Skip to content

Instantly share code, notes, and snippets.

@raisinbl
raisinbl / main.go
Last active November 20, 2023 03:46
minimal using Syft as library ver 0.94
package main
import (
// "github.com/anchore/syft/syft/pkg/cataloger/python"
"fmt"
"github.com/anchore/syft/syft"
"github.com/anchore/syft/syft/format"
"github.com/anchore/syft/syft/format/cyclonedxjson"
"github.com/anchore/syft/syft/pkg/cataloger"
"github.com/anchore/syft/syft/sbom"
@raisinbl
raisinbl / log-maven-scala
Last active November 10, 2023 09:58
scala 2.12.18 installation log
[INFO] Error stacktraces are turned on.
[INFO] Scanning for projects...
[INFO]
[INFO] ---------------------------< org.vss:kafka >----------------------------
[INFO] Building kafka 1.0-SNAPSHOT
[INFO] --------------------------------[ jar ]---------------------------------
[INFO]
[INFO] >>> maven-dependency-plugin:2.8:analyze (default-cli) > test-compile @ kafka >>>
[INFO]
[INFO] --- scala-maven-plugin:4.8.1:add-source (default) @ kafka ---
! [SYNTAX] https://help.adblockplus.org/hc/en-us/articles/360062733293
/banhkhuc/,/thapcam/,/xoilac/,/90phut/,/vebo/##.box-host.box
/banhkhuc/,/thapcam/,/xoilac/,/90phut/,/vebo/##.vebo-sp.container
/banhkhuc/,/thapcam/,/xoilac/,/90phut/,/vebo/##.mc_padding
/banhkhuc/,/thapcam/,/xoilac/,/90phut/,/vebo/##.to-left
/banhkhuc/,/thapcam/,/xoilac/,/90phut/,/vebo/##.to-right
/banhkhuc/,/thapcam/,/xoilac/,/90phut/,/vebo/##.ft-box,
/banhkhuc/,/thapcam/,/xoilac/,/90phut/,/vebo/##.sidebar-right,
/banhkhuc/,/thapcam/,/xoilac/,/90phut/,/vebo/##.jw-logo.jw-logo-bottom-right.jw-reset
#! /opt/miniconda3/bin/python
import requests
from bs4 import BeautifulSoup
tiengruoi = requests.get('https://bit.ly/tiengruoi')
soup = BeautifulSoup(tiengruoi.text, 'html.parser')
# Find all elements with class "cl_item"
@raisinbl
raisinbl / test_spark.py
Created May 3, 2023 05:04
testing if pyspark lib is installed correctly and spark-submit is work
from pyspark.sql import SparkSession
spark = SparkSession.builder.appName("SimpleApp").getOrCreate()
data = [("Alice", 1), ("Bob", 2), ("Charlie", 3)]
df = spark.createDataFrame(data, ["Name", "Age"])
df.show()