Skip to content

Instantly share code, notes, and snippets.

View chapmanjacobd's full-sized avatar
🥅
goal_net

Jacob Chapman chapmanjacobd

🥅
goal_net
View GitHub Profile
@chapmanjacobd
chapmanjacobd / bchunk_multi_bin.fish
Created November 4, 2023 01:46
multiple track BIN / single CUE on linux; afterwards get metadata from MusicBrainz Picard
for i in (seq -w 1 50)
bchunk -w Dolphin\'s\ Mind\ -\ Fluid\ \(Germany\)\ \(The\ Full\ Dream\ Dance\ Megamix\)\ \(Track\ $i\).bin (
echo "FILE \"Dolphin\'s Mind - Fluid (Germany) (The Full Dream Dance Megamix) (Track $i).bin\" BINARY
TRACK $i AUDIO
"| psub
) DolpinMindMegamix
end
@chapmanjacobd
chapmanjacobd / shadows_be_gone.py
Created October 29, 2023 05:32
Shadow DOM WebElement generator
def flatten_shadows(driver):
# Shadow DOM can go to hell !!
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
def get_all_elements(driver, elements):
for el in elements:
shadow_root = driver.execute_script("return arguments[0].shadowRoot", el)
if shadow_root:
@chapmanjacobd
chapmanjacobd / gitls.md
Last active December 1, 2023 05:09
a few interesting git commands

gitls

git ls-files --sparse --full-name -z | 
  xargs -0 -I FILE -P 20 git log --date=iso-strict-local --format='%ad %>(14) %cr %<(5) %an  %h ./FILE' -- FILE | 
  sort --general-numeric-sort

This lists the files and hashes for each git commit and file:

2021-12-05T13:32:32-06:00  1 year, 11 months ago  Jacob Chapman  cc91fa0 ./.gitattributes

2021-12-05T13:32:32-06:00 1 year, 11 months ago Jacob Chapman cc91fa0 ./.gitignore

@chapmanjacobd
chapmanjacobd / gnu_parallel_global_timeout.fish
Created October 18, 2023 16:23
GNU Parallel global timeout
timeout -s HUP 3s parallel --timeout 10 -j2 sleep {} ::: 10 12 15 10 8 15
string split ' ' 10 12 15 10 8 15 | timeout -s HUP 3s parallel --timeout 10 -j2 sleep {}
@chapmanjacobd
chapmanjacobd / ffmpeg_check_file.py
Created September 28, 2023 01:20
how to correctly scan a video file to check for corruption
def decode_full_scan(path):
ffprobe_cmd = [
'ffprobe', '-show_entries', 'stream=r_frame_rate,nb_read_frames,duration',
'-select_streams', 'v', '-count_frames',
'-of', 'json',
'-threads', '20',
'-v', '0',
path,
]
javascript:(function(){
var previousScroll;
var scrollInterval;
var nextPageTimeout;
function scrollAndCheck() {
previousScroll = window.scrollY;
window.scrollBy(0, 3000);
def rank_dataframe(df, ascending_cols=None):
if ascending_cols is None:
ascending_cols = []
numeric_cols = df.select_dtypes(include=["number"]).columns
ranks = df[numeric_cols].apply(
lambda x: x.rank(
method="min", na_option="bottom", ascending=x.name in ascending_cols
)
)
def rebin_folders(paths, max_files_per_folder=16000):
parent_paths = Counter(Path(p).parent for p in paths)
rebin_parents = set(k for k,v in parent_paths.items() if v > max_files_per_folder)
rebinned = set(p for p in paths if Path(p).parent in rebin_parents)
untouched = set(paths) - rebinned
rebinned_tuples = []
chunked = list(chunks(list(rebinned), max_files_per_folder))
min_len = len(str(len(chunked) + 1)) # start at 1
def mpv_enrich(args, media) -> List[Dict]:
for m in media:
md5 = path_to_mpv_watchlater_md5(m["path"])
metadata_path = Path(args.watch_later_directory, md5)
if metadata_path.exists():
m["time_partial_first"] = int(metadata_path.stat().st_ctime)
m["time_partial_last"] = int(metadata_path.stat().st_mtime)
else:
m["time_partial_first"] = 0
def calculate_sparseness(stat) -> int:
if stat.st_size == 0:
sparseness = 0
else:
blocks_allocated = stat.st_blocks * 512
sparseness = blocks_allocated / stat.st_size
return sparseness