Here are some helpful commands for managing ZFS and ZPool on Ubuntu
Useful for populating /etc/zfs/vdev_id.conf
:
printDisks() {
for i in /dev/sd[b-i]; do
fdisk -l $i
Windows Registry Editor Version 5.00 | |
[HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\CurrentVersion\App Paths\npp.exe] | |
@="C:\\Program Files (x86)\\Notepad++\\notepad++.exe" |
# View diffs using delta | |
# Via https://github.com/jonas/tig/issues/26#issuecomment-1923835137 | |
bind diff D >sh -c "git show %(commit) | delta --paging always" | |
bind diff S >sh -c "git show %(commit) | delta --paging always --side-by-side" | |
bind stage D >sh -c "git diff HEAD -- %(file) | delta --paging always" | |
bind stage S >sh -c "git diff HEAD -- %(file) | delta --paging always --side-by-side" | |
bind status D >sh -c "git diff HEAD -- %(file) | delta --paging always" | |
bind status S >sh -c "git diff HEAD -- %(file) | delta --paging always --side-by-side" |
set-option -g status-right '#{client_tty} (#(TZ=US/Mountain date +%%H:%%M)MT #(TZ=UTC date +%%H:%%M)Z) %Y-%m-%d %H:%M' |
# To configure for MinGW instead of nmake | |
# | |
# C:\TDM-GCC-64\mingwvars.bat | |
# cmake . -G "MinGw Makefiles" | |
# | |
# Makefile: cmake ... -DCMAKE_TOOLCHAIN_FILE=TDM.cmake | |
set(CMAKE_SYSTEM_NAME Windows) | |
set(CMAKE_C_COMPILER C:/TDM-GCC-64/bin/gcc.exe) |
[cmdletbinding()] | |
Param( | |
[Parameter(Mandatory=$true)] | |
[string[]]$servers, | |
[ValidateScript({ | |
if(-Not ($_ | Test-Path )) { | |
throw "Folder does not exist" | |
} | |
return $true |
# Thrift is not finding Delta jars in Ivy2 cache, even when specified in spark-defaults.conf (spark.sql.catalog.spark_catalog) ? | |
# Probably a bad solution... | |
wget https://repo1.maven.org/maven2/io/delta/delta-core_2.12/2.4.0/delta-core_2.12-2.4.0.jar && \ | |
mv delta-core_2.12-2.4.0.jar jars/ | |
wget https://repo1.maven.org/maven2/io/delta/delta-storage/2.4.0/delta-storage-2.4.0.jar && \ | |
mv delta-storage-2.4.0.jar jars/ |
#!/bin/bash | |
# build/build-spark-pip.sh | |
# https://spark.apache.org/docs/3.4.1/building-spark.html | |
export MAVEN_OPTS="-Xss64m -Xmx2g -XX:ReservedCodeCacheSize=1g" | |
#./build/mvn -DskipTests clean package | |
pushd .. |
""" | |
Run DuckDB query on over SSH to avoid scanning full file set on a remote server, | |
and make results availble to local DuckDB process | |
""" | |
import io, paramiko, duckdb | |
sql = "SELECT * FROM read_json_auto('/path/to/data.json')" | |
cmd = f'duckdb -csv -s "{sql}"' |
pip download --no-deps --dest vendor -r requirements.txt |