Archive tar
and gzip
tar -czf output.tar.gz input_folder1 input_folder2 input_file …
Extract tar
and gzip
tar -xzf input.tar.gz
Encode file with Base64
openssl base64 -in <input> -out <output>
Rename all files to lowercase
for f in *;
do mv "$f" "$f.tmp";
mv "$f.tmp" "`echo $f | tr "[:upper:]" "[:lower:]"`";
done
Remove all .DS_Store
files
find . -name ".DS_Store" -delete
Reset git-lfs files
git rm --cached -r .
git reset --hard
git checkout -f .
Convert PNG to ICO
convert input.png -define icon:auto-resize=48,32,16 output.ico
Convert all PNG images to JPEG format
mogrify -format jpg *.png
Resize all JPEG images to a maximum of 256x256
mogrify -resize 256x256 *.jpg
Resize and fill proportionally
mogrify -resize "300x400^" -gravity center -extent 300x400 *.jpg
Append images at bottom
convert *.jpg -append result.jpg
Convert gif to mp4
ffmpeg -f gif -i input.gif output.mp4
Convert gif to webm
ffmpeg -i input.gif -c:v libvpx -crf 12 -b:v 500K -auto-alt-ref 0 output.webm
Create solid color.
convert -size 100x100 xc:#990000 dark-red.png
convert -size 100x100 xc:rgba\(255,0,0,0.4\) dark-red.png
Convert avi to uncompressed mov
ffmpeg -i input.avi -vcodec rawvideo -pix_fmt uyvy422 output.mov
Extract image from video every 10 seconds
ffmpeg -i video.mkv -r 1/10 -f image2 output_%03d.jpg
Convert mov to gif
Frame Rate: -r
Size: -s
ffmpeg -i input.mov -pix_fmt rgb24 -r 10 -s 320x240 output.gif
Edit cronjobs
env EDITOR=nano crontab -e
Compress pdf
gs -sDEVICE=pdfwrite -dCompatibilityLevel=1.4 -dPDFSETTINGS=/printer -dNOPAUSE -dQUIET -dBATCH -sOutputFile=output.pdf input.pdf
Merge pdfs
gs -dNOPAUSE -sDEVICE=pdfwrite -sOUTPUTFILE=combine.pdf -dBATCH 1.pdf 2.pdf
Execute sting shell script
sh -c "echo 'Hello, world!'"
Generate a new SSH key
ssh-keygen -t rsa -b 4096 -C "your_email@example.com"
Copy public key to clipboard
cat ~/.ssh/id_rsa.pub | pbcopy
Download entire website
wget --recursive --no-clobber --page-requisites --html-extension --convert-links --restrict-file-names=windows --domains website.com --no-parent www.website.com
Grep all URLs from website and save it to txt file
wget -m http://www.example.com 2>&1 | grep '^--' | awk '{ print $3 }' | grep -v '\.\(css\|js\|png\|gif\|jpg\|JPG\)$' > urls.txt
Change user to "apache"
sudo su - apache