These are a list of usages of shell commands I can't live without on UNIX-based systems.
Using Homebrew (yes, I am opinionated) you can install the following tools with the following packages:
#!/bin/bash | |
# simple function to check http response code before downloading a remote file | |
# example usage: | |
# if `validate_url $url >/dev/null`; then dosomething; else echo "does not exist"; fi | |
function validate_url(){ | |
if [[ `wget -S --spider $1 2>&1 | grep 'HTTP/1.1 200 OK'` ]]; then echo "true"; fi | |
} |
from scrapy import log | |
from scrapy.item import Item | |
from scrapy.http import Request | |
from scrapy.contrib.spiders import XMLFeedSpider | |
def NextURL(): | |
""" | |
Generate a list of URLs to crawl. You can query a database or come up with some other means | |
Note that if you generate URLs to crawl from a scraped URL then you're better of using a |
#!/usr/bin/env python | |
# | |
#require: https://github.com/richardasaurus/mega.py | |
# | |
import os | |
import sys | |
from mega import Mega | |
mega = Mega({'verbose': True}) | |
m = mega.login('megauseremail', 'megapass') |
sudo apt-get update && sudo apt-get install libglib2.0-dev libtool autoconf glib-networking fuse curl wget gettext gobject-introspection libcurl4-openssl-dev -y | |
sudo apt-get install lib32gmp-dev lib32gmp10 lib32gmpxx4 libgmp-dev libgmp10 libgmp3-dev -y | |
wget https://ftp.gnu.org/gnu/nettle/nettle-3.0.tar.gz && tar xvf nettle-3.0.tar.gz && cd nettle-3.0/ && ./configure && make && sudo make install && cd ../ | |
wget http://megatools.megous.com/builds/megatools-1.9.93.tar.gz && tar xvf megatools-1.9.93.tar.gz && cd megatools-1.9.93/ && ./configure && make && sudo make install |
require 'forwardable' | |
module RtmpMeta | |
class Parser | |
PATTERN = /duration\s+(?<duration>\d+\.?\d+)$/ | |
attr_reader :raw_data | |
def initialize raw_data | |
@raw_data = raw_data | |
end |
#!/bin/bash | |
# Rename the ouput html file from redditPostArchiver with the reddit thread title. | |
# https://github.com/sJohnsonStoever/redditPostArchiver | |
for f in *.html; | |
do | |
title=$( awk 'BEGIN{IGNORECASE=1;FS="<title>|</title>";RS=EOF} {print $2}' "$f" ) | |
mv -i "$f" "${title//[^a-zA-Z0-9\._\- ]}_$f" |
#!/bin/bash | |
if [[ $1 == "" ]]; then | |
echo "Usage: $0 <downloadurl>"; exit | |
fi | |
aria2c -v 2>&1 >/dev/null || (echo "aria2 is not installed"; exit) | |
url=`echo $1 | cut -f1 -d"?"` | |
cmd="aria2c" | |
for mirror in `echo "kent freefr garr switch netcologne surfnet heanet iweb superb-dca3 superb-dca2 jaist tenet ufpt internode" | tr ' ' '\n'` ; do | |
cmd="${cmd} \"${url}?use_mirror=${mirror}\"" | |
done |
ffmpeg -i some.mkv -acodec copy -vcodec copy some.mp4 #copy mkv to mp4 container - may not be compliant | |
ffmpeg -t 30 -i some.mkv -acodec copy -vcodec copy some.mp4 #create sample | |
ffmpeg -i some.mkv -acodec libfaac -ac 2 -vcodec copy some.mp4 #mp4 with downmixed audio (AAC @ 128kbit) | |
aria2c --bt-min-crypto-level=arc4 --bt-require-crypto=true --dht-listen-port=34450-34459 --listen-port=34460-34469 |
alias cp="rsync -avrP --progress" | |
alias aria2c="aria2c -s 16 -x 16 -k 1M" | |
alias nl="sudo lsof -i | grep -i listen" |