I hereby claim:
- I am dbeley on github.
- I am dbeley (https://keybase.io/dbeley) on keybase.
- I have a public key ASBTGLUBKbliZ_B1PX9Q-OzxZ0uqTZhQn8nOImMrOGYPgQo
To claim this, I am signing this object:
I hereby claim:
To claim this, I am signing this object:
""" | |
Python script skeleton | |
""" | |
import logging | |
import time | |
import argparse | |
logger = logging.getLogger() | |
temps_debut = time.time() |
import requests | |
import pandas as pd | |
from bs4 import BeautifulSoup | |
url_base = "https://en.wikipedia.org/wiki/Napoleon" | |
soup_base = BeautifulSoup(requests.get(url_base).content, "lxml") | |
links = soup_base.find("div", {"class": "navbox"}).find_all("li") |
The Gathering ████░░░░░░░░░░░░░ 52 plays | |
Paradise Lost ███▋░░░░░░░░░░░░░ 48 plays | |
Agalloch ██▏░░░░░░░░░░░░░░ 28 plays | |
Anathema █▊░░░░░░░░░░░░░░░ 24 plays | |
Katatonia █▋░░░░░░░░░░░░░░░ 22 plays | |
Riverside █▏░░░░░░░░░░░░░░░ 15 plays | |
Primordial ▊░░░░░░░░░░░░░░░░ 10 plays | |
Ride ▌░░░░░░░░░░░░░░░░ 8 plays | |
Myrath ▌░░░░░░░░░░░░░░░░ 7 plays | |
Thy Catafalque ▍░░░░░░░░░░░░░░░░ 6 plays |
""" | |
Extract youtube urls from a json file returned by the google takeout export. | |
Usage : python extract_urls.py <name of json file> | |
""" | |
import logging | |
import argparse | |
import json | |
from pathlib import Path | |
logger = logging.getLogger() |
#!/usr/bin/env bash | |
./favd.sh google.com google.ico |
#!/usr/bin/env bash | |
set -eEu -o pipefail | |
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd -P)" | |
usage() { printf "%s" "\ | |
Usage: ./archive_github_user.sh [-h] USER | |
"; exit 0; | |
} | |
if [ "$1" == "-h" ]; then |
#!/usr/bin/env bash | |
set -eEu -o pipefail | |
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd -P)" | |
usage() { printf "%s" "\ | |
Usage: ./archive_github_user_starred.sh [-h] USER | |
"; exit 0; | |
} | |
if [ "$1" == "-h" ]; then |
#!/usr/bin/env bash | |
set -eEu -o pipefail | |
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd -P)" | |
# URLs.txt is a text file containing one URL per line | |
for URL in `cat "$DIR/URLs.txt"`; do | |
# you can add other extensions | |
wget -r -nd --accept=mp3,MP3 -k -l 1 --show-progress --progress=bar:force:noscroll -e robots=off --span-hosts "$URL" -q | |
done |
#!/usr/bin/env bash | |
# docker prune | |
docker system prune -a | |
# remove exited containers: | |
docker ps --filter status=dead --filter status=exited -aq | xargs -r docker rm -v | |
# remove unused images: | |
docker images --no-trunc | grep '' | awk '{ print $3 }' | xargs -r docker rmi |