Created
February 26, 2021 07:20
-
-
Save F5Training/b3a3ead41cd7166ef3e4529d49440f27 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# | |
# Install required apt packages | |
# | |
REQUIRED_PKG=(apt-transport-https \ | |
zsh \ | |
ansible \ | |
powerline \ | |
python3 \ | |
python3-powerline \ | |
python3-paramiko \ | |
python3-venv \ | |
python3-boto3 \ | |
python3-passlib \ | |
python3-docopt \ | |
python3-ruamel.yaml \ | |
python3-flake8 \ | |
flake8 \ | |
python3-passlib \ | |
expect \ | |
jq \ | |
httpie \ | |
python3-docopt \ | |
python3-ruamel.yaml \ | |
python3-jinja2 \ | |
python3-magic \ | |
python-is-python2 \ | |
socat \ | |
p7zip-full) | |
echo "${USER} ALL=(ALL) NOPASSWD:ALL" | sudo tee /etc/sudoers.d/99-${USER} > /dev/null 2>&1 | |
sudo locale-gen en_SG.UTF-8 | |
sudo update-locale LANG=en_SG.UTF8 | |
sudo apt update; sudo apt install -y --no-install-recommends ${REQUIRED_PKG[@]} | |
# | |
# Make required directories | |
# | |
mkdir ~/.config | |
mkdir ~/Projects | |
mkdir -p ~/.local/bin | |
mkdir -p ~/.local/man | |
touch ~/.z | |
# | |
# Download msysgit2unix-socket.py | |
# | |
cat <<EOF > ~/.local/bin/start-ssh-agent.sh | |
# KeeAgent | |
touch /tmp/.ssh-agent.sock | |
export SSH_AUTH_SOCK=/tmp/.ssh-agent.sock | |
sshpid=\$(ss -ap | grep "\$SSH_AUTH_SOCK") | |
if [ "\$1" = "-k" ] || [ "\$1" = "-r" ]; then | |
sshpid=\${sshpid//*pid=/} | |
sshpid=\${sshpid%%,*} | |
if [ -n "\${sshpid}" ]; then | |
kill "\${sshpid}" | |
else | |
echo "'socat' not found" | |
fi | |
if [ "\$1" = "-k" ]; then | |
exit | |
fi | |
unset sshpid | |
fi | |
if [ -z "\${sshpid}" ]; then | |
rm -f \$SSH_AUTH_SOCK | |
( setsid socat UNIX-LISTEN:\$SSH_AUTH_SOCK,fork EXEC:"/usr/local/bin/npiperelay.exe -ei -s //./pipe/openssh-ssh-agent",nofork & ) >/dev/null 2>&1 | |
fi | |
EOF | |
chmod +x ~/.local/bin/start-ssh-agent.sh | |
curl -SL https://github.com/rupor-github/wsl-ssh-agent/releases/download/v1.4.2/wsl-ssh-agent.7z -o /tmp/wsl-ssh-agent.7z | |
sudo 7z e -y /tmp/wsl-ssh-agent.7z -o/usr/local/bin | |
echo "Type the following:" | |
echo "~/.local/bin/start-ssh-agent.sh" | |
# | |
# Create runme1st.sh | |
# | |
cat <<EOF > ~/runme1st.sh | |
#!/bin/bash | |
cd ~/.config | |
if [[ -z \$MYREPO ]]; then | |
echo "MYREPO not set. Please set MYREPO and re-run this script." | |
exit 1 | |
fi | |
git clone git@\${MYREPO} --recurse-submodules | |
cd ~/.config/dotdrop | |
./dotdrop.sh install --profile demo | |
# fzf | |
git clone --depth 1 https://github.com/junegunn/fzf.git ~/.fzf | |
alias curl='curl --silent' | |
yes | ~/.fzf/install | |
EOF | |
# | |
# Create runme3rd.sh | |
# | |
cat <<EOF > ~/runme3rd.sh | |
#!/bin/bash | |
REQUIRED_PKG=(make \\ | |
build-essential \\ | |
libssl-dev zlib1g-dev \\ | |
libbz2-dev \\ | |
libreadline-dev \\ | |
libsqlite3-dev \\ | |
wget \\ | |
curl \\ | |
llvm \\ | |
libncurses5-dev \\ | |
xz-utils \\ | |
tk-dev \\ | |
libxml2-dev \\ | |
libxmlsec1-dev \\ | |
libffi-dev \\ | |
liblzma-dev) | |
sudo apt update; sudo apt install -y --no-install-recommends \${REQUIRED_PKG[@]} | |
if [ \$? != 0 ]; then | |
echo "apt update failed. Re-run this script '\$0' again." | |
exit 127 | |
fi | |
git clone --depth 1 https://github.com/pyenv/pyenv.git ~/.pyenv | |
PYENV_MODULES=(doctor \\ | |
installer \\ | |
update \\ | |
virtualenv \\ | |
which-ext) | |
for module in "\${PYENV_MODULES[@]}"; | |
do | |
git clone --depth 1 https://github.com/pyenv/pyenv-\${module}.git ~/.pyenv/plugins/pyenv-\${module} | |
done | |
if [[ "\$PATH" != *"\$HOME/.local/bin"* ]]; then | |
export PATH="\$HOME/.local/bin:\$PATH" | |
fi | |
export PYENV_ROOT="\$HOME/.pyenv" | |
export PATH="\$PYENV_ROOT/bin:\$PATH" | |
PYTHON_VER=\$(pyenv install -l | grep "\s3\." | egrep -v "dev|a[0-9]|b|rc" | sort -r | head -1) | |
pyenv install \${PYTHON_VER} | |
pyenv global \${PYTHON_VER} | |
eval "\$(pyenv init -)" | |
pip install --upgrade pip | |
PIP_INSTALLS=(wheel \\ | |
docopt \\ | |
ruamel.yaml \\ | |
jinja2 \\ | |
python3-magic \\ | |
"--user pipx") | |
for pip in "\${PIP_INSTALLS[@]}"; | |
do | |
pip install --use-feature=2020-resolver \${pip} | |
done | |
PIPX_INSTALLS=(pipenv \\ | |
http-prompt \\ | |
httpie \\ | |
ipython \\ | |
black \\ | |
pytest \\ | |
pytube) | |
for pipx in "\${PIPX_INSTALLS[@]}"; | |
do | |
pipx install \${pipx} | |
done | |
# mkdir -p Projects | |
cd ~/Projects | |
git clone git@\${LABAUTO} | |
cd ~/Projects/lab-automation | |
git checkout v14.1-dev | |
git checkout v14.1 | |
# pipenv install | |
# echo layout pipenv >> .envrc | |
EOF | |
# | |
# Create 1-setup.py | |
# | |
cat <<EOF > ~/1-setup.py | |
#!/usr/bin/env python3 | |
# -*- coding: utf-8 -*- | |
""" | |
Spyder Editor | |
This is a temporary script file. | |
""" | |
from pathlib import Path, PurePosixPath | |
from requests import get | |
from ruamel.yaml import YAML | |
import re | |
from urllib.parse import urlsplit | |
import hashlib | |
import os | |
import pprint | |
def load_data(): | |
""" | |
""" | |
yaml_str = """ | |
cli: | |
github: exercism | |
name: cli | |
regex_str: 'exercism-.*-linux-x86_64\.tar\.gz' | |
content_type: application/gzip | |
checksum: true | |
checksum_file: exercism_checksums.txt | |
checksum_sig: exercism_checksums.txt.sig | |
checksum_type: 'multiline|normal' | |
starship: | |
github: starship | |
name: starship | |
regex_str: 'starship-x86_64-unknown-linux-gnu\.tar\.gz' | |
content_type: application/gzip | |
checksum: true | |
checksum_ext: sha256 | |
checksum_type: 'singleline|checksum_only' | |
yq: | |
github: mikefarah | |
name: yq | |
regex_str: 'yq_linux_amd64' | |
content_type: application/octet-stream | |
checksum: true | |
checksum_file: checksums | |
checksum_order: 18 | |
checksum_type: 'multiline|ordered' | |
task: | |
github: go-task | |
name: task | |
regex_str: 'task_linux_amd64\.tar\.gz' | |
checksum: true | |
checksum_file: task_checksums.txt | |
checksum_type: 'multiline|normal' | |
direnv: | |
github: direnv | |
name: direnv | |
regex_str: 'direnv\.linux-amd64' | |
content_type: application/octet-stream | |
fd: | |
github: sharkdp | |
name: fd | |
regex_str: 'fd-.*-x86_64-unknown-linux-gnu\.tar\.gz' | |
content_type: application/gzip | |
bat: | |
github: sharkdp | |
name: bat | |
regex_str: 'bat-.*-x86_64-unknown-linux-gnu\.tar\.gz' | |
content_type: application/gzip | |
k9s: | |
github: derailed | |
name: k9s | |
regex_str: 'k9s_Linux_x86_64\.tar\.gz' | |
content_type: application/gzip | |
checksum: true | |
checksum_file: checksums.txt | |
checksum_type: 'multiline|normal' | |
kubectx: | |
github: ahmetb | |
name: kubectx | |
regex_str: 'kubectx_.*_linux_x86_64\.tar\.gz' | |
content_type: application/gzip | |
checksum: true | |
checksum_file: checksums.txt | |
checksum_type: 'multiline|normal' | |
kubens: | |
github: ahmetb | |
name: kubectx | |
regex_str: 'kubens_.*_linux_x86_64\.tar\.gz' | |
content_type: application/gzip | |
checksum: true | |
checksum_file: checksums.txt | |
checksum_type: 'multiline|normal' | |
""" | |
return YAML(typ='safe').load(yaml_str) | |
def json_extract(obj, key): | |
"""Recursively fetch values from nested JSON.""" | |
arr = [] | |
def extract(obj, arr, key): | |
"""Recursively search for values of key in JSON tree.""" | |
if isinstance(obj, dict): | |
for k, v in obj.items(): | |
if isinstance(v, (dict, list)): | |
extract(v, arr, key) | |
elif k == key: | |
arr.append(v) | |
elif isinstance(obj, list): | |
for item in obj: | |
extract(item, arr, key) | |
return arr | |
values = extract(obj, arr, key) | |
return values | |
def get_tag_name(data): | |
return data.get('tag_name', '*') | |
def get_repos_mapping(data): | |
repos = {} | |
for key, value in data.items(): | |
repos[key] = dict(github=value['github'], name=value['name']) | |
return repos | |
def get_json(data): | |
return data.json() | |
def retrieve_github_download_urls(data): | |
gh_url = "https://api.github.com/repos/{}/{}/releases/latest" | |
headers = {"Accept":"application/vnd.github.v3+json"} | |
repos = get_repos_mapping(data) | |
for software, value in repos.items(): | |
github = value.get('github') | |
name = value.get('name') | |
resp = get(gh_url.format(github, name), headers=headers) | |
resp_json = get_json(resp) | |
tag_name = get_tag_name(resp_json) | |
browser_download_urls = json_extract(resp_json, 'browser_download_url') | |
data.get(name)['tag_name'] = tag_name | |
regex = data.get(software).get('regex_str') | |
urls = [] | |
for url in browser_download_urls: | |
filename = PurePosixPath(urlsplit(url)[2]).name | |
matched = re.findall(regex, filename) | |
if matched: | |
urls.append(url) | |
data[software]['urls'] = urls | |
return data | |
def mkdir(dir): | |
dir = Path(dir) | |
if not dir.exists(): | |
dir.mkdir() | |
return dir | |
def download_file(url, dir): | |
local_filename = dir / Path(url.split('/')[-1]) | |
# NOTE the stream=True parameter below | |
with get(url, stream=True) as r: | |
r.raise_for_status() | |
with open(local_filename, 'wb') as f: | |
for chunk in r.iter_content(chunk_size=8192): | |
# If you have chunk encoded response uncomment if | |
# and set chunk_size parameter to None. | |
# if chunk: | |
f.write(chunk) | |
return local_filename | |
def download_files(dir, data): | |
for name, item in data.items(): | |
subdir = dir / Path(name) | |
subdir = mkdir(subdir) | |
for file in item.get('urls'): | |
download_file(file, subdir) | |
if item.get('checksum'): | |
if filename := item.get('checksum_file'): | |
url = item.get('urls')[0] | |
url = url[:url.rfind('/')] | |
download_file(f'{url}/{filename}', subdir) | |
print(f'{name}-{item.get("tag_name")}') | |
def verify_checksum(item): | |
status = 'passed' | |
urls = item.get('urls') | |
file_name = urls[0].split('/')[-1] | |
checksum_type = item.get('checksum_type') | |
with open(file_name, 'rb') as f: | |
file = f.read() | |
file_hash = hashlib.sha256() | |
file_hash.update(file) | |
if checksum_type == 'multiline|normal': | |
with open(item.get('checksum_file'), 'r') as f: | |
lines = f.readlines() | |
for line in lines: | |
if file_name in line: | |
if file_hash.hexdigest() != line.split(' ')[0]: | |
print(f'{item.get("name")} checksum failed') | |
status = 'failed' | |
elif checksum_type == 'singleline|checksum_only': | |
with open(file_name + '.sha256', 'r') as f: | |
checksum = f.read().rstrip() | |
if file_hash.hexdigest() != checksum: | |
print(f'{item.get("name")} checksum failed') | |
status = 'failed' | |
elif checksum_type == 'multiline|ordered': | |
with open(item.get('checksum_file', 'r')) as f: | |
lines = f.readlines() | |
for line in lines: | |
if file_name in line: | |
if file_hash.hexdigest() != line.split(' ')[item.get('checksum_order')]: | |
print(f'{item.get("name")} checksum failed') | |
status = 'failed' | |
return status | |
def verify_checksums(data, dir_name): | |
pwd = os.getcwd() | |
status = {} | |
for name, item in data.items(): | |
if item.get('checksum') == True: | |
subdir_name = dir_name / name | |
os.chdir(subdir_name) | |
result = verify_checksum(item) | |
os.chdir(pwd) | |
status[name] = result | |
else: | |
status[name] = 'no checksum' | |
return status | |
if __name__ == '__main__': | |
data = load_data() | |
data = retrieve_github_download_urls(data) | |
dir_name = mkdir('tmp') | |
download_files(dir_name, data) | |
results = verify_checksums(data, dir_name) | |
pp = pprint.PrettyPrinter(indent=4) | |
pp.pprint(results) | |
EOF | |
# | |
# Create move-bin.sh | |
# | |
cat <<EOF > ~/move-bin.sh | |
#!/bin/bash | |
PKGS=("cli" \ | |
"starship" \ | |
"yq" \ | |
"direnv" \ | |
"fd" \ | |
"bat" \ | |
"task" \ | |
"k9s" \ | |
"kubectx" \ | |
"kubens" \ | |
) | |
for pkg in "\${PKGS[@]}"; | |
do | |
case \${pkg} in | |
"starship") | |
cd ~/tmp/starship | |
STARSHIP_TGZ=starship-x86_64-unknown-linux-gnu.tar.gz | |
tar -C ~/.local/bin -zxvf \${STARSHIP_TGZ} | |
;; | |
"cli") | |
cd ~/tmp/cli | |
EXERCISM_TGZ=\$(ls exercism-*-linux-*.tar.gz) | |
tar -C ~/.local/bin -zxvf \${EXERCISM_TGZ} exercism | |
;; | |
"yq") | |
cd ~/tmp/yq | |
YQ_BIN=yq_linux_amd64 | |
mv \${YQ_BIN} ~/.local/bin/yq | |
;; | |
"direnv") | |
cd ~/tmp/direnv | |
DIRENV_BIN=direnv.linux-amd64 | |
mv \${DIRENV_BIN} ~/.local/bin/direnv | |
;; | |
"fd") | |
cd ~/tmp/fd | |
FD_TGZ=\$(ls fd-*-x86_64-unknown-linux-gnu.tar.gz) | |
tar -zxvf \${FD_TGZ} --no-anchored --strip-components=1 fd fd.1 | |
mv fd ~/.local/bin/fd | |
mv fd.1 ~/.local/man/fd.1 | |
;; | |
"bat") | |
cd ~/tmp/bat | |
BAT_TGZ=\$(ls bat-*-x86_64-unknown-linux-gnu.tar.gz) | |
tar -zxvf \${BAT_TGZ} --no-anchored --strip-components=1 bat bat.1 | |
mv bat ~/.local/bin/bat | |
mv bat.1 ~/.local/man/bat.1 | |
;; | |
"task") | |
cd ~/tmp/task | |
TASK_TGZ=task_linux_amd64.tar.gz | |
tar -C ~/.local/bin -zxvf \${TASK_TGZ} task | |
;; | |
"k9s") | |
cd ~/tmp/k9s | |
K9S_TGZ=k9s_Linux_x86_64.tar.gz | |
tar -C ~/.local/bin -zxvf \${K9S_TGZ} --no-anchored --strip-components=1 k9s | |
;; | |
"kubectx") | |
cd ~/tmp/kubectx | |
KUBECTX_TGZ=\$(ls kubectx_*_linux_x86_64.tar.gz) | |
tar -C ~/.local/bin -zxvf \${KUBECTX_TGZ} --no-anchored --strip-components=1 kubectx | |
;; | |
"kubens") | |
cd ~/tmp/kubens | |
KUBENS_TGZ=\$(ls kubens_*_linux_x86_64.tar.gz) | |
tar -C ~/.local/bin -zxvf \${KUBENS_TGZ} --no-anchored --strip-components=1 kubens | |
esac | |
# kubectl | |
done | |
KUBECTL_VER=\$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt) | |
curl -sSLO "https://storage.googleapis.com/kubernetes-release/release/\${KUBECTL_VER}/bin/linux/amd64/kubectl" | |
mv kubectl ~/.local/bin/kubectl | |
echo kubectl-\${KUBECTL_VER} | |
chmod +x ~/.local/bin/* | |
EOF | |
# | |
# | |
# | |
chmod +x ~/*.{sh,py} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment