Skip to content

Instantly share code, notes, and snippets.

Avatar
🏠
Working from home

Chukwudi Nwachukwu ichux

🏠
Working from home
View GitHub Profile
View gist:ec666ef4de3205b6a1bd80944c8eaf28
chrome_options.add_argument("--whitelisted-ips=''")
https://github.com/SeleniumHQ/selenium/issues/4813#issuecomment-381123451
###
Remove all containers at once
docker rm $(docker ps -qa)
###
docker system df
docker system prune
@ichux
ichux / get_output.py
Last active May 3, 2021
get's the output of a git call
View get_output.py
import os
import subprocess
from datetime import datetime
date_format = "%Y-%m-%d %H:%M:%S"
# os.environ["TZ"] = "UTC"
try:
committer_date = datetime.strptime(
subprocess.check_output(
[
View Ubuntu-20.04.sh
# Step 1: Update OS
1. sudo su
2. apt update -y && apt upgrade -y && apt dist-upgrade -y && apt autoclean -y && apt clean -y && apt autoremove -y && init 6
# Step 2: # install docker
sudo apt install make docker.io docker-compose -y && sudo systemctl enable --now docker
sudo groupadd docker; sudo usermod -aG docker ${USER}; sudo pkill -u ${USER}
View curl-proxy.sh
curl -k --proxy 192.168.1.16:9137 http://httpbin.org/ip
export http_proxy=192.168.1.16:9137
export https_proxy=192.168.1.16:9137
curl -k http://httpbin.org/ip
unset http_proxy
unset https_proxy
View celery_log.py
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
View nuggets.py
import secrets
import keyword as _keyword
from difflib import get_close_matches
ALLOWED_CHARS = (
'abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
)
@ichux
ichux / crawler.py
Created Mar 3, 2021 — forked from AO8/crawler.py
Crawl a website and gather all internal links with Python and BeautifulSoup.
View crawler.py
# Adapted from example in Ch.3 of "Web Scraping With Python, Second Edition" by Ryan Mitchell
import re
import requests
from bs4 import BeautifulSoup
pages = set()
def get_links(page_url):
global pages
View routeros.txt
Chateau LTE12
http://192.168.88.1/webfig/#Wireless.Security_Profiles.0
http://192.168.88.1/webfig/#Wireless.WiFi_Interfaces.1
- change the SSID name/Hide SSID
http://192.168.88.1/webfig/#IP:DHCP_Server.Leases
http://192.168.88.1/webfig/#Interfaces.LTE_APNs
http://192.168.88.1/webfig/#System:Users.Change_Password
View missing-lectures.sh
#!/bin/bash
USERAGENT="Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/600.7.12 (KHTML, like Gecko) Version/8.0.7 Safari/600.7.12"
# declare -a URLS=("https://www.youtube.com/watch?v=Z56Jmr9Z34Q" "https://www.youtube.com/watch?v=kgII-YWo3Zw" "https://www.youtube.com/watch?v=a6Q8Na575qc" "https://www.youtube.com/watch?v=sz_dsktIjt4" "https://www.youtube.com/watch?v=e8BO_dYxk5c" "https://www.youtube.com/watch?v=2sjqTHE0zok" "https://www.youtube.com/watch?v=l812pUnKxME" "https://www.youtube.com/watch?v=_Ms1Z4xfqv4" "https://www.youtube.com/watch?v=tjwobAmnKTo" "https://www.youtube.com/watch?v=JZDt-PRq0uo" "https://www.youtube.com/watch?v=Wz50FvGG6xU" )
# for value in ${URLS[@]}; do
# youtube-dl --restrict-filenames \
# --no-check-certificate \
# --user-agent $USERAGENT $value
View es_requests.py
import requests
URL = "192.168.88.19:9200"
response = requests.get(f"http://{URL}/_search",
data='{"query": {"match_all": {}}}',
headers={
"Content-Type": "application/json"
}