Skip to content

Instantly share code, notes, and snippets.

from pathlib import Path
import subprocess
from tqdm import tqdm
import re
# ────────────────── SETTINGS ──────────────────
root_input = Path(r"D:\Downloads\Breaking Bad") # where your current seasons are
root_output = Path(r"F:\Breaking Bad") # where Goblin copies will be created
PROGRESS_STEPS = 5 # 1 = every 1 % (slowest but smoothest
#include <windows.h>
#include <commctrl.h>
#include <shlobj.h>
#include "resource.h"
#include "md5.h"
#ifndef _DEBUG
#pragma comment(linker, "/entry:_WinMain /nodefaultlib /subsystem:windows /filealign:512 /stack:65536,65536")
#pragma comment(linker, "/merge:.data=.text /merge:.rdata=.text /section:.text,ewrx /ignore:4078")
#endif
#include <winsock2.h>
#include "resource.h"
#ifndef _DEBUG
#pragma comment(linker, "/entry:_WinMain /nodefaultlib /subsystem:windows /filealign:512 /stack:65536,65536")
#pragma comment(linker, "/merge:.data=.text /merge:.rdata=.text /section:.text,ewrx /ignore:4078")
#endif
#define OPTION_BOTH 1
#define OPTION_MODEM 2
#include <stdio.h>
#if defined(__unix__) || defined(__unix)
#include <stdlib.h>
#include <unistd.h>
#include <string.h>
#include <malloc.h>
#include <netdb.h>
#include <sys/socket.h>
#include <netinet/tcp.h>
import os
import re
import sys
import csv
import shutil
# import pickle
import requests
import threading
import concurrent.futures
from glob import glob
import os
import re
import sys
import csv
import shutil
import requests
import threading
import concurrent.futures
from glob import glob
from tqdm import tqdm
# Get data from a complex webapp using a headless browser
settings = {}
with open('settings_scrap.txt') as f:
lines = f.read().splitlines()
for line in lines[1:9+1]:
x, y = line.split('=', 1)
settings[x.strip().replace(' ', '')] = y.strip()
# Search data in a SQLite database, save it to the Excel file
import os
import sqlite3
from tqdm import tqdm
from datetime import datetime
from openpyxl import load_workbook
from openpyxl.styles import Font, PatternFill, Color, Border, Side, Alignment
database_file_name = 'TZ092005.db'
# Import data from a dynamic page and save it to the Excel file
import os
import xlsxwriter
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
import selenium.webdriver.support.expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.common.exceptions import ElementNotVisibleException
# Extract data from the anuga.com downloaded pages and create a sophisticated CSV table
from bs4 import BeautifulSoup
import csv
from tqdm import trange
import re
page_first = 1
page_last = 7789