Skip to content

Instantly share code, notes, and snippets.

@arush15june
Last active December 27, 2022 12:27
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save arush15june/3f819ca7d90fb9797ce54584801b5681 to your computer and use it in GitHub Desktop.
Save arush15june/3f819ca7d90fb9797ce54584801b5681 to your computer and use it in GitHub Desktop.
Parse HTML document, upload local images to Imgur and update document with imgur link
import os
import sys
import json
import requests
from base64 import b64encode
from bs4 import BeautifulSoup
from urllib.parse import urlparse
IMGUR_CLIENT_ID = ""
def _soup(file):
with open(file, "r") as f:
soup = BeautifulSoup(f.read(), 'html.parser')
return soup
url = "https://api.imgur.com/3/upload.json"
def _upload_imgur(name, data, client_id=IMGUR_CLIENT_ID):
file = b64encode(data)
headers = {"Authorization": f"Client-ID {client_id}"}
req = requests.post(
url,
headers = headers,
data = {
'image': file,
'type': 'base64',
'name': name,
}
)
data = req.json()
return data["data"]["link"]
def _upload_image_local(path, client_id=IMGUR_CLIENT_ID):
with open(path, "rb") as img:
print(f"[+] uploading {path}")
return _upload_imgur(os.path.basename(path), img.read(), client_id)
def _upload_image_url(url, client_id=IMGUR_CLIENT_ID):
print(f"[+] fetching from url {url}")
resp = requests.get(url)
print(f"[+] uploading {url}")
return _upload_imgur(os.path.basename(url), resp.content, client_id)
def uri_validator(x):
try:
result = urlparse(x)
return all([result.scheme, result.netloc])
except:
return False
if __name__ == "__main__":
if len(sys.argv) != 2:
print("""Usage: python upload.py <file.html>
file.html can contain local files or remote resources, these will be uploaded to imgur""")
exit(1)
file = sys.argv[1]
soup = _soup(file)
images = soup.find_all("img")
for img in images:
src = img["src"]
try:
link = ""
if uri_validator(src):
link = _upload_image_url(src)
else:
link = _upload_image_local(src)
print(f"[+] uploaded {src} to {link}")
img["src"] = link
except Exception as e:
print(f"[!] failed to upload {src}: {e}")
hrefs = soup.find_all("a")
for href in hrefs:
lnk = href["href"]
print(f"[+] changing {lnk} to {{.URL}}")
href["href"] = "{{.URL}}"
with open(sys.argv[1], "wb") as f_output:
f_output.write(soup.prettify("utf-8"))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment