Skip to content

Instantly share code, notes, and snippets.

@safebuffer
Created October 3, 2021 15:41
Show Gist options
  • Save safebuffer/04e45e03303470ca26e9ff2bba7b0f91 to your computer and use it in GitHub Desktop.
Save safebuffer/04e45e03303470ca26e9ff2bba7b0f91 to your computer and use it in GitHub Desktop.
mixed-content finder (i don't remember why i created this script)
import re
import requests
from bs4 import BeautifulSoup
import urllib3
import sys
urllib3.disable_warnings()
from multiprocessing.dummy import Pool
import coloredlogs, logging as log
coloredlogs.install()
TIMEOUT = 30
TARGET_TAGS = {
'audio' :['src'],
'embed' :['src'],
'form' :['action'],
'iframe' :['src'],
'img' :['src', 'srcset', 'data-src'],
'link' : ['href'],
'object' : ['data'],
'param' : ['value'],
'script' : ['src'],
'source' : ['src', 'srcset'],
'video' : ['src']
}
STOARED = {}
def found_mixed_content(url):
log.critical(f"[+] Found something at {url}")
open('mixed_content.txt','a').write(url+'\n')
def check_mixed_content(base):
extracted_text = ""
try:
header = {
'User-Agent':f"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87",
}
html = requests.get(base,timeout=TIMEOUT,verify=False,headers=header)
except Exception as e:
log.error(f" error requests.get {e}")
return None
if html:
extracted_text = html.text
if extracted_text:
soup = BeautifulSoup(extracted_text, "html.parser")
for tag in TARGET_TAGS.keys():
for foundtag in soup.find_all(tag):
arrt_target = TARGET_TAGS[tag]
for art in arrt_target:
if str(art) in foundtag.attrs:
new_page = foundtag.attrs[art]
if 'http://' in new_page:
found_mixed_content(new_page)
print(f"[+] found mixed-content at {tag} {art} : {base} {foundtag}")
else:
log.info(f" NO mixed at {tag} {art} : {base} {foundtag}")
else:
log.warning(f"bad request {base} {html}")
def start_mixed_content_scan(url):
if not 'http://' in url:
if 'https://' in url:
pass
else:
url = 'http://' + url
try:
check_mixed_content(url)
except Exception as e:
log.error(f"bad check_mixed_content {e}")
if __name__ == "__main__":
if len(sys.argv) > 1:
hostsarg = sys.argv[1]
try:
to_go = []
hosts = open(hostsarg,'r').readlines()
for host in hosts:
host = host.replace('\n','')
to_go.append(host) if host not in to_go else None
pl = Pool(int(sys.argv[2]))
pl.map(start_mixed_content_scan, to_go)
except Exception as e:
log.error(f"[-] error in main {e}")
else:
log.error(f"using : {sys.argv[0]} urls.txt multiprocessing-count")
exit()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment