Skip to content

Instantly share code, notes, and snippets.

@href href/cloudbleed.py
Last active Feb 24, 2017

Embed
What would you like to do?
Checks exported 1password urls against the domains affected by Cloudbleed.
#!/usr/bin/python3
""" Checks exported 1password urls against the domains affected by Cloudbleed.
Usage:
1. Export the 1password urls (as csv containing *only* the url column!!).
2. Store the exported file in the current directory as 'export.csv'.
3. Run python3 ./cloudbleed.py
"""
import bisect
import os
from urllib.parse import urlparse
from urllib.request import urlretrieve
from zipfile import ZipFile
def read_domains():
url = 'https://github.com/pirate/sites-using-cloudflare/archive/master.zip'
path = 'sites-using-cloudflare-master/sorted_unique_cf.txt'
temp = '/tmp/sites-using-cloudflare.zip'
if not os.path.exists(temp):
urlretrieve(url, temp)
with open(temp, 'rb') as f:
with ZipFile(f) as zipfile:
lines = zipfile.open(path).readlines()
lines = (l.decode('utf-8') for l in lines)
lines = (l.strip('\n') for l in lines)
lines = (l for l in lines if l)
yield from lines
def extract_urls(urls):
with open(urls, 'r') as f:
urls = (urlparse(l.strip('", \n')) for l in f.readlines())
urls = (o for o in urls if o.netloc)
yield from urls
def subdomains(hostname):
parts = hostname.split('.')
for i in range(len(parts) - 1):
yield '.'.join(parts[i:])
def contains(lst, value):
index = bisect.bisect_left(lst, value)
return len(lst) != index and lst[index] == value
def find_vulnerable_urls(export):
domains = tuple(read_domains())
for url in extract_urls(export):
for subdomain in subdomains(url.netloc):
if contains(domains, subdomain):
print(subdomain)
if __name__ == '__main__':
find_vulnerable_urls('./export.csv')
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.