Skip to content

Instantly share code, notes, and snippets.

@vivekn
Created January 5, 2016 11:50
Show Gist options
  • Save vivekn/52bf04dae2f4418314d2 to your computer and use it in GitHub Desktop.
Save vivekn/52bf04dae2f4418314d2 to your computer and use it in GitHub Desktop.
Crawl and Download Varadhan's lecture notes on probability, measure theory and large deviations. Requires (gevent, bs4)
import gevent
from gevent import monkey
monkey.patch_all()
from bs4 import BeautifulSoup
from urllib2 import urlopen
from os import mkdir, path
BASE = 'http://math.nyu.edu/faculty/varadhan'
def mkpath(url):
folders = url.strip('/').split('/')
if len(folders):
folders = folders[:-1]
p = ''
for i in xrange(len(folders)):
p += folders[i]
if not path.exists(p):
print "Creating path", p
mkdir(p)
def download(url):
print 'Downloading', BASE + url
try:
mkpath(url)
resp = urlopen(BASE + url)
f = open(url.strip('/'), 'wb')
f.write(resp.read())
f.close()
except:
pass
def get_links(url):
print 'Crawling', BASE + url
soup = BeautifulSoup(urlopen(BASE + url).read())
return [tag['href'] for tag in soup.findAll('a')]
def crawl():
Q = ['']
pdfs = []
while len(Q):
curr = Q.pop()
links = get_links(curr)
for link in links:
if link.startswith('./'):
link = link[1:]
if link.endswith('.pdf') and (not path.exists(link.strip('/'))):
pdfs.append(link)
elif link.endswith('/') or link.endswith('html'):
Q.append(link)
if len(pdfs) >= 10 or (len(pdfs) > 0 and len(Q) == 0):
pool = gevent.pool.Pool()
res = pool.map(download, pdfs)
pdfs = []
crawl()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment