Skip to content

@jayrambhia /

Embed URL


Subversion checkout URL

You can clone with
Download ZIP
Fetches Mila Kunis pics from | Doesn't use Reddit API. I'll work on that later. Wanted to download few pics fast. Made it up . Downloaded 28 pics and got restricted by reddit. I'll also add header later.
from BeautifulSoup import BeautifulSoup
import urllib2
import os
import re
def getLinks(URL, pics=[]):
print len(pics), "pics yet."
print "fetching", URL
page = urllib2.urlopen(URL)
except urllib2.HTTPError:
print "Reddit restricts bots"
print "Couldn't fetch"
print URL
print "Returning all the pics. Hope it works."
print len(pics)
return pics
print "page fetched"
soup = BeautifulSoup(
links = soup.findAll("a")
for link in links:
pic_url = link.get("href")
if pic_url and "" in pic_url:
pics = list(set(pics))
#print pics
next = soup.find("a", rel="nofollow next")
if next:
next_url = next.get("href")
if next_url:
getLinks(next_url, pics)
return pics
def fetchPics(pics, default_dir):
print len(pics), "pics"
#pics = list(set(pics))
for pic in pics:
data =
filename = re.split("/",pic)[-1]
filename = default_dir+"/"+"Mila_Kunis_"+filename
print "downloading and saving to", filename
f = open(filename, "wb")
#URL = ""
URLs = ["", ""]
default_dir = os.path.join(os.path.expanduser("~"),"Pictures/")
for URL in URLs:
pics = getLinks(URL)
fetchPics(pics, default_dir)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Something went wrong with that request. Please try again.