Skip to content
Create a gist now

Instantly share code, notes, and snippets.

Simple script for faster fetching with urllib2 with exception handling
import threading, urllib2
import Queue
def read_url(url, queue):
data = urllib2.urlopen(url).read()
except urllib2.HTTPError, e:
checksLogger.error('HTTPError = ' + str(e.code))
except urllib2.URLError, e:
checksLogger.error('URLError = ' + str(e.reason))
except httplib.HTTPException, e:
except Exception:
import traceback
checksLogger.error('generic exception: ' + traceback.format_exc())
print('Fetched %s from %s' % (len(data), url))
def fetch_parallel(list_of_urls):
result = Queue.Queue()
threads = [threading.Thread(target=read_url, args = (url,result)) for url in list_of_urls]
for t in threads:
for t in threads:
return result
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Something went wrong with that request. Please try again.