Skip to content

Instantly share code, notes, and snippets.

@lhk
Forked from Mitmischer/gist:4196882
Created December 3, 2012 19:05
Show Gist options
  • Save lhk/4197158 to your computer and use it in GitHub Desktop.
Save lhk/4197158 to your computer and use it in GitHub Desktop.
import urllib.request
import concurrent.futures
lowerBound = -1
upperBound = -1
lowerBound = int(input("von :?"))
upperBound = int(input("bis :?"))
# Retrieve a single page and report the url and contents
def load_url(i, timeout):
conn = urllib.request.urlopen('http://www.entwickler-ecke.de/files/eeags2012paranuss.php?q=' + str(i), timeout=timeout)
s = str(conn.read())
return s.split('\'')[1]
responses=[]
# We can use a with statement to ensure threads are cleaned up promptly
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
# Start the load operations and mark each future with its URL
future_to_url = {executor.submit(load_url, i, 60):i for i in range(lowerBound,upperBound+1)}
for future in concurrent.futures.as_completed(future_to_url):
url = future_to_url[future]
try:
data = future.result()
except Exception as exc:
print('%r generated an exception: %s' % (url, exc))
else:
#print('%r page delivers the result %d ' % (url, int(data)))
responses.append((url,int(data)))
responses=sorted(responses, key=lambda item: item[0])
print(responses)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment