Skip to content

Instantly share code, notes, and snippets.

@andrefsp
Created February 10, 2016 18:26
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save andrefsp/69e245ee80390548f69e to your computer and use it in GitHub Desktop.
Save andrefsp/69e245ee80390548f69e to your computer and use it in GitHub Desktop.
This gist shows how to make a non-blocking parallel url fetcher using aiohttp and asyncio
from datetime import datetime
import logging
import asyncio
from aiohttp import web
import requests
logger = logging.getLogger()
@asyncio.coroutine
def parallel_fetch(request):
loop = asyncio.get_event_loop()
@asyncio.coroutine
def get_url(url, future):
logger.warning("%s fetching %s" % (datetime.utcnow().isoformat(), url))
future.set_result((yield from loop.run_in_executor(None, requests.get, url)))
logger.warning("%s finish %s" % (datetime.utcnow().isoformat(), url))
future_results = []
for url in ("http://ebay.co.uk", "http://www.google.com", "http://www.facebook.com"):
future = asyncio.Future()
future_results.append(future)
asyncio.async(get_url(url, future))
yield from asyncio.wait(future_results)
statuses = [future.result() for future in future_results]
return web.Response(body=str("%s" % statuses).encode('utf-8'))
app = web.Application()
app.router.add_route('GET', '/', parallel_fetch)
web.run_app(app)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment