Skip to content

Instantly share code, notes, and snippets.

@gbrault
Created May 10, 2019 14:01
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save gbrault/03d1e5c49afde622c2c82fa36608cee4 to your computer and use it in GitHub Desktop.
Save gbrault/03d1e5c49afde622c2c82fa36608cee4 to your computer and use it in GitHub Desktop.
Calling synchronous code from asyncio
import asyncio
import time
from urllib.request import urlopen
@asyncio.coroutine
def count_to_10():
for i in range(11):
print("Counter: {}".format(i))
yield from asyncio.sleep(.5)
def get_page_len(url):
# This is the blocking sleep (not the async-friendly one)
time.sleep(2)
page = urlopen(url).read()
return len(page)
@asyncio.coroutine
def run_get_page_len():
loop = asyncio.get_event_loop()
future1 = loop.run_in_executor(None, get_page_len, 'http://calebmadrigal.com')
#data1 = yield from future1
return future1
@asyncio.coroutine
def print_data_size():
data = yield from run_get_page_len()
print("Data size: {}".format(data))
loop = asyncio.get_event_loop()
tasks = [
asyncio.async(count_to_10()),
asyncio.async(print_data_size())]
loop.run_until_complete(asyncio.wait(tasks))
@gbrault
Copy link
Author

gbrault commented May 10, 2019

@gbrault
Copy link
Author

gbrault commented May 10, 2019

Counter: 0
Counter: 1
Counter: 2
Counter: 3
Counter: 4
Counter: 5
Data size: 337581
Counter: 6
Counter: 7
Counter: 8
Counter: 9
Counter: 10

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment