Skip to content

Instantly share code, notes, and snippets.

@wtneal
wtneal / async_scraper.py
Last active April 17, 2019 07:38
asyncio scraper
import asyncio
import aiofiles
import aiohttp
import logging
import re
import sys
import os
import lxml.html
@lgliducik
lgliducik / asyncio_test.py
Last active December 9, 2016 07:45
asyncio_test
import funcy
import requests
import aiohttp
import asyncio
import concurrent.futures
pages = ["https://yandex.ru", "https://google.com", "http://selectel.ru"]
@imbolc
imbolc / keep_alive.py
Created October 27, 2015 16:59
Understanding aiohttp.ClientSession
from time import time
import asyncio
import aiohttp
from aiohttp import web
async def web_handler(request):
n = int(request.GET.get('n', 0))
return web.Response(text=str(n+1))
@FND
FND / sample0.py
Last active December 9, 2016 07:47
Python asyncio
import time
import random
URIS = ("http://example.org/foo", "http://example.org/bar",
"http://example.org/baz")
def main():
for uri in URIS:
@madjar
madjar / scrapper.py
Last active March 5, 2023 15:02
A example of scrapper using asyncio and aiohttp
import asyncio
import aiohttp
import bs4
import tqdm
@asyncio.coroutine
def get(*args, **kwargs):
response = yield from aiohttp.request('GET', *args, **kwargs)
return (yield from response.read_and_close(decode=True))