Created
April 23, 2018 19:09
-
-
Save neighthan/9d9d066fc7114d2881f98d67abaf8318 to your computer and use it in GitHub Desktop.
Download all links from webpage (Python asyncio)
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import requests | |
import lxml.html | |
from urllib.parse import unquote | |
from tqdm import tqdm | |
import asyncio | |
from aiohttp import ClientSession, TCPConnector | |
import aiofiles | |
# import async_timeout | |
from argparse import ArgumentParser | |
from typing import List, Optional | |
def parse_links(address: str, filters: Optional[List[str]]=None, verbose: bool=False) -> List[str]: | |
if verbose: | |
print(f"Parsing links for {address}") | |
filters = [] if filters is None else filters | |
dom = lxml.html.fromstring(requests.get(address).content) | |
links = [] | |
for link in dom.xpath('//a/@href'): # select the url in href for all a tags | |
keep = True | |
for filter_ in filters: | |
if filter_ not in link: | |
keep = False | |
if keep: | |
links.append(str(link)) | |
return links | |
async def fetch_binary(session, url: str): | |
# async with async_timeout.timeout(timeout): | |
async with session.get(url) as response: | |
return await response.read() | |
async def save_content(fname: str, content, base_dir: str='./'): | |
async with aiofiles.open(base_dir + fname, 'wb') as f: | |
await f.write(content) | |
async def download_and_save(session, url: str, semaphore) -> None: | |
try: | |
with await semaphore: | |
content = await fetch_binary(session, url) | |
fname = unquote(url[url.rindex('/') + 1:]) | |
await save_content(fname, content) | |
except asyncio.TimeoutError: | |
print(f"TIMEOUT while downloading {url}") | |
pass | |
async def download_links(links, semaphore): | |
async with ClientSession() as session: | |
coroutines = [download_and_save(session, link, semaphore) for link in links] | |
await wait_with_progressbar(coroutines) | |
async def wait_with_progressbar(coroutines): | |
for future in tqdm(asyncio.as_completed(coroutines), total=len(coroutines)): | |
await future | |
if __name__ == '__main__': | |
parser = ArgumentParser(description="Parse (and optionally download) all links from a site.") | |
parser.add_argument('site', help="URL of site from which to parse links.") | |
parser.add_argument('-f', '--filters', help='Only links that contain each filter will be retained.', nargs='+') | |
parser.add_argument('-d', '--download', action='store_true', help="Whether to download all links (otherwise, they're printed).") | |
parser.add_argument('-t', '--test', action='store_true', help="Testing; if so, print args then exit.") | |
args = parser.parse_args() | |
if args.test: | |
print(args) | |
else: | |
links = parse_links(args.site, args.filters) | |
if args.download: | |
max_concurrent_downloads = 8 | |
semaphore = asyncio.Semaphore(max_concurrent_downloads) | |
event_loop = asyncio.get_event_loop() | |
event_loop.run_until_complete(download_links(links, semaphore)) | |
event_loop.close() | |
else: | |
for link in links: | |
print(link) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment