Skip to content

Instantly share code, notes, and snippets.

@rvvvt
Forked from colyk/vk_ip_async.py
Created February 25, 2019 15:11
Show Gist options
  • Save rvvvt/a048727cee423604df30e080a4d958ca to your computer and use it in GitHub Desktop.
Save rvvvt/a048727cee423604df30e080a4d958ca to your computer and use it in GitHub Desktop.
async
import requests
import asyncio
from bs4 import BeautifulSoup
proxy_list = []
def get_html(URL):
r = requests.get(URL)
# print(r.request.headers)
if(r.status_code == 200):
return r.text
print('Error')
exit()
def proxies_parse(html):
soup = BeautifulSoup(html,'lxml')
page = soup.find_all('table',height="100%")
table = page[-2]
trs = table.find_all('tr')[5:-12]
for tr in trs:
proxy = tr.find('td').text.strip()
proxy_list.append(proxy)
def make_proxy():
a = proxy_list
URL = 'http://xseo.in/freeproxy'
html = get_html(URL)
proxies_parse(html)
async def download_coroutine(ip):
URL = 'https://vk.com'
headers = {'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36'}
proxy = {'https':'https://'+ip, 'http':'http://'+ip}
print(proxy)
try:
r = requests.get(URL,headers = headers, proxies = proxy)
print(r.status_code)
except Exception as e:
print('Error')#,e)
else:
print('good ip : ',ip)
async def main(urls):
coroutines = [download_coroutine(url) for url in urls]
await asyncio.wait(coroutines)
if __name__ == '__main__':
make_proxy()
proxies = proxy_list
event_loop = asyncio.get_event_loop()
try:
event_loop.run_until_complete(main(proxies))
finally:
event_loop.close()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment