Skip to content

Instantly share code, notes, and snippets.

@colyk
Last active July 24, 2017 17:09
Show Gist options
  • Save colyk/915ea59b6a9b54538be041d1d2a1937d to your computer and use it in GitHub Desktop.
Save colyk/915ea59b6a9b54538be041d1d2a1937d to your computer and use it in GitHub Desktop.
check_proxy
import requests
from bs4 import BeautifulSoup
from multiprocessing import Pool
proxy_list = []
def get_html(URL):
r = requests.get(URL)
# print(r.request.headers)
if(r.status_code == 200):
return r.text
print('Error')
exit()
def proxies_parse(html):
soup = BeautifulSoup(html,'lxml')
page = soup.find_all('table',height="100%")
table = page[-2]
trs = table.find_all('tr')[5:-12]
for tr in trs:
proxy = tr.find('td').text.strip()
proxy_list.append(proxy)
def check_proxies():
headers = {'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36'}
URL = 'https://vk.com'
for ip in proxy_list:
proxy = {'https':'https://'+ip}
print(proxy)
try:
r = requests.get(URL,headers = headers, proxies = proxy)
print(r.status_code)
except Exception as e:
print('Error')#,e)
else:
print('good ip : ',ip)
def main():
URL = 'http://xseo.in/freeproxy'
html = get_html(URL)
proxies_parse(html)
check_proxies()
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment