Skip to content

Instantly share code, notes, and snippets.

@Alexsandr0x
Created August 2, 2018 12:59
Show Gist options
  • Save Alexsandr0x/311069164c0622f208cd0a27e5cc9154 to your computer and use it in GitHub Desktop.
Save Alexsandr0x/311069164c0622f208cd0a27e5cc9154 to your computer and use it in GitHub Desktop.
Captura de proxies gratuitos
import random
import requests
import json
from bs4 import BeautifulSoup
import os
"""
Esse modulo tem como objetivo gerar uma lista de proxies mais recentemente
validados como funcionais pelo free-proxy-list.net.
Essa lista sera gerada para randomicamente escolher proxies para o webdriver
Created by @Alexsandr0x
"""
class ProxyList(object):
def __init__(self):
self._FREE_GEO_IP_URL = 'http://ip-api.com/json/'
self._FREE_PROXY_LIST_URL = 'https://www.proxynova.com/proxy-server-list/country-br/'
self.proxies_filename = 'proxies.json'
self.proxies = []
def get_proxy(self, state=None):
if not self.proxies:
self._load_proxies_list()
if len(self.proxies) > 0:
if state:
try:
proxy = random.choice([x for x in self.proxies if x['estado'] == state])
except IndexError:
print('[WARNING] Não foi encontrado proxy para estado {}. pegando proxy aleatorio...'.format(state))
proxy = random.choice(self.proxies)
else:
proxy = random.choice(self.proxies)
return proxy
def _load_proxies_list(self):
try:
page = requests.get(self._FREE_PROXY_LIST_URL, timeout=60).content
list = self._parse_geo_ip_page(page)
list = self._add_geo_pos(list)
self.proxies = list
with open(self.proxies_filename, "a") as f:
f.write(json.dumps(self.proxies))
except Exception as error:
print('[WARNING] Erro ao tentar capturar os proxies mais recentes' + str(error) +
'recuperando proxies no arquivo [{}]'.format(self.proxies_filename))
if os.path.isfile(self.proxies_filename):
with open(self.proxies_filename) as proxies_input:
self.proxies = json.load(proxies_input)
def _add_geo_pos(self, list_):
for proxy in list_:
ip = proxy['ip']
geo_response = requests.get(self._FREE_GEO_IP_URL + ip,timeout=180)
geo_response = json.loads(geo_response.text)
proxy['estado'] = geo_response['region']
return list_
def _parse_geo_ip_page(self, page):
p_list = []
soup = BeautifulSoup(page, 'lxml')
ip_lists = soup.find('table', {'id': 'tbl_proxy_list'})
for ip in ip_lists.find_all('tr')[1:]:
celulas = ip.find_all('td')
if len(celulas) > 4:
if int(celulas[3].find('div').attrs['data-value']) > 50:
ip = ''.join(c for c in celulas[0].text.strip() if c.isdigit() or c == '.')[3:]
ip = ip[6:].replace('.8', '', 1)
p_list.append({
'ip': ip,
'port': celulas[1].text.strip()
})
return p_list
if __name__ == '__main__':
proxy_list = ProxyList()
for x in ['SP', 'MG', 'RJ', 'RR', 'PR']:
print(proxy_list.get_proxy(x))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment