Skip to content

Instantly share code, notes, and snippets.

@gravcat
Last active June 28, 2018 01:26
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save gravcat/61a61e0c4d8d6fc2154fc2daf695c169 to your computer and use it in GitHub Desktop.
Save gravcat/61a61e0c4d8d6fc2154fc2daf695c169 to your computer and use it in GitHub Desktop.
import mechanize
import requests
import random
from random import randrange
scammers_url = 'https://baddies.online.notarealtld/'
word_list_url = 'https://raw.githubusercontent.com/bitcoin/bips/master/bip-0039/english.txt'
useragents = ['Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.87 Safari/537.36', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:60.0) Gecko/20100101 Firefox/60.0', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.181 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:60.0) Gecko/20100101 Firefox/60.0']
runs = 1
# returns a convincing useragent str
def get_random_useragent():
useragent = random.choice(useragents)
return useragent
# returns proxy as list [ip:port, ip]
def get_new_proxy():
proxy_api_url = 'http://pubproxy.com/api/proxy?type=http?https=true?user_agent=true?limit=1'
proxy_api_data = requests.get(proxy_api_url)
proxy_api_data_json = proxy_api_data.json()['data'][0]
return [proxy_api_data_json['ipPort'],proxy_api_data_json['ip']]
# initialize browser client in browser to look real, they're blocking bots
br = mechanize.Browser()
# set a bunch of client stuff
br.set_handle_equiv(True)
br.set_handle_gzip(True)
br.set_handle_redirect(True)
br.set_handle_referer(True)
br.set_handle_robots(False)
# load up word list from bip-0039
response = requests.get(word_list_url)
# get a proxy to start with
proxy = get_new_proxy()
while True:
# set random useragent from list of most common
br.addheaders = [("User-agent", get_random_useragent())]
# be weird and change proxies at different intervals
if runs % 4 == 0 or runs % 9 == 0 or runs % 13 == 0 or runs % 17 == 0:
time.sleep(2)
print "+ Switched proxies!"
proxy = get_new_proxy()
# set proxy on client
br.set_proxies({"http": proxy[0]})
# open desired page
scam_page=br.open(scammers_url)
# get first form on page (the only one in this case)
br.select_form(nr=0)
# create spammy garbage 12 words based on real bip-0039 words
if response.status_code != 200:
print('Error in web request:', response.status_code)
else:
wordlist = response.text.strip().split('\n')
random_12_words = ' '.join(random.sample(wordlist, 12))
# place our garbage 12 words into the form's field
br.form['phrase'] = random_12_words
# the scammers are also storing user IP in a hidden control, modify that too to make it harder to remove bogus data >:D
captured_user_ip = br.form.find_control("IP")
captured_user_ip.readonly = False
captured_user_ip.value = proxy[1]
print "==============="
print "Submitted {} times".format(runs)
print br.form['phrase']
print br.addheaders
print captured_user_ip.value
# ship it!
scam_page = br.submit()
#print scam_page.read()
runs += 1
mechanize
requests
random
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment