Last active
December 13, 2015 19:58
-
-
Save lelandbatey/4966247 to your computer and use it in GitHub Desktop.
Short and dirty page scraper to tell you the different hats that are available on http://scrap.tf. Loops on a ten second delay. If this in any way causes problems for the scrap.tf guys, I'll take it down. However, it's a super useful tool for other fellow hat-hounds.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#scrap.tf-scraper.py | |
import urllib2 | |
from bs4 import BeautifulSoup | |
import time | |
def getItemDivs(url): | |
opener = urllib2.build_opener() | |
# For this to work, you need to paste your scrap.tf cookies into here. I used the following javascript bookmarklet to get the cookies: | |
# javascript:void(document.cookie=prompt(document.cookie,document.cookie)); | |
opener.addheaders.append(("Cookie","PASTE YOUR COOKIES HERE!")) | |
scrap1 = opener.open(url).read() | |
soup = BeautifulSoup(scrap1) | |
try: | |
divChil = soup.find('div', {'class': 'items-wrapper'}) | |
children = divChil.findChildren() | |
strChildren = [] | |
for child in children: | |
strChildren.append(str(child)) | |
except: | |
strChildren = ['title="no hats :('] | |
return strChildren | |
def getHatNames(): | |
urllist = ["http://scrap.tf/hats?bot=8","http://scrap.tf/hats?bot=9","http://scrap.tf/hats?bot=9"] | |
hatArray = [] | |
for url in urllist: | |
divs = getItemDivs(url) | |
for div in divs: | |
hatArray.append(div.split('title="')[1].split('"')[0]) # Extracts that hat name from the div string, storing it in hatArray | |
return hatArray | |
def main(): | |
# Yeah, I realize this never actually stops running. Just CTRL-C it like a man! | |
while True: | |
hats = getHatNames() | |
for hat in hats: | |
print hat | |
time.sleep(10) # If you want the delay to be longer or shorter, edit this number! | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment