Skip to content

Instantly share code, notes, and snippets.

@aznnico
Last active December 21, 2015 12:29
Show Gist options
  • Save aznnico/6306128 to your computer and use it in GitHub Desktop.
Save aznnico/6306128 to your computer and use it in GitHub Desktop.
Auto bump in d2lounge.com. Uses chrome cookies. User must login through chrome first.
# d2lounge.com Auto Bumper
# Bumps every 31 min. Uses chrome cookies. User must login through chrome first.
import base64
import cookielib
import getpass
import gzip
import htmlentitydefs
import httplib
import re
import socket
import sqlite3
import StringIO
import sys
import time
import urllib
import urllib2
import zlib
COOKIE_LOCATION = r'C:\Users\%s\AppData\Local\Google\Chrome\User Data\Default\Cookies' % getpass.getuser()
BUMP_TIMEOUT = 60*31
class simpleurllib:
"""Simple urllib class with limited functionality used for scraping"""
def __init__(self,timeout=None):
"""Init function. Note that headers is an ordered list."""
if timeout:
self.timeout = timeout
else:
self.timeout = 30
self.hh = urllib2.HTTPHandler()
self.hh.set_http_debuglevel(0)
httplib.HTTPConnection.debuglevel = 1
self.rh = urllib2.HTTPRedirectHandler()
self.cj = cookielib.LWPCookieJar()
self.cj.clear()
self.proxy_handler = urllib2.ProxyHandler({})
self.opener = urllib2.build_opener(self.hh,
urllib2.HTTPCookieProcessor(self.cj),
self.proxy_handler)
self.headers = [
('User-agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.72 Safari/537.36'),
('Accept', '*/*'),
('Accept-Language', 'en-us,en;q=0.8'),
('Accept-Encoding', 'gzip,deflate,sdch'),
('Accept-Charset' , 'ISO-8859-1,utf-8;q=0.7,*;q=0.7'),
('Connection' , 'keep-alive'),
('Content-Type', 'application/x-www-form-urlencoded; charset=UTF-8'),
]
self.opener.addheaders = self.headers
def decode(self,page):
"""Uncompress/unzip if response page is compressed/zipped.
Args:
page - a string, likely HTML source of a page from a url GET
request, which may be compressed or zipped
Returns: a string
"""
encoding = page.info().get('Content-Encoding')
if encoding in ('gzip', 'x-gzip', 'deflate'):
content = page.read()
if encoding == 'deflate':
data = StringIO.StringIO(zlib.decompress(content))
else:
data = gzip.GzipFile('', 'rb', 9, StringIO.StringIO(content))
return data.read()
else:
return page.read()
def GetSource(self, url, data=None, request_type=None, more_headers=[],
username=None, password=None, retries=3):
"""Fetches the url and returns the results.
Args:
url - a string, ie: 'http://google.com'
data - a string of the data for the POST request, must be quoted
and joined properly
request_type - a string for the type of request, ie: HEAD, GET
retries - int (optional), specifies how many times to retry
Returns: a string if successful
None if something failed
"""
if data:
if not isinstance(data, str):
return
try:
if data:
request = urllib2.Request(url,data)
else:
request = urllib2.Request(url)
if ((request_type is not None) and (request_type.lower() in
('get','head', 'post', 'put', 'delete', 'trace', 'connect'))):
request.get_method = lambda: request_type
for key,value in more_headers:
request.add_header(key, value)
if username and password:
auth_encoded = base64.encodestring('%s:%s' % (username, password))[:-1]
request.add_header('Authorization', 'Basic %s' % auth_encoded)
res = self.opener.open(request,timeout=self.timeout)
return self.decode(res)
except Exception, e:
if retries > 0:
return self.GetSource(url, data=data, request_type=request_type,
more_headers=more_headers, retries=retries-1)
class d2lounge:
def __init__(self):
print "Starting auto-bumper."
try:
self.conn = sqlite3.connect(COOKIE_LOCATION)
print "Cookie file found."
except:
print "Cookie file could not be found."
sys.exit(1)
self.c = self.conn.cursor()
self.mygetter = simpleurllib()
self.last_bump = 0
def get_cookies(self):
#self.c.execute('PRAGMA table_info(cookies);')
#print self.c.fetchall()
self.c.execute('select name,value from cookies where host_key like "dota2lounge.com";')
cookies = self.c.fetchall()
return dict(cookies)
def add_cookies(self,new_cookies):
for k,v in new_cookies.iteritems():
ck = cookielib.Cookie(version=0, name=k, value=v, port=None, port_specified=False, domain='dota2lounge.com', domain_specified=False, domain_initial_dot=False, path='/', path_specified=True, secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False)
self.mygetter.cj.set_cookie(ck)
def find_trades(self):
a = re.compile('<div class="tradepoll" id="trade(\d+?)">', re.M|re.I|re.DOTALL)
source = self.mygetter.GetSource('http://dota2lounge.com/mytrades')
results = a.findall(source)
return results
def bump(self,trades):
url = 'http://dota2lounge.com/ajax/bumpTrade.php'
more_headers = [('Origin','http://dota2lounge.com'),
('Referer','http://dota2lounge.com/mytrades'),
('X-Requested-With','XMLHttpRequest'),
]
for trade in trades:
data = 'trade=%s'%trade
print "Bumping trade %s" %str(trade)
self.mygetter.GetSource(url,data)
self.last_bump = time.time()
def run(self):
try:
cookies = self.get_cookies()
print "Cookies found."
except:
print "Cookies not found."
sys.exit(1)
self.add_cookies(cookies)
try:
trades = self.find_trades()
if trades:
print "Trades found:",
print ",".join(trades)
self.bump(trades)
else:
print "No trades found."
except Exception, e:
print "exception: %s" %str(e)
def run_forever(self):
while True:
if time.time() - self.last_bump > BUMP_TIMEOUT:
self.run()
else:
print "Waiting %s seconds." %BUMP_TIMEOUT
time.sleep(BUMP_TIMEOUT)
if __name__ == '__main__':
d2l = d2lounge()
d2l.run_forever()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment