Skip to content

Instantly share code, notes, and snippets.

Created May 21, 2017 01:07
Show Gist options
  • Save anonymous/5f6d5bad9e351410e84f150b88273de7 to your computer and use it in GitHub Desktop.
Save anonymous/5f6d5bad9e351410e84f150b88273de7 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
# don't fuck too hard with it n-st :P
from twisted.words.protocols import irc
from twisted.internet import protocol
from twisted.internet import reactor
from twisted.internet import task
from twisted.python import log
import cfscrape
import validators
import duckduckgo
import tweepy
import sys
from easyprocess import Proc
from bs4 import BeautifulSoup
from datetime import datetime
from subprocess import call
import re
import json
import soundcloud
import requests
import lxml.html
import urbandict
import feedparser
import urllib
import urllib2
import time, sys
import hashlib
import wolframalpha
import mmap
import random
from apixu.client import ApixuClient, ApixuException
import os
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname)
#import antispam
datad='-----';
magic = 0
reload(sys)
consumer_key = 'GU'
consumer_secret = 'u'
access_token = '8
access_token_secret = 'Zc'
search_text = '#VenteFlash'
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
sys.setdefaultencoding('utf-8')
headers = {'Cache-Control':'no-cache','User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:45.0) Gecko/20100101 Firefox/45.0'}
class Bot(irc.IRCClient):
def _get_nickname(self):
return self.factory.nickname
nickname = property(_get_nickname)
def lebfeed(self):
filehash = open('hashbox.db','a+')
scrapp = cfscrape.create_scraper()
getletfeed = scrapp.get("https://lowendbox.com/feed/",headers=headers).content
se = feedparser.parse(getletfeed)
let_title = str(se.entries[0].title)
let_link = str(se.entries[0].link)
paylod = {'longUrl':let_link}
ga = requests.post("https://www.googleapis.com/urlshortener/v1/url?key=AIzaSyC3WlAMmbZjfjsqf2oEc5-jz1C8VOcoFNA",data=json.dumps(paylod),headers={'Content-type':'application/json'}).content
jax = json.loads(ga)
surl = str(jax['id'])
global lasthash
lasthash = hashlib.md5(let_title+let_link).hexdigest()
filehash.write(lasthash)
print >> filehash
#self.msg("#test",str('[Top] '+let_title+' - '+surl))
def kimo(self):
flash_tweets = api.user_timeline('ovh_deals')
for tweet in flash_tweets:
if search_text in tweet.text:
hahme = open('kimsu.db','a+')
mhashit = hashlib.md5(tweet.text).hexdigest()
hahme.write(mhashit)
print >> hahme
def lebfeedupdate(self):
scrapo = cfscrape.create_scraper()
getletfeed2 = scrapo.get("https://lowendbox.com/feed/",headers=headers).content
so = feedparser.parse(getletfeed2)
let_title2 = str(so.entries[0].title)
let_link2 = str(so.entries[0].link)
thishash = hashlib.md5(let_title2+let_link2).hexdigest()
paylod = {'longUrl':let_link2}
ga = requests.post("https://www.googleapis.com/urlshortener/v1/url?key=AIzaSyC3WlAMmbZjfjsqf2oEc5-jz1C8VOcoFNA",data=json.dumps(paylod),headers={'Content-type':'application/json'}).content
jax = json.loads(ga)
surl = str(jax['id'])
hashdb = open('hashbox.db','r+')
hashlook = mmap.mmap(hashdb.fileno(),0,access=mmap.ACCESS_READ)
if(hashlook.find(thishash)!=-1):
self.msg('Nickaaaa','.')
else:
self.msg(channel,'[LEB]'+str(' '+let_title2+' - '+let_link2))
self.lebfeed()
def kimsufi(self):
dbhash = open('kimsu.db','r+')
newapi = tweepy.API(auth)
flash_tweet = newapi.user_timeline('ovh_deals')
for tweet in flash_tweet:
if search_text in tweet.text:
hasit = hashlib.md5(tweet.text).hexdigest()
if hasit in dbhash.read():
self.msg('nickaaa','found')
else:
tweeo = str(tweet.text).replace('#VenteFlash #Kimsufi : le #serveur','')
self.msg(channel,'[KIMSUFI FLASH SALE]'+tweeo)
self.kimo()
def letfeed(self):
filehash = open('hash.db','a+')
scrape = cfscrape.create_scraper()
getletfeed = scrape.get("https://www.lowendtalk.com/categories/general/feed.rss",headers=headers).content
se = feedparser.parse(getletfeed)
let_title = str(se.entries[0].title)
let_link = str(se.entries[0].link)
paylod = {'longUrl':let_link}
ga = scrape.post("https://www.googleapis.com/urlshortener/v1/url?key=AIzaSyC3WlAMmbZjfjsqf2oEc5-jz1C8VOcoFNA",data=json.dumps(paylod),headers={'Content-type':'application/json'}).content
jax = json.loads(ga)
surl = str(jax['id'])
global lasthash
lasthash = hashlib.md5(let_title+let_link).hexdigest()
filehash.write(lasthash)
print >> filehash
#self.msg("#test",str('[Top] '+let_title+' - '+surl))
def letfeedupdate(self):
scrap = cfscrape.create_scraper()
getletfeed2 = scrap.get("https://www.lowendtalk.com/categories/general/feed.rss",headers=headers).content
so = feedparser.parse(getletfeed2)
let_title2 = str(so.entries[0].title)
let_link2 = str(so.entries[0].link)
thishash = hashlib.md5(let_title2+let_link2).hexdigest()
paylod = {'longUrl':let_link2}
ga = scrap.post("https://www.googleapis.com/urlshortener/v1/url?key=AIzaSyC3WlAMmbZjfjsqf2oEc5-jz1C8VOcoFNA",data=json.dumps(paylod),headers={'Content-type':'application/json'}).content
jax = json.loads(ga)
surl = str(jax['id'])
hashdb = open('hash.db','r+')
hashlook = mmap.mmap(hashdb.fileno(),0,access=mmap.ACCESS_READ)
if(hashlook.find(thishash)!=-1):
self.msg('Nickaaaa','.')
else:
self.msg(channel,'[TOP]'+str(' '+let_title2+' - '+let_link2))
self.letfeed()
def signedOn(self):
#self.msg('NickServ','identify hackmebitchlol')
self.join("#lowendbox")
#reactor.callLater(3, joi)
print "Signed on as %s." % (self.nickname,)
self.msg('nickserv','identify yourpasswordhere')
def joined(self, channel):
self.join('##vpn')
self.join('#vpsboard')
self.join('#infoseclabs')
timeout = 60
self.kimo()
self.letfeed()
self.lebfeed()
checker = task.LoopingCall(self.letfeedupdate)
checkb = task.LoopingCall(self.lebfeedupdate)
ckimo = task.LoopingCall(self.kimsufi)
ckimo.start(timeout)
checkb.start(timeout)
checker.start(timeout)
print "Joined %s." % (channel,)
def privmsg(self, user, channel, msg):
#skiller = antispam.Detector('anti_spam.dat')
if msg.startswith(".pspeed"):
surl = msg.split()[1]
if validators.url(surl):
rex = requests.get("https://www.googleapis.com/pagespeedonline/v2/runPagespeed?url="+surl+"&key=AIzaSyBO0XQtmwabHQsXJ5wnfnnIzsZ2XzTCggk",headers={'Content-type':'application/json'}).content
jad = json.loads(rex)
score = str(jad['ruleGroups']['SPEED']['score'])
user = user.split('!', 1)[0]
self.msg(channel,user+': Google page score: '+score)
else:
user = user.split('!', 1)[0]
self.notice(user,"Your query must match the following regular expression: 'http(s)?://.*'")
elif msg.startswith(".anon"):
user = user.split("!",1)[0]
userx = msg.split()[1]
self.notice(userx,'you are my slave now.')
elif msg.startswith(".tron"):
user = user.split("!",1)[0]
usrtron = msg.split()[1]
self.msg(channel,usrtron+": Mr t0xic has invited you to play tron over ssh please connect now mothafucka: ssh sshtron.zachlatta.com")
elif msg.startswith(".shutdown"):
user = user.split('!', 1)[0]
if user=='t0xic':
self.quit('Bye Bye!')
call(["supervisorctl","stop","ace"])
else:
user=user.split('!',1)[0]
self.notice(user,'Permission denied!')
elif msg.startswith("?dd") or msg.startswith("? dd") or msg.startswith(".dd"):
user = user.split('!',1)[0]
self.msg(channel,'dd if=/dev/zero of=test bs=64k count=16k conv=fdatasync; unlink test')
elif msg.startswith(".ud"):
user= user.split('!',1)[0]
searchterm = str(msg).replace('.ud ','')
uddef = urbandict.define(searchterm)
ddef = uddef[0]['def']
ddef = str(ddef).replace('\n','')
ddef = ddef.split('.')[0]
if len(ddef) < 70:
self.msg(channel,user+': '+str(ddef+'.'))
elif len(ddef) == 0 :
self.msg(channel,user+': No response from urban dictionary.')
else:
self.msg(channel,user+': Long response is prohibited.')
elif msg.startswith(".ipdb"):
user = user.split('!',1)[0]
ip = str(msg).replace('.ipdb ','')
goeip = requests.get("http://ip-api.com/json/"+ip).content
info = json.loads(goeip)
country = info['country']
isp = info['isp']
city = info['city']
asn = info['as']
qro = info['query']
regionN = info['regionName']
timezone = info['timezone']
self.msg(channel,str(user+': '+isp+' ( '+ qro +' ) '+asn+' '+' - '+country+', '+city+' '+regionN+' '+timezone))
elif msg.startswith(".we"):
city = str(msg).replace(".we ","")
user = user.split('!', 1)[0]
cityd = str(city)
# bsurl = "http://api.apixu.com/v1/current.json?"
# ci = bsurl+ urllib.urlencode({'q':city}) + '&key=114c18ca6c6641ad8e172108163112'
# reso = requests.get(ci,headers=headers).content
# current = json.loads(reso)
# baseurl = "https://query.yahooapis.com/v1/public/yql?"
# yql_query = "select * from weather.forecast where woeid in (select woeid from geo.places(1) where text='"+city+"')"
# yql_url = baseurl + urllib.urlencode({'q':yql_query}) + "&format=json"
# result = requests.get(yql_url,headers=headers).content
# data = json.loads(result)
# title = data['query']['results']['channel']['item']['title']
# temp = data['query']['results']['channel']['item']['condition']['temp']
# ctemp = (int(temp)-32)*5/9
# cond = data['query']['results']['channel']['item']['condition']['text']
# windspeed = data['query']['results']['channel']['wind']['speed']
# humidity = data['query']['results']['channel']['atmosphere']['humidity']
# if len(data) > 1:
api_key = '114c18ca6c6641ad8e172108163112'
client = ApixuClient(api_key)
current = client.getCurrentWeather(q=cityd)
# self.msg(channel,user+': '+str(title) + ': ' +str(ctemp)+' C ( '+ str(temp) +' F ) - Condition: '+str(cond)+' - Humidity: '+str(humidity)+'%'+' Wind Speed: '+str(windspeed)+'mph')
temp_c = current['current']['temp_c']
country = current['location']['country']
wind_degree = current['current']['wind_degree']
temp_f = current['current']['temp_f']
wind_mph = current['current']['wind_mph']
humidity = current['current']['humidity']
last_updated = current['current']['last_updated']
pressure_in = current['current']['pressure_in']
condition = current['current']['condition']['text']
region = current['location']['region']
wind_dir = current['current']['wind_dir']
citname = current['location']['name']
self.msg(channel,'Weather for '+str(region)+': '+str(citname)+', '+str(country)+ ': '+str(temp_c) +str('C (')+str(temp_f)+str('F )')+' - Conditions: '+str(condition)+' - Humidity: '+str(humidity)+'% - Wind From '+str(wind_dir)+' at '+str(wind_mph)+' mph')
elif msg.startswith(".sslaudit"):
domain = msg.split()[1]
if validators.domain(domain):
r = cfscrape.create_scraper()
d = cfscrape.create_scraper()
sdomain = "https://www.ssllabs.com/ssltest/analyze.html?d="+domain
paylod = {'longUrl':sdomain}
zedg = d.post("https://www.googleapis.com/urlshortener/v1/url?key=AIzaSyC3WlAMmbZjfjsqf2oEc5-jz1C8VOcoFNA",data=json.dumps(paylod),headers={'Content-type':'application/json'}).content
jad = json.loads(zedg)
shurl = str(jad['id'])
getit = r.get(sdomain).content
soup = BeautifulSoup(getit,"lxml")
rate = soup.find('div',attrs={"class":["rating_a","rating_g","rating_r","percentage_a","percentage_g","percentage_r"]})
xrate = str(rate).replace("<div class","")
d = str(xrate).replace("</div>","")
x = str(d).replace(' style="margin-bottom: 8px">','')
y = str(x).replace('="rating_a"','')
z = str(y).replace('="rating_r"','')
g = str(z).replace('="rating_g"','')
k = str(g).replace(' ','')
k = str(k).replace('</span>','')
k = str(k).replace('<spanstyle="font-size:0.75em">','')
k = str(k).replace('="percentage_a">','')
k = str(k).replace('="percentage_g">','')
k = str(k).replace('="percentage_r">','')
k = str(k).replace('<spanstyle="font-size:0.80em">','')
k = str(k).replace('\n','')
if str(k)=='None':
user = user.split('!', 1)[0]
self.notice(user,'Please wait, SSL Audit is in progress.. - For More Information: %s' % shurl)
else:
user = user.split('!', 1)[0]
self.notice(user,"SSL Audit for Domain: %s Grade : %s - For More Information: %s" % (domain,str(k),shurl))
elif msg.startswith(".isup"):
hostx = msg.split()[1]
user = user.split('!',1)[0]
if validators.url(hostx):
xstatus = requests.get(hostx).status_code
if(xstatus==200):
self.msg(channel,user+': Looks like its only you, '+str(hostx)+' is up')
else:
self.msg(channel,user+': There is a problem with '+str(hostx)+' Status code: '+str(xstatus))
elif msg=='macpac' or msg=='MacPac':
user = user.split('!', 1)[0]
self.msg(channel,user+': Yes sir! I am at your service.')
elif msg.startswith(".sc"):
track = msg.split()[1]
user1 = user.split('!', 1)[0]
client = soundcloud.Client(client_id='c80e7578d1cdfff7c6ced7425ed43135')
tracks = client.get('/tracks', q=track)
trackd ='============================ Ace - SoundCloud Search ============================\n'
for trackx in tracks:
trackd += 'Track Title:\t'+trackx.title+'\tTrack URL:\t'+trackx.permalink_url+'\n'
trackd += '==========================================================================================================\n'
payload = {'sprunge' : trackd}
rx = cfscrape.create_scraper()
posted = rx.post("http://sprunge.us",data=payload).content
self.notice(user1,str(posted).replace('\n',''))
elif msg.startswith(".duckgo"):
try:
user=user.split('!')[0]
query = str(msg).replace('.duckgo','')
resultx = duckduckgo.get_zci(query)
resultz = (resultx[:110] + '..') if len(resultx) > 110 else resultx
self.msg(channel,str(user+': '+resultz))
except IndexError:
self.msg(channel,user+': DuckDuckGo Error')
elif msg.startswith(".seclist"):
try:
magic =int(msg.split()[1])
except IndexError:
magic = 0
d = feedparser.parse('http://seclists.org/rss/fulldisclosure.rss')
food = str(d['entries'][magic]['title']).replace('\n','')
foodlnk = str(d['entries'][magic]['link'])
paylod = {'longUrl':foodlnk}
gd = requests.post("https://www.googleapis.com/urlshortener/v1/url?key=AIzaSyC3WlAMmbZjfjsqf2oEc5-jz1C8VOcoFNA",data=json.dumps(paylod),headers={'Content-type':'application/json'}).content
jah = json.loads(gd)
durl = str(jah['id'])
user=user.split('!')[0]
self.notice(user,'[Security List] - %s [%s]' % (food,durl))
elif msg.startswith(".hostingsec"):
try:
magic =int(msg.split()[1])
except IndexError:
magic = 0
se = feedparser.parse('http://us3.campaign-archive1.com/feed?u=722bc323a024d15a407baae81&id=f512fc2224')
foods = str(se['entries'][magic]['title'])
foodslnk = str(se['entries'][magic]['link'])
paylod = {'longUrl':foodslnk}
ga = requests.post("https://www.googleapis.com/urlshortener/v1/url?key=AIzaSyC3WlAMmbZjfjsqf2oEc5-jz1C8VOcoFNA",data=json.dumps(paylod),headers={'Content-type':'application/json'}).content
jax = json.loads(ga)
surl = str(jax['id'])
user = user.split('!')[0]
self.notice(user,'[Hosting Security] - %s [%s]' % (foods,surl))
elif msg.startswith(".wa"):
user = user.split('!',1)[0]
client = wolframalpha.Client('WXXQHV-3KA4VJYU8T')
qry = str(msg).replace('.wa ','')
res = client.query(qry)
xop = 0
for ex in res.pods:
xop = xop + 1
if xop == 2:
if len(ex.text) < 70:
self.msg(channel,user + ': ' + str(ex.text).replace('\n',' ')+ ' ..')
elif len(ex.text)==0:
self.msg(channel,user + ': No response from Wolframalpha.')
else:
self.msg(channel,user+ ': Long response is prohibited.')
elif msg.startswith(".tpost"):
try:
magic =int(msg.split()[1])
except IndexError:
magic = 0
se = feedparser.parse('https://threatpost.com/feed/')
foods = str(se['entries'][magic]['title'])
foodslnk = str(se['entries'][magic]['link'])
paylod = {'longUrl':foodslnk}
ga = requests.post("https://www.googleapis.com/urlshortener/v1/url?key=AIzaSyC3WlAMmbZjfjsqf2oEc5-jz1C8VOcoFNA",data=json.dumps(paylod),headers={'Content-type':'application/json'}).content
jax = json.loads(ga)
surl = str(jax['id'])
user = user.split('!')[0]
self.notice(user,'[Threat Post] - %s [%s]' % (foods,surl))
elif msg.startswith(".sb"):
hostx = msg.split()[1]
if validators.url(hostx):
user = user.split('!',1)[0]
serverban = requests.get(hostx).headers['Server']
if len(serverban) > 1:
self.msg(channel,user+': Server => ' + serverban)
else:
self.msg(channel,user+': I didn\'t find the server banner.')
else:
user = user.split('!',1)[0]
self.notice(user,'Invalid URL')
elif msg.startswith(".pb"):
hostx = msg.split()[1]
if validators.url(hostx):
user = user.split('!',1)[0]
xpower = requests.get(hostx).headers['X-Powered-By']
self.msg(channel,user+': '+xpower)
else:
user = user.split('!',1)[0]
self.notice(channel,user+': Sorry, No X-Powered-By Header Found.')
elif msg.startswith(".help"):
user= user.split('!',1)[0]
self.notice(user,'Bot commands http://sprunge.us/SYLV')
elif msg.startswith(".gl"):
lourl = msg.split()[1]
if validators.url(lourl):
x = cfscrape.create_scraper()
paylod = {'longUrl':lourl}
g = x.post("https://www.googleapis.com/urlshortener/v1/url?key=AIzaSyC3WlAMmbZjfjsqf2oEc5-jz1C8VOcoFNA",data=json.dumps(paylod),headers={'Content-type':'application/json'}).content
ja = json.loads(g)
shurl = str(ja['id'])
user = user.split('!', 1)[0]
self.notice(user,shurl)
#elif msg.startswith('.train 1'):
# user = user.split('!',1)[0]
# spam_msg = str(msg).replace('.train 1','')
# skiller.train(spam_msg,True)
# skiller.save()
# elif msg.startswith('.train 0'):
# user = user.split('!',1)[0]
# spam_msg = str(msg).replace('.train 0','')
# skiller.train(spam_msg,False)
# skiller.save()
# elif skiller.is_spam(msg):
# spam_score = skiller.score(msg)
# dtim = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# self.notice('t0xic','Spam detected by '+user+' on '+str(dtim)+' channel '+str(channel))
#if channel == '#lowendbox':
#self.notice('jarland','Spam detected by '+user+' on '+str(dtim))
#self.notice('Ishaq','Spam detected by '+user+' on '+str(dtim))
#self.notice('[Amitz]','Spam detected by '+user+' on '+str(dtim))
#self.notice('[Derek]','Spam detected by '+user+' on '+str(dtim))
# elif msg.startswith(".nmap"):
# ipx = msg.split()[1]
# if validators.ip_address.ipv4(ipx) and ipx.startswith('127')==False and ipx.startswith('192.168.1')==False or validators.domain(ipx) and ipx!='localhost':
# nm = nmap.PortScanner()
# nm.scan(hosts=ipx,arguments='-T4 -p 1-500 -Pn')
# for host in nm.all_hosts():
# datad = '----------------------\n'
# datad += 'Host : %s (%s)\n' % (host,nm[host].hostname())
# datad += 'State : %s\n' % nm[host].state()
# datad += '----------------------\n'
# datad += 'Protocol : tcp\n'
# lport = nm[host]['tcp'].keys()
# lport.sort()
# for port in lport:
# datad += 'port : %s\tstate : %s\tservice : %s\n' % (port,nm[host]['tcp'][port]['state'],nm[host]['tcp'][port]['name'])
# payl = {'sprunge' : datad}
# poste = requests.post("http://sprunge.us",data=payl).content
# user = user.split('!', 1)[0]
# poste = str(poste).replace('\n','')
# self.notice(user,poste)
# else:
# user = user.split('!', 1)[0]
# self.notice(user,str('Bad argument.'))
elif msg.find('http://') or msg.find('https://'):
user = user.split('!')[0]
findaurl = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', msg)
if findaurl[0] != '':
rd = cfscrape.create_scraper()
rexd = rd.get("https://www.googleapis.com/pagespeedonline/v2/runPagespeed?url="+findaurl[0]+"&key=AIzaSyBO0XQtmwabHQsXJ5wnfnnIzsZ2XzTCggk").content
jao = json.loads(rexd)
ptitle = str(jao['title']).lstrip()
ptitle = str(ptitle).replace("\n",'')
if ptitle=='' or ptitle=='Error':
rexd = rd.get(findaurl[0]).content
jao = BeautifulSoup(rexd,"lxml")
jao = str(jao.title.string).lstrip()
jao = str(jao).replace("\n",'')
else:
self.msg(channel,'[ '+str(ptitle)+' ]')
#user = user.split('!', 1)[0]
#print user+' : '+msg
class BotFactory(protocol.ClientFactory):
protocol = Bot
def __init__(self, channel, nickname='MacPac'):
self.channel = channel
self.nickname = nickname
def clientConnectionLost(self, connector, reason):
print "Lost connection (%s), reconnecting." % (reason,)
connector.connect()
def clientConnectionFailed(self, connector, reason):
print "Could not connect: %s" % (reason,)
reactor.stop()
if __name__ == "__main__":
channel = '#lowendbox'
reactor.connectTCP('moon.freenode.net',6667, BotFactory(channel))
reactor.run()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment