Last active
September 2, 2020 17:36
-
-
Save KebabLord/a958c5d2b8da090e77b9e7922ead2523 to your computer and use it in GitHub Desktop.
checks for raidforums official database thread for new accessive databreaches
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/python3 | |
# a guide a friend created: https://u.teknik.io/cwplF.png | |
useragent="Mozilla/5.0 (X11; Linux x86_64; rv:78.0) Gecko/20100101 Firefox/78.0" ## change this with yours | |
url="https://raidforums.com/Thread-Be-Notified-When-Databases-Get-Added?page=last" | |
from os import listdir,chdir,path | |
chdir("./cookies") | |
cooks = listdir() | |
headers={"User-Agent":useragent} | |
import requests,json | |
session = requests.session() | |
def stringify(obj: dict) -> dict: #turn every value in the dictionary to a string function for cookies | |
for k, v in obj.items(): | |
if isinstance(v, dict): | |
# if value is a dictionary, stringifiy recursively | |
stringify(v) | |
continue | |
if not isinstance(v, str): | |
if isinstance(v, bool): | |
# False/True -> false/true | |
obj[k] = str(v).lower() | |
else: | |
obj[k] = str(v) | |
return obj | |
for i in cooks: # import all cookies | |
with open(i) as f: | |
cookie_list: list = json.load(f) | |
# create the cookie jar from the first cookie | |
cookie_jar = requests.utils.cookiejar_from_dict(stringify(cookie_list[0])) | |
# append the rest of the cookies | |
for cookie in cookie_list[1:]: | |
requests.utils.add_dict_to_cookiejar(cookie_jar, stringify(cookie)) | |
session.cookies = cookie_jar | |
req = session.get(url,headers=headers) | |
if req.status_code is not 200: | |
print("fail:"+str(req)) | |
exit() | |
data = req.content.decode() | |
from bs4 import BeautifulSoup | |
soup = BeautifulSoup(data, 'html.parser') | |
last_reply = soup.select(".post")[-1] | |
last_reply_id = last_reply['id'][5:] | |
if (path.isfile("../rf.log")): # Check if the latest post id in log is same with the last post from site | |
with open('../rf.log','r') as f: | |
previous_id = f.read().replace('\n','') | |
if (previous_id == last_reply_id ): # if so abort the script, if not continue | |
print("no updates.") | |
exit() | |
raw_liste = last_reply.select(".spoiler__main")[0].contents | |
liste = [] # will be list of new databases | |
for i in range(0,len(raw_liste),2): | |
liste.append(raw_liste[i].replace('\n','')) | |
print("NEW UPDATE!") | |
for i in liste: | |
print(i) | |
with open('../rf.log','w') as f: # Save latest post id to logs | |
f.write(last_reply_id) |
Author
KebabLord
commented
Sep 2, 2020
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment