Skip to content

Instantly share code, notes, and snippets.

@Stepland
Last active March 17, 2024 00:56
Show Gist options
  • Star 11 You must be signed in to star a gist
  • Fork 4 You must be signed in to fork a gist
  • Save Stepland/45422208ee517bea14fbc9774757a50f to your computer and use it in GitHub Desktop.
Save Stepland/45422208ee517bea14fbc9774757a50f to your computer and use it in GitHub Desktop.
Want to be Elite TM but don't have the time and patience to rip 500 Vinyls ? no problem.
import requests
import time
import sys
import argparse
import pickle
import json
class RedactedAPIError(Exception):
pass
class RedactedSession(requests.Session):
def get(self, *args, **kwargs):
time.sleep(2.0)
return super().get(*args, **kwargs)
def post(self, *args, **kwargs):
time.sleep(2.0)
return super().post(*args, **kwargs)
def getAPI(self,params):
return self.get("https://redacted.ch/ajax.php",params=params)
def getEdition(torrent):
return {
x : torrent[x] for x in [
"remastered",
"remasterYear",
"remasterTitle",
"remasterRecordLabel",
"remasterCatalogueNumber",
"media"
]
}
def is24bitOnly(torrentId,groupInfo):
for torrent in groupInfo["response"]["torrents"]:
if torrent["id"] == torrentId:
edition = getEdition(torrent)
break
for torrent in groupInfo["response"]["torrents"]:
if getEdition(torrent) == edition:
if torrent["encoding"] == "Lossless":
return False
return True
def searchResultIterator(session,args):
release_types = {"Single":9,"EP":5} # single, EP
years = iter(list(reversed(range(1970,args.from_year+1))))
for type_name in release_types.keys():
for year in years:
exceded_max_size = False
params = {
"action":"browse",
"encoding":"24bit Lossless",
"order_by":"size",
"order_way":"asc",
"releasetype":release_types[type_name],
"year":year
}
print(f"Requesting page 1 of year {year} for type {type_name} ...")
try:
first_page = session.getAPI(params).json()
except (json.decoder.JSONDecodeError, requests.exceptions.TooManyRedirects):
raise RedactedAPIError("Invalid Login Creditentials or Outdated Session Cookie") from None
if first_page["status"] != "success":
raise RedactedAPIError("Unsucc")
if not args.ignore_20_pages_limit:
nombre_de_pages = min(first_page["response"]["pages"],20)
else:
nombre_de_pages = first_page["response"]["pages"]
print(f"{nombre_de_pages} pages found")
for group in first_page["response"]["results"]:
if any(map(lambda x: x["size"]>args.max_size,group["torrents"])):
exceded_max_size = True
print("Max File Size Exceeded, skipping ...")
break
else:
yield group
if exceded_max_size:
continue
for i in range(2,nombre_de_pages+1):
params["page"] = i
#print(f"params : {params}")
print(f"Requesting page {i} of year {year} for type {type_name} ...")
page = session.getAPI(params).json()
if page["status"] != "success":
raise Exception("Unsucc")
for group in page["response"]["results"]:
if any(map(lambda x: x["size"]>args.max_size,group["torrents"])):
exceded_max_size = True
print("Max File Size Exceeded, skipping ...")
break
else:
yield group
if exceded_max_size:
break
def search():
parser = argparse.ArgumentParser()
login_group = parser.add_mutually_exclusive_group(required=True)
login_group.add_argument_group(title="login method")
login_group.add_argument(
"--login",
nargs=2,
metavar=('username','password'),
help="Login using your username and password if you don't already have a pickled session cookie handy"
)
login_group.add_argument(
"--cookies",
metavar="cookies_file",
help="Path to a pickle file containing your session cookie, cookies.dat by default"
)
parser.add_argument(
"--number",
type=int,
default=500,
dest="FOUNDnumber",
help="Number of torrent files at which the script will stop",
metavar="max_number"
)
parser.add_argument(
"--output",
help="Path to a file that will be appended with the URLs of the torrents found"
)
parser.add_argument(
"--max-size",
type=int,
default=200000000,
help="Size at which the script will stop searching further"
)
parser.add_argument(
"--from-year",
type=int,
default=2018,
help="Year at which the script will start looking"
)
parser.add_argument(
"--ignore-20-pages-limit",
action="store_true",
help="Ignore the limit of 20 search results pages"
)
args = parser.parse_args()
FOUND = 0
session = RedactedSession()
if args.cookies is None:
session.post("https://redacted.ch/login.php",data={"username":args.login[0],"password":args.login[1]})
with open('cookies.dat', 'wb') as cookies_file:
pickle.dump(requests.utils.dict_from_cookiejar(session.cookies), cookies_file)
else:
with open(args.cookies, 'rb') as cookies_file:
cookies = requests.utils.cookiejar_from_dict(pickle.load(cookies_file))
session.cookies.update(cookies)
searchResults = searchResultIterator(session,args)
for group in searchResults:
groupSearchParams = {
"action" : "torrentgroup",
"id": group["groupId"]
}
groupInfo = session.getAPI(groupSearchParams).json()
for torrent in group["torrents"]:
if is24bitOnly(torrent["torrentId"],groupInfo):
FOUND += 1
if args.output is not None:
with open(args.output, "a") as myfile:
myfile.write(f"https://redacted.ch/torrents.php?id={group['groupId']}&torrentid={torrent['torrentId']}\n")
print(f"torrent<{torrent['torrentId']: >7}> : yay! https://redacted.ch/torrents.php?id={group['groupId']}&torrentid={torrent['torrentId']}")
else:
print(f"torrent<{torrent['torrentId']: >7}> : nope")
if FOUND >= args.FOUNDnumber:
return
print(f"{FOUND} torrents found")
if __name__ == "__main__":
search()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment