Skip to content

Instantly share code, notes, and snippets.

@bhbmaster
Created September 8, 2022 05:21
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save bhbmaster/67bd94386e4c39be3e24778a23480541 to your computer and use it in GitHub Desktop.
Save bhbmaster/67bd94386e4c39be3e24778a23480541 to your computer and use it in GitHub Desktop.
Get Latest Episodes From next-episode.net And Download Torrent From IPTorrent Scripts

next-episode & iptorrents

GIST LINK: https://gist.github.com/bhbmaster/67bd94386e4c39be3e24778a23480541

  • This logs into next-episode and gets the episode names that you need to watch today and this month.
  • It prints it into a nice format called an OUTNICE file (its a text file in the same directory)
  • Another script then can call upon above script and also login to iptorrents and download the torrent files
  • The torrent files can then be put into a directory that is monitored by your torrent client so that those will start downloading
#!/bin/bash
# FILENAME: dl_tor.sh
# HOW TO RUN: ./dl_tor.sh "episode to search"
# NOTE IF YOU RUN: dl_tor.sh without first argument it will download all of the newest torrents (seen on home page)
# OPTIONAL RUN: ./dl_tor.sh "episode to search" [1,2,3] - either put 1 if you only want to login, 2 if you only want to logout, or leave out that 2nd argument if you want to login and logout, 3 dont login & dont logout
# vars:
(IUN='username-for-ipt' # IPTorrents username
IPW='password-for-ipt' # IPTorrents password
DIRNAME=$(dirname $(readlink -f "$0")) # location of script (everything goes here)
XCOOKIE1="$DIRNAME/xxcookie1.txt"
XOUT1="$DIRNAME/xxout1_search_res.txt"
XOUT2="$DIRNAME/xxout2_torrent_links.txt"
TORRENT_FILE_DL_PREFIX="$DIRNAME/dl_tor/" # doesnt end with slash (might not matter if it does though)
### TORRENT_FILE_DL_PREFIX="$DIRNAME/xxttmp" # doesnt end with slash (might not matter if it does though)
SEARCHTERM_ORG="${1}"
### SEARCHTERM_ORG="Walking Dead s01e05"
# LOGIN LOGOUT VARIABLES
LOGINOUTVAR=${2}
if [ -z ${LOGINOUTVAR} ]; then
# echo "~~~~~~~~~~~~~~~~~~ NORMAL0 ~~~~~~~~~~~~~~~~~~~~~~~"
LOGINOUTVAR=100; # if variable is not 1 or 2 or empty, login and logout of ipt as normal. below if doesnt like empty so i made it 100 so that it doesnt fulfill 1st & 2nd condition and goes to 3rd condition (else part)
fi
if [ ${LOGINOUTVAR} -eq 1 ]; then
# if 1 then need to login (1) and not logout (0)
# echo "~~~~~~~~~~~~~~~~~~ LOGIN ONLY ~~~~~~~~~~~~~~~~~~~~~~~"
NEEDLOGIN=1;
NEEDLOGOUT=0;
elif [ ${LOGINOUTVAR} -eq 2 ]; then
# if 2 then need to not login (r) and definitely logout (1)
# echo "~~~~~~~~~~~~~~~~~~ LOGOUT ONLY ~~~~~~~~~~~~~~~~~~~~~~~"
NEEDLOGIN=0;
NEEDLOGOUT=1;
elif [ ${LOGINOUTVAR} -eq 3 ]; then
# if 3 then dont login and dont logout
# echo "~~~~~~~~~~~~~~~~~~ DONT LOGIN AND DONT LOGOUT ~~~~~~~~~~~~~~~~~~~~~~~"
NEEDLOGIN=0;
NEEDLOGOUT=0;
else
# if anything else or not there need to login and logout
# echo "~~~~~~~~~~~~~~~~~~ NORMAL1 ~~~~~~~~~~~~~~~~~~~~~~~"
NEEDLOGIN=1;
NEEDLOGOUT=1;
fi
# -- print header -- #
echo "----------- [`date +%D-%T`|`date +%s`s] `basename $0` -----------"
# -- login -- #
if [ $NEEDLOGIN -eq 1 ]; then
echo "- *** login to ipt ***"
curl -s -c "${XCOOKIE1}" --data "username=${IUN}&password=${IPW}&php=" https://www.iptorrents.com/t/ 2>&1 > /dev/null
else
echo "- already logged in"
fi
# -- prepare search url -- #
echo "- search ipt"
# editing search term
# 1st sed: remove special chars & double+ spaces
# 2nd sed: change spaces to +, lower case all (in the original)
### SEARCHTERM_FINAL=`echo "${SEARCHTERM_ORG}" | sed 's/[ ]/+/g;s/\(.*\)/\L\1/g;'`
SEARCHTERM_FINAL=`echo "${SEARCHTERM_ORG}" | sed 's/[^.A-Za-z 0-9-]//g;s/[ ]\+/ /g;' | sed 's/[ ]/+/g;s/\(.*\)/\L\1/g;'`
SEARCHTERM_WEB=`echo "${SEARCHTERM_FINAL}" | sed 's/+/ /g'` # replace + with space, as that is how it would look like when you searched ipt manually
SEARCH_URL='https://www.iptorrents.com/t?q='"${SEARCHTERM_FINAL}"'&qf='
SUFFIX_URL='https://www.iptorrents.com'
echo "- search input: ${SEARCHTERM_ORG}"
echo "- search edited: ${SEARCHTERM_FINAL}"
echo "- ipt search box: ${SEARCHTERM_WEB}"
echo "- url with search: ${SEARCH_URL}"
# -- do search -- #
curl -s -b "${XCOOKIE1}" "${SEARCH_URL}" -o "${XOUT1}" 2>&1 > /dev/null
# -- extract torrent links -- #
cat "${XOUT1}" | sed 's|/download.php|\n/download.php|g;s|.torrent|.torrent\n|g' | grep "download.php" | awk -v SUFFIX=${SUFFIX_URL} '{print SUFFIX $0}' > "${XOUT2}"
# -- checking if found something -- #
NUMBER_OF_TORRENTS=`cat ${XOUT2} | wc -l`
if [ ${NUMBER_OF_TORRENTS} -eq 0 ]; then
echo "- NO LINKS FOUND - Trying To Remove Year from search (4 digit years removed)";
SEARCHTERM_WEB_NEW=`echo "${SEARCHTERM_WEB}" | sed 's/ [0-9]\{4\}\( \|$\)/ /g;s/[ ]\+/ /g'` #remove 4 digit year, remove extra spaces
SEARCHTERM_FINAL_NEW=`echo "${SEARCHTERM_WEB_NEW}" | sed 's/ /+/g'` # turn spaces to + for uel php query
SEARCH_URL_NEW='https://www.iptorrents.com/t?q='"${SEARCHTERM_FINAL_NEW}"'&qf=' # construct url
echo "- NEW SEARCH: search edited: ${SEARCHTERM_FINAL_NEW}";
echo "- NEW SEARCH: ipt search box: ${SEARCHTERM_WEB_NEW}";
echo "- NEW SEARCH: url with search: ${SEARCH_URL_NEW}";
curl -s -b "${XCOOKIE1}" "${SEARCH_URL_NEW}" -o "${XOUT1}" 2>&1 > /dev/null # run new search, and look thru it
cat "${XOUT1}" | sed 's|/download.php|\n/download.php|g;s|.torrent|.torrent\n|g' | grep "download.php" | awk -v SUFFIX=${SUFFIX_URL} '{print SUFFIX $0}' > "${XOUT2}"
NUMBER_OF_TORRENTS=`cat ${XOUT2} | wc -l`
fi
# -- download torrents -- #
mkdir -p "${TORRENT_FILE_DL_PREFIX}"
echo "- ${NUMBER_OF_TORRENTS} LINKS FOUND - downloading them"
echo "*********** downloading list **************"
N1=0;
OLDIFS=${IFS};IFS=$'\n';
for i in `cat "${XOUT2}"`; do
N1=$((N1+1))
DL_LINK="${i}"
DL_FILENAME=`basename ${DL_LINK}`
OUTPUT_FULL_FILENAME="${TORRENT_FILE_DL_PREFIX}/${DL_FILENAME}"
# -f tests if file is there, tests if file is there and is bigger than 0 (not 0 size) -s returns false for none-existant files and 0 size files
if [ -s "${OUTPUT_FULL_FILENAME}" ]; then
# file exists if its there and has size
echo "- ${N1}/${NUMBER_OF_TORRENTS}: ${DL_FILENAME} exists"
else
# download file if its not there, or if its 0 size (perhaps a bad download)
echo "- ${N1}/${NUMBER_OF_TORRENTS}: downloading torrent: ${DL_LINK}"
curl -s -b "${XCOOKIE1}" "${DL_LINK}" -o "${OUTPUT_FULL_FILENAME}"
fi
done
IFS=${OLDIFS}
echo "*******************************************"
# -- logout -- #
if [ $NEEDLOGOUT -eq 1 ]; then
echo "- *** logout of ipt ***"
curl -s -c "${XCOOKIE1}" -b "${XCOOKIE1}" https://www.iptorrents.com/log-out.php 2>&1 > /dev/null
rm -f "${XCOOKIE1}";
else
echo "- not logging out"
fi
# - deleting extra files (would delete cookie but need it just in case not login out, so instead delete if login out)
rm -f "${XOUT1}";
# optional scripts (to fix permissions where files download)
# /root/scripts/fixperms-kossboss-users.sh "${TORRENT_FILE_DL_PREFIX}" 2>&1 > /dev/null;
# /root/scripts/fixperms-guest-guest.sh "${TORRENT_FILE_DL_PREFIX}" 2>&1 > /dev/null;
)
#!/bin/bash
# FILENAME: get_nec_info.sh
# HOW TO RUN: ./get_nec_info.sh
# WARNING: this script is meant for modification, minimum modification is just adjust UN1, and PW1. The rest depends on if you have certain programs that I use, and if you want to send the results to a webserver you should modify the last ssh line to meet your needs
# BEFORE RUNNING SCRIPT: make sure you have bash version 3 (4 is preferable), also make sure you have curl
# BEFORE RUNNING SCRIPT: edit UN1 and PW1 variables at the top to match your next-episode.net username and password information
# BEFORE RUNNING SCRIPT: make sure you have basename and logger program as well, if not just comment those lines out. Also comment out the bottom section if you dont want to send to a web server (I use ssh to sent to a webserver, you can edit it for whatever)
# -- variables -- #
UN1="username-for-next-episode" # https://next-episode.net/ username
PW1="password-for-next-episode" # and password
DIRNAME=$(dirname $(readlink -f "$0"))
COOKIE1="$DIRNAME/xcookie.txt"
OUT1="$DIRNAME/xout1_full-cal-html.txt"
OUT2="$DIRNAME/xout2_days-html.txt"
OUT3="$DIRNAME/xout3_todays-html.txt"
OUT4="$DIRNAME/xout4_final-full-month.txt"
OUT5="$DIRNAME/xout5_final-today.txt"
OUTNICE="$DIRNAME/xout_nice.txt"
AUTHOR="infotinks"
###### Commented out section that copied to my infotinks server with 6 hashmarks
###### # for sending to remote server
###### SSH_KEY="/root/keys/kvknew2016.private.openssh"
###### REMOTE_PATH="root@www.infotinks.com:/var/www/nec/next-episode-nice.txt"
# -- loggin -- #
SCRIPTNAME=`basename $0`
logger "${SCRIPTNAME} - getting next_episode.net today and full month info"
# print header
echo "=========== [`date +%D-%T`|`date +%s`s] `basename $0` ==========="
# -- web calls -- #
echo "- login to next-episode.net"
curl -s -c ${COOKIE1} --data "username=${UN1}&password=${PW1}" https://next-episode.net/userlogin 2>&1 > /dev/null || { echo "ERROR: failed at login; Error code $?"; logger "${SCRIPTNAME} - ERROR: failed at login"; exit 1; }
echo "- view calendar & save it to a file"
curl -s -b ${COOKIE1} https://next-episode.net/calendar/ -o "${OUT1}" 2>&1 > /dev/null || { echo "ERROR: failed at downloading calendar"; logger "${SCRIPTNAME} - ERROR: failed at downloading calendar"; exit 2; }
echo "- logout out of next-episode.net"
curl -s -c ${COOKIE1} -b ${COOKIE1} https://next-episode.net/logout 2>&1 > /dev/null
rm ${COOKIE1}
# if OUT1 is empty that means we didnt download the script well
RESULTS1LENGTH=$(cat "${OUT1}" | wc -l)
if [ ${RESULTS1LENGTH} -eq 0 ]; then echo "ERROR: downloaded an empty file"; exit -1; fi;
# TO DO: PARSE SCRIPT
# --- FIRST --- #: i noticed that all of the calendar data in the /calendar page output is between beforedayboxes & afterdayboxes
cat ${OUT1} | grep -A1000000 "beforedayboxes" | grep -B1000000 "afterdayboxes" > ${OUT2} # extracting all days of month portion
# if first way didnt find anything its because they removed afterdayboxes, but thats fine right below the results (a couple lines below at least) we get </table></td> unique marker (its not as good as afterdayboxes but it does thee job)
RESULTS2LENGTH=$(cat "${OUT2}" | wc -l)
if [ ${RESULTS2LENGTH} -eq 0 ]; then
echo "* First Parse Script didnt work with 'afterdayboxes' keyword-marker using second method with html tags";
# --- SECOND --- #: i noticed that all of the calendar data in the /calendar page output is just after beforedayboxes (afterdayboxes stopped existing & OUT2 started being empty so results were null, this fixed it)
cat ${OUT1} | grep -A1000000 "beforedayboxes" | grep -B1000000000 '</table></td>' > ${OUT2} # extracting all days of month portion
RESULTS2LENGTH=$(cat "${OUT2}" | wc -l)"second" # recalculate RESULTS2LENGTH for debug output at the end
RESULTS2ALENGTH=$(cat "${OUT2}" | wc -l) # checking if empty again (so results1,results2, and now results3 which i call results 2a)
if [ ${RESULTS2ALENGTH} -eq 0 ]; then
# copy contents of out1 to out2, as out1 by itself does fine.. meaning out1 can be used as out2 and generate results
# first results is looking between "beforedayboxes" and "afterdayboxes"
# second results is looking after "beforedayboxes"
# third is just the whole OUT1 (which is the calendar output)
# --- THIRD --- #: i noticed that it doesnt matter if you look for afterdayboxes or beforedayboxes, because the prasing sed commands will correctly parse everything, so instead of trying to make OUT2 smaller by triangulating in on the calendar data in OUT1 (which is the /calendar page), we just use the calendar output as OUT2 and parse thru that. So basically we can just skip FIRST and SECOND and just do THIRD always & it will be better.. but too lazy to just convery to THIRD, even though it would only be a few line comments (but then there is also all of those debug statements which are annoying)
cat "${OUT1}" > "${OUT2}"
RESULTS2LENGTH=$(cat "${OUT2}" | wc -l)"third" # recalculate the length again & store it into resutls2length (technically should be called results3 but that is taken up)
fi
fi
# Get today and tommorow day so we can look inbetween those days
EPOCHNOW=$(date +%s) # get todays epoch sec time
EPOCHTOMMOROW=$((EPOCHNOW+$((60*60*24))))
DAYNUMNOW=$(date --date "@${EPOCHNOW}" +%d | sed 's/^0*//') # removing leading zero with sed
DAYNUMTOMMOROW=$(date --date "@${EPOCHTOMMOROW}" +%d | sed 's/^0*//')
if [ $DAYNUMTOMMOROW -gt $DAYNUMNOW ]; then
echo "- date is NOT End of Month, Looking Between Day $DAYNUMNOW and $DAYNUMTOMMOROW"
cat ${OUT2} | grep -A1000000 "^${DAYNUMNOW}</span>&nbsp;</div>" | grep -B1000000 "^${DAYNUMTOMMOROW}</span>&nbsp;</div>" > ${OUT3} # extracting day
else
echo "- date is end of the Month, Looking Between Day $DAYNUMNOW and $DAYNUMTOMMOROW"
cat ${OUT2} | grep -A1000000 "^${DAYNUMNOW}</span>&nbsp;</div>" | grep -B1000000 "\"afterdayboxes\"" > ${OUT3} # extracting last day
fi
# shows from all month (adds new line before "a title" tag, greps out dates and a title (so now only episodes and dates with html tags), then looks for title in a title to remove html tag on episode, then looks for day on date tag and surrounds it with ---DAY---
cat ${OUT2} | sed 's/<a title/\\\n<a title/g' | egrep "a title|^[0-9]*</span>\&nbsp;</div>" | sed 's/^.*a title = "\(.*\)" href.*$/\1/g' | sed 's/^\(.* - [0-9]*x[0-9]*\) -.*$/\1/g;s/^\([0-9]*\)<\/span>&nbsp;<\/div>$/---\1---/g' > ${OUT4}
cat ${OUT3} | sed 's/<a title/\\\n<a title/g' | sed -n 's/^.*a title = "\(.*\)" href.*$/\1/pg' | sed 's/^\(.* - [0-9]*x[0-9]*\) -.*$/\1/g' > ${OUT5}
RESULTS3LENGTH=$(cat "${OUT3}" | wc -l)
RESULTS4LENGTH=$(cat "${OUT4}" | wc -l)
RESULTS5LENGTH=$(cat "${OUT5}" | wc -l)
echo "- This Months output from the next-episode: ${OUT4}"
echo "cat ${OUT4}"
echo "- Todays output from the next-episode: ${OUT5}"
echo "*********** todays episode list **************"
cat ${OUT5}
echo "**********************************************"
# -- save to nice -- #
echo "Episodes To Watch Today from next-episode.net" > ${OUTNICE}
echo "By: $AUTHOR - Date: `date` - Epoch: `date +%s` s" >> ${OUTNICE}
echo >> ${OUTNICE}
echo "#### --- TODAY: `date +%D` --- ####" >> ${OUTNICE}
cat ${OUT5} >> ${OUTNICE}
echo >> ${OUTNICE}
echo "#### --- WHOLE MONTH --- ###" >> ${OUTNICE}
cat ${OUT4} >> ${OUTNICE}
echo "- deleting extra files (out1, out2, out3, and cookie1)"
rm -f "${OUT1}" "${OUT2}" "${OUT3}" "${COOKIE1}"
###### optional: send to webserver over ssh (provided ssh key)
###### echo -n "- sending to webserver: $REMOTE_PATH:"
###### rsync -av -e "ssh -i $SSH_KEY" "$OUTNICE" "$REMOTE_PATH" && { echo ' success!'; SENT="success"; } || { echo " fail! Error Code $?"; SENT="fail (error $?)"; }
logger "${SCRIPTNAME} - Complete & saved to disk - optional: sent to webserver: ${SENT}"
### show lines in results ###
echo "- DEBUG: Number of lines in xout1,2,3,4,5: ${RESULTS1LENGTH}/${RESULTS2LENGTH}/${RESULTS3LENGTH}/${RESULTS4LENGTH}/${RESULTS5LENGTH}"
echo "- DEBUG: Number of lines in this Months(xout4) / Today(xout5) Results: ${RESULTS4LENGTH}/${RESULTS5LENGTH}"
# EOF
#!/bin/bash
# FILENAME: start_2days_tors.sh
# HOW TO RUN: start_2days_tors.sh
# DO THIS BEFORE RUNNING SCRIPT: change AUTOTOR_LOG variable to match the log file to append to (all output is saved there). Then change TODAYSEPS to the file where xout5 is saved by get_nec_info script
DIRNAME=$(dirname $(readlink -f "$0"))
AUTOTOR_LOG="$DIRNAME/autotor.log"
# AUTOTOR_LOG="/var/log/autotor.log"
(# starting subshell for easier login with tee at the end
cd "$DIRNAME"
./get_nec_info.sh
echo "=========== [`date +%D-%T`|`date +%s`s] `basename $0` ==========="
TODAYSEPS="$DIRNAME/xout5_final-today.txt"
### TODAYSEPS="$DIRNAME/testepisodes.txt"
OLDIFS=${IFS};IFS=$'\n';I=0;ITOTAL=`cat "${TODAYSEPS}" | wc -l`
for episode in `cat ${TODAYSEPS}`; do
I=$((I+1));
echo "- convert nec 2 ipt, src, nec result: ${episode}"
EP_NAME=`echo "${episode}" | sed 's/^\(.*\) - \([0-9]*\)x\([0-9]*\)$/\1/g'`
EP_SEASON=`echo "${episode}" | sed 's/^\(.*\) - \([0-9]*\)x\([0-9]*\)$/\2/g'`
EP_EPISODE=`echo "${episode}" | sed 's/^\(.*\) - \([0-9]*\)x\([0-9]*\)$/\3/g'`
EP_SEASON_2PADDED=`echo "${EP_SEASON}" | awk '{printf("%02d\n", $1);}'`
EP_EPISODE_2PADDED=`echo "${EP_EPISODE}" | awk '{printf("%02d\n", $1);}'`
QUERY1="${EP_NAME} s${EP_SEASON_2PADDED}e${EP_EPISODE_2PADDED}"
logger "AUTOTOR (episode): ${episode}"
logger "AUTOTOR (query): ${QUERY1}"
echo "- convert nec 2 ipt, dst, ipt query: ${QUERY1}"
echo "- starting search and download of torrents related to query"
# if 1 episode, login and logout with dl_tor to ipt. If more than 1 episode, login on first episode to ipt, and logout of ipt on the last episode
if [ $ITOTAL -eq 1 ]; then
./dl_tor.sh "${QUERY1}"; # no 2nd arg means login and logout of ipt this time (first and only episode)
else
if [ $I -eq 1 ]; then
./dl_tor.sh "${QUERY1}" 1; # 2nd arg = 1 means login, but dont logout (first episode)
elif [ $I -eq $ITOTAL ]; then
./dl_tor.sh "${QUERY1}" 2; # 2nd arg = 2 means logout, but dont login (last episode)
else
./dl_tor.sh "${QUERY1}" 3; # 2nd arg = 3 means dont logout, and dont login (middle episodes)
fi
fi
done
echo "- done";IFS=${OLDIFS};) | tee -a ${AUTOTOR_LOG}; (echo;echo;) >> ${AUTOTOR_LOG}; # adding a seperator for the log
# EOF
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment