Skip to content

Instantly share code, notes, and snippets.

@zanculmarktum
Last active August 9, 2018 19:37
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save zanculmarktum/58402c0d2d3acae683f3788839b23af3 to your computer and use it in GitHub Desktop.
Save zanculmarktum/58402c0d2d3acae683f3788839b23af3 to your computer and use it in GitHub Desktop.
Get download url from various filehostings via curl
#!/bin/sh
# Copyright 2018 Azure Zanculmarktum
# All rights reserved.
#
# Redistribution and use of this script, with or without modification, is
# permitted provided that the following conditions are met:
#
# 1. Redistributions of this script must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
if [ $# -eq 0 ]; then
echo "Usage: ${0##*/} url" >&2
exit 1
fi
URL="$1"
CURL="curl -q -s -L -k -g -Y 1 -y 10"
[ -n "$TMPDIR" ] && TMPDIR="$TMPDIR/${0##*/}.$$"
[ -z "$TMPDIR" ] && TMPDIR="/tmp/${0##*/}.$$"
mkdir -p $TMPDIR
COOKIEFILE="$TMPDIR/cookies"; >$COOKIEFILE
CAPTCHAFILE="$TMPDIR/captcha"; >$CAPTCHAFILE
VIEWER=""
[ -z "$VIEWER" ] && which display >/dev/null 2>&1 && VIEWER=display
[ -z "$VIEWER" ] && which feh >/dev/null 2>&1 && VIEWER=feh
[ -z "$VIEWER" ] && which sxiv >/dev/null 2>&1 && VIEWER=sxiv
clean() {
rm -Rf $TMPDIR
}
trap 'clean' EXIT
trap 'clean; exit 1' INT TERM
wait() {
local i=$1
while [ $i -gt 0 ]; do
printf '%s' "Please wait $i seconds " >&2
sleep 1
i=$(($i-1))
done
}
file_url=""
file_urls=""
while :; do
case "$URL" in
*uptobox.com*)
PAGE=$($CURL -c $COOKIEFILE "$URL") || exit 1
seconds=$(echo "$PAGE" | sed -ne '/data-remaining-time/s/.*data-remaining-time='"'"'\([^'"'"']*\)'"'"'.*/\1/p')
if [ -n "$seconds" -a "$seconds" != "0" ]; then
sleep 5
wait $seconds
waitingToken=$(echo "$PAGE" | sed -ne '/waitingToken/s/.*value='"'"'\([^'"'"']*\)'"'"'.*/\1/p')
PAGE=$($CURL -F waitingToken=$waitingToken "$URL") || exit 1
fi
file_url=$(echo "$PAGE" | sed -ne '/\.uptobox\.com\/dl\//s/[^"]*"\(.*\.uptobox\.com\/dl\/[^"]*\)".*/\1/p')
;;
*userscloud.com*)
PAGE=$($CURL -c $COOKIEFILE "$URL") || exit 1
op=$(echo "$PAGE" | sed -ne '/name="op"/s/.*value="\([^"]*\)".*/\1/p' | grep -v '^login$')
id=$(echo "$PAGE" | sed -ne '/name="id"/s/.*value="\([^"]*\)".*/\1/p')
rand=$(echo "$PAGE" | sed -ne '/name="rand"/s/.*value="\([^"]*\)".*/\1/p')
PAGE=$($CURL -F op=$op -F id=$id -F rand=$rand "$URL") || exit 1
file_url=$(echo "$PAGE" | sed -ne '/Download Ready!/s/.*href="\([^"]*\)".*/\1/p')
;;
*solidfiles.com*)
PAGE=$($CURL -c $COOKIEFILE "$URL") || exit 1
file_url=$(echo "$PAGE" | sed -ne '/downloadUrl/s/.*"downloadUrl":"\([^"]*\)".*/\1/p')
;;
*drive.google.com/file*)
PAGE=$($CURL -c $COOKIEFILE "$URL") || exit 1
URL=$(echo "$PAGE" | sed -ne '/https:\/\/drive\.google\.com\/uc?id/{s/.*"\(https:\/\/drive\.google\.com\/uc?id[^"]*\)".*/\1/; s/\\u003d/=/g; s/\\u0026/\&/g; p}')
# | perl -pe 's/\\u(\X{4})/chr(oct("0x$1"))/ge'
continue
;;
*drive.google.com/uc*|*docs.google.com/uc*)
curl=$(echo ${CURL%%-L*}${CURL##*-L})
pageinfo=$($curl -I "$URL")
if ! echo "$pageinfo" | grep -qs -e '^Location: .*\.googleusercontent\.com.*' -e '^location: .*\.googleusercontent\.com.*'; then
PAGE=$($CURL -c $COOKIEFILE "$URL") || exit 1
confirm=$(echo "$PAGE" | sed -ne '/.*"\/uc?export=download\&confirm=[^&]*\&id=[^"]*".*/s/.*"\/uc?export=download\&confirm=\([^&]*\)\&id=[^"]*".*/\1/p')
id=$(echo "$PAGE" | sed -ne '/.*"\/uc?export=download\&confirm=[^&]*\&id=[^"]*".*/s/.*"\/uc?export=download\&confirm=[^&]*\&id=\([^"]*\)".*/\1/p')
pageinfo=$($curl -I -b "download_warning_13058876669334088843_$id=$confirm;" "https://drive.google.com/uc?export=download&confirm=$confirm&id=$id")
fi
file_url=$(echo "$pageinfo" | sed -ne '/^Location: /s/Location: \(.*\)/\1/p' | tr -d '\r')
[ -z "$file_url" ] && file_url=$(echo "$pageinfo" | sed -ne '/^location: /s/location: \(.*\)/\1/p' | tr -d '\r')
;;
*e-hentai.org*)
PAGE=$($CURL -c $COOKIEFILE "$URL") || exit 1
if echo "$PAGE" | grep -qsFe '<p>This gallery has been flagged as <strong>Offensive For Everyone</strong>'; then
PAGE=$($CURL -b $COOKIEFILE "${URL%/}/?nw=always") || exit 1
fi
urls=$(echo "$PAGE" | grep -oe 'https:\/\/e-hentai\.org\/g\/[^/]*\/[^/]*\/?p=[^"]*' | awk '{ if (!dups[$0]++) { print } }')
url=""
while :; do
[ -n "$url" ] && PAGE=$($CURL -c $COOKIEFILE "$url")
for i in $(echo "$PAGE" | grep -oe 'https:\/\/e-hentai\.org\/s\/[^"]*'); do
file_urls="$file_urls $($CURL $i | sed -ne '/<img id="img" src="/s/.*<img id="img" src="\([^"]*\)".*/\1/p')"
done
[ -z "$urls" ] && break
url=$(for i in $urls; do echo $i; break; done)
urls=$(j=0; for i in $urls; do j=$(($j+1)); [ $j -eq 1 ] && continue; echo $i; done)
done
;;
*.deviantart.com*)
PAGE=$($CURL -c $COOKIEFILE "$URL") || exit 1
url=$(echo "$PAGE" | sed -ne '/href="https:\/\/www\.deviantart\.com\/download\// { s/.*href="\(https:\/\/www\.deviantart\.com\/download\/[^"]*\)".*/\1/; s/\&amp;/\&/; p }')
curl=$(echo ${CURL%%-L*}${CURL##*-L})
file_url=$($curl -I -b $COOKIEFILE "$url" | sed -ne '/^Location: /s/Location: \(.*\)/\1/p' | tr -d '\r')
;;
*.zippyshare.com*)
PAGE=$($CURL -c $COOKIEFILE "$URL") || exit 1
url=$(echo "$URL" | grep -oe '.*\.com')
a=$(echo "$PAGE" | sed -ne '/document\.getElementById('"'"'dlbutton'"'"')/s/.*\.href = "\([^"]*\)".*/\1/p')
b=$(expr $(echo "$PAGE" | sed -ne '/document\.getElementById('"'"'dlbutton'"'"')/s/.*+ (\(.*\)) +.*/\1/p'))
c=$(echo "$PAGE" | sed -ne '/document\.getElementById('"'"'dlbutton'"'"')/s/.*+ "\([^"]*\)";/\1/p')
file_url="$url$a$b$c"
;;
*openload.co*)
if [ -z "$VIEWER" ]; then
echo "${0##*/}: no image viewer found on your system" >&2
exit 1
fi
fid=$(echo "$URL" | sed -ne 's,https\?://openload.co/f/\([^/]*\).*,\1,p')
url="https://api.openload.co/1/file/dlticket?file=$fid"
PAGE=$($CURL -c $COOKIEFILE "$url") || exit 1
if echo "$PAGE" | grep -qse '"status":509'; then
echo "${0##*/}: $(echo "$PAGE" | sed -ne 's/.*"\(bandwidth usage too high[^"]*\)".*/\1/p')" >&2
exit 1
fi
json=$(echo "$PAGE" | grep -oe '"result":{[^}]*}')
wait_time=$(echo "$json" | sed -ne '/"wait_time"/s/.*"wait_time":"\?\([^,"]*\).*/\1/p')
captcha_url=$(echo "$json" | sed -ne '/"captcha_url"/{ s/.*"captcha_url":"\?\([^,"]*\).*/\1/; s,\\/,/,g; p}')
ticket=$(echo "$json" | sed -ne '/"ticket"/s/.*"ticket":"\?\([^,"]*\).*/\1/p')
wait $wait_time
$CURL -o $CAPTCHAFILE $captcha_url
$VIEWER $CAPTCHAFILE &
pid=$!
printf '%s' ' Captcha: '; read captcha_response
kill $pid
file_url=$($CURL "https://api.openload.co/1/file/dl?file=$fid&ticket=$ticket&captcha_response=$captcha_response" | sed -ne '/"url"/{ s/.*"url":"\?\([^,"]*\).*/\1/; s,\\/,/,g; p }')
;;
*hentai2read.com*)
if [ "${URL%/1/}" = "$URL" ]; then
URL="${URL%/}/1/"
fi
PAGE=$($CURL -c $COOKIEFILE "$URL") || exit 1
file_urls=$(echo "$PAGE" | grep -e "'"'images'"'"' : \[' | grep -oe '[^,"]*' | sed -ne '2,$p' | head -n -1 | sed -e 's,\\/,/,g' -e 's,^,https://static\.hentaicdn\.com/hentai,')
;;
esac
break
done
if [ -z "$file_url" ]; then
for file_url in $file_urls; do echo "$file_url"; done
else
echo "$file_url"
fi
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment