Skip to content

Instantly share code, notes, and snippets.

@ghoomfrog
Last active August 10, 2021 00:38
Show Gist options
  • Save ghoomfrog/c9a88a4099b79247d9d13d1a965c06bd to your computer and use it in GitHub Desktop.
Save ghoomfrog/c9a88a4099b79247d9d13d1a965c06bd to your computer and use it in GitHub Desktop.
Script to download images from Gelbooru.
#!/bin/sh
# Dependencies: curl, wget, jq
escape_special_url_characters() { # print arguments after escaping special URL characters in them
echo $@ |
sed 's/%/%25/g; s/\$/%24/g; s/&/%26/g; s/\+/%2B/g; s/,/%2C/g;
s/\//%2F/g; s/:/%3A/g; s/;/%3B/g; s/=/%3D/g; s/\?/%3F/g;
s/@/%40/g; s/ /%20/g; s/"/%22/g; s/</%3C/g; s/>/%3E/g;
s/#/%23/g; s/{/%7B/g; s/}/%7D/g; s/|/%7C/g; s/\\/%5C/g;
s/\^/%5E/g; s/~/%7E/g; s/\[/%5B/g; s/]/%5D/g; s/`/%60/g;'
}
limit=1
if [ $GELDIR ]; then
dir=$GELDIR
elif [ -d ~/Downloads ]; then
dir=~/Downloads
else
dir=~/.gel
fi
while getopts :hndk:u:i:l:p:c:o: c; do
case $c in
h) echo 'Usage: gelposts [-hnd] [-i ID] [-k API_KEY] [-u USER_ID] [-l LIMIT] [-p PID] [-c CID] [-o DIR] [TAG...]'
echo 'Download images from Gelbooru.'
echo
echo ' -h display this help and exit'
echo ' -n clear the download directory before downloading'
echo ' -d download images of deleted posts'
echo ' -l=LIMIT download only LIMIT images'
echo ' -p=PID specify the post offset (page number)'
echo ' -c=CID specify the change ID'
echo ' -o=DIR download images to DIR'
echo
echo 'If DIR is not specified, $GELDIR is used.'
echo 'If $GELDIR is unset, ~/Downloads is used.'
echo 'If ~/Downloads is not a directory, ~/.gel is created (if necessary) and used.'
exit;;
n) clean=1;;
d) deleted_param='&deleted=show';;
k) api_key_param="&api_key=`escape_special_url_characters $OPTARG`";;
u) user_id_param="&user_id=`escape_special_url_characters $OPTARG`";;
i) id_param="&id=`escape_special_url_characters $OPTARG`";;
l) limit=`escape_special_url_characters $OPTARG`;;
p) pid_param="&pid=`escape_special_url_characters $OPTARG`";;
c) cid_param="&cid=`escape_special_url_characters $OPTARG`";;
o) dir=$OPTARG;;
esac
done
limit_param="&limit=$limit"
if [ $limit -gt 0 ]; then
shift $(($OPTIND - 1))
tags_param="&tags=`escape_special_url_characters $@`"
mkdir -p $dir
echo Retrieving post info...
res=`curl -s "https://gelbooru.com/index.php?page=dapi&s=post&q=index&json=1$api_key_param$user_id_param$id_param$limit_param$pid_param$deleted_param$cid_param$tags_param"`
i=0
len=`echo $res | jq length`
echo Selecting $dir
if [ $clean -a "`ls $dir`" ]; then
echo Clearing directory
rm -r $dir/*
fi
if [ $len -gt 0 ]; then
echo Downloading $len image`[ $len -gt 1 ] && echo s`
else
echo Nothing to download
fi
while [ $i -lt $len ]; do
post=`echo $res | jq ".[$i]"`
url=`echo $post | jq -r .file_url`
basename=`echo $post | jq -r .image`
path=$dir/$basename
exists=`[ -e $path ] && echo 1`
echo [$((i + 1))/$len] $basename `[ $exists ] && echo '(skipped)'`
if [ ! $exists ]; then
wget -q --show-progress -O $path $url
fi
((i++))
done
fi
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment