Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
Download all the content of a specific manga from manga reader.
#!/bin/zsh
# ./mangareader <manga_url>
website_url="www.mangareader.net"
chapters_dl_name="chdl_name`date +"%H%m%s"`"
tmp_filename="tmp`date +"%H%m%s"`"
# Download the manga index web page
wget $1 -q -O $chapters_dl_name
manga_name=`cat $chapters_dl_name | grep "mangaimg" | sed 's/.*cover\///g' | sed 's/\/.*//g'`
mkdir $manga_name
mv $chapters_dl_name $manga_name
cd $manga_name
# Get back the chapter list from it
cat $chapters_dl_name | grep $manga_name | grep "td" | sed 's/^[^\"]*\"//g' | sed 's/\".*//g' > $tmp_filename
# Save it to a zsh table
chapter_table=("${(f)$(< $tmp_filename )}")
rm $tmp_filename $chapters_dl_name
id=0
# For each chapter do
for i in $chapter_table
do
# Download the chapter index web page
url_chapter="$website_url$i"
(( id = $id + 1 ))
wget $url_chapter -q -O $id
# Get back the picture address list
chapters_dl_name=$id"_"
cat $id | grep "option" | sed 's/.*value//g' | sed 's/^[^\"]*\"//g' | sed 's/\".*//g' > $chapters_dl_name
# Save it to a zsh table
pictures_table=("${(f)$(< $chapters_dl_name )}")
idj=0
mkdir "ch_"$id
cd "ch_"$id
# For each picture address we've got
for j in $pictures_table
do
url_picture="$website_url$j"
(( idj = $idj + 1 ))
# Download the file where actual picture is written
wget $url_picture -q -O "_"$idj
# Download actual picture
wget `cat "_"$idj | grep "imgholder" | sed 's/.*src.\"//g' | sed 's/\".*//g'` -b -q -O $id"_"$idj".jpg" >/dev/null 2>&1
echo "Downloading "$id"_"$idj
rm "_"$idj
done
cd ..
rm $id $chapters_dl_name
done
cd ..
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment