Skip to content

Instantly share code, notes, and snippets.

@NuarkNoir
Created March 3, 2019 14:49
Show Gist options
  • Save NuarkNoir/739f761fe4d62462f1793914164c9795 to your computer and use it in GitHub Desktop.
Save NuarkNoir/739f761fe4d62462f1793914164c9795 to your computer and use it in GitHub Desktop.
AkumaMoe Helper - Helps download mangas from akuma.moe
// ==UserScript==
// @name AkumaMoe Helper
// @namespace http://tampermonkey.net/
// @version 0.1
// @description Helps download mangas from akuma.moe
// @author Nuark
// @connect s1.akuma.moe
// @match https://*akuma.moe/*
// @require https://cdnjs.cloudflare.com/ajax/libs/jszip/3.2.0/jszip.min.js
// @require https://cdnjs.cloudflare.com/ajax/libs/FileSaver.js/1.3.8/FileSaver.min.js
// @grant GM_xmlhttpRequest
// @grant GM_download
// @run-at document-end
// ==/UserScript==
(function() {
'use strict';
let w = unsafeWindow;
let jq = w.$;
let blobholder = {
init: function(filename, details, maxcount, caller) {
this.filename = filename;
this.details = details;
this.maxcount = maxcount;
this.caller = caller;
},
blobarray: [],
currcount: 0,
addBlob: function(blob) {
this.blobarray.push(blob);
this.currcount++;
console.log("blob insertion dispatched", blob, this.currcount, this.maxcount);
if (this.currcount === this.maxcount) {
this.callback();
this.caller.text("Serializing...");
console.log("calling serialization", blob, this.currcount, this.maxcount);
}
},
callback: function() {
let zip = new JSZip();
zip.file("description.txt", this.details);
let img = zip.folder("images");
this.blobarray.forEach(file => {
img.file(file[0], file[1]);
});
zip.generateAsync({type:"blob"}).then(function(content) {
saveAs(content, blobholder.filename);
blobholder.caller.toggleClass("btn-info").toggleClass("btn-success");
blobholder.caller.text("Done");
});
}
}
w.zxc = async function (caller) {
caller = jq(caller);
let todo = w.pag.cnt;
let tpl = "{origin}{pathname}/{page}".replace(/(\{origin\})/, location.origin).replace(/(\{pathname\})/, location.pathname);
blobholder.init(jq(".entry-title").text(), "", todo, caller);
caller.text(`Pages: ${todo}`);
for (let i = 1; i <= todo; i++) {
caller.text(`Fetching ${i} of ${todo}...`);
let page_response = await fetch(tpl.replace(/(\{page\})/, i));
let html = await page_response.text();
let div = jq("<div>").html(html.replace(/(script)/g, "scr").replace(/(link)/g, "lnk"))[0];
let link = div.querySelector("#image-container > img").src;
GM_xmlhttpRequest({
method: "GET",
url: link,
synchronous: true,
responseType: "blob",
onload: function (response) {
let title = response.finalUrl .split("/").pop();;
let blob = response.response;
blobholder.addBlob([title, blob]);
}
});
}
}
function init() { //TODO: location.pathname detection
let new_btn = jq("#start").clone();
new_btn.id = "download";
new_btn.text("Download");
new_btn.attr("href", "#download");
new_btn.attr("title", "Download");
new_btn.toggleClass("btn-primary").toggleClass("btn-info");
new_btn.attr("onclick", "window.zxc(this)");
jq("#start").parent().append(new_btn);
}
init();
})();
@NuarkNoir
Copy link
Author

@Markuzkiller Can you provide link to manga you are trying to download?

@Markuzkiller
Copy link

Sorry for the late reply, I'm currently sick, and I'm barely recovering
I was able to download the mangas I wanted to, but they took a long time
The first one is 1587 pages long, so it took it about 1015 mins to Fetch the images, and about 1.30 hours to finish the Serializing
The second one is 1945 pages long, so it took it about 15
20 mins to Fetch the images, and about 4 hours to finish the Serializing

It seems to that akuma .moe is having some issue, because right now is down for me, the entire website

Still, thanks for this amazing script, even tho it took an extensive amount of time I was able to download what I wanted!

@NuarkNoir
Copy link
Author

Glad it helped you, @Markuzkiller

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment