Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
My bot written in Google Scripts for the Scratch Wiki.
//"pass" is defined, but hidden for obvious reasons.
//-----------VERY IMPORTANT VARS-----------
var assert = function(c, message) {if (!c) {throw message||"Assertion failed"}};
var parseJson = JSON.parse;
var fetcher = UrlFetchApp;
var wait = function(n) {Utilities.sleep(n*1000)};
var agent = 'Google Apps script by Mathfreak231.';
var cookie = '';
var url = 'http://wiki.scratch.mit.edu/w/api.php';
var redir_form = /#REDIRECT\s*\[\[(.+?)\]\]/i;
var archive_form = new RegExp('http://scratch\.mit\.edu/forums/viewtopic\.php[?]id=([0-9]+)(?:&p=([0-9]+))?(?:#p([0-9]+))?$');
var archive_redir_form = new RegExp('http://scratch\.mit\.edu/forums/viewtopic\.php[?]pid=([0-9]+)(?:#p[0-9]+)?$')
//----------REQUEST-SENDING FUNCTIONS-----------
function full_request(payload) {
//Sends an HTTP POST request to the Scratch Wiki API with the given object as the parameters and returns the whole HTTPResponse object.
var headers = new Object();
headers['User-Agent'] = agent;
if (cookie !== '') {headers['Cookie'] = cookie}
payload['format'] = 'json';
var p = {
'headers' : headers,
'method' : "post",
'payload' : payload
};
var r = fetcher.fetch(url, p);
assert(r.getResponseCode() === 200,"HTML request failed; response code was "+r.getResponseCode());
return r;
}
function request(payload) {
//Sends an HTTP POST request to the Scratch Wiki API with the given object as the parameters and returns just the parsed JSON body of the request
var headers = new Object();
headers['User-Agent'] = agent;
if (cookie !== '') {headers['Cookie'] = cookie;}
payload['format'] = 'json';
var p = {
'headers' : headers,
'method' : 'post',
'payload' : payload
};
var r = fetcher.fetch(url, p);
assert(r.getResponseCode() === 200,"HTML request failed; response code was "+r.getResponseCode());
var s = parseJson(r.getContentText());
if (s['error']) {
throw s['error']['info'];
}
return s
}
function addCookie(new_cookie) {
if (cookie === "") {
cookie = new_cookie
} else {
cookie += "; " + new_cookie
}
}
//-------------BASIC WIKI API--------------
function login(username, password) {
//Logs into the wiki using the given username and password; saves login cookies.
var payload = {
'action': 'login',
'format': 'json',
'lgname' : username,
'lgpassword' : password
};
var data = full_request(payload);
if(parseJson(data.getContentText())['login']['result'] === "NeedToken") {
payload["lgtoken"] = parseJson(data.getContentText())['login']['token'];
var c = data.getHeaders()['Set-Cookie'];
addCookie(c);
data = request(payload)['login'];
if(data['result'] === "Success") {
var prefix = data['cookieprefix'];
addCookie(prefix + "UserName=" + data["lgusername"] + "; "
+ prefix + "UserID=" + data["lguserid"] + "; "
+ prefix + "Token=" + data["lgtoken"]
);
} else {throw "Second login attempt failed; error message was " + data['result']}
} else {throw "First login attempt failed; error message was " + data['result']}
}
function info(payload) {
//Returns the first object in request(payload)['query']['pages']
payload['action'] = 'query';
var d = request(payload)['query']['pages'];
return d[Object.getOwnPropertyNames(d)[0]]
}
function read(page) {
//Reads the latest revision of the page
var payload = {
'titles': page,
'prop': "revisions",
'rvprop': "content",
'rvlimit': 1
};
return info(payload)["revisions"][0]["*"]
}
function token(page, token) {
//Gets a writing token for the page
return info({
'titles': page,
'prop': 'info',
'intoken': token
})[token+'token']
}
function edit(page, content, summary) {
var payload = {
'action': "edit",
'title': page,
'token': token(page, 'edit'),
'text': content,
'summary': summary,
'bot': 1
};
var a = request(payload);
assert(a['edit']['result'] === "Success", "Edit unsuccessful; error was"+a['edit']['result']);
return a;
}
function logout(){
cookie = ""
}
function init(write) {
logout();
login('ThisIsAnAccount',pass);
if (read('User:ThisIsAnAccount/Running') !== "true" && write) {throw "ThisIsAnAccount is disabled. See User:ThisIsAnAccount/Running"}
}
function link_replace(text, title1, title2){
//Replaces all wikilinks to one title to that of another title.
return text.replace(new RegExp('\\\[\\\['+title1+'(.*?)\\\]\\\]','g'),function(x, p1) {return '[['+title2+p1+']]'})
}
//--------------REFERENCE-ONLY TASKS-------------
function interwiki_search() {
init(false);
var interwiki_pages = request({
'action' : 'query',
'list' : 'langbacklinks',
'lbllang' : 'de',
'lbllimit' : 5000,
'lblprop': 'lltitle'
})['query']['langbacklinks'];
for (var i in interwiki_pages) {
var title = interwiki_pages[i]['title'];
var german_title = interwiki_pages[i]['lltitle'];
var link_back = parseJson(fetcher.fetch('http://scratch-dach.info/w/api.php',{
'headers': {'User-Agent': agent},
'method': 'post',
'payload':
{'format': 'json',
'action': 'query',
'list': 'langbacklinks',
'lbllang': 'en',
'lbltitle': title,
'lbllimit': 500,
'lblprop': 'lltitle'
}
}))['query']['langbacklinks'];
if (link_back.length === 0) {
Logger.log(title + ' | ' + german_title);
}
var b;
for (var j in link_back) {
if (link_back[j]['lltitle'] === title) {
b = true;
}
}
if (!b) {
Logger.log(title + ' | ' + german_title);
}
}
}
//-----------------ACTIVE TASKS------------------
function uncategorize_redirects(){
//Finds the first page in Category:Redirects and removes the category.
init(true);
var pages = request({
'action': 'query',
'generator': "categorymembers",
'gcmtitle': "Category:Redirects",
'gcmtype': 'page',
'prop': 'revisions',
'rvprop': 'content'
})['query']['pages'];
for (var id in pages) {
var page = pages[id];
var target = redir_form.exec(page["revisions"][0]["*"])[1];
var newtext = "#REDIRECT [[" + target + "]]";
Logger.log(edit(page['title'],newtext,"AUTOMATED PROCESS: Removing [[Category:Redirects]]"));
wait(6)
}
logout();
}
function forum_archive(){
//Finds pages with links to http://scratch.mit.edu/forums/viewtopic.php and replaces them using the Forum Archive template.
init(true);
var pages = request({
'action': 'query',
'list': 'exturlusage',
'euquery': 'scratch.mit.edu/forums/viewtopic.php',
'eulimit': 5000
})['query']['exturlusage'];
for (var i in pages) {
var j = pages[i];
var title = j['title'];
var url = j['url'];
if (j['ns'] === 0 && archive_form.exec(url)) { //Mainspace only! Also skips if the regex doesn't match.
var match = archive_form.exec(url);
var result = '{{Forum Archive|'+match[1];
if (match[2] && parseInt(match[2]) !== 1) result += '|page=' + match[2];
if (match[3]) result += '|p=' + match[3];
result += "}}";
Logger.log(edit(title, read(title).replace(url, result, 'g'), 'AUTOMATED PROCESS: Using [[Template:Forum Archive]]'));
wait(6)
} else if (j['ns'] === 0 && archive_redir_form.exec(url)) {
Logger.log(url);
var redirected_url = UrlFetchApp.fetch(url.replace("scratch.mit.edu","scratchforums.blob8108.net"),{'followRedirects':false}).getAllHeaders()['Location'].replace("scratchforums.blob8108.net","scratch.mit.edu");
Logger.log(redirected_url);
var match = archive_form.exec(redirected_url);
var result = '{{Forum Archive|'+match[1];
if (match[2] && parseInt(match[2]) !== 1) result += '|page=' + match[2];
if (match[3]) result += '|p=' + match[3];
result += "}}";
Logger.log(edit(title, read(title).replace(url, result, 'g'), 'AUTOMATED PROCESS: Using [[Template:Forum Archive]]'));
wait(6)
} else {Logger.log("Unchanged page:" + title)}
}
}
function on_page_move() {
//Checks the move log and checks if pages are moved. If they are, it edits ALL non-talk pages to update them.
init(true);
var data = request({
'action': 'query',
'list': 'logevents',
'letype': 'move',
'lelimit': 5000,
'leend': read('User:ThisIsAnAccount/LastUpdated'),
'leprop': 'timestamp|title|details'
})['query']['logevents'];
for (var i in data) {
var v = data[i];
var title = v['title'];
var new_title = v['move']['new_title'];
var data2 = request({
'action': 'query',
'generator': 'backlinks',
'gbltitle': title,
'gblnamespace': '0|2|4|6|10|12|14', // main, project, user, file, help, template, category
'gbllimit': 5000,
'prop': 'revisions',
'rvprop': 'content',
});
if (!data2['query']) {
var backlinks = ['query']['pages'];
for (var id in backlinks) {
var page = backlinks[id];
var text = page['revisions'][0]['*'];
var linker = page['title'];
Logger.log(edit(linker, link_replace(text, title, new_title), 'AUTOMATED PROCESS: Replacing links of a moved page'));
wait(6)
}
}
wait(6); edit('User:ThisIsAnAccount/LastUpdated', new Date().toISOString().replace(/\.\d{3}/,''));
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.