Skip to content

Instantly share code, notes, and snippets.

Created January 15, 2014 00:32
Show Gist options
  • Save anonymous/8428731 to your computer and use it in GitHub Desktop.
Save anonymous/8428731 to your computer and use it in GitHub Desktop.
This is a simple script ... if you know Javascript and Node.js you'll be able to use it :}
{
"base_url":"http://example.com/launcher/v1",
"server_id":"",
"name": "MagiTech Madness",
"news":"http://example.com/launcher/v1/index.html",
"address":"",
"revision":"0.0.1",
"version":"1.6.4"
}
"use strict";
/**
* Config Options:
*/
var config ={
base_url: "http://dl.dropbox.com/"
, server_id: ""
, news: ""
, address: ""
, revision: ""
, version: ""
}
/**
* Base Imports
*/
var fs = require('fs')
, path = require('path')
, crypto = require('crypto')
/**
* Userspace libs
*/
var async = require('async')
, mu = require('minstache')
, _ = require('./lib')
, makeModules = require('./modules')
;
/**
* Precompiled templates
*/
var templates = {
main: mu.compile(fs.readFileSync("main.mustache.html").toString())
, module: mu.compile(fs.readFileSync("module.mustache.html").toString())
}
var args = process.argv.splice(2);
if (args.length < 2) {
console.log([
"needed params: "
, " [target] input directory"
, " [config] configuration in json format"
, ""
, "ex: node " + process.argv[1] + " pack config.json"
].join("\n"))
process.exit();
}
var target = makeModules.basedir = args[0]
, config = require(path.resolve(args[1]))
;
console.log("Using config: \n%s", JSON.stringify(config))
/**
* Write the output XML
*/
function writeXML(modules, target) {
var out = templates.main({
base_url: config.base_url
, server_id: config.server_id
, server_name: config.name
, news_url: config.news
, server_address: config.address
, revision: config.revision
, version: config.version
, modules: modules.join('\n')
});
fs.writeFileSync(target + "/ServerPack.xml", out);
process.exit();
}
function getConfig(target, cb) {
console.log("Zipping Config files to: " + target + "/config.zip");
_.zipDir(target, "config.zip", "./config/")
_.md5sum(target + "/config.zip", function (err, md5) {
cb(err,
templates.module({
id: "config"
, name: "Config Files"
, url: config.base_url + "/config.zip"
, type: "Extract"
, inroot: true
, required: "true"
, md5: md5
})
);
});
}
makeModules(target, function (err, modules) {
if (!! err) throw err; // Handle errors...
modules = modules.map(function (m) {
return templates.module({
id: m.id
, name: m.name
, url: config.base_url + m.url
, type: m.type
, required: m.required.toString()
, md5: m.md5
})
});
getConfig(target, function (err, config_tpl) {
if (err) throw err;
modules.push(config_tpl)
console.log("creating ServerPack.xml...")
writeXML(modules, target);
console.log("Seems all nice and fancy...")
process.exit(0);
});
})
"use strict";
var fs = require("fs")
, crypto = require("crypto")
, path = require("path")
, AdmZip = require('adm-zip')
, async = require("async")
, log = console.log
/**
* Small Utilities
*/
var cleanText = module.exports.cleanText = function (str) { return str.replace(/[\s\-\_\.]+/, '') }
, isArray = module.exports.isArray = function (blob) { return Object.prototype.toString.call(blob) === '[object Array]' }
, isString = module.exports.isString = function (blob) { return typeof blob === "string" }
module.exports.extend = function extend (origin, add) {
// Don't do anything if add isn't an object
if (!add || typeof add !== 'object') return origin;
var keys = Object.keys(add);
var i = keys.length;
while (i--) {
origin[keys[i]] = add[keys[i]];
}
return origin;
}
/**
* Find mod info
*/
module.exports.find_mod_info = function find_mod_info (file, cb) {
var zip = new AdmZip(file);
zip.readAsTextAsync("mcmod.info", function (info) {
// if (!! err) return cb(err);
if (!info || info.length == 0) {
// try with the darn pack.mcmeta
info = zip.readAsText("pack.mcmeta");
}
try {
info = JSON.parse(info);
} catch(e) {
info = '';
}
if (info.length == 0) {
var basename = path.basename(file)
, name = path.basename(file).replace(path.extname(file), '').replace(/[\s\[\]\.]+/g, '')
console.log("Mod %s does not have an mcmod.info", path.basename(file))
console.log("falling back to name: %s", name)
return cb(null, cleanText(name), name);
}
if (isArray(info))
info = info[0];
if (!! info["modlist"])
info = info["modlist"][0];
if (!! info["name"])
return cb(null, cleanText(info["modid"] || info["name"]), info["name"]);
if (!! info["pack"])
return cb(null, cleanText(info["pack"]["description"]), info["pack"]["description"]);
});
}
/**
* Zip a directory inside a zip file
*/
module.exports.zipDir = function zipDir(cwd, filename, target_dir) {
var prev_cwd = process.cwd();
process.chdir(cwd);
var zip = new AdmZip();
zip.addLocalFolder(target_dir, target_dir);
zip.writeZip(filename);
process.chdir(prev_cwd);
}
/**
* MD5 sum of files
*/
module.exports.md5sum = function md5sum(file, cb) {
var hasher = crypto.createHash('md5');
var s = fs.ReadStream(file);
s.on('data', function(d) {
hasher.update(d);
});
s.on('end', function() {
var d = hasher.digest('hex');
cb(null, d);
});
s.on('error', function(err){
cb(err);
});
}
/**
* Iterate each file in directory
*
* WARN! ASYNC operation. use a queue to process the files
*/
module.exports.eachFile = function eachFile (dir, cb){
log("eachFile: " + dir)
fs.readdir(dir, function (err, files) {
if (!! err) throw err;
files = files.filter(function(el) { return (/\.(zip|jar)$/i).test(el) && !(/^idfix.*/i).test(el) })
async.each(files, function (file, next) {
log("loking for "+ dir + '/' + file);
fs.stat( dir + '/' + file, function (err, f) {
if (!! err) return next(err);
log("sending: "+ file)
if (! f.isDirectory()) {
log("sending: "+ file)
cb(err, file);
return next();
}
})
})
})
};
<ServerPack version="3.0">
<Server id="{{server_id}}" name="{{server_name}}" newsUrl="{{news_url}}" version="{{version}}" serverAddress="{{server_address}}" revision="{{revision}}" mainClass="net.minecraft.launchwrapper.Launch">
<Import url="{{base_url}}/forge.xml">forge</Import>
{{!modules}}
</Server>
</ServerPack>
var args = process.argv.splice(2);
if (args.length < 1) {
console.log("Wrong number of arguments: " + prcess.argv[1] + " fooBar");
console.log(" to write 'fooBar/mods.json'")
}
var baseDir = args[0];
var _ = require("./lib")
, async = require("async")
, fs = require("fs");
var modInfo = [];
var Q = async.queue(function(filename, next) {
_.find_mod_info(baseDir + '/mods/' + filename , function(err, cleanedName, name) {
if (!! err) {
console.log("Error processing :" + filename);
process.exit();
}
modInfo.push(name);
next();
});
}, 8);
Q.drain = function() {
console.log("Mod list length: " + modInfo.length);
fs.writeFileSync("./" + baseDir + "/mods.json", JSON.stringify(modInfo));
process.exit();
};
_.eachFile("./" + baseDir + "/mods", function (err, file) {
if (!! err ) throw err;
Q.push(file);
});
<Module id="{{id}}" name="{{name}}">
<URL>{{url}}</URL>
<Required>{{required}}</Required>
<ModType {{#inroot}}inRoot="true"{{/inroot}}>{{type}}</ModType>
<MD5>{{md5}}</MD5>
</Module>
"use strict";
/**
* Base Imports
*/
var fs = require('fs')
, path = require('path')
/**
* Userspace libs
*/
var async = require('async')
, AdmZip = require('adm-zip')
, _ = require('./lib')
, log = console.log
var modules = [], basedir;
var Q = async.queue(function(data, next){
_.md5sum( data.basedir + data.dirname + "/" + data.filename, function (err, md5) {
if (!! err) return console.log("err..." + err) & next(err);
_.find_mod_info(data.basedir + data.dirname + "/" + data.filename, function (err, id, name){
if (!! err) {
console.log('ERR >> ' + data.filename)
throw err;
}
console.log("pushing: " + id);
modules.push({
id: id
, name: name
, url: data.dirname + "/" + escape(data.filename)
, required: data.required
, type: "Regular"
, md5: md5
});
next();
})
})
}, 8);
Q.drain = function (err) {
if (!! err) throw err;
log('Q drainded')
fs.writeFileSync("modules.json", JSON.stringify(modules));
process.exit();
}
function enqueue (basedir) {
// Enqueue files
// first add optionals so I don't have to scroll down
_.eachFile(basedir +'/mods-client', function (err, filename) {
if (!! err) throw err;
Q.push({
dirname: "/mods-client"
, filename: filename
, required: false
, basedir: basedir
})
})
//
_.eachFile( basedir + '/mods', function (err, filename) {
if (!! err) throw err;
log('Enqueue : ' + filename)
Q.push({
dirname: "/mods"
, filename: filename
, required: true
, basedir: basedir
})
})
}
module.exports = function (basedir, callback) {
enqueue(basedir);
Q.drain = function (err) {
callback(err, modules)
}
}
// Run and save a pack.json if running locally
if (require.main === module) {
enqueue("./pack");
}
{
"name": "mcu_maker",
"version": "0.0.1",
"description": "just needed a way to quickly generate MCU xml files",
"main": "index.js",
"dependencies": {
"archiver": "~0.4.10",
"async": "~0.2.9",
"adm-zip": "~0.4.3",
"minstache": "~1.1.0"
},
"devDependencies": {},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "aliem",
"license": "BSD-2-Clause"
}
#!/bin/sh
## Bonus script!!
## for you linux users
WORKDIR="v2"
DESTDIR="~/launcher/$WORKDIR"
SERVERDIR="~/server_root"
SSH_SERVER="ssh_server"
EXCLUDES="\
--exclude='*/*.log' \
--exclude='*/*.log.*' \
--exclude='*/*.dat' \
--exclude='*/crash-reports' \
--exclude='*/saves' \
--exclude='*/stats' \
--exclude='*/hats' \
--exclude='*/openperipheral' \
--exclude='1.6.4' \
--exclude='ic2' \
--exclude='logging.properties' \
"
_rsync='rsync --verbose --progress --human-readable --compress --archive --hard-links --one-file-system --update'
_syncronize="$_rsync --delete "
SYNC=false
SERV=false
NOOP=false
while getopts ":sen" opt; do
case $opt in
s) SYNC=true
;;
e) SERV=true
;;
n) NOOP=true
;;
\?)
echo "$0 -[sen]"
echo " -s Sync the launcher files"
echo " -e Sync the server files"
echo " -n Do not run the package maker"
exit
;;
esac
done
if ! $NOOP
then
node index $WORKDIR ./config.json
fi
if [[ $? -gt 0 ]]; then
echo "ERROR from NodeJS: $?"
exit 4
fi
if $SYNC
then
$_syncronize -e ssh $EXCLUDES $WORKDIR/ masters:$DESTDIR
#cp $WORKDIR/ServerPack.xml /home/lor/Dropbox/Public/1.6.4/
fi
if $SERV
then
ssh $SSH_SERVER "$_syncronize $EXCLUDES $DESTDIR/mods/ $SERVERDIR/mods/ && $_rsync $EXCLUDES $DESTDIR/config/ $SERVERDIR/config/"
ssh $SSH_SERVER "chmod -R u+rwX,go+rX,go-w $DESTDIR"
fi
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment