Skip to content

Instantly share code, notes, and snippets.

@maximilianschmitt
Created September 3, 2014 23:47
Show Gist options
  • Save maximilianschmitt/680abefef40ebd341635 to your computer and use it in GitHub Desktop.
Save maximilianschmitt/680abefef40ebd341635 to your computer and use it in GitHub Desktop.
Automated MySQL backups to S3 with node.js
'use strict';
var mysqlBackup = require('./mysql-backup');
var schedule = require('node-schedule');
schedule.scheduleJob({ hour: 22, minute: 0 }, mysqlBackup);
'use strict';
var spawn = require('child_process').spawn;
var s3Upload = require('s3-stream-upload');
var config = require('../config');
var Promise = require('bluebird');
var moment = require('moment');
var mysqlBackup = function() {
var upload = s3Upload({
accessKeyId: config.aws.accessKey,
secretAccessKey: config.aws.secretKey,
Bucket: config.aws.buckets.backup.name,
region: config.aws.buckets.backup.region
});
var s3 = upload({ Key: 'mysql-backup-' + moment().format('YYYY-MM-DD-HH-mm-ss') + '.sql' });
var mysqldump = spawn('mysqldump', [
'-u', config.db.connection.user,
'-p' + config.db.connection.password,
config.db.connection.database
]);
return new Promise(function(resolve, reject) {
mysqldump
.stdout
.pipe(s3)
.on('finish', function() {
resolve();
})
.on('error', function(err) {
reject(err);
});
});
};
module.exports = mysqlBackup;
@AndreiTelteu
Copy link

Thank you. Very useful.

This also works with other S3 compatible servers like DigitalOcean Spaces or others. Check out s3-stream-upload documentation for that.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment