Skip to content

Instantly share code, notes, and snippets.

@magnusdahlstrand
Created July 19, 2013 11:43
Show Gist options
  • Save magnusdahlstrand/6038565 to your computer and use it in GitHub Desktop.
Save magnusdahlstrand/6038565 to your computer and use it in GitHub Desktop.
Quick and dirty nodejs s3 sync from gruntfile. Not 100% yet
require('js-yaml');
var _ = require('lodash');
var fs = require('fs');
var aws = require('aws-sdk');
var path = require('path');
var exec = require('exec-sync');
module.exports = function (grunt) {
grunt.initConfig({
pkg: grunt.file.readJSON('package.json')
});
// Deploy
grunt.registerTask('deploy', 'Deploy staging', function() {
var done = this.async();
var current_sha = exec('git rev-parse HEAD');
try {
var config = require('./project.yaml');
}
catch (error) {
console.log(error);
console.warn('Unable to continue with deploy.');
return;
}
aws.config.update({
region: config.s3['region'],
accessKeyId: config.s3['access-key'],
secretAccessKey: config.s3['secret']
});
var all_paths = [];
var traverse = function(basepath) {
_.each(fs.readdirSync(basepath), function(filepath) {
if(filepath.match(/^.+\..+$/)) {
all_paths.push(basepath + path.sep + filepath);
}
else if(filepath.match(/^[^\.]+$/)) {
// Folder, traverse deeper
traverse(basepath + path.sep + filepath);
}
else {
// Dotfiles
}
});
};
traverse(config['build-dir']);
var files = {};
_.each(all_paths, function(filepath) {
files[filepath.replace(config['build-dir'] + path.sep, '')] = fs.statSync(filepath).size
});
var s3 = new aws.S3({params: {
apiVersion: '2006-03-01'
}});
var upload_files = [];
s3.listObjects({Bucket: config.s3['bucket-name']}, function(error, data) {
if(error) {
console.error('Unable to list bucket "%s"', config.s3['bucket-name'])
}
else {
_.each(data.Contents, function(obj) {
if(files[obj.Key] !== undefined) {
if(files[obj.Key] !== obj.Size) {
upload_files.push(obj.Key);
}
}
});
}
_.each(upload_files, function(remote_filename) {
var local_filename = config['build-dir'] + path.sep + remote_filename;
var file_data;
try {
file_data = fs.readFileSync(local_filename, 'utf-8');
}
catch(error) {
console.log(error);
console.error('Upload of %s failed', local_filename);
return;
}
console.log('Uploading %s', local_filename);
// TODO: Make sure the content type is set correctly
s3.putObject({
Bucket: config.s3['bucket-name'],
Key: remote_filename,
Body: file_data
}, function(error, data) {
if(error) {
console.log(error);
console.error('Upload of %s failed', local_filename);
}
else {
console.log('Successfully uploaded %s', local_filename);
}
});
});
// TODO: Make this work without timeout, it should call done when all files are uploaded
setTimeout(function(){
done();
},4000);
});
});
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment