Skip to content

Instantly share code, notes, and snippets.

@abernardobr
Created August 14, 2015 13:53
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save abernardobr/09892af277ef92efa398 to your computer and use it in GitHub Desktop.
Save abernardobr/09892af277ef92efa398 to your computer and use it in GitHub Desktop.
How to upload images to S3 using Graphics Magick and Knox S3 npm module
/ At the client, just use a normal post. I use jQuery and do an AJAX post
// In your HTML
<form id="uploadForm" autocomplete="off" action="" method="post" enctype="multipart/form-data"></form>
<input id='fileId' type="file" accept="image/*">
// In you JS
var $form = $('#uploadForm');
var $file = $('#fileId');
var formData = new FormData($form[0]);
formData.append('imageFilename', $file[0].files[0].name);
formData.append('imageType', $file[0].files[0].type);
formData.append('imageSize', $file[0].files[0].size);
formData.append('imageFile', $file[0].files[0]);
// How to create a rout using HAPI
config: {
handler: internals.add,
timeout: {
socket: 13 * 60 * 1000, // 13 mins
server: 12 * 60 * 1000 // 12 mins
},
payload: {
parse: true, // parse, the default is to have the payload as 'data'. You will get a readeableStream
maxBytes: 2388608 // about 2MB
}
}
/
## On Windows:
``http://www.graphicsmagick.org/INSTALL-windows.html``
## On the Mac:
``brew install graphicsmagick``
## On the Joyent box:
```
pkgin search GraphicsMagic
pkgin install GraphicsMagic
```
## On Linux production server
```
wget ftp://ftp.graphicsmagick.org/pub/GraphicsMagick/1.3/GraphicsMagick-1.3.19.tar.gz
tar -xvf GraphicsMagick-1.3.19.tar.gz
cd GraphicsMagick-1.3.19
./configure
make
sudo make install
```
## On AWS RedHat
```
> sudo yum install GraphicsMagick GraphicsMagick-devel
```
### Troubleshooting
* [https://forums.aws.amazon.com/message.jspa?messageID=572527](https://forums.aws.amazon.com/message.jspa?messageID=572527)
* [http://serverfault.com/questions/569824/how-to-properly-set-up-imagemagick-on-aws-server-using-nodejs-and-express](http://serverfault.com/questions/569824/how-to-properly-set-up-imagemagick-on-aws-server-using-nodejs-and-express)
* [http://stackoverflow.com/questions/16094875/how-do-i-configure-a-jpeg-delegate-for-graphicsmagick](http://stackoverflow.com/questions/16094875/how-do-i-configure-a-jpeg-delegate-for-graphicsmagick)
* Change to 1.3.18 -->
[https://gist.github.com/pzaich/3997914](https://gist.github.com/pzaich/3997914)
* [http://blog.ericlamb.net/2008/11/fix-for-convert-no-decode-delegate-for-this-image-format/](http://blog.ericlamb.net/2008/11/fix-for-convert-no-decode-delegate-for-this-image-format/)
Link to Graphic Magic: [Graphic Magic Homepage](http://www.graphicsmagick.org/index.html)
### Checking PNG support:
[PNG support check](http://stackoverflow.com/questions/11196562/how-to-install-graphicsmagick-with-png-support-on-amazon-ec2)
### Verify Build
[Verify build](http://www.graphicsmagick.org/INSTALL-unix.html#verifying-the-build)
// Route Handler
internals.add = function(request, reply) {
var srcFile = request.payload.imageFile;
var width = request.payload.width ? request.payload.width : 1600;
var height = request.payload.height ? request.payload.height : 1200;
var filename = request.payload.imageFilename;
var imageType = request.payload.imageType;
Gm(srcFile)
.resize(width, height, ">")
.toBuffer(function(err, buffer) {
if (err) {
console.log("GM error at image on image.js");
console.dir(err);
reply({ response: 'NOTOK'});
} else {
var tempFilename = "timg/" + filename; // your image name at S3
try {
S3.addBuffer(buffer, tempFilename, imageType, function(err) {
if (err)
reply({ response: 'NOTOK'});
else {
reply({ response: 'OK'});
}
});
} catch(ex) {
reply({ response: 'NOTOK'});
}
}
});
};
// S3 handling using knock
var _ = require('lodash');
var Domains = require('hd').domains;
var Knox = require('knox');
// S3 AWS
var internals = { active: false, enabled: false, client: null };
internals.init = function() {
var self = this;
if(self.active)
return;
if(self.client == null) {
self.client = Knox.createClient({
key: serverConfig.s3.accessKeyId,
secret: serverConfig.s3.secretAccessKey,
bucket: serverConfig.s3.bucket
});
}
self.active = true;
}
internals.add = function(srcFile, filename, cb) {
var self = this;
var client = self.client;
client.putFile(srcFile, filename, function(err, res){
// Always either do something with `res` or at least call `res.resume()`.
if(!err)
res.resume();
else {
console.log('S3 add --> ');
console.log(err);
}
cb(err);
});
}
internals.addStream = function(stdout, filename, contentType, cb) {
var self = this;
var client = self.client;
var chunks = [];
stdout.on('data', function (chunk) {
chunks.push(chunk);
});
stdout.on('end', function () {
var buf = Buffer.concat(chunks);
var headers = {
'Content-Length': buf.length, 'Content-Type': contentType
};
client.putBuffer(buf, filename, headers, function (err, res) {
if(res != null)
res.resume();
if(err || res.statusCode !== 200) {
console.log('S3 addStream --> ');
if(err)
console.log(err);
else {
err = {
code: 400,
message: res.statusMessage
};
console.log(res.statusCode);
console.log(res.statusMessage);
}
}
cb(err);
});
});
}
internals.addBuffer = function(buffer, filename, contentType, cb) {
var self = this;
var client = self.client;
var headers = {
'Content-Length': buffer.length, 'Content-Type': contentType
};
client.putBuffer(buffer, filename, headers, function (err, res) {
if(res != null)
res.resume();
if(err || res.statusCode !== 200) {
console.log('S3 addStream --> ');
if(err)
console.log(err);
else {
err = {
code: 400,
message: res.statusMessage
};
console.log(res.statusCode);
console.log(res.statusMessage);
}
}
cb(err);
});
}
internals.remove = function(filename, cb) {
var self = this;
var client = self.client;
client.del(filename).on('response', function(res){
cb(null);
}).end();
}
internals.copy = function(sourceFilename, destFilename, cb) {
var self = this;
var client = self.client;
// we are assuming that all files that are being copied are from temp/ prefix
// we have to set file lyfecycle at S3 with the temp/ prefix
client.copyFile(sourceFilename, destFilename, function(err, res) {
// Always either do something with `res` or at least call `res.resume()`.
if(!err)
res.resume();
else {
console.log('S3 copy --> ');
console.log(err);
}
cb(err);
});
}
module.exports = {
init: internals.init,
add: internals.add,
addStream: internals.addStream,
addBuffer: internals.addBuffer,
remove: internals.remove,
copy: internals.copy
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment