Skip to content

Instantly share code, notes, and snippets.

@bpceee
Last active August 12, 2017 23:54
Show Gist options
  • Save bpceee/0542f1ae2245779de1d9 to your computer and use it in GitHub Desktop.
Save bpceee/0542f1ae2245779de1d9 to your computer and use it in GitHub Desktop.
aws ec2 nodejs
var AWS = require('aws-sdk');
AWS.config.loadFromPath('./config.json');
AWS.config.region = 'ap-southeast-1';
//1. download
// var s3 = new AWS.S3();
// var file = require('fs').createWriteStream('./docConv.html');
// var params = {Bucket: 'bpctest', Key: 'docConv.html'};
// s3.getObject(params).
// on('httpData', function(chunk) { console.log("wtf");file.write(chunk); }).
// on('httpDone', function() { console.log("wtf?");file.end(); }).
// send();
//2. get url
var s3 = new AWS.S3();
var params = {Bucket: 'bpctest', Key: 'test.html'};
s3.getSignedUrl('getObject', params, function (err, url) {
console.log("The URL is", url);
});
// //3. upload
// var s3 = new AWS.S3();
// var file = require('fs').createReadStream('./test.html');
// var s3 = new AWS.S3({params: {Bucket: 'bpctest', Key: 'test.html'}});
// s3.createBucket(function() {
// s3.putObject({Body: file}, function() {
// console.log("Successfully uploaded data to myBucket/myKey");
// });
// });
@bpceee
Copy link
Author

bpceee commented Sep 11, 2014

// 3. upload
var Uploader = require('s3-streaming-upload').Uploader,
    upload = null,
    stream = require('fs').createReadStream('./test.html');

upload = new Uploader({
  accessKey:  '***',
  secretKey:  "***",
  bucket:     "***",
  objectName: "myUploadedFile",
  region: "us-west-2",

  stream:     stream
});

upload.on('completed', function (err, res) {
    console.log('upload completed');
});

upload.on('failed', function (err) {
    console.log('upload failed with error', err);
});

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment