Skip to content

Instantly share code, notes, and snippets.

@ppshein
Forked from magegu/multipart.js
Created October 27, 2019 03:16
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save ppshein/663c97bb785cdfa5a3d56595813a451f to your computer and use it in GitHub Desktop.
Save ppshein/663c97bb785cdfa5a3d56595813a451f to your computer and use it in GitHub Desktop.
mutipart upload for aws s3 with nodejs based on the async lib including retries for part uploads
/*
by Martin Güther @magegu
just call it:
uploadFile(absoluteFilePath, callback);
*/
var path = require('path');
var async = require('async');
var fs = require('fs');
var AWS = require('aws-sdk');
AWS.config.loadFromPath('./aws.json');
var s3 = new AWS.S3();
var bucketName = "YOUR BUCKET NAME";
function uploadMultipart(absoluteFilePath, fileName, uploadCb) {
s3.createMultipartUpload({ Bucket: bucketName, Key: fileName }, (mpErr, multipart) => {
if(!mpErr){
//console.log("multipart created", multipart.UploadId);
fs.readFile(absoluteFilePath, (err, fileData) => {
var partSize = 1024 * 1024 * 5;
var parts = Math.ceil(fileData.length / partSize);
async.timesSeries(parts, (partNum, next) => {
var rangeStart = partNum*partSize;
var end = Math.min(rangeStart + partSize, fileData.length);
console.log("uploading ", fileName, " % ", (partNum/parts).toFixed(2));
partNum++;
async.retry((retryCb) => {
s3.uploadPart({
Body: fileData.slice(rangeStart, end),
Bucket: bucketName,
Key: fileName,
PartNumber: partNum,
UploadId: multipart.UploadId
}, (err, mData) => {
retryCb(err, mData);
});
}, (err, data) => {
//console.log(data);
next(err, {ETag: data.ETag, PartNumber: partNum});
});
}, (err, dataPacks) => {
s3.completeMultipartUpload({
Bucket: bucketName,
Key: fileName,
MultipartUpload: {
Parts: dataPacks
},
UploadId: multipart.UploadId
}, uploadCb);
});
});
}else{
uploadCb(mpErr);
}
});
}
function uploadFile(absoluteFilePath, uploadCb) {
var fileName = path.basename(absoluteFilePath);
var stats = fs.statSync(absoluteFilePath)
var fileSizeInBytes = stats["size"]
if(fileSizeInBytes < (1024*1024*5)) {
async.retry((retryCb) => {
fs.readFile(absoluteFilePath, (err, fileData) => {
s3.putObject({
Bucket: bucketName,
Key: fileName,
Body: fileData
}, retryCb);
});
}, uploadCb);
}else{
uploadMultipart(absoluteFilePath, fileName, uploadCb)
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment