Skip to content

Instantly share code, notes, and snippets.

@hguillermo
Last active September 9, 2022 15:33
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save hguillermo/b12eaab30dfeade316536b1615a2c268 to your computer and use it in GitHub Desktop.
Save hguillermo/b12eaab30dfeade316536b1615a2c268 to your computer and use it in GitHub Desktop.
AWS s3 Multipart Upload using new checksums algorithms
//
// https://aws.amazon.com/blogs/aws/new-additional-checksum-algorithms-for-amazon-s3/
//
require('dotenv').config();
const s3 = require('@aws-sdk/client-s3');
const fs = require('fs');
const crypto = require('crypto');
const accessKeyId = process.env.AWS_ACCESS_KEY_ID;
const secretAccessKey = process.env.AWS_SECRET_ACCESS_KEY;
const sessionToken = process.env.AWS_SESSION_TOKEN;
const BUCKET_NAME = process.env.BUCKET;
const BUCKET_REGION = process.env.REGION;
const CHUNK_SIZE = 1024 * 1024 * 10; // 10mb chunks except last part
const fileKey = 'short.mp4';
const filePathKey = `./${fileKey}`;
const s3Client = new s3.S3Client({
credentials: {
accessKeyId: accessKeyId,
secretAccessKey: secretAccessKey,
sessionToken: sessionToken,
},
bucket: BUCKET_NAME,
region: BUCKET_REGION,
});
const getHashSha256 = (data) => {
return crypto.createHash('sha256').update(data).digest('base64');
}
async function uploadFile() {
let multipart = await s3Client.send(new s3.CreateMultipartUploadCommand({
Bucket: BUCKET_NAME,
Key: fileKey,
ACL: 'private',
ContentType: 'video/mp4',
StorageClass: 'STANDARD',
ChecksumAlgorithm: s3.ChecksumAlgorithm.SHA256,
}));
console.log("multipart ====>", multipart);
let uploadPartResults = []
const buffer = fs.readFileSync(filePathKey);
let partNum = 0;
const partSize = CHUNK_SIZE;
const multipartParams = {
Bucket: BUCKET_NAME,
Key: fileKey,
ContentType: 'video/mp4'
};
let hashSHA256 = null;
for (let start = 0; start < buffer.length; start += partSize) {
partNum++;
const end = Math.min(start + partSize, buffer.length);
const data = buffer.slice(start, end);
// hashSHA256 = getHashSha256(data)
// console.log('hashSHA256 =>', hashSHA256);
const partParams = {
Body: data,
Bucket: multipartParams.Bucket,
Key: multipartParams.Key,
PartNumber: String(partNum),
UploadId: multipart.UploadId,
ChecksumAlgorithm: s3.ChecksumAlgorithm.SHA256,
// ChecksumSHA256: hashSHA256, // Pass this if you want to calculate the checksum of each part manually
};
console.log('Uploading part: #', partParams.PartNumber, ', Start:', start);
let uploadPromiseResult = await s3Client.send(new s3.UploadPartCommand(partParams));
console.log('uploadPromiseResult ==>', uploadPromiseResult);
uploadPartResults.push({
PartNumber: partNum,
ETag: uploadPromiseResult.ETag,
ChecksumSHA256: uploadPromiseResult.ChecksumSHA256,
})
}
console.log('uploadPartResults ==>', uploadPartResults);
let completeUploadResponse = null;
try {
completeUploadResponse = await s3Client.send(new s3.CompleteMultipartUploadCommand({
Bucket: BUCKET_NAME,
Key: fileKey,
MultipartUpload: {
Parts: uploadPartResults
},
UploadId: multipart.UploadId,
}));
} catch (e) {
console.error('ERROR!!!!! =====>', e);
}
console.log("completeUploadResponse ====>", completeUploadResponse);
console.log("MULTIPART UPLOAD COMPLETED!!!!!");
console.log('\n\n\n\nCHECKING: Show uploaded object meta:');
const attributes = await s3Client.send(new s3.GetObjectAttributesCommand({
Bucket: BUCKET_NAME,
Key: fileKey,
ObjectAttributes: [
'ETag',
'Checksum',
'ObjectParts',
'StorageClass',
'ObjectSize',
],
ChecksumMode: 'ENABLED',
}));
console.log('attributes ==>', attributes);
}
uploadFile();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment