Skip to content

Instantly share code, notes, and snippets.

@nickwild-999
Last active December 30, 2023 19:41
Show Gist options
  • Star 6 You must be signed in to star a gist
  • Fork 3 You must be signed in to fork a gist
  • Save nickwild-999/c89cfdc3b9edf5f9a8175381ffd79943 to your computer and use it in GitHub Desktop.
Save nickwild-999/c89cfdc3b9edf5f9a8175381ffd79943 to your computer and use it in GitHub Desktop.
S3 Multipart upload with presigned url and progress in React
// this is my aws serverless function
const serverless = require('serverless-http');
const express = require('express');
const AWS = require('aws-sdk');
const app = express();
app.use(express.urlencoded({ extended: true }));
app.use(express.json());
const BUCKET_NAME = "your-bucket-name";
const s3 = new AWS.S3({ params: {Bucket: BUCKET_NAME} });
app.use(function (req, res, next) {
res.header("Access-Control-Allow-Origin", "https://yoururl.com");
res.header("Access-Control-Allow-Headers", ", X-Requested-With, Content-Type, Accept");
next();
});
app.get('/', (req, res, next) => {
res.send(JSON.stringify(req))
})
app.get('/test', (req, res, next) => {
res.send('Welcome test!')
})
app.get('/start-upload', async (req, res) => {
try {
let params = {
Bucket: BUCKET_NAME,
Key: req.query.fileName,
ContentType: req.query.fileType,
Tagging: req.query.basecampProjectID
};
return new Promise(
(resolve, reject) => s3.createMultipartUpload(params, (err, uploadData) => {
if (err) {
reject(err);
} else {
resolve(res.send({ uploadId: uploadData.UploadId }));
}
})
);
} catch (err) {
console.log(err)
return err;
}
});
app.get('/get-upload-url', async (req, res) => {
try {
let params = {
Bucket: BUCKET_NAME,
Key: req.query.fileName,
PartNumber: req.query.partNumber,
UploadId: req.query.uploadId
}
//concurrency!!
return new Promise(
(resolve, reject) => s3.getSignedUrl('uploadPart', params, (err, presignedUrl) => {
if (err) {
reject(err);
} else {
resolve(res.send({ presignedUrl }));
}
})
);
} catch (err) {
console.log(err);
return err;
}
})
app.post('/complete-upload', async (req, res) => {
try {
// console.log(req.body, ': body')
console.log('parts:', req.body.params)
let params = {
Bucket: BUCKET_NAME,
Key: req.body.params.fileName,
MultipartUpload: {
Parts: req.body.params.parts
},
UploadId: req.body.params.uploadId
}
return new Promise(
(resolve, reject) => s3.completeMultipartUpload(params, (err, data) => {
if (err) {
reject(err);
} else {
resolve(res.send({ data }));
}
})
);
} catch (err) {
console.log(err)
return err;
}
})
module.exports.handler = serverless(app);
//In main file
//in my code startUpload was called by another file with useImperativeHandle(ref, () => ({
async startUpload(data) {
const fileName = `${projectID}-${basecampProjectID}/${data.Surname}${data.Firstname.charAt(
0
)}/${data.Surname}${data.Firstname.charAt(0)}-${fileattach}-${filedate}.${fileExtension}`
setFileName(fileName)
const params = {
fileName,
fileType,
basecampProjectID,
}
if (fileName.length > 1) {
const resp = await axios.get(`${backendUrl}/start-upload`, { params })
const { uploadId } = resp.data
setuploadId(uploadId)
} else {
setSubmitSuccess(99)
}
},
const uploadMultipartFile = async () => {
try {
const fileSize = fileSelected.size
const CHUNK_SIZE = 10000000 // 10MB
const CHUNKS_COUNT = Math.floor(fileSize / CHUNK_SIZE) + 1
const promisesArray = []
let start
let end
let blob
for (let index = 1; index < CHUNKS_COUNT + 1; index++) {
start = (index - 1) * CHUNK_SIZE
end = index * CHUNK_SIZE
blob = index < CHUNKS_COUNT ? fileSelected.slice(start, end) : fileSelected.slice(start)
// Get presigned URL for each part
const getUploadUrlResp = await axios.get(`${backendUrl}/get-upload-url`, {
params: {
fileName,
partNumber: index,
uploadId,
Tagging: `basecamp_project_id=${basecampProjectID}`,
},
})
const uploadProgressHandler = async (progressEvent, blob, index) => {
if (progressEvent.loaded >= progressEvent.total) return
const currentProgress = Math.round(
(progressEvent.loaded * 100) / progressEvent.total
)
setProgressArray(progressArray => {
progressArray[index - 1] = currentProgress
const sum = progressArray.reduce((acc, curr) => acc + curr)
setUploadProgress(Math.round(sum / CHUNKS_COUNT))
return progressArray
})
// console.log(progressArray)
}
const { presignedUrl } = getUploadUrlResp.data
console.log(`Presigned URL ${index}: ${presignedUrl} filetype ${fileSelected.type}`)
// Send part to aws server
const uploadResp = axios.put(presignedUrl, blob, {
onUploadProgress: e => uploadProgressHandler(e, CHUNKS_COUNT, index),
headers: {
'Content-Type': fileSelected.type,
},
})
promisesArray.push(uploadResp)
}
const resolvedArray = await Promise.all(promisesArray)
console.log(resolvedArray, ' resolvedArray')
const uploadPartsArray = []
resolvedArray.forEach((resolvedPromise, index) => {
uploadPartsArray.push({
ETag: resolvedPromise.headers.etag,
PartNumber: index + 1,
})
})
// CompleteMultipartUpload in the backend server
const completeUploadResp = await axios.post(`${backendUrl}/complete-upload`, {
params: {
fileName,
parts: uploadPartsArray,
uploadId,
},
})
setUploadProgress(100)
setUploadSuccess(1)
// setSubmitSuccess(2)
setSubmitStatus(oldArray => [...oldArray, fileattach])
// put a delay in here for 2 seconds
// also clear down uploadProgress
console.log(completeUploadResp.data, 'upload response complete ')
} catch (err) {
console.log(err)
}
}
@nickwild-999
Copy link
Author

nickwild-999 commented Jun 10, 2021

upload.js calls aws slf.

The code is a little ugly and untidy and probably needs a tidy up to remove redundant code but it works for me

@davidtran
Copy link

awesome, I am gonna use this code in my project.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment