Skip to content

Instantly share code, notes, and snippets.

@santteegt
Last active September 8, 2020 02:30
Show Gist options
  • Save santteegt/66c7ce31cca956d4884cac0b88465ed0 to your computer and use it in GitHub Desktop.
Save santteegt/66c7ce31cca956d4884cac0b88465ed0 to your computer and use it in GitHub Desktop.
Example code on how to upload files to an S3 bucket using AWS SDK
#!/usr/bin/env node
// HOW to setup a Nodejs project and configure access credentials:
// https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/getting-started-nodejs.html
require('dotenv').config();
const program = require('commander')
const mime = require('mime-types')
const AWS = require('aws-sdk')
const fs = require('fs')
console.log('AWS_REGION', process.env.AWS_REGION)
console.log('AWS_BUCKET_OUTPUT', process.env.AWS_BUCKET_OUTPUT)
console.log('AWS_BUCKET_ADMINLOGS', process.env.AWS_BUCKET_ADMINLOGS)
program
.option('-l, --path <path>', 'Volume path')
.option('-v, --verbose', 'Enables verbose mode')
.action(() => {
const { path, verbose } = program
const config = { path, verbose }
main(config)
.then(() => {
if (verbose) {
console.log('Finished!')
}
process.exit(0)
})
.catch(e => console.error(e))
})
.parse(process.argv)
async function main({
path,
verbose
}) {
const log = (...args) => (verbose ? console.log(...args) : undefined)
AWS.config.update({ region: process.env.AWS_REGION })
const outputfiles = await getdir(path)
const workflowid = ''
log('OutputFiles:', outputfiles)
// Do processing on the array and add options to each file
for (var i = 0; i < outputfiles.length; i++) {
outputfiles[i].column = null
outputfiles[i].isoutput = true
outputfiles[i].shouldpublish = false
outputfiles[i].uploadadminzone = false
const uploadUrl = await uploadthisfile(outputfiles[i], workflowid)
/* eslint-disable-next-line */
outputfiles[i].url = uploadUrl
}
log('outputfiles:', outputfiles)
console.log('Everything is OK')
} // end main
async function getdir(folder) {
var retfiles = []
var files = await fs.readdirSync(folder, { withFileTypes: true })
for (var i = 0; i < files.length; i++) {
var file = files[i]
if (file.isFile()) {
var arr = []
arr.name = file.name
arr.path = folder + '/' + file.name
arr.contentType = mime.lookup(file.name) || undefined
arr.contentLength = String(fs.statSync(`${folder}/${file.name}`).size)
arr.isoutput = false
retfiles.push(arr)
}
}
return retfiles
}
async function uploadthisfile(filearr, workflowid) {
const url = await uploadtos3(filearr, workflowid)
console.log('Got ' + url + '')
return url
}
async function uploadtos3(filearr, workflowid) {
let bucketName
if (filearr.uploadadminzone === true) bucketName = process.env.AWS_BUCKET_ADMINLOGS
else bucketName = process.env.AWS_BUCKET_OUTPUT
const s3 = new AWS.S3({ apiVersion: '2006-03-01' })
const uploadParams = {
Bucket: bucketName,
Key: '',
Body: '',
ACL: 'public-read'
}
const fileStream = fs.createReadStream(filearr.path)
// TO DO - check for null
uploadParams.Body = fileStream
uploadParams.Key = workflowid + filearr.path
try {
console.log("uploading:")
console.log(uploadParams)
const putObjectPromise = await s3.upload(uploadParams).promise()
const location = putObjectPromise.Location
return location
} catch (e) {
console.log('error:', e)
return null
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment