Skip to content

Instantly share code, notes, and snippets.

@dsoike
Created April 12, 2019 21:18
Show Gist options
  • Save dsoike/fa34dc1fcfc7b965d3a3732246a6d207 to your computer and use it in GitHub Desktop.
Save dsoike/fa34dc1fcfc7b965d3a3732246a6d207 to your computer and use it in GitHub Desktop.
Deploy Static Website to AWS S3
const AWS = require('aws-sdk')
const fs = require('fs')
const path = require('path')
const mime = require('mime')
const chalk = require('chalk')
const s3 = new AWS.S3()
const s3Bucket = 'my-s3-bucket.com'
const directoryName = 'build'
function printTitle() {
console.log(chalk.bold(chalk.underline('Uploading Files to AWS S3')))
}
function configureAWSCredentials() {
var credentials = new AWS.SharedIniFileCredentials({profile: process.env.AWS_PROFILE || 'default'})
if (!credentials.accessKeyId) {
console.log(chalk.red('Warning! You need to configure AWS before uploading files to S3'))
console.log(chalk.blue('\n Install the aws cli and run `aws configure` in your terminal\n'))
process.exit()
}
AWS.config.credentials = credentials
}
function uploadFiles() {
const buildDirectory = `${getProjectDirectory()}/${directoryName}`
const files = getFilePaths(buildDirectory).map(transformToFileObjects)
const uploadPromises = files.map((file) => uploadFile(s3Bucket, file.key, file.path, file.type))
Promise.all(uploadPromises)
.then(() => {
console.log(chalk.bold(chalk.green('\Files Successfully Uploaded to AWS S3!\n')))
})
.catch((err) => {
console.log(chalk.red(`${chalk.bold('\nError Uploading Files to AWS S3')}\n${err.message}`))
console.log(err)
})
}
function getFilePaths(directory) {
const contents = fs.readdirSync(directory).filter(isNotHiddenFileOrDirectory).map(getFullPath(directory))
const files = contents.filter(isFile)
const directories = contents.filter(isDirectory)
const directoryFiles = directories.length ? directories.map(getFilePaths).reduce(flattenFiles) : []
return files.concat(directoryFiles)
}
function getProjectDirectory() {
return path.dirname(__dirname)
}
function isNotHiddenFileOrDirectory(fileOrDirectoryName) {
return /^(?![.])/.test(fileOrDirectoryName)
}
function getFullPath(directory) {
return function(name) {
return path.resolve(getProjectDirectory(), directory, name)
}
}
function isFile(path) {
return fs.lstatSync(path).isFile()
}
function isDirectory(path) {
return fs.lstatSync(path).isDirectory()
}
function flattenFiles(result, files) {
return !result ? files : result.concat(files)
}
function transformToFileObjects(filePath) {
return {
key: filePath.split(`/${directoryName}/`).pop().split(`\\${directoryName}\\`).pop().replace(/\\/g,'/'),
path: filePath,
type: mime.getType(filePath)
}
}
const uploadFile = (bucket, key, path, type) => {
return new Promise((resolve, reject) => {
fs.readFile(path, (err, data) => {
if (err) {
err.message = `unable to read file at path ${path} - ${err.message}`
reject(err)
} else {
const params = {
Bucket: bucket,
Key: key,
Body: data,
ContentType: type,
ACL: 'public-read'
}
s3.upload(params, function(s3Err, data) {
if (s3Err) {
err.message = `unable to upload file to aws s3 (${key}) - ${err.message}`
reject(s3Err)
} else {
console.log(` ${chalk.underline(key)} -> ${data.Location}`)
resolve()
}
})
}
})
})
}
if (require.main === module) {
printTitle()
configureAWSCredentials()
uploadFiles()
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment