Skip to content

Instantly share code, notes, and snippets.

@mykeels
Created July 27, 2020 17:50
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save mykeels/ad8591b7390419db293e03fe5d7e49f2 to your computer and use it in GitHub Desktop.
Save mykeels/ad8591b7390419db293e03fe5d7e49f2 to your computer and use it in GitHub Desktop.
Script to deploy to and maintain 50 past versions of storybook in an S3 Bucket
#!/usr/bin/env node
const AWS = require("aws-sdk");
const path = require("path");
const fs = require("fs");
const mime = require("mime-types");
const { version } = require("../package.json");
const quit = message => {
console.error(message);
process.exit(1);
};
if (!process.env.AWS_DEFAULT_REGION)
quit("Please provide a [AWS_DEFAULT_REGION] env variable");
if (!process.env.AWS_ACCESS_KEY_ID)
quit("Please provide a [AWS_ACCESS_KEY_ID] env variable");
if (!process.env.AWS_SECRET_ACCESS_KEY)
quit("Please provide a [AWS_SECRET_ACCESS_KEY] env variable");
AWS.config.update({ region: process.env.AWS_DEFAULT_REGION });
function slugify(text) {
return text
.toLowerCase()
.replace(/ /g, "-")
.replace(/[^\w-]+/g, "-")
.replace(/-+/g, "-");
}
const S3_BUCKET = "s3-bucket-name";
const S3_PREFIX = "./sub-directory";
const LOCAL_FOLDER_NAME = "../storybook-static";
const BRANCH_NAME =
process.env.BITBUCKET_BRANCH && slugify(process.env.BITBUCKET_BRANCH);
const DEFAULT_REMOTE_FOLDER_NAME = [version, BRANCH_NAME]
.filter(Boolean)
.join("-");
const REMOTE_FOLDER_NAME = process.argv[2] || DEFAULT_REMOTE_FOLDER_NAME;
const MAX_DEPLOYMENTS = 50;
const s3 = new AWS.S3({
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY
});
async function getSubFolders(Prefix, Bucket = S3_BUCKET) {
const objects = await s3
.listObjectsV2({
Delimiter: "/",
Prefix,
Bucket
})
.promise();
return objects.CommonPrefixes.map(({ Prefix }) =>
Prefix.replace(S3_PREFIX, "").replace(/\/$/g, "")
);
}
async function upload(filePath, bucketName, bucketPrefix) {
try {
const bucketPath = bucketPrefix.replace(/\\/g, "/");
const Body = fs.readFileSync(filePath);
const ContentType = mime.lookup(filePath);
const params = {
Bucket: bucketName,
Key: bucketPath,
Body,
...(ContentType ? { ContentType } : {})
};
await s3.putObject(params).promise();
console.log("Uploaded " + bucketPath + " to " + bucketName);
} catch (err) {
console.error(err);
}
}
async function uploadDirectory(folder, bucketName, bucketPrefix) {
async function walkSync(currentDirPath) {
for (let name of fs.readdirSync(currentDirPath)) {
var filePath = path.join(currentDirPath, name);
var stat = fs.statSync(filePath);
if (stat.isFile()) {
upload(
filePath,
bucketName,
path.join(bucketPrefix, filePath.substring(folder.length + 1))
)
.then(() => {})
.catch(console.error);
} else if (stat.isDirectory()) {
await walkSync(filePath);
}
}
}
walkSync(folder);
}
(async () => {
const folders = await getSubFolders(S3_PREFIX);
const localFolder = path.join(__dirname, LOCAL_FOLDER_NAME);
const localHtml = path.join(__dirname, "index.html");
const remoteFolder = path.join(S3_PREFIX, REMOTE_FOLDER_NAME);
console.log(`Attempting to deploy storybook`);
console.log(`Local: ${localFolder}`);
console.log(`Remote: ${remoteFolder}`);
await uploadDirectory(localFolder, S3_BUCKET, remoteFolder);
const html = [
"<div style='text-align:center;font-size:1.8rem'>",
folders
.concat(REMOTE_FOLDER_NAME)
.slice(0 - MAX_DEPLOYMENTS)
.sort()
.reverse()
.map(
name => `
<p style='padding: 0.5rem; '>
<a href='./${name}/index.html'>${name}</a>
</p>
`
)
.join(""),
"</div>"
].join("\n");
fs.writeFileSync(localHtml, html, { encoding: "utf8" });
await upload(localHtml, S3_BUCKET, path.join(S3_PREFIX, "index.html"));
fs.unlinkSync(localHtml);
})();
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment