Skip to content

Instantly share code, notes, and snippets.

@kristelteng
Last active July 27, 2016 20:22
Show Gist options
  • Save kristelteng/e51b4e552f26a1fa40f5e20a9e0c481f to your computer and use it in GitHub Desktop.
Save kristelteng/e51b4e552f26a1fa40f5e20a9e0c481f to your computer and use it in GitHub Desktop.
upload to s3 with creds
let tar = require('tar-stream');
let _ = require('lodash');
let assert = require('assert');
let fs = require('mz/fs');
let path = require('path');
let recursiveReadSync = require('recursive-readdir-sync');
let zlib = require('zlib');
let rootdir = require('app-root-dir');
let aws = require('aws-sdk');
let client = require('taskcluster-client');
let S3UploadStream = require('s3-upload-stream');
async function documenter(options) {
options = _.defaults({}, options, {
tcCreds: {},
tier: null,
schemas: {},
menuIndex: 10,
docsFolder: rootdir.get() + '/docs',
references: [],
publish: process.env.NODE_ENV == 'production',
});
assert(options.schemas, 'options.schemas must be given');
assert(options.tier, 'options.tier must be given');
assert(['core', 'platform'].indexOf(options.tier) !== -1, 'options.tier is either core or platform');
let tarball = tar.pack();
let metadata = {version: 1, tier: options.tier, menuIndex: options.menuIndex};
let data = JSON.stringify(metadata, null, 2);
tarball.entry({name: 'metadata.json'}, data);
let schemas = options.schemas;
_.forEach(schemas, (name, schema) => {
let data = JSON.stringify(schema, null, 2);
tarball.entry({name: 'schema/' + name}, data);
});
let references = options.references;
references.forEach(reference => {
let data = JSON.stringify(reference, null, 2);
tarball.entry({name: 'references/' + reference.name + '.json'}, data);
});
if (options.docsFolder) {
try {
let docs = options.docsFolder;
let files = recursiveReadSync(options.docsFolder);
await Promise.all(files.map(async (file) => {
let relativePath = path.basename(file);
let data = await fs.readFile(file, {encoding: 'utf8'});
tarball.entry({name: 'docs/' + relativePath}, data);
}));
} catch (err) {
if (err.code == 'ENOENT') {
console.log('Docs folder does not exist');
} else {
throw err;
}
}
}
// the stream was added
// no more entries
tarball.finalize();
let gzip = zlib.createGzip();
let tgz = tarball.pipe(gzip);
// if (options.publish) {
// let creds = await auth.awsS3Credentials('read-write', 'taskcluster-raw-docs', 'testing/');
// let s3 = new aws.S3(creds.credentials);
// let s3Stream = S3UploadStream(s3);
// let auth = new client.Auth({
// credentials: options.credentials,
// });
try {
let tcCreds = {
credentials: options.credentials,
scopes: options.scopes,
};
let auth = new client.Auth(tcCreds);
// assert(options.credentials, 'Can\'t publish without aws credentials.');
let creds = await auth.awsS3Credentials('read-write', 'taskcluster-raw-docs', 'testing/');
let s3 = new aws.S3(creds.credentials);
let s3Stream = S3UploadStream(s3);
let upload = s3Stream.upload({
Bucket: 'taskcluster-raw-docs',
Key: 'testing/latest.tar.gz',
});
// handle progress
upload.on('part', function(details) {
console.log(details);
});
let uploadPromise = new Promise((resolve, reject) => {
// handle upload completion
upload.on('uploaded', function(details) {
console.log(details);
resolve(details);
});
// handle errors
upload.on('error', function(error) {
console.log(error);
reject(error);
});
});
// pipe the incoming filestream through compression and up to s3
tgz.pipe(upload);
await uploadPromise;
} catch (e) {
console.error(e);
}
let output = {
tgz,
};
return output;
}
module.exports = documenter;
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment