Last active
March 29, 2020 20:26
-
-
Save hades2510/40584c8fc27adf47933ac2086f2b2437 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const Archiver = require('archiver') | |
const Stream = require('stream') | |
const AWS = require("aws-sdk"); | |
const s3 = new AWS.S3( { apiVersion: '2006-03-01'} ); | |
const archiveFolder = async (data) => { | |
//get list of items | |
const s3Files = await s3.listObjectsV2({ | |
Bucket: data.bucket, | |
Prefix: data.source | |
}).promise() | |
let excludeRegExp = null | |
if (data.exclude) { | |
excludeRegExp = new RegExp(data.exclude.join('|')) | |
} | |
const s3DownloadStreams = s3Files.Contents.filter(item => { | |
return !item.Key.endsWith('/') && (!excludeRegExp || item.Key.match(excludeRegExp) === null) | |
}).map(item => { | |
return { | |
stream: s3.getObject({ Bucket: data.bucket, Key: item.Key }).createReadStream(), | |
filename: item.Key.replace(data.key, '') | |
}; | |
}); | |
const streamPassThrough = new Stream.PassThrough(); | |
const params = { | |
ACL: 'private', | |
Body: streamPassThrough, | |
Bucket: data.bucket, | |
ContentType: 'application/zip', | |
Key: data.destination, | |
StorageClass: 'STANDARD_IA', | |
}; | |
const s3Upload = s3.upload(params, (error) => { | |
if (error) { | |
console.error(`Got error creating stream to s3 ${error.name} ${error.message} ${error.stack}`); | |
throw error; | |
} | |
}); | |
s3Upload.on('httpUploadProgress', (progress) => { | |
console.log(progress); | |
}); | |
const archive = Archiver('zip', {store: true}); | |
archive.on('error', (error) => { | |
throw new Error(`${error.name} ${error.code} ${error.message} ${error.path} ${error.stack}`); | |
}); | |
try { | |
await new Promise((resolve, reject) => { | |
console.log('Starting upload'); | |
s3Upload.on('close', resolve); | |
s3Upload.on('end', resolve); | |
s3Upload.on('error', reject); | |
archive.pipe(streamPassThrough); | |
s3DownloadStreams.forEach((streamDetails) => archive.append(streamDetails.stream, { name: streamDetails.filename })); | |
archive.finalize(); | |
resolve() | |
}).catch((error) => { | |
throw new Error(`${error.code} ${error.message} ${error.data}`) | |
}); | |
await s3Upload.promise(); | |
} catch (ex) { | |
// handle the exception | |
// probably registering this as a failed job | |
} | |
} | |
(async () => { | |
await archiveFolder({ | |
bucket: process.env.BUCKET, | |
source: process.env.SOURCE, | |
destination: process.env.DESTINATION, | |
exclude: process.env.EXCLUDE ? process.env.EXCLUDE.split(',') : null, | |
}) | |
})() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment