Last active
July 10, 2018 19:06
-
-
Save AlecTroemel/7e146d55e6dfc0edaea38551bcb9bebf to your computer and use it in GitHub Desktop.
add S3 Bucket Cache Control to existing Objects, with pagination
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* global require */ | |
// this script iterates through all maps in the bucket | |
// and adds the "CacheControl" header | |
// if any keys fail, it will write them out to a file at the end of the crawl | |
const fs = require('fs'); | |
const _ = require('lodash'); | |
const AWS = require('aws-sdk'); | |
const Promise = require('bluebird'); | |
const s3 = new AWS.S3(); | |
Promise.promisifyAll(s3); | |
const Bucket = '<bucket-name>'; | |
const Prefix = '<bucket-prefix>'; | |
const CacheControl = 'max-age=2592000'; | |
let failed = []; | |
async function process(objs) { | |
// map doesnt happen serially, thats good for performance | |
return Promise.map(objs, async obj => { | |
let Key = obj.Key; | |
try { | |
let headData = await s3.headObjectAsync({ Bucket, Key }); | |
if (headData.ContentType !== 'image/png') { | |
console.log(`skip-not-image: ${Key}`); | |
return; | |
} | |
if (_.has(headData, 'CacheControl')) { | |
console.log(`skip-processed: ${Key}`); | |
return; | |
} | |
try { | |
let result = await s3.copyObjectAsync({ | |
Bucket, | |
Key, | |
CopySource: `/${Bucket}/${Key}`, | |
CacheControl, | |
MetadataDirective: 'REPLACE', | |
ContentType: headData.ContentType | |
}); | |
console.log(`completed: ${Key}`); | |
} catch (err) { | |
console.error(`copyObject Error ${Key}`); | |
console.log(err); | |
failed.push(Key); | |
} | |
} catch (err) { | |
console.error(`headObject Error ${Key}`); | |
console.log(err); | |
failed.push(Key); | |
} | |
}); | |
} | |
async function run(ContinuationToken = undefined) { | |
try { | |
let config = _.omitBy({ Bucket, Prefix, ContinuationToken }, _.isNil); | |
let objs = await s3.listObjectsV2Async(config); | |
console.log('paginate: ', objs.IsTruncated); | |
// heres the recursive pagination call | |
let next = objs.IsTruncated ? run(objs.NextContinuationToken) : Promise.resolve(); | |
await Promise.all([next, process(objs.Contents)]); | |
} catch (err) { | |
console.error('listObjects Error'); | |
console.log(err); | |
} | |
} | |
run().then(r => { | |
console.log(`${failed.length}: failed`); | |
if (failed.length > 0) { | |
console.log('wrote list of failed keys to "failed.json"'); | |
fs.writeFileSync('failed.json', JSON.stringify(failed)); | |
} | |
}); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment