Skip to content

Instantly share code, notes, and snippets.

@NSBum
Created March 22, 2016 16:00
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save NSBum/0c379edcb9cf778ce6dd to your computer and use it in GitHub Desktop.
Save NSBum/0c379edcb9cf778ce6dd to your computer and use it in GitHub Desktop.
Fixing AWS S3 caching metadata
#!/usr/bin/python
import datetime
from dateutil.relativedelta import relativedelta
import subprocess
weeks = 2
seconds = weeks * 7 * 24 * 60 * 60
today = datetime.datetime.now()
new_date = today + relativedelta(weeks=weeks)
command = '''aws s3 cp s3://ojisanseiuchi.com/ s3://ojisanseiuchi.com/ --exclude "*" '''
command += '''--include *.jpg '''
command += '''--include *.jpg '''
command += '''--recursive '''
command += '''--metadata-directive REPLACE '''
command += '''--expires {0} '''.format(new_date.isoformat())
command += '''--acl public-read '''
command += '''--content-encoding "gzip" '''
command += '''--cache-control "public, max-age={0}"'''.format(seconds)
subprocess.call(command,shell=True)
# Now fix the index.html cache header
hours = 1
seconds = hours * 60 * 60 # seconds in hours
new_date = today + relativedelta(hours=hours)
command = '''aws s3api copy-object --copy-source ojisanseiuchi.com/index.html --key index.html --bucket ojisanseiuchi.com '''
command += '''--metadata-directive "REPLACE" '''
command += '''--expires {0} '''.format(new_date.isoformat())
command += '''--acl public-read '''
command += '''--content-type "text/html; charset=UTF-8" '''
command += '''--content-encoding "gzip" '''
command += '''--cache-control "public, max-age={0}"'''.format(seconds)
subprocess.call(command,shell=True)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment