Skip to content

Instantly share code, notes, and snippets.

@aljiwala
Created February 24, 2017 21:02
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save aljiwala/f8f761df5d0ca96e43cd1969f6d50d9f to your computer and use it in GitHub Desktop.
Save aljiwala/f8f761df5d0ca96e43cd1969f6d50d9f to your computer and use it in GitHub Desktop.
Script to update the metadata on Amazon S3
"""
To run the script
s3_meta.py --access-key=AWS_ACCESS_KEY_ID
--secret-key=AWS_SECRET_ACCESS_KEY
--bucket-name=BUCKET_NAME
-d=METADATA
e.g.
python s3_meta.py --access_key=AKIAIOSFODNN7EXAMPLE --secret-key=wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY --bucket-name=test-bucket -d '{"Cache-Control": "max-age=3600"}'
"""
# For Python2 Only
# General
import os, sys, json
import optparse
import mimetypes
from argparse import ArgumentParser
# Boto Imports
import boto
from boto.s3.connection import S3Connection
def get_input():
"""
----------
Parameters
----------
1. access_key
2. secret_key
3. bucket_name
4. metadata
"""
parser = optparse.OptionParser(
usage=" %prog --access-key=AWS_ACCESS_KEY_ID "
"--secret-key=AWS_SECRET_ACCESS_KEY "
"--bucket-name=BUCKET_NAME "
"--max-age=MAX_AGE "
"-d=METADATA "
)
parser.add_option(
'--access-key', dest='access_key', help="Access Key for AWS S3."
)
parser.add_option(
'--secret-key', dest='secret_key', help="Secret Key for AWS S3."
)
parser.add_option(
'--bucket-name', dest='bucket_name', help="AWS S3 Bucket Name."
)
parser.add_option(
'-d', '--metadata', dest='metadata', default={},
help="""Metadata to update. (Provide values in json format)"""
""" e.g. '{"Cache-Control": "max-age=3600"}' """,
)
(options, args) = parser.parse_args()
# Sanity check to ensure if all mandatory options are there.
mandatories = ['access_key', 'secret_key', 'bucket_name', 'metadata']
for item in mandatories:
if not options.__dict__[item]:
print("\n--{} option is mandatory.\n".format(item.replace("_", "-")))
exit(-1)
return options
def start_execution(options):
"""
Connect to S3.
Update the headers.
"""
ALLOWED_CONTENT_TYPES = ('image/png', 'image/jpeg', )
try:
METADATA_TO_UPDATE = json.loads(options.metadata)
except ValueError:
print("\nInvalid data: {}\n".format(options.metadata))
return
try:
connection = S3Connection(options.access_key, options.secret_key)
except Exception as e:
sys.stdout.write("Unable to connect to S3. An error occured: {}\n".format(e))
sys.stdout.flush()
return
try:
bucket = connection.get_bucket(options.bucket_name)
except boto.exception.S3ResponseError as e:
sys.stdout.write("An error occured: {}\n".format(e))
sys.stdout.flush()
return
for key in bucket:
content_type, encoding = mimetypes.guess_type(key.name)
if content_type in ALLOWED_CONTENT_TYPES:
sys.stdout.write("Updating... {}\n".format(key.name))
key.set_remote_metadata(METADATA_TO_UPDATE, {}, True)
sys.stdout.write("\nCaching completed.")
if __name__ == "__main__":
options = get_input()
start_execution(options)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment