Skip to content

Instantly share code, notes, and snippets.

@aioue
Created April 29, 2015 20:40
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save aioue/cd3a20c7bee7411146b1 to your computer and use it in GitHub Desktop.
Save aioue/cd3a20c7bee7411146b1 to your computer and use it in GitHub Desktop.
create an AWS S3 bucket, configure it for website hosting, then go and make a route53 ALIAS DNS record pointing to it
import time
import argparse
import json
import boto.ec2
import boto.s3
import boto.route53
from boto.route53 import record
from boto.s3 import connection
from boto.s3.connection import Location
# create an S3 bucket called bucketname, convert it to website hosting
# then create bucketname.foo.com 'A' record, ALIAS pointing to it
# using boto. This was not as trivial as it should have been.
# boto docs for acl and policy are slim. So are net examples.
def main(branchname):
new_bucket_name = branchname
# AWS DNS zone to edit
zone_name = "foo.com"
# branchname.test.foo.com
desired_fqdn = new_bucket_name + ".test." + zone_name
# region must be specified due to boto bug
# https://github.com/boto/boto/issues/2836
s3 = boto.s3.connect_to_region("eu-west-1", calling_format=connection.OrdinaryCallingFormat())
print s3.get_all_buckets()
index_html = """
<html>
<head><title>S3 Webpage</title></head>
<body><h2>S3-based website</h2></body>
</html>"""
a_bucket = s3.lookup(desired_fqdn)
if a_bucket is None:
print "bucket " + desired_fqdn + " does not exist, creating."
# create a new bucket. Buckets must have a globally unique name (not just
# unique to your account)
new_bucket = s3.create_bucket(desired_fqdn,
location=Location.EU,
policy='authenticated-read')
good_bucket = s3.lookup("somebucket")
good_acl = good_bucket.get_acl()
print good_acl
a_bucket = s3.lookup(desired_fqdn)
# nick good policy from working bucket as boto docs are rubbish - sets "Authenticated Users" upload access
# <Policy: foo-aws (owner) = READ, foo-aws (owner) = WRITE, foo-aws (owner) = READ_ACP, foo-aws (owner) = WRITE_ACP, http://acs.amazonaws.com/groups/global/AuthenticatedUsers = READ, http://acs.amazonaws.com/groups/global/AuthenticatedUsers = WRITE>
xml_acl = '''<?xml version="1.0" encoding="UTF-8"?>
<AccessControlPolicy xmlns="http://s3.amazonaws.com/doc/2006-03-01/"><Owner><ID>foohex</ID><DisplayName>foo-aws</DisplayName></Owner><AccessControlList><Grant><Grantee xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="CanonicalUser"><ID>foohex</ID><DisplayName>foo-aws</DisplayName></Grantee><Permission>READ</Permission></Grant><Grant><Grantee xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="CanonicalUser"><ID>foohex</ID><DisplayName>foo-aws</DisplayName></Grantee><Permission>WRITE</Permission></Grant><Grant><Grantee xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="CanonicalUser"><ID>foohex</ID><DisplayName>foo-aws</DisplayName></Grantee><Permission>READ_ACP</Permission></Grant><Grant><Grantee xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="CanonicalUser"><ID>foohex</ID><DisplayName>foo-aws</DisplayName></Grantee><Permission>WRITE_ACP</Permission></Grant><Grant><Grantee xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="Group"><URI>http://acs.amazonaws.com/groups/global/AuthenticatedUsers</URI></Grantee><Permission>READ</Permission></Grant><Grant><Grantee xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="Group"><URI>http://acs.amazonaws.com/groups/global/AuthenticatedUsers</URI></Grantee><Permission>WRITE</Permission></Grant></AccessControlList></AccessControlPolicy>'''
a_bucket.set_xml_acl(xml_acl)
# set bucket permissions - public readable polocy
json_policy = """{
"Version": "2008-10-17",
"Id": "Policy1426684310446",
"Statement": [
{
"Sid": "Stmt1426186873595",
"Effect": "Allow",
"Principal": "*",
"Action": "s3:GetObject",
"Resource": "arn:aws:s3:::""" + desired_fqdn + """/*"
}
]
}"""
loaded = json.loads(json_policy)
json_d = json.dumps(loaded)
a_bucket.set_policy(json_d)
# upload HTML pages and make sure they are publicly readable
# index_key = new_bucket.new_key('index.html')
# index_key.content_type = 'text/html'
# index_key.set_contents_from_string(index_html, policy='public-read')
# set website configuration for bucket (turn it on)
new_bucket.configure_website('index.html', 'index.html')
# S3 takes a short while to become consistent
time.sleep(5)
# now get the website configuration, just to check it
# print new_bucket.get_website_configuration()
else:
print "bucket " + desired_fqdn + " exists, skipping."
print s3.get_all_buckets()
# create CNAME Alias DNS record
route53conn = boto.route53.connect_to_region('eu-west-1')
a_zone = route53conn.get_zone(zone_name)
if a_zone is None:
print "zone " + zone_name + " does not exist, stopping."
exit()
else:
print "zone " + zone_name + " found, editing records."
if a_zone.find_records(name=desired_fqdn, type="A") is None:
print "alias entry does not exist for " + desired_fqdn + ", creating."
records = record.ResourceRecordSets(route53conn, a_zone.id)
# http://stackoverflow.com/questions/18524941/aws-boto-route53-adding-an-alias-for-a-bucket
change = records.add_change(action="CREATE", name=desired_fqdn, type="A",
alias_hosted_zone_id='Z1BKCTXD74EZPE',
alias_dns_name='s3-website-eu-west-1.amazonaws.com',
alias_evaluate_target_health=False)
change.add_value('ALIAS s3-website-eu-west-1.amazonaws.com (Z1BKCTXD74EZPE)')
records.commit()
else:
print "alias entry already exists for " + desired_fqdn + ", skipping."
print "Done."
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("branchname", help="name of the branch to create buckets and DNS entries for")
args = parser.parse_args()
main(args.branchname)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment