#aws #s3 #amazon
# example of usage: | |
# | |
# $ ansible-playbook --extra-vars instance_rds_name=rds-sandbox-api --extra-vars instance_ec2_name=ec2-sandbox-api provision/playbook_sandbox.yml | |
# | |
- hosts: localhost | |
tasks: | |
# This subnet is necessary for the DB, I don't know where is in the panel | |
- name: sandbox subnet | |
rds_subnet_group: | |
state: present | |
name: subnet-db-sandbox | |
description: I need a subnet for the DB | |
subnets: | |
- subnet-c19fdcb6 | |
- subnet-92d982f7 | |
region: eu-west-1 | |
- name: create database | |
rds: | |
command: create | |
instance_name: "{{ instance_rds_name }}" | |
db_engine: postgres | |
size: 5 | |
wait: true | |
instance_type: db.t2.micro | |
subnet: subnet-db-sandbox | |
region: eu-west-1 | |
username: real_user | |
password: 1nsecure | |
tags: | |
sandbox | |
- name: retrieve RDS infos | |
rds: | |
command: facts | |
instance_name: "{{ instance_rds_name }}" | |
region: eu-west-1 | |
register: rds_facts | |
- name: create instance (wait a little bit that is booting) | |
ec2: | |
instance_tags: | |
Name: "{{ instance_ec2_name }}" | |
instance_type: t2.micro | |
image: ami-f95ef58a | |
region: eu-west-1 | |
exact_count: 1 | |
count_tag: | |
Name: "{{ instance_ec2_name }}" | |
wait: true | |
key_name: kp_01 | |
vpc_subnet_id: subnet-c19fdcb6 | |
group: sg01 | |
assign_public_ip: yes | |
- name: retrieve infos | |
ec2_remote_facts: | |
region: eu-west-1 | |
filters: | |
instance-state-name: running | |
"tag:Name": "{{ instance_ec2_name }}" | |
register: ec2_facts | |
- name: summary | |
debug: | |
msg: "EC2 public ip: {{ ec2_facts.instances[0].public_ip_address }} RDS endpoint: {{ rds_facts.instance.endpoint }}" |
#!/usr/bin/env python | |
''' | |
Utility scripts to interact with S3. | |
''' | |
import logging | |
import botocore | |
import boto3 | |
import sys | |
stream = logging.StreamHandler() | |
formatter = logging.Formatter('%(levelname)s - %(filename)s:%(lineno)d - %(message)s') | |
logger = logging.getLogger(__name__) | |
logger.setLevel(logging.DEBUG) | |
logger.addHandler(stream) | |
stream.setFormatter(formatter) | |
class S3Deployer(object): | |
def __init__(self, bucket_name): | |
self.bucket_name = bucket_name | |
self.initialize_s3_client() | |
def initialize_s3_client(self): | |
self.s3 = boto3.resource('s3') | |
def check_bucket_exists(self): | |
bucket = self.s3.Bucket(self.bucket_name) | |
exists = True | |
try: | |
self.s3.meta.client.head_bucket(Bucket=self.bucket_name) | |
except botocore.exceptions.ClientError as e: | |
# If a client error is thrown, then check that it was a 404 error. | |
# If it was a 404 error, then the bucket does not exist. | |
error_code = int(e.response['Error']['Code']) | |
if error_code == 404: | |
exists = False | |
else: | |
logger.exception('checking existence for \'%s\'failed' % self.bucket_name) | |
raise e | |
return exists | |
def create_bucket(self): | |
self.s3.create_bucket(Bucket=self.bucket_name) | |
assert self.check_bucket_exists() | |
if __name__ == '__main__': | |
bucket_name = sys.argv[1] |
#!/usr/bin/env python | |
from unittest import TestCase | |
from moto import mock_s3 | |
from s3 import S3Deployer | |
@mock_s3 | |
class S3DeployerTests(TestCase): | |
def test_bucket_does_not_exist(self): | |
s3d = S3Deployer('miao') | |
self.assertFalse(s3d.check_bucket_exists()) | |
s3d.create_bucket() | |
self.assertTrue(s3d.check_bucket_exists()) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment