Skip to content

Instantly share code, notes, and snippets.

@niranjan94
Last active December 21, 2019 12:17
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save niranjan94/92f2636a29f09bd6cc53085951e78046 to your computer and use it in GitHub Desktop.
Save niranjan94/92f2636a29f09bd6cc53085951e78046 to your computer and use it in GitHub Desktop.
#!/usr/bin/env python
import json
import os
from argparse import ArgumentParser
from contextlib import suppress
from shutil import make_archive
import boto3
from botocore.exceptions import ClientError
s3 = boto3.client('s3')
def inline_deploy(args):
"""
Upload all the CDK assets to the bucket and inline the key & bucket name into the template.
:param args:
:return:
"""
with open(os.path.join(args.input_dir, 'manifest.json'), 'r') as fd:
json_data = fd.read()
manifest = json.loads(json_data)
stack_info = manifest['artifacts'][args.stack]
with open(os.path.join(args.input_dir, stack_info['properties']['templateFile']), 'rb') as fd:
template = json.load(fd)
print('archiving assets')
for asset_folder in os.listdir(args.input_dir):
if not asset_folder.startswith('asset.'):
continue
asset_folder_path = os.path.join(args.input_dir, asset_folder)
if not os.path.isdir(asset_folder_path):
continue
zip_asset_path = os.path.join(args.input_dir, 'packages', asset_folder)
if not os.path.exists(zip_asset_path + '.zip'):
make_archive(
zip_asset_path,
'zip',
asset_folder_path
)
print('uploading assets to s3')
for name, resource in template['Resources'].items():
if 'aws:asset:path' in resource.get('Metadata', {}):
asset_id = resource['Metadata']['aws:asset:path']
resource_key = 'Code' if resource['Type'] == 'AWS::Lambda::Function' else 'Content'
path = os.path.join('packages', asset_id + '.zip')
try:
try:
s3.head_object(Bucket=args.bucket, Key=path)
except ClientError:
s3.upload_file(
os.path.join(args.input_dir, path),
args.bucket,
path
)
except Exception as e:
with suppress(Exception):
packages_path = os.listdir(os.path.join(args.input_dir, 'packages'))
print(f'Files at {packages_path}:', packages_path)
with suppress(Exception):
print(f'Files at {args.input_dir}:', os.listdir(args.input_dir))
with suppress(Exception):
print(f'Files at {os.getcwd()}:', os.listdir(os.getcwd()))
print(f'Error processing assets for {name}: {e}')
raise e
resource['Properties'][resource_key].update({
'S3Bucket': args.bucket,
'S3Key': path
})
del resource['Metadata']['aws:asset:path']
del resource['Metadata']['aws:asset:property']
allowed_parameters = ['Environment']
template['Parameters'] = {
name: value for (name, value) in template['Parameters'].items() if name in allowed_parameters
}
with open(args.output_file, 'w') as fd:
json.dump(template, fd, indent=2)
if __name__ == '__main__':
parser = ArgumentParser(description='Process some integers.')
parser.add_argument(
'-s', '--stack', dest='stack', action='store', required=True,
help='Name of the stack'
)
parser.add_argument(
'-b', '--bucket', dest='bucket', action='store', required=False,
help='Name of the bucket', default='cdk-stack-staging-bucket'
)
parser.add_argument(
'-i', '--input', dest='input_dir', action='store', required=False,
help='Name of the cdk-generated input dir', default='cdk.out'
)
parser.add_argument(
'-o', '--output', dest='output_file', action='store', required=False,
help='Location to store the output file at', default='inline-template.json'
)
inline_deploy(parser.parse_args())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment