Skip to content

Instantly share code, notes, and snippets.

@danielkza
Last active October 19, 2016 20:45
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save danielkza/77d33dc10e9407c3e18d6db3f8be8111 to your computer and use it in GitHub Desktop.
Save danielkza/77d33dc10e9407c3e18d6db3f8be8111 to your computer and use it in GitHub Desktop.
Stacker hook for uploading lambda functions
import os
import os.path
import fnmatch
import stat
import logging
import hashlib
from StringIO import StringIO
from zipfile import ZipFile, ZIP_DEFLATED
import boto3
import botocore
from troposphere.awslambda import Code
from stacker.util import get_config_directory
logger = logging.getLogger(__name__)
# UNIX file attributes are stored in the upper 16 bits in the external
# attributes field of a ZIP entry
ZIP_PERMS_MASK = (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) << 16
def _zip_files(files, root):
zip_data = StringIO()
with ZipFile(zip_data, 'w', ZIP_DEFLATED) as zip_file:
for fname in files:
zip_file.write(os.path.join(root, fname), fname)
# Fix file permissions to avoid any issues - only care whether a file
# is executable or not, choosing between modes 755 and 644 accordingly.
for zip_entry in zip_file.filelist:
perms = (zip_entry.external_attr & ZIP_PERMS_MASK) >> 16
if perms & stat.S_IXUSR != 0:
new_perms = 0755
else:
new_perms = 0644
if new_perms != perms:
logger.debug("lambda: fixing perms: %s: %o => %o",
zip_entry.filename, perms, new_perms)
new_attr = ((zip_entry.external_attr & ~ZIP_PERMS_MASK) |
(new_perms << 16))
zip_entry.external_attr = new_attr
contents = zip_data.getvalue()
zip_data.close()
return contents
def _fnmatch_filter_relative(names, root, includes, excludes):
# Match the complete relative path if the pattern include slashes, and
# only the basename otherwise
for name in names:
path = os.path.join(root, name)
for pattern in includes:
if fnmatch.fnmatch(path if '/' in pattern else name, pattern):
break
else:
continue
for pattern in excludes:
if fnmatch.fnmatch(path if '/' in pattern else name, pattern):
break
else:
yield name
def _find_files(root, includes, excludes):
root = os.path.abspath(root)
for base, dirs, files in os.walk(root):
rel_base = os.path.relpath(base, root)
if rel_base == '.':
rel_base = ''
dirs[:] = _fnmatch_filter_relative(dirs, rel_base, ['*'], excludes)
files = _fnmatch_filter_relative(files, rel_base, includes, excludes)
for f in files:
yield os.path.join(rel_base, f)
def _zip_from_file_patterns(root, includes, excludes):
logger.info('lambda: base directory: %s', root)
files = list(_find_files(root, includes, excludes))
logger.info('lambda: adding %d files:', len(files))
for fname in files:
logger.info('lambda: + %s', fname)
return _zip_files(files, root)
def _head_object(s3_conn, bucket, key):
try:
return s3_conn.head_object(Bucket=bucket, Key=key)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == '404':
return None
else:
raise
def _ensure_bucket(s3_conn, bucket):
try:
s3_conn.head_bucket(Bucket=bucket)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == '404':
logger.info('Creating bucket %s.', bucket)
s3_conn.create_bucket(Bucket=bucket)
elif e.response['Error']['Code'] in ('401', '403'):
logger.exception('Access denied for bucket %s.', bucket)
raise
else:
logger.exception('Error creating bucket %s. Error %s', bucket,
e.response)
def _upload_code(s3_conn, bucket, name, contents):
hsh = hashlib.md5(contents)
logger.debug('lambda: ZIP hash: %s', hsh.hexdigest())
key = 'lambda-{}-{}.zip'.format(name, hsh.hexdigest())
info = _head_object(s3_conn, bucket, key)
expected_etag = '"{}"'.format(hsh.hexdigest())
if info and info['ETag'] == expected_etag:
logger.info('lambda: object %s already exists, not uploading', key)
else:
logger.info('lambda: uploading object %s', key)
s3_conn.put_object(Bucket=bucket, Key=key, Body=contents,
ContentType='application/zip',
ACL='aws-exec-read')
return Code(S3Bucket=bucket, S3Key=key)
def _check_pattern_list(patterns, key, default=None):
if not patterns:
return default
if isinstance(patterns, basestring):
return [patterns]
if isinstance(patterns, list):
if all(isinstance(p, basestring) for p in patterns):
return patterns
raise ValueError('Invalid file patterns in key %s: must be a string or '
'list of strings', key)
def _upload_function(s3_conn, bucket, name, options):
try:
root = options['path']
except KeyError as e:
logger.error('lambda: missing required property in function '
'configuration %s: %s', name, e.args[0])
raise
includes = _check_pattern_list(options.get('include'), 'include',
default=['**'])
excludes = _check_pattern_list(options.get('exclude'), 'exclude',
default=[])
logger.info('lambda: processing function %s', name)
# os.path.join will ignore other parameters if the right-most one is an
# absolute path, which is exactly what we want.
root = os.path.abspath(os.path.join(get_config_directory(), root))
zip_contents = _zip_from_file_patterns(root, includes, excludes)
return _upload_code(s3_conn, bucket, name, zip_contents)
def upload_lambda_functions(region, namespace, mappings, parameters,
context=None, **kwargs):
if not context:
logger.error('lambda: context not received in hook, '
'check if recent version of stacker is being used')
return False
bucket = kwargs.get('bucket')
if not bucket:
bucket = context.bucket_name
logger.info('lambda: using default bucket from stacker: %s', bucket)
else:
logger.info('lambda: using custom bucket: %s', bucket)
session = boto3.Session(region_name=region)
s3_conn = session.client('s3')
_ensure_bucket(s3_conn, bucket)
results = {}
for name, options in kwargs['functions'].items():
results[name] = _upload_function(s3_conn, bucket, name, options)
# TODO: use a non-hacky mechanism to pass hook data back to blueprints
if not hasattr(context, 'hook_data'):
context.hook_data = {}
context.hook_data['upload_lambda_functions'] = results
return True
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment