Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
import json,os,boto3
def lambda_handler(event, context):
iid = os.getenv('INSTANCE_ID',None)
pd = os.getenv('PAYLOAD_DIR',None)
pipeline = boto3.client('codepipeline')
job = event['CodePipeline.job']['id']
if iid is None or pd is None:
pipeline.put_job_failure_result(
jobId=job,
failureDetails={
'type':'ConfigurationError',
'message':'missing environment variables in lambda'
}
)
artifact = event['CodePipeline.job']['data']['inputArtifacts'][0]['location']['s3Location']
s3loc = 's3://'+artifact['bucketName']+'/'+artifact['objectKey']
#print s3loc
zf = artifact['objectKey'].split('/')[-1]
cmd1 = 'aws s3 cp %s %s.zip' % (s3loc,zf)
cmd2 = 'unzip %s.zip -d %s' % (zf,zf)
cmd3 = 'sudo mv %s/bin/* %s' % (zf,pd)
client = boto3.client('ssm')
response = client.send_command(
InstanceIds=[
iid
],
DocumentName='AWS-RunShellScript',
Parameters={
'commands':[cmd1,
cmd2,
cmd3
],
'workingDirectory':[
'/tmp'
]
}
)
cmdid = response['Command']['CommandId']
pipeline.put_job_success_result(
jobId=job,
currentRevision={
'revision':'1',
'changeIdentifier':'1'
}
)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment