Skip to content

Instantly share code, notes, and snippets.

@alexcasalboni
Last active March 6, 2021 13:23
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 2 You must be signed in to fork a gist
  • Save alexcasalboni/04d0a94a77b72ae1dbcd66e190577837 to your computer and use it in GitHub Desktop.
Save alexcasalboni/04d0a94a77b72ae1dbcd66e190577837 to your computer and use it in GitHub Desktop.
AWS Lambda Coding Session - clda.co/webinar-lambda
def lambda_handler(event, context):
name = event.get('name') or 'World'
print("Name: %s" % name)
return "Hello %s!" % name
{
"Effect": "Allow",
"Action": [
"s3:GetObject",
"s3:PutObject"
],
"Resource": "arn:aws:s3:::*"
}
from __future__ import print_function
import os, urllib, json
import zipfile, StringIO
import boto3
s3 = boto3.client('s3')
def compress(body, key):
data = StringIO.StringIO()
with zipfile.ZipFile(data, 'w', zipfile.ZIP_DEFLATED) as f:
f.writestr(os.path.basename(key), body)
data.seek(0)
return data.read()
def lambda_handler(event, context):
# read bucket and key from event data
bucket = event['Records'][0]['s3']['bucket']['name']
key = urllib.unquote_plus(event['Records'][0]['s3']['object']['key']).decode('utf8')
# generate new key name
new_key = "compressed/%s.zip" % os.path.basename(key)
# read the source obj content
body = s3.get_object(Bucket=bucket, Key=key)['Body'].read()
# create new obj with compressed data
s3.put_object(
Body=compress(body, key),
Key=new_key,
Bucket=bucket,
)
return "OK"
{
"Effect":"Allow",
"Action":["dynamodb:*"],
"Resource":["arn:aws:dynamodb:eu-central-1:582636008125:table/notifications"]
}
import boto3
DDB = boto3.resource('dynamodb').Table('notifications')
def lambda_handler(event, context):
# read message from event data
sns = event['Records'][0]['Sns']
message = sns['Message']
message_id = sns['MessageId']
topic_arn = sns['TopicArn']
subject = sns['Subject']
# build new DDB item
item = {
'Item': {
'MessageId': message_id,
'Message': message,
'Topic': topic_arn,
'Subject': subject,
}
}
# log it
print("Item: %s" % item)
# write into db
DDB.put_item(**item)
return item
import time
from datetime import datetime
import boto3
DDB = boto3.resource('dynamodb').Table('logtime')
def lambda_handler(event, context):
# generate current timestamp (UTC)
now = datetime.utcnow()
timestamp = int(time.mktime(now.timetuple()))
# build new DDB item
item = {
'Item': {
'timestamp': timestamp,
'datetime': now.isoformat(),
}
}
# log it
print("Item: %s" % item)
# write into db
DDB.put_item(**item)
return item
{
"Effect":"Allow",
"Action":["dynamodb:*"],
"Resource":["arn:aws:dynamodb:eu-central-1:582636008125:table/logtime"]
}
import boto3
DDB = boto3.resource('dynamodb').Table('logtime')
def lambda_handler(event, context):
# read records from event data (batch)
records = event['Records']
for record in records:
# log each record
print(record)
event_name = record['eventName'].upper()
# do something only if INSERT (new record)
if event_name == 'INSERT':
# retrieve the primary key
timestamp = record['dynamodb']['Keys']['timestamp']['N']
# update the item by adding a new field (static value)
item = DDB.update_item(
Key={'timestamp': int(timestamp)},
UpdateExpression='SET newfield = :val',
ExpressionAttributeValues={':val': "hello"},
)
# log how many records have been processed
print("Processed %s records" % len(records))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment