Skip to content

Instantly share code, notes, and snippets.

@goduncan
Created January 7, 2017 15:33
Show Gist options
  • Save goduncan/729ffc9e53cbb9f762445a6a4def7a9e to your computer and use it in GitHub Desktop.
Save goduncan/729ffc9e53cbb9f762445a6a4def7a9e to your computer and use it in GitHub Desktop.
import json
import gzip
import time
from StringIO import StringIO
import boto3
S3 = boto3.client('s3')
def lambda_handler(event, context):
"""Default Lambda entry point"""
# Set the name of the S3 bucket
bucket_s3 = 'your.s3.bucket.com'
folder_s3 = 'Flow'
prefix_s3 = 'FlowLogs_'
# Capture the CloudWatch log data
out_events = str(event['awslogs']['data'])
# Decode and unzip the log data
out_events = gzip.GzipFile(fileobj=StringIO(out_events.decode('base64', 'strict'))).read()
# Convert the log data from JSON into a dictionary
clean_events = json.loads(out_events)
# Create a temp file
temp_file = open('/tmp/file', 'w+')
# Create the S3 file key
key = folder_s3 + '/' + prefix_s3 + str(int(time.time())) + ".log"
# Loop through the events line by line
for event in clean_events['logEvents']:
# Transform the data and store it in the temp file.
temp_file.write(str(event['extractedFields']['srcaddr'])+","+str(event['extractedFields']['srcport'])+","+str(event['extractedFields']['dstaddr'])+","+str(event['extractedFields']['dstport'])+","+str(event['extractedFields']['protocol'])+","+str(event['extractedFields']['start'])+","+str(event['extractedFields']['end'])+ ","+str(event['extractedFields']['bytes'])+"\n")
# Close the temp file
temp_file.close()
# Write the files to s3
S3.upload_file('/tmp/file', bucket_s3, key)
@ShadySQL
Copy link

ShadySQL commented Feb 9, 2017

Did you get this working with Lambda or thru EC2?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment