Skip to content

Instantly share code, notes, and snippets.

@mostafa6765
Created November 1, 2023 15:31
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save mostafa6765/0d4dd408fef72184b87b0e732df5c15b to your computer and use it in GitHub Desktop.
Save mostafa6765/0d4dd408fef72184b87b0e732df5c15b to your computer and use it in GitHub Desktop.
lamda-image-resize
import os
import boto3
from io import BytesIO
from PIL import Image
# Initialize the S3 and Lambda clients
s3 = boto3.client('s3')
lambda_client = boto3.client('lambda')
def lambda_handler(event, context):
# Get the S3 bucket and key from the event
bucket = event['Records'][0]['s3']['bucket']['name']
key = event['Records'][0]['s3']['object']['key']
# Define the output bucket and path for the resized images
output_bucket = 'your-output-bucket'
# Define the desired image widths
image_sizes = [200, 400, 800]
# Process and resize the image for each size
for size in image_sizes:
output_key = f'resized/{size}px/' + os.path.basename(key)
resize_and_upload_image(bucket, key, output_bucket, output_key, size)
def resize_and_upload_image(input_bucket, input_key, output_bucket, output_key, new_width):
# Download the image from S3
response = s3.get_object(Bucket=input_bucket, Key=input_key)
image_data = response['Body'].read()
# Resize the image
resized_image = resize_image(image_data, new_width)
# Upload the resized image to the output bucket
s3.put_object(Bucket=output_bucket, Key=output_key, Body=resized_image)
def resize_image(image_data, new_width):
with Image.open(BytesIO(image_data)) as img:
img = img.resize((new_width, int(new_width * img.size[1] / img.size[0]))
output_buffer = BytesIO()
img.save(output_buffer, format='JPEG') # You can change the format as needed
return output_buffer.getvalue()
org: my_org
app: image-resize-serverless
service: image-resize-serverless
frameworkVersion: '3'
provider:
name: aws
runtime: python3.9
region: us-west-2
functions:
cloud-cma-extract-stage:
handler: handler.lambda_handler
events:
- s3:
bucket: name
event: s3:ObjectCreated:*
rules:
- prefix: folder/
existing: true
forceDeploy: true
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment