Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
Save and load Keras models to and from AWS S3
import s3fs
import zipfile
import tempfile
import numpy as np
from tensorflow import keras
from pathlib import Path
import logging
AWS_ACCESS_KEY="aws_access_key"
AWS_SECRET_KEY="aws_secret_key"
BUCKET_NAME="bucket_name"
def get_s3fs():
return s3fs.S3FileSystem(key=AWS_ACCESS_KEY, secret=AWS_SECRET_KEY)
def zipdir(path, ziph):
# Zipfile hook to zip up model folders
length = len(path) # Doing this to get rid of parent folders
for root, dirs, files in os.walk(path):
folder = root[length:] # We don't need parent folders! Why in the world does zipfile zip the whole tree??
for file in files:
ziph.write(os.path.join(root, file), os.path.join(folder, file))
def s3_save_keras_model(model, model_name):
with tempfile.TemporaryDirectory() as tempdir:
model.save(f"{tempdir}/{model_name}")
# Zip it up first
zipf = zipfile.ZipFile(f"{tempdir}/{model_name}.zip", "w", zipfile.ZIP_STORED)
zipdir(f"{tempdir}/{model_name}", zipf)
zipf.close()
s3fs = get_s3fs()
s3fs.put(f"{tempdir}/{model_name}.zip", f"{BUCKET_NAME}/{model_name}.zip")
logging.info(f"Saved zipped model at path s3://{BUCKET_NAME}/{model_name}.zip")
def s3_get_keras_model(model_name: str) -> keras.Model:
with tempfile.TemporaryDirectory() as tempdir:
s3fs = get_s3fs()
# Fetch and save the zip file to the temporary directory
s3fs.get(f"{BUCKET_NAME}/{model_name}.zip", f"{tempdir}/{model_name}.zip")
# Extract the model zip file within the temporary directory
with zipfile.ZipFile(f"{tempdir}/{model_name}.zip") as zip_ref:
zip_ref.extractall(f"{tempdir}/{model_name}")
# Load the keras model from the temporary directory
return keras.models.load_model(f"{tempdir}/{model_name}")
inputs = keras.Input(shape=(32,))
outputs = keras.layers.Dense(1)(inputs)
model = keras.Model(inputs, outputs)
model.compile(optimizer="adam", loss="mean_squared_error")
# Save the model to S3
s3_save_keras_model(model, "my_model")
# Load the model from S3
loaded_model = s3_get_keras_model("my_model")
@kunalsharmagnome

This comment has been minimized.

Copy link

@kunalsharmagnome kunalsharmagnome commented May 24, 2021

how you able to load tensorflow and s3fs in lambda layer because size will be higher than 250 mb ? i think tensorflow 10 inclused the tf.keras and its size is big.
can you help me here i am little lost .

@ramdesh

This comment has been minimized.

Copy link
Owner Author

@ramdesh ramdesh commented May 24, 2021

So I didn't do this on a lambda function, this was a plain Python function that I ran on my machine. Yes, loading tf in lambdas is a pain, but you could try using Lambda containers.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment