-
-
Save ramdesh/f00ec1f5d01f03114264e8f3d0c226e8 to your computer and use it in GitHub Desktop.
import s3fs | |
import zipfile | |
import tempfile | |
import numpy as np | |
from tensorflow import keras | |
from pathlib import Path | |
import logging | |
AWS_ACCESS_KEY="aws_access_key" | |
AWS_SECRET_KEY="aws_secret_key" | |
BUCKET_NAME="bucket_name" | |
def get_s3fs(): | |
return s3fs.S3FileSystem(key=AWS_ACCESS_KEY, secret=AWS_SECRET_KEY) | |
def zipdir(path, ziph): | |
# Zipfile hook to zip up model folders | |
length = len(path) # Doing this to get rid of parent folders | |
for root, dirs, files in os.walk(path): | |
folder = root[length:] # We don't need parent folders! Why in the world does zipfile zip the whole tree?? | |
for file in files: | |
ziph.write(os.path.join(root, file), os.path.join(folder, file)) | |
def s3_save_keras_model(model, model_name): | |
with tempfile.TemporaryDirectory() as tempdir: | |
model.save(f"{tempdir}/{model_name}") | |
# Zip it up first | |
zipf = zipfile.ZipFile(f"{tempdir}/{model_name}.zip", "w", zipfile.ZIP_STORED) | |
zipdir(f"{tempdir}/{model_name}", zipf) | |
zipf.close() | |
s3fs = get_s3fs() | |
s3fs.put(f"{tempdir}/{model_name}.zip", f"{BUCKET_NAME}/{model_name}.zip") | |
logging.info(f"Saved zipped model at path s3://{BUCKET_NAME}/{model_name}.zip") | |
def s3_get_keras_model(model_name: str) -> keras.Model: | |
with tempfile.TemporaryDirectory() as tempdir: | |
s3fs = get_s3fs() | |
# Fetch and save the zip file to the temporary directory | |
s3fs.get(f"{BUCKET_NAME}/{model_name}.zip", f"{tempdir}/{model_name}.zip") | |
# Extract the model zip file within the temporary directory | |
with zipfile.ZipFile(f"{tempdir}/{model_name}.zip") as zip_ref: | |
zip_ref.extractall(f"{tempdir}/{model_name}") | |
# Load the keras model from the temporary directory | |
return keras.models.load_model(f"{tempdir}/{model_name}") | |
inputs = keras.Input(shape=(32,)) | |
outputs = keras.layers.Dense(1)(inputs) | |
model = keras.Model(inputs, outputs) | |
model.compile(optimizer="adam", loss="mean_squared_error") | |
# Save the model to S3 | |
s3_save_keras_model(model, "my_model") | |
# Load the model from S3 | |
loaded_model = s3_get_keras_model("my_model") |
When I am implementing this I am getting the following error :
"SavedModel file does not exist at: /var/folders/cb/ns18k3051f35p2jr32r2t4vr0000gp/T/tmpbv0lstar/yolo_v3/{saved_model.pbtxt|saved_model.pb}"
Any ideas on what can be done?
@rahuja23, This seems to be a keras error based on yolo
, which you seem to be using. I'd say try to print out the files at the temp folder path. You could also maybe get some help from this question: https://stackoverflow.com/questions/60071818/savedmodel-file-does-not-exist-at-model-h5-saved-model-pbtxtsaved-model-pb
@rahuja23
I countered same problem too. Zip file extract doesn't work.
Have you figure it out?
@ramdesh this worked for saving/loading my keras SavedModel from S3 when everything I tried before wasn't working. Thanks so much!
This worked perfectly for me. Thanks
So I didn't do this on a lambda function, this was a plain Python function that I ran on my machine. Yes, loading tf in lambdas is a pain, but you could try using Lambda containers.