Skip to content

Instantly share code, notes, and snippets.

@MathiasGruber
Last active April 15, 2021 13:26
Show Gist options
  • Save MathiasGruber/08faf4be5122dcc4a5d4900e53d50a10 to your computer and use it in GitHub Desktop.
Save MathiasGruber/08faf4be5122dcc4a5d4900e53d50a10 to your computer and use it in GitHub Desktop.
Training MNIST with mlflow logging on Databricks
"""
Keras MNIST example from: https://keras.io/examples/vision/mnist_convnet/
Adapted to add mlflow logging
"""
import mlflow
import mlflow.keras
import numpy as np
from tensorflow import keras
from tensorflow.keras import layers
# Model / data parameters
num_classes = 10
input_shape = (28, 28, 1)
# the data, split between train and test sets
(x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()
# Scale images to the [0, 1] range
x_train = x_train.astype("float32") / 255
x_test = x_test.astype("float32") / 255
# Make sure images have shape (28, 28, 1)
x_train = np.expand_dims(x_train, -1)
x_test = np.expand_dims(x_test, -1)
# Convert class vectors to binary class matrices
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
# Build the model
model = keras.Sequential(
[
keras.Input(shape=input_shape),
layers.Conv2D(32, kernel_size=(3, 3), activation="relu"),
layers.MaxPooling2D(pool_size=(2, 2)),
layers.Conv2D(64, kernel_size=(3, 3), activation="relu"),
layers.MaxPooling2D(pool_size=(2, 2)),
layers.Flatten(),
layers.Dropout(0.5),
layers.Dense(num_classes, activation="softmax"),
]
)
model.compile(loss="categorical_crossentropy", optimizer="adam", metrics=["accuracy"])
# Enable magic mlflow logging
mlflow.tensorflow.autolog()
# Train model
model.fit(x_train, y_train, batch_size=128, epochs=15, validation_split=0.1)
# Fine tune for another epoch
model.fit(x_train, y_train, batch_size=128, epochs=1, validation_split=0.1)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment