Skip to content

Instantly share code, notes, and snippets.

@Caellwyn
Created June 24, 2021 16:39
Show Gist options
  • Save Caellwyn/da8b5ab4af9cd8c7226130931d9cf364 to your computer and use it in GitHub Desktop.
Save Caellwyn/da8b5ab4af9cd8c7226130931d9cf364 to your computer and use it in GitHub Desktop.
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from keras import regularizers, optimizers
from keras.layers import Dense, Dropout, Input
from keras.models import Sequential, load_model
from keras.callbacks import ModelCheckpoint
from keras.datasets import boston_housing
from keras.losses import MeanSquaredError
#Load Boston Housing Dataset
(X_train, y_train), (X_test, y_test) = boston_housing.load_data()
#Build model
model = Sequential()
model.add(Input(shape=(13,)))
model.add(Dense(32, activation='tanh',
kernel_regularizer = regularizers.l1_l2(l1=1e-5, l2=1e-4)))
model.add(Dense(1,activation='relu'))
mse = MeanSquaredError()
adam = optimizers.Adam(learning_rate=.1, decay=1e-3)
model.compile(optimizer = adam, loss = mse, metrics = None)
print(model.summary())
#create callback
filepath = 'my_best_model.hdf5'
checkpoint = ModelCheckpoint(filepath=filepath,
monitor='val_loss',
verbose=1,
save_best_only=True,
mode='min')
callbacks = [checkpoint]
#fit the model
history = model.fit(pd.DataFrame(X_train).apply(np.asarray),
y_train,
batch_size=10,
epochs=100,
validation_split=0.2,
callbacks=callbacks)
#plot the training history
plt.plot(history.history['loss'], label='Training Loss')
plt.plot(history.history['val_loss'], label='Validation Loss')
plt.legend()
plt.xlabel('Epochs')
plt.ylabel('Mean Squared Error')
plt.savefig('model_training_history')
plt.show()
#Load and evaluate the best model version
model = load_model(filepath)
yhat = model.predict(X_test)
print('Model MSE on test data = ', mse(y_test, yhat).numpy())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment