Skip to content

Instantly share code, notes, and snippets.

@lettergram
Last active January 3, 2019 22:01
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save lettergram/33888fcf3716c304c9de5b11511309ab to your computer and use it in GitHub Desktop.
Save lettergram/33888fcf3716c304c9de5b11511309ab to your computer and use it in GitHub Desktop.
if save_model_flag:
# Add optimization method, loss function and optimization value
model.compile(loss='categorical_crossentropy',
optimizer='adam', metrics=['accuracy'])
# "Fit the model" (train model), using training data (80% of datset)
model.fit(x_train, y_train, batch_size=batch_size,
epochs=epochs, validation_data=(x_test, y_test))
# Save model to json file
model_json = model.to_json()
with open(model_name + ".json", "w") as json_file:
json_file.write(model_json)
# serialize weights to HDF5
model.save_weights(model_name + ".h5")
else:
# load json and create model
json_file = open(model_name + '.json', 'r')
loaded_model_json = json_file.read()
json_file.close()
model = model_from_json(loaded_model_json)
# load weights into new model
model.load_weights(model_name + ".h5")
# evaluate loaded model on test data
model.compile(loss='categorical_crossentropy',
optimizer='adam', metrics=['accuracy'])
# Evaluate the trained model, using the test data (20% of the dataset)
score = model.evaluate(x_test, y_test, batch_size=batch_size)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment