Skip to content

Instantly share code, notes, and snippets.

@jkmackie
Last active October 1, 2022 04:35
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jkmackie/398f29dac323de2ee92a1982f202fba4 to your computer and use it in GitHub Desktop.
Save jkmackie/398f29dac323de2ee92a1982f202fba4 to your computer and use it in GitHub Desktop.
#Train model and get metrics.
model.compile(
optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy'],
)
from keras.callbacks import Callback
class Histories(Callback):
def on_train_begin(self,logs={}):
self.batch_loss = []
self.batch_accuracy = []
self.loss = []
self.accuracy = []
self.val_loss = []
self.val_accuracy = []
def on_train_batch_end(self, batch, logs={}):
self.batch_loss.append(logs.get('loss',-1))
self.batch_accuracy.append(logs.get('accuracy',-1))
def on_epoch_end(self, epoch, logs={}):
self.loss.append(logs.get('loss',-1))
self.accuracy.append(logs.get('accuracy',-1))
self.val_loss.append(logs.get('val_loss',-1))
self.val_accuracy.append(logs.get('val_accuracy',-1))
h_cb = Histories()
#Fit model in 3 epochs. Save metrics to history.
history = model.fit(train_ds, validation_data=val_ds, epochs=3, callbacks=[h_cb])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment