Last active
August 15, 2023 21:52
-
-
Save erenon/91f526302cd8e9d21b73f24c0f9c4bb8 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This example shows how to use keras TensorBoard callback | |
# with model.train_on_batch | |
import tensorflow.keras as keras | |
# Setup the model | |
model = keras.models.Sequential() | |
model.add(...) # Add your layers | |
model.compile(...) # Compile as usual | |
batch_size=256 | |
# Create the TensorBoard callback, | |
# which we will drive manually | |
tensorboard = keras.callbacks.TensorBoard( | |
log_dir='/tmp/my_tf_logs', | |
histogram_freq=0, | |
batch_size=batch_size, | |
write_graph=True, | |
write_grads=True | |
) | |
tensorboard.set_model(model) | |
# Transform train_on_batch return value | |
# to dict expected by on_batch_end callback | |
def named_logs(model, logs): | |
result = {} | |
for l in zip(model.metrics_names, logs): | |
result[l[0]] = l[1] | |
return result | |
# Run training batches, notify tensorboard at the end of each epoch | |
for batch_id in range(1000): | |
x_train,y_train = create_training_data(batch_size) | |
logs = model.train_on_batch(x_train, y_train) | |
tensorboard.on_epoch_end(batch_id, named_logs(model, logs)) | |
tensorboard.on_train_end(None) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
I was very uncomfortable on using tensorboard.on_epoch_end() instead of tensorboard.on_batch_end() as well. I couldn't get the correct result if I just made the simple switch. I needed to change callbacks.tensorboard update_freq from the default "epoch" to "batch" as:
tensorboard = keras.callbacks.TensorBoard(
log_dir='/tmp/my_tf_logs',
histogram_freq=0,
batch_size=batch_size,
write_graph=True,
write_grads=True,
update_freq="batch"
)