Skip to content

Instantly share code, notes, and snippets.

@samuelsmal
Last active May 27, 2019 18:06
Show Gist options
  • Save samuelsmal/373bcb8058a7bf4f014189c98c3eb1e8 to your computer and use it in GitHub Desktop.
Save samuelsmal/373bcb8058a7bf4f014189c98c3eb1e8 to your computer and use it in GitHub Desktop.
Tensorflow Eager Mode histogram and scaler logging helpers
def tf_clean_variable_name(var_name):
"""
Usage example (and an example of how to log a histogram in eager mode)
with gradients_writer.as_default(), tfc.summary.always_record_summaries():
for g, var_name in zip(gradients, [tf_clean_variable_name(v.name) for v in model.trainable_variables]):
tfc.summary.histogram(f'gradient_{var_name}', g, step=epoch)
"""
# example of a var_name: 'inf_dense_0_23/kernel:0'
parts = var_name.split('/')
parts[0] = '_'.join(parts[0].split('_')[:-1]) # drop the last bit. seems to be a counter (which leads to the confusion)
parts[-1] = parts[-1][:-len(':x')] # and this is also not necessary
return '/'.join(parts)
def tf_write_scalars(writer, scalars, step):
""" Writes multiple scalars
Usage example:
_recorded_scalars_ = ['loss', 'recon', 'kl']
tf_write_scalars(train_summary_writer, zip(_recorded_scalars_, train_reports[-1]), step=epoch)
"""
with writer.as_default(), tfc.summary.always_record_summaries():
for n, v in scalars:
tfc.summary.scalar(n, v, step=step)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment