Skip to content

Instantly share code, notes, and snippets.

@random-forests random-forests/ Secret
Created Mar 5, 2019

What would you like to do?
# See
def train_one_step(model, optimizer, x, y):
with tf.GradientTape() as tape:
logits = model(x)
loss = compute_loss(y, logits)
grads = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
compute_accuracy(y, logits)
return loss
def train(model, optimizer):
train_ds = mnist_dataset()
step = 0
loss = 0.0
for x, y in train_ds:
step += 1
loss = train_one_step(model, optimizer, x, y)
if tf.equal(step % 10, 0):
tf.print('Step', step, ': loss',
loss, '; accuracy', compute_accuracy.result())
return step, loss, accuracy
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.