Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save karszawa/fe8a4e2f6872acabf5e2487c2a1395a5 to your computer and use it in GitHub Desktop.
Save karszawa/fe8a4e2f6872acabf5e2487c2a1395a5 to your computer and use it in GitHub Desktop.
TensorFlow Eager Execution
import tensorflow as tf
import numpy as np
tfe = tf.contrib.eager
tf.enable_eager_execution()
N, D_in, H, D_out = 64, 1000, 100, 10
x = tf.constant(np.random.rand(N, D_in))
y = tf.constant(np.random.rand(N, D_out))
w1 = tfe.Variable(np.random.rand(D_in, H))
w2 = tfe.Variable(np.random.rand(H, D_out))
learning_rate = 1e-6
for i in range(500):
with tf.GradientTape() as tape:
y_pred = tf.matmul(tf.nn.relu(tf.matmul(x, w1)), w2)
loss = tf.reduce_mean(tf.reduce_sum((y_pred - y) ** 2, axis=1))
dw1, dw2 = tape.gradient(loss, [w1, w2])
w1.assign_sub(dw1 * learning_rate)
w2.assign_sub(dw2 * learning_rate)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment