Skip to content

Instantly share code, notes, and snippets.

What would you like to do?
How to use autograd inside Tensorflow
import tensorflow as tf
import autograd.numpy as np
from autograd import grad
from tensorflow.python.framework import function
rng = np.random.RandomState(42)
x_np = rng.randn(4,4).astype(np.float32)
with tf.device('/cpu:0'):
x = tf.Variable(x_np)
def tf_loss(a):
return tf.reduce_sum(tf.square(a))
def np_loss(a):
return np.array(2.).astype(np.float32)*np.square(a).sum()
grad_np_loss = grad(np_loss)
l = tf_loss(x)
g = tf.gradients(l, x)
with tf.device('/cpu:0'):
np_in_tf = tf.py_func(np_loss, [x], tf.float32)
npgrad_in_tf = tf.py_func(grad_np_loss, [x], tf.float32)
def op_grad(x, grad):
return [tf.py_func(grad_np_loss, [x], tf.float32)]
def tf_replaced_grad_loss(a):
return tf_loss(a)
with tf.device('/cpu:0'):
tf_np_grad = tf.gradients(tf_replaced_grad_loss(x),x)
with tf.Session() as sess:
print("Tensorflow gradient:\n")
print("\nNumpy gradient (should be 2 times tf version):\n")
print("\nNumpy gradient evaluated in Tensorflow:\n")
print("\nNumpy gradient put in Tensorflow graph:\n")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment