Skip to content

Instantly share code, notes, and snippets.

@keunwoochoi
Forked from yang-song/grad_lib.py
Last active February 26, 2020 02:36
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save keunwoochoi/b275e69102d6955b15ac32ef016b4e7e to your computer and use it in GitHub Desktop.
Save keunwoochoi/b275e69102d6955b15ac32ef016b4e7e to your computer and use it in GitHub Desktop.
L operator and R operator in Tensorflow2
"""This is a conversion of https://gist.github.com/yang-song/07392ed7d57a92a87968e774aef96762
to Tensorflow 2 using GradientTape
"""
import tensorflow as tf
@tf.function
def gradients(f, x, tape, grad_ys=None):
'''
An easier way of computing gradients in tensorflow. The difference from tf.gradients is
* If f is not connected with x in the graph, it will output 0s instead of Nones. This will be more meaningful
for computing higher-order gradients.
* The output will have the same shape and type as x. If x is a list, it will be a list. If x is a Tensor, it
will be a tensor as well.
:param f: A `Tensor` or a list of tensors to be differentiated
:param x: A `Tensor` or a list of tensors to be used for differentiation
:param tape: A `GradientTape` object
:param grad_ys: Optional. It is a `Tensor` or a list of tensors having exactly the same shape and type as `f` and
holds gradients computed for each of `f`.
:return: A `Tensor` or a list of tensors having the same shape and type as `x`
'''
if isinstance(x, list):
grad = tape.gradient(f, x, output_gradients=grad_ys)
for i in range(len(x)):
if grad[i] is None:
grad[i] = tf.zeros_like(x[i])
return grad
else:
grad = tape.gradient(f, x, output_gradients=grad_ys)
if grad is None:
return tf.zeros_like(x)
else:
return grad
@tf.function
def Lop(f, x, v, tape):
'''
Compute Jacobian-vector product. The result is v^T @ J_x
:param f: A `Tensor` or a list of tensors for computing the Jacobian J_x
:param x: A `Tensor` or a list of tensors with respect to which the Jacobian is computed.
:param v: A `Tensor` or a list of tensors having the same shape and type as `f
:param tape: A `GradientTape` object
:return: A `Tensor` or a list of tensors having the same shape and type as `x`
'''
assert not isinstance(f, list) or isinstance(v, list), "f and v should be of the same type"
return gradients(f, x, tape=tape, grad_ys=v)
@tf.function
def Rop(f, x, v):
'''
Compute Jacobian-vector product. The result is J_x @ v.
The method is inspired by [deep yearning's blog](https://j-towns.github.io/2017/06/12/A-new-trick.html)
:param f: A `Tensor` or a list of tensors for computing the Jacobian J_x
:param x: A `Tensor` or a list of tensors with respect to which the Jacobian is computed
:param v: A `Tensor` or a list of tensors having the same shape and type as `v`
:return: A `Tensor` or a list of tensors having the same shape and type as `f`
'''
assert not isinstance(x, list) or isinstance(v, list), "x and v should be of the same type"
with tf.GradientTape(persistent=True) as g:
if isinstance(f, list):
w = [tf.ones_like(_) for _ in f]
else:
w = tf.ones_like(f)
g.watch(w)
g.watch(x)
return gradients(Lop(f, x, w, tape=g), w, tape=g, grad_ys=v)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment