Skip to content

Instantly share code, notes, and snippets.

@jiangnanhugo
Forked from aam-at/fast_jac.py
Created May 17, 2016 14:21
Show Gist options
  • Save jiangnanhugo/47b29e986276115d86f455d08f46b1a4 to your computer and use it in GitHub Desktop.
Save jiangnanhugo/47b29e986276115d86f455d08f46b1a4 to your computer and use it in GitHub Desktop.
Fast Jacobian computation with scan chunking
# See for details and discussion: https://github.com/Theano/Theano/issues/4087
import theano
from theano import tensor as T, ifelse
from theano.gof import Variable
def fast_jacobian(expr, wrt, chunk_size=16, func=None):
assert isinstance(expr, Variable), \
"tensor.jacobian expects a Variable as `expr`"
assert expr.ndim < 2, \
("tensor.jacobian expects a 1 dimensional variable as "
"`expr`. If not use flatten to make it a vector")
num_chunks = T.ceil(1.0 * expr.shape[0] / chunk_size)
num_chunks = T.cast(num_chunks, 'int32')
steps = T.arange(num_chunks)
remainder = expr.shape[0] % chunk_size
def chunk_grad(i):
wrt_rep = T.tile(wrt, (chunk_size, 1))
if func is not None:
expr_rep = func(wrt_rep)
else:
expr_rep, _ = theano.scan(
fn=lambda wrt_: theano.clone(expr, {wrt: wrt_}),
sequences=wrt_rep)
chunk_expr_grad = T.roll(
T.identity_like(expr_rep),
i * chunk_size,
axis=1)
return T.grad(cost=None,
wrt=wrt_rep,
known_grads={
expr_rep: chunk_expr_grad
})
grads, _ = theano.scan(chunk_grad, sequences=steps)
grads = grads.reshape((chunk_size * grads.shape[0], wrt.shape[0]))
jac = ifelse.ifelse(T.eq(remainder, 0), grads, grads[:expr.shape[0], :])
return jac
def symbolic_func(x):
"""Function should work for example and batch of examples
"""
return x * x
x = T.vector('x')
y = symbolic_func(x)
jac = fast_jacobian(y, x, func=symbolic_func)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment