Skip to content

Instantly share code, notes, and snippets.

@Bigpig4396
Forked from apaszke/jacobian_hessian.py
Created September 19, 2021 14:38
Show Gist options
  • Save Bigpig4396/1080fb0d5c67e30d46c80725fe483bbe to your computer and use it in GitHub Desktop.
Save Bigpig4396/1080fb0d5c67e30d46c80725fe483bbe to your computer and use it in GitHub Desktop.
import torch
def jacobian(y, x, create_graph=False):
jac = []
flat_y = y.reshape(-1)
grad_y = torch.zeros_like(flat_y)
for i in range(len(flat_y)):
grad_y[i] = 1.
grad_x, = torch.autograd.grad(flat_y, x, grad_y, retain_graph=True, create_graph=create_graph)
jac.append(grad_x.reshape(x.shape))
grad_y[i] = 0.
return torch.stack(jac).reshape(y.shape + x.shape)
def hessian(y, x):
return jacobian(jacobian(y, x, create_graph=True), x)
def f(x):
return x * x * torch.arange(4, dtype=torch.float)
x = torch.ones(4, requires_grad=True)
print(jacobian(f(x), x))
print(hessian(f(x), x))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment