Skip to content

Instantly share code, notes, and snippets.

@cosmic-cortex
Created Oct 23, 2019
Embed
What would you like to do?
class Loss(Function):
def forward(self, X, Y):
"""
Computes the loss of x with respect to y.
Args:
X: numpy.ndarray of shape (n_batch, n_dim).
Y: numpy.ndarray of shape (n_batch, n_dim).
Returns:
loss: numpy.float.
"""
pass
def backward(self):
"""
Backward pass for the loss function. Since it should be the final layer
of an architecture, no input is needed for the backward pass.
Returns:
gradX: numpy.ndarray of shape (n_batch, n_dim). Local gradient of the loss.
"""
return self.grad['X']
def local_grad(self, X, Y):
"""
Local gradient with respect to X at (X, Y).
Args:
X: numpy.ndarray of shape (n_batch, n_dim).
Y: numpy.ndarray of shape (n_batch, n_dim).
Returns:
gradX: numpy.ndarray of shape (n_batch, n_dim).
"""
pass
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment