Skip to content

Instantly share code, notes, and snippets.

@cosmic-cortex
Created October 23, 2019 09:20
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save cosmic-cortex/f696a53124acecd7058c6fe403aec1d1 to your computer and use it in GitHub Desktop.
Save cosmic-cortex/f696a53124acecd7058c6fe403aec1d1 to your computer and use it in GitHub Desktop.
class CrossEntropyLoss(Loss):
def forward(self, X, y):
"""
Computes the cross entropy loss of x with respect to y.
Args:
X: numpy.ndarray of shape (n_batch, n_dim).
y: numpy.ndarray of shape (n_batch, 1). Should contain class labels
for each data point in x.
Returns:
crossentropy_loss: numpy.float. Cross entropy loss of x with respect to y.
"""
# calculating crossentropy
exp_x = np.exp(X)
probs = exp_x/np.sum(exp_x, axis=1, keepdims=True)
log_probs = -np.log([probs[i, y[i]] for i in range(len(probs))])
crossentropy_loss = np.mean(log_probs)
# caching for backprop
self.cache['probs'] = probs
self.cache['y'] = y
return crossentropy_loss
def local_grad(self, X, Y):
probs = self.cache['probs']
ones = np.zeros_like(probs)
for row_idx, col_idx in enumerate(Y):
ones[row_idx, col_idx] = 1.0
grads = {'X': (probs - ones)/float(len(X))}
return grads
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment