Skip to content

Instantly share code, notes, and snippets.

@emilemathieu
Created August 9, 2018 14:52
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save emilemathieu/1873f05800df51927a1d7e4dbb330b32 to your computer and use it in GitHub Desktop.
Save emilemathieu/1873f05800df51927a1d7e4dbb330b32 to your computer and use it in GitHub Desktop.
class CrossEntropyLoss(object):
def __call__(self, Y, labels):
loss = 0
for i, y in enumerate(Y):
loss += - y[labels[i]] + np.log(np.sum(np.exp(y)))
return loss/len(labels)
def grad(self, Y, labels):
output_grad = np.empty_like(Y)
for i, y in enumerate(Y):
output_grad[i,:] = np.exp(y) / np.sum(np.exp(y))
output_grad[i, labels[i]] -= 1
return output_grad
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment