Skip to content

Instantly share code, notes, and snippets.

@vene
Created May 23, 2019 19:16
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save vene/c90f8e7da6ed45a0c8e0fe94d877dfbb to your computer and use it in GitHub Desktop.
Save vene/c90f8e7da6ed45a0c8e0fe94d877dfbb to your computer and use it in GitHub Desktop.
Relationship between (soft)max and (soft)argmax
import numpy as np
def numeric_grad(f, x, eps=1e-3):
grad = np.zeros_like(x)
for i in range(x.shape[0]):
v = np.zeros_like(x)
v[i] = 1
grad[i] = f(x + eps * v) - f(x - eps * v)
grad[i] /= 2 * eps
return grad
def logsumexp(x):
return np.log(np.sum(np.exp(x)))
def softmax(x):
return np.exp(x) / np.sum(np.exp(x))
def main():
np.set_printoptions(precision=3, suppress=True)
x = np.random.randn(5)
print(f"x: ", x)
print(f"max(x):", np.max(x))
print(f"nabla max(x):", numeric_grad(np.max, x))
print(f"logsumexp(x):", logsumexp(x))
print(f"nabla logsumexp(x):", numeric_grad(logsumexp, x))
print(f"softmax(x):", softmax(x))
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment