Skip to content

Instantly share code, notes, and snippets.

@OneRaynyDay
Created February 8, 2016 19:21
Show Gist options
  • Save OneRaynyDay/77f184f0316050cf4d48 to your computer and use it in GitHub Desktop.
Save OneRaynyDay/77f184f0316050cf4d48 to your computer and use it in GitHub Desktop.
"""Softmax."""
scores = [2.0, 1.0, 0.1]
import numpy as np
def softmax(x):
"""Compute softmax values for each sets of scores in x."""
#We want to find the e^y_i/(sum(e^y)
''' Long way
ans = []
for x_i in x:
numerator = np.exp(x_i)
denominator = np.sum(np.exp(x))
ans.append(numerator/denominator)
return np.array(ans)
'''
''' Clever One-liner '''
return np.exp(x)/np.sum(np.exp(x), axis=0)
print(softmax(scores))
# Plot softmax curves
import matplotlib.pyplot as plt
x = np.arange(-2.0, 6.0, 0.1)
print(x)
scores = np.vstack([x, np.ones_like(x), 0.2 * np.ones_like(x)])
plt.plot(x, softmax(scores).T, linewidth=2)
plt.show()
@OneRaynyDay
Copy link
Author

A little softmax function for ML lecture.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment