Skip to content

Instantly share code, notes, and snippets.

@kamath
Last active February 22, 2021 00:09
Show Gist options
  • Save kamath/50b1c29a83fd91bf1001c03bdf9cc009 to your computer and use it in GitHub Desktop.
Save kamath/50b1c29a83fd91bf1001c03bdf9cc009 to your computer and use it in GitHub Desktop.
import numpy as np
import matplotlib.pyplot as plt
y = np.array([2.85, 1.5, .49, 1.57, 1.9, 0.6, 0.38, 2.33, 1.65, 0.3])
x = np.arange(0, 20, 2)
b = np.ones(3) # 3 parameters, b0-b2
eta = .02 # learning rate
g = lambda x: np.array([1, np.sin(x), np.cos(x)])
print(x)
print(y)
X = np.array([g(a) for a in x])
costs = []
num_params = len(b)
for i in range(100):
# Prediction
h = np.dot(X, b)
diff = h - y
# Update
rss = np.sum(diff ** 2)
costs.append(rss)
gradient = np.dot(X.T, diff)
b -= eta * gradient
plt.plot(x, y, label='actual')
plt.plot(x, X.dot(b), label='predicted')
plt.legend()
plt.show()
# Make sure error goes down every iteration
plt.plot(costs, label='costs')
plt.legend()
plt.show()
print(' '.join([f'b{i} = {a}' for i, a in enumerate(b)]))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment