Skip to content

Instantly share code, notes, and snippets.

@keithmgould
Last active June 19, 2019 18:47
Show Gist options
  • Save keithmgould/27b9ab35592adf5282c96fea25c17031 to your computer and use it in GitHub Desktop.
Save keithmgould/27b9ab35592adf5282c96fea25c17031 to your computer and use it in GitHub Desktop.
# toy example from https://www.kdnuggets.com/2017/04/simple-understand-gradient-descent-algorithm.html
import numpy as np
import random
import matplotlib.pyplot as plt
LEARNING_RATE_M = 1e-8
LEARNING_RATE_B = 1e-2
# sq ft , price
HISTORIC_DATA = [
[1400, 245000],
[1600, 312000],
[1700, 279000],
[1875, 308000],
[1100, 199000],
[1550, 219000],
[2350, 405000],
[2450, 324000],
[1425, 319000],
[1700, 255000]]
N = len(HISTORIC_DATA)
def determine_error(m, b):
error_sum = 0
for x, y in HISTORIC_DATA:
y_hat = m * x + b
error_sum += .5 * ( y_hat - y) ** 2
return error_sum / float(N)
def calculate_gradients(m, b):
grad_m = 0
grad_b = 0
for x, y in HISTORIC_DATA:
y_hat = m * x + b
grad_b += y_hat - y
grad_m += (y_hat - y) * x
grad_m = grad_m / N
grad_b = grad_b / N
return grad_m, grad_b
# f(x) = mx + b
def main():
m = random.randint(1,100) # slope
b = random.randint(20,25) # bias
plt.subplots(5,1, figsize=(6,10))
plot = 1
for i in range(40000):
total_error = determine_error(m,b)
# print("total error: {}. a:{}, b:{}".format(total_error, m, b))
grad_m, grad_b = calculate_gradients(m,b)
# print("{},{}".format(grad_m, grad_b))
b = b - LEARNING_RATE_B * grad_b
m = m - LEARNING_RATE_M * grad_m
hd = np.array(HISTORIC_DATA)
x = hd[:,0]
y = hd[:,1]
if i % 4000 == 0:
plt.subplot(10, 1, plot)
plt.scatter(x,y)
# plots ground truth line of best fit:
plt.plot(np.unique(x), np.poly1d(np.polyfit(x, y, 1))(np.unique(x)))
# plots estimated line of best fit:
plt.plot([1000,3000],[m*1000+b,m*3000+b])
plot += 1
print("m: {}, b: {}".format(m,b))
plt.show()
if __name__ == '__main__':
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment