Skip to content

Instantly share code, notes, and snippets.

@rj76
Created August 7, 2021 14:59
Show Gist options
  • Save rj76/82fab3d4505f32d33cbf8fa28835bed0 to your computer and use it in GitHub Desktop.
Save rj76/82fab3d4505f32d33cbf8fa28835bed0 to your computer and use it in GitHub Desktop.
gradient descent
theta0 = 0
theta1 = 0
alpha = .2
dataset = [
{'x': 1, 'y': 0.5},
{'x': 2, 'y': 1},
{'x': 4, 'y': 2},
{'x': 0, 'y': 0},
]
def sum_theta0():
val = 0
for i in range(0, len(dataset)-1):
val += (theta0 + theta1*dataset[i]['x']) - dataset[i]['y']
return val
def sum_theta1():
val = 0
for i in range(0, len(dataset)-1):
val += ((theta0 + theta1*dataset[i]['x']) - dataset[i]['y']) * dataset[i]['x']
return val
for i in range(1, 100):
theta0 = theta0 - alpha * 1/(len(dataset)) * sum_theta0()
theta1 = theta1 - alpha * 1/(len(dataset)) * sum_theta1()
print('theta0: %s, theta1: %s' % (theta0, theta1))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment