Skip to content

Instantly share code, notes, and snippets.

@nithyadurai87
Created February 11, 2019 09:52
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save nithyadurai87/43664cacd625e7c290c8812894dca659 to your computer and use it in GitHub Desktop.
Save nithyadurai87/43664cacd625e7c290c8812894dca659 to your computer and use it in GitHub Desktop.
x = [1, 2, 3]
y = [1, 2, 3]
m = len(y)
theta0 = 1
theta1 = 1.5
alpha = 0.01
def cost_function(theta0,theta1):
predicted_y = [theta0+(theta1*1), theta0+(theta1*2), theta0+(theta1*3)]
sum=0
for i,j in zip(predicted_y,y):
sum = sum+((i-j)**2)
J = 1/(2*m)*sum
return (J)
def gradientDescent(x, y, theta1, alpha):
J_history = []
for i in range(50):
for i,j in zip(x,y):
delta=1/m*(i*i*theta1-i*j);
theta1=theta1-alpha*delta;
J_history.append(cost_function(theta0,theta1))
print (min(J_history))
gradientDescent(x, y, theta1, alpha)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment