Skip to content

Instantly share code, notes, and snippets.

@adityathakker
Last active May 8, 2017 14:58
Show Gist options
  • Save adityathakker/eef193500e7b15bf0e33a4dbc3f88bb4 to your computer and use it in GitHub Desktop.
Save adityathakker/eef193500e7b15bf0e33a4dbc3f88bb4 to your computer and use it in GitHub Desktop.
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
import math
data = np.genfromtxt('input.txt',delimiter=',')
x = data[:,0]
y = data[:,1]
m = y.shape[0]
x = x.reshape((m, 1))
x = np.append(x, np.ones((m, 1)), axis=1)
def cost(x, y, theta=np.zeros((2,1))):
J = 1/(2*m) * sum((x.dot(theta).flatten()- y)**2)
return J
def gradient_desc(x, y, theta=np.zeros((2,1)), alpha=.01,iterations=15000):
J = []
for numbers in range(iterations):
a = theta[0][0] - alpha*(1/m)*sum((x.dot(theta).flatten() - y)*x[:,0])
b = theta[1][0] - alpha*(1/m)*sum((x.dot(theta).flatten() - y)*x[:,1])
theta[0][0],theta[1][0]=a,b
if numbers % 1000 == 0:
print('Iteration: %s Cost: %s' % (numbers, cost(x,y,theta)))
return theta
theta = gradient_desc(x, y)
print(theta)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment