Navigation Menu

Skip to content

Instantly share code, notes, and snippets.

@Xevaquor
Last active August 29, 2015 14:15
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Xevaquor/0e51c473198fee5eb4a9 to your computer and use it in GitHub Desktop.
Save Xevaquor/0e51c473198fee5eb4a9 to your computer and use it in GitHub Desktop.
Multivariable linear regression
# -*- coding: utf-8 -*-
"""
MIT License
Created on Wed Feb 11 13:05:36 2015
@author: xevaquor
"""
import numpy as np
TrainingData = np.array([
[1.,1. ,4.5],
[1.,1. ,3. ],
[1.,1. ,4. ],
[1.,2. ,3.5],
[1.,2. ,5. ],
[1.,2. ,3. ],
[1.,3. ,3. ],
[1.,3. ,4.5],
[1.,3. ,4. ],
[1.,4. ,3. ],
[1.,4. ,3.5],
[1.,4. ,5. ],
[1.,5. ,4. ],
[1.,5. ,3. ],
[1.,5. ,3.5],
[1.,6. ,4.5],
[1.,6. ,4.5],
[1.,6. ,4.5],
[1.,7. ,4. ],
[1.,7. ,3. ],
[1.,7. ,5. ],
[1.,8. ,3. ],
[1.,8. ,4. ],
[1.,8. ,3.5],
[1.,9. ,3. ],
[1.,9. ,4. ],
[1.,10. ,4.5],
[1.,10. ,5. ],
[1.,10. ,3. ]])
m,n = TrainingData.shape
alpha = 0.01
epsylon = 0.001
Y = [8,5,7,6,8,6,4,6,7,5,6,7,5,3,4,7,6,8,6,4,6,1,4,3,2,5,5,6,0]
def h(Theta, X):
return Theta.transpose().dot(X)
def Jpartial(j,Theta):
sum = 0
for i in range(m):
sum += (h(Theta, TrainingData[i,:]) - Y[i]) * TrainingData[i][j]
return 2 * sum / m
def gradient_descent(startTheta):
theta = startTheta
converge = False
i = 0
while not converge:
differentials = np.zeros(n)
for j in range(n):
differentials[j] = Jpartial(j, theta)
for j in range(n):
theta[j] -= alpha * differentials[j]
if i % 500 == 0:
print (np.amax(differentials))
converge = np.amax(differentials) < epsylon
return theta
startT = np.array([1.,1.,1.])
t = gradient_descent(startT)
print(t)
print(h(t, [1.,0.,5.]))
print(h(t, [1.,10.,3.]))
print(h(t, [1.,2.,3.5]))
print(h(t, [1.,5.,3.5]))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment