Skip to content

Instantly share code, notes, and snippets.

@jogonba2
Created June 22, 2015 11:35
Show Gist options
  • Save jogonba2/3ca56af027f74cdf5230 to your computer and use it in GitHub Desktop.
Save jogonba2/3ca56af027f74cdf5230 to your computer and use it in GitHub Desktop.
Gradient descent for linear regression (One variable)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GradientDescent.py
#
# Copyright 2015 Overxflow
def h0(sample,theta0,theta1): return theta0+sample*theta1
def derivative_0(train_samples,theta0,theta1): return (1.0/len(train_samples))*sum([h0(sample[0],theta0,theta1)-sample[1] for sample in train_samples])
def derivative_1(train_samples,theta0,theta1): return (1.0/len(train_samples))*sum([(h0(sample[0],theta0,theta1)-sample[1])*sample[0] for sample in train_samples])
def train(train_samples,theta0,theta1,alpha,it):
ant_theta0,ant_theta1,i = float('inf'),float('inf'),0
while (ant_theta0!=theta0 and ant_theta1!=theta1) or i<it:
temp0,temp1 = theta0 - alpha*derivative_0(train_samples,theta0,theta1),theta1 - alpha*derivative_1(train_samples,theta0,theta1)
theta0,theta1 = temp0,temp1
ant_theta0,ant_theta1,i = theta0,theta1,i+1
return (theta0,theta1)
def predict(test_sample,theta0,theta1): return h0(test_sample,theta0,theta1)
"""if __name__ == '__main__':
train_samples = [(1,127),(2,325),(3,459),(4,720)]
(theta0,theta1) = train(train_samples,900,-0.1,0.1,100000)
print predict(2.5,theta0,theta1)
"""
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment