Skip to content

Instantly share code, notes, and snippets.

@marcelcaraciolo
Created October 28, 2011 03:57
Show Gist options
  • Star 10 You must be signed in to star a gist
  • Fork 10 You must be signed in to fork a gist
  • Save marcelcaraciolo/1321585 to your computer and use it in GitHub Desktop.
Save marcelcaraciolo/1321585 to your computer and use it in GitHub Desktop.
multivariate linear regression
from numpy import loadtxt, zeros, ones, array, linspace, logspace, mean, std, arange
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
from pylab import plot, show, xlabel, ylabel
#Evaluate the linear regression
def feature_normalize(X):
'''
Returns a normalized version of X where
the mean value of each feature is 0 and the standard deviation
is 1. This is often a good preprocessing step to do when
working with learning algorithms.
'''
mean_r = []
std_r = []
X_norm = X
n_c = X.shape[1]
for i in range(n_c):
m = mean(X[:, i])
s = std(X[:, i])
mean_r.append(m)
std_r.append(s)
X_norm[:, i] = (X_norm[:, i] - m) / s
return X_norm, mean_r, std_r
def compute_cost(X, y, theta):
'''
Comput cost for linear regression
'''
#Number of training samples
m = y.size
predictions = X.dot(theta)
sqErrors = (predictions - y)
J = (1.0 / (2 * m)) * sqErrors.T.dot(sqErrors)
return J
def gradient_descent(X, y, theta, alpha, num_iters):
'''
Performs gradient descent to learn theta
by taking num_items gradient steps with learning
rate alpha
'''
m = y.size
J_history = zeros(shape=(num_iters, 1))
for i in range(num_iters):
predictions = X.dot(theta)
theta_size = theta.size
for it in range(theta_size):
temp = X[:, it]
temp.shape = (m, 1)
errors_x1 = (predictions - y) * temp
theta[it][0] = theta[it][0] - alpha * (1.0 / m) * errors_x1.sum()
J_history[i, 0] = compute_cost(X, y, theta)
return theta, J_history
#Load the dataset
data = loadtxt('ex1data2.txt', delimiter=',')
#Plot the data
'''
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
n = 100
for c, m, zl, zh in [('r', 'o', -50, -25)]:
xs = data[:, 0]
ys = data[:, 1]
zs = data[:, 2]
ax.scatter(xs, ys, zs, c=c, marker=m)
ax.set_xlabel('Size of the House')
ax.set_ylabel('Number of Bedrooms')
ax.set_zlabel('Price of the House')
plt.show()
'''
X = data[:, :2]
y = data[:, 2]
#number of training samples
m = y.size
y.shape = (m, 1)
#Scale features and set them to zero mean
x, mean_r, std_r = feature_normalize(X)
#Add a column of ones to X (interception data)
it = ones(shape=(m, 3))
it[:, 1:3] = x
#Some gradient descent settings
iterations = 100
alpha = 0.01
#Init Theta and Run Gradient Descent
theta = zeros(shape=(3, 1))
theta, J_history = gradient_descent(it, y, theta, alpha, iterations)
print theta, J_history
plot(arange(iterations), J_history)
xlabel('Iterations')
ylabel('Cost Function')
show()
#Predict price of a 1650 sq-ft 3 br house
price = array([1.0, ((1650.0 - mean_r[0]) / std_r[0]), ((3 - mean_r[1]) / std_r[1])]).dot(theta)
print 'Predicted price of a 1650 sq-ft, 3 br house: %f' % (price)
@vladimircape
Copy link

you algoritm gives wrong predict price, try to use test on training sets, and you will see error
you put incorrect
iterations = 100
alpha = 0.01

such quantity not enough with such alpha to find local optimum ,
for example these better
iterations = 10000
alpha = 0.001

@redcho
Copy link

redcho commented Apr 2, 2016

You should flatten your predictions in the compute_cost function. Otherwise it returns array.

@snitchjinx
Copy link

I don't get the point of obtaining the size by m=y.size, and then set y.shape=(m,1).
Does anybody have any idea on what the latter does?

@dinumarathe
Copy link

Need this gradient please help!!!!

dp_bg

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment