Skip to content

Instantly share code, notes, and snippets.

@gorlum0
Created October 31, 2011 09:11
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save gorlum0/1327156 to your computer and use it in GitHub Desktop.
Save gorlum0/1327156 to your computer and use it in GitHub Desktop.
ml-class - ex1_multi (python)
#!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
from numpy import newaxis, r_, c_, mat
from numpy.linalg import *
def featureNormalize(X):
X_norm = X.A
m = X.shape[0]
mu = X_norm.mean(axis=0)
#X_norm -= np.tile(mu, (m, 1))
X_norm -= mu # broadcasting
sigma = X_norm.std(axis=0)
#X_norm /= np.tile(sigma, (m, 1))
X_norm /= sigma
return mat(X_norm), mu, sigma
def computeCostMulti(X, y, theta):
m = X.shape[0]
predictions = X*theta
sqrErrors = (predictions - y).A ** 2
return 1./(2*m) * sqrErrors.sum()
def gradientDescentMulti(X, y, theta, alpha, num_iters):
m = X.shape[0]
J_history = np.zeros(num_iters)
for i in xrange(num_iters):
theta = theta - alpha/m * X.T * (X*theta - y)
J_history[i] = computeCostMulti(X, y, theta)
return theta, J_history
def normalEqn(X, y):
theta = pinv(X.T * X) * X.T * y
return theta
if __name__ == '__main__':
data = np.loadtxt('ex1data2.txt', delimiter=',')
X = mat(data[:, :2])
y = c_[data[:, 2]]
m = X.shape[0]
# =================== Part 1: Feature Normalization
print 'Normalizing Features ...'
X, mu, sigma = featureNormalize(X)
# =================== Part 2: Gradient descent
X = c_[np.ones(m), X]
iterations = 100
for alpha in [0.01, 0.03, 0.1, 0.3, 1.]: # divergence at 3.0
theta = c_[np.zeros(3)]
theta, J_history = gradientDescentMulti(X, y, theta, alpha, iterations)
# Plot the convergence graph
plt.plot(r_[:iterations], J_history, linewidth=1)
plt.xlabel('Number of iterations')
plt.ylabel('Cost J')
plt.show()
print 'Theta (last) computed from gradient descent:'
print theta
print
house = [1650, 3]
house = (house - mu) / sigma
price = r_[1, house].dot(theta)
print 'Predicted price of a 1650 sq-ft, 3 br house ' \
'(using gradient descent):\n $%f' % price
raw_input('Press any key to continue\n')
# =================== Part 3: Normal equations
data = np.loadtxt('ex1data2.txt', delimiter=',')
X = mat(data[:, :2])
y = c_[data[:, 2]]
m = X.shape[0]
X = c_[np.ones(m), X]
theta = normalEqn(X, y)
print 'Theta computed from the normal equations:'
print theta
print
house = [1650, 3];
price = r_[1, house].dot(theta);
print 'Predicted price of a 1650 sq-ft, 3 br house ' \
'(using normal equations):\n $%f' % price
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment