Skip to content

Instantly share code, notes, and snippets.

@gorlum0
Created October 31, 2011 09:08
Show Gist options
  • Save gorlum0/1327152 to your computer and use it in GitHub Desktop.
Save gorlum0/1327152 to your computer and use it in GitHub Desktop.
ml-class - ex1 (python)
#!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
from numpy import newaxis, r_, c_, mat
from numpy.linalg import *
def plotData(X, y):
plt.plot(X, y, 'rx', markersize=7)
plt.ylabel('Profit in $10,000s')
plt.xlabel('Population of City in 10,000s')
def computeCost(X, y, theta):
m = X.shape[0]
predictions = X*theta
sqrErrors = (predictions - y).A ** 2
return 1./(2*m) * sqrErrors.sum()
def gradientDescent(X, y, theta, alpha, num_iters):
m = X.shape[0]
J_history = np.zeros(num_iters)
for i in xrange(num_iters):
theta = theta - alpha/m * X.T * (X*theta - y)
J_history[i] = computeCost(X, y, theta)
return theta, J_history
if __name__ == '__main__':
data = np.loadtxt('ex1data1.txt', delimiter=',')
X = mat(data[:, 0][:, newaxis])
y = data[:, 1][:, newaxis]
m = X.shape[0]
# =================== Part 2: Plotting
plotData(X, y)
plt.show()
raw_input('Press any key to continue\n')
# =================== Part 3: Gradient descent
X = c_[np.ones(m), X]
theta = np.zeros(2)[:, newaxis]
iterations = 1500
alpha = 0.01
print computeCost(X, y, theta)
theta, J_history = gradientDescent(X, y, theta, alpha, iterations)
print 'Theta found by gradient descent:\n', theta
plotData(X[:, 1], y)
plt.plot(X[:, 1], X*theta, '-')
plt.legend(['Training data', 'Linear regression'])
plt.show()
raw_input('Press any key to continue\n')
# ============= Part 4: Visualizing J(theta_0, theta_1)
theta0_vals = np.linspace(-10, 10, 100)
theta1_vals = np.linspace(-1, 4, 100)
J_vals = np.zeros((len(theta0_vals), len(theta1_vals)))
for i in xrange(len(theta0_vals)):
for j in xrange(len(theta1_vals)):
t = r_[theta0_vals[i], theta1_vals[j]][:, newaxis]
J_vals[i, j] = computeCost(X, y, t);
J_vals = J_vals.T
plt.contour(theta0_vals, theta1_vals, J_vals, np.logspace(-2, 3, 20))
plt.plot(theta[0], theta[1], 'rx', markersize=10, linewidth=5)
plt.xlabel(r'$\Theta_0$'); plt.ylabel(r'$\Theta_1$')
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment