Skip to content

Instantly share code, notes, and snippets.

@ageek
Forked from marcelcaraciolo/log_regression.py
Created December 26, 2013 18:16
Show Gist options
  • Save ageek/8136916 to your computer and use it in GitHub Desktop.
Save ageek/8136916 to your computer and use it in GitHub Desktop.
def sigmoid(X):
'''Compute the sigmoid function '''
#d = zeros(shape=(X.shape))
den = 1.0 + e ** (-1.0 * X)
d = 1.0 / den
return d
def compute_cost(theta,X,y): #computes cost given predicted and actual values
m = X.shape[0] #number of training examples
theta = reshape(theta,(len(theta),1))
#y = reshape(y,(len(y),1))
J = (1./m) * (-transpose(y).dot(log(sigmoid(X.dot(theta)))) - transpose(1-y).dot(log(1-sigmoid(X.dot(theta)))))
grad = transpose((1./m)*transpose(sigmoid(X.dot(theta)) - y).dot(X))
#optimize.fmin expects a single value, so cannot return grad
return J[0][0]#,grad
def compute_grad(theta, X, y):
#print theta.shape
theta.shape = (1, 3)
grad = zeros(3)
h = sigmoid(X.dot(theta.T))
delta = h - y
l = grad.size
for i in range(l):
sumdelta = delta.T.dot(X[:, i])
grad[i] = (1.0 / m) * sumdelta * - 1
theta.shape = (3,)
return grad
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment