Skip to content

Instantly share code, notes, and snippets.

@redcho
Created April 5, 2016 13:03
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save redcho/5db96430c8edf2f9b0ba3633841e5213 to your computer and use it in GitHub Desktop.
Save redcho/5db96430c8edf2f9b0ba3633841e5213 to your computer and use it in GitHub Desktop.
Logistic Regression
def sigmoid(X):
return 1 / (1 + (e ** (-1*X)))
def gradient(theta, x, y):
grad = np.zeros(theta.shape).flatten()
m = y.size
h = sigmoid(x.dot(theta))
for jth in range(x.shape[1]):
xjth = x[:, jth]
gradjth = (1./m) * ((h - y) * xjth).sum()
grad[jth] = gradjth
return grad
def costFunction(theta, x, y):
J = 0
m = y.size
h = sigmoid(x.dot(theta))
J = (1./m) * (-y.dot(log(h)) - (1 - y).dot(log(1 - h)))
return J
''' Optimize the theta using Newton (TNC) algorithm '''
Result = op.minimize(fun=costFunction, x0=theta, args=(x, y), method='TNC', jac=gradient)
opt_theta = Result.x
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment