Skip to content

Instantly share code, notes, and snippets.

@marcelcaraciolo
Created November 15, 2011 01:39
Show Gist options
  • Star 2 You must be signed in to star a gist
  • Fork 4 You must be signed in to fork a gist
  • Save marcelcaraciolo/1365847 to your computer and use it in GitHub Desktop.
Save marcelcaraciolo/1365847 to your computer and use it in GitHub Desktop.
logistic_reg.py
from numpy import loadtxt, where, zeros, e, array, log, ones, append, linspace
from pylab import scatter, show, legend, xlabel, ylabel, contour, title
from scipy.optimize import fmin_bfgs
def sigmoid(X):
'''Compute the sigmoid function '''
#d = zeros(shape=(X.shape))
den = 1.0 + e ** (-1.0 * X)
d = 1.0 / den
return d
def cost_function_reg(theta, X, y, l):
'''Compute the cost and partial derivatives as grads
'''
h = sigmoid(X.dot(theta))
thetaR = theta[1:, 0]
J = (1.0 / m) * ((-y.T.dot(log(h))) - ((1 - y.T).dot(log(1.0 - h)))) \
+ (l / (2.0 * m)) * (thetaR.T.dot(thetaR))
delta = h - y
sumdelta = delta.T.dot(X[:, 1])
grad1 = (1.0 / m) * sumdelta
XR = X[:, 1:X.shape[1]]
sumdelta = delta.T.dot(XR)
grad = (1.0 / m) * (sumdelta + l * thetaR)
out = zeros(shape=(grad.shape[0], grad.shape[1] + 1))
out[:, 0] = grad1
out[:, 1:] = grad
return J.flatten(), out.T.flatten()
m, n = X.shape
y.shape = (m, 1)
it = map_feature(X[:, 0], X[:, 1])
#Initialize theta parameters
initial_theta = zeros(shape=(it.shape[1], 1))
#Set regularization parameter lambda to 1
l = 1
# Compute and display initial cost and gradient for regularized logistic
# regression
cost, grad = cost_function_reg(initial_theta, it, y, l)
def decorated_cost(theta):
return cost_function_reg(theta, it, y, l)
print fmin_bfgs(decorated_cost, initial_theta, maxfun=400)
@mugnaio
Copy link

mugnaio commented Nov 18, 2016

hi, nice posts, helped me a lot.
I just fixed some functions and this is working for me:
def fcost(theta, X, y, l):
m = y.size
h = sigmoid(X.dot(theta))
theta[0] = 0
sreg = theta.T.dot(theta)
reg = (l/(2*m)) * sreg
left = -y.T.dot(log(h))
right = (1-y).T.dot(log(1-h))
diff = left - right
J = (diff / m) + reg
return J

def fgradient(theta, X, y, l):
m = y.size
h = sigmoid(X.dot(theta))
h.shape = (m,1)
theta[0] = 0
hx = X.T.dot(h-y)
reg = theta * (l/m)
reg.shape = hx.shape
grad = (hx / m) + reg
return grad.flatten()

...
it = map_feature(X[:, 0], X[:, 1])
X = it
initial_theta = zeros(shape=(it.shape[1], 1))
l = 1
myargs = (X, y, l)
theta = (fmin_bfgs(fcost, x0=initial_theta, fprime=fgradient, args=myargs, maxiter =400))
..

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment