Skip to content

Instantly share code, notes, and snippets.

@mshivers
Created February 3, 2012 15:46
Show Gist options
  • Star 5 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save mshivers/1730797 to your computer and use it in GitHub Desktop.
Save mshivers/1730797 to your computer and use it in GitHub Desktop.
L2 Regularized Non-negative logistic regression
import scipy as sp
from scipy import optimize as opt
def nnlr(X, y, C):
"""
Non-negative Logistic Regression with L2 regularizer
"""
def lr_cost(X, y, theta, C):
m = len(y)
return (1./m) * (sp.dot(-y, sp.log(sigmoid(sp.dot(X, theta)))) \
- sp.dot((1-y), sp.log(1 - sigmoid(sp.dot(X, theta)))) \
+ 2 * C * sp.dot(theta, theta))
def lr_grad(X, y, theta, C):
m = len(y)
return (1./m) * (sp.dot(X.T, sigmoid(sp.dot(X, theta)) - y) \
+ C * theta)
def sigmoid(z):
return 1 / (1 + sp.exp(-z))
N = X.shape[1]
J = lambda theta: lr_cost(X, y, theta, C)
J_grad = lambda theta: lr_grad(X, y, theta, C)
theta0 = 0.02 * sp.ones(N)
x, nfeval, rc = opt.fmin_tnc(J,theta0, fprime=J_grad, bounds=[(0,None)]*N,
disp=0)
return x
@cryax
Copy link

cryax commented Nov 29, 2016

Got error when tried X = (400x19),y =(400,1), C =0,1

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment