Skip to content

Instantly share code, notes, and snippets.

@Alrecenk
Created May 21, 2014 12:38
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save Alrecenk/5026e174b116e3fab9d9 to your computer and use it in GitHub Desktop.
Save Alrecenk/5026e174b116e3fab9d9 to your computer and use it in GitHub Desktop.
Hessian (second derivative) of error calculation for a logistic regression model.
//returns the hessian (gradient of gradient) of error with respect to weights
//for a logistic regression with weights w on the given input and output
//output should be in the form 0 for negative, 1 for positive
public double[][] hessian(double w[]){
heval++;//keep track of how many times this has been called
double h[][] = new double[w.length][] ;
//second derivative matrices are always symmetric so we only need triangular portion
for(int j=0;j<h.length;j++){
h[j] = new double [j+1] ;
}
for(int k=0;k<input.length;k++){
//calculate coefficient{
double dot = dot(w,input[k]) ;
double ds = ds(dot) ;
double coef = 2 * ( ds*ds + dds(dot) * (s(dot) - output[k]) ) ;
for(int j=0;j<h.length;j++){// add x * x^t * coef to hessian
for(int l=0;l<=j;l++){
h[j][l] += input[k][j]*input[k][l] * coef ;
}
}
}
return h ;
}
//returns a numerically calculated hessian - approximation to above
//used only for unit testing hessian, not called in final version
public double[][] numericalHessian(double w[], double epsilon){
double h[][] = new double[w.length][] ;
for(int j=0;j<h.length;j++){
w[j]+=epsilon ;
h[j] = gradient(w) ;
w[j] -= 2*epsilon ;
h[j] = subtract(h[j], gradient(w)) ;
w[j] +=epsilon ;
for(int k=0;k<w.length;k++){
h[j][k]/=2*epsilon ;
}
}
return h ;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment