Skip to content

Instantly share code, notes, and snippets.

@cscherrer
Last active November 16, 2017 07:22
Show Gist options
  • Save cscherrer/ba040761b7ce2031b3b31ab96e2e1be0 to your computer and use it in GitHub Desktop.
Save cscherrer/ba040761b7ce2031b3b31ab96e2e1be0 to your computer and use it in GitHub Desktop.
Max likelihood logistic regression using NLopt and ReverseDiff in Julia
using ReverseDiff: GradientConfig, gradient!
using Distributions
using NLopt
using StatsFuns
X = rand(MvNormal([1.0 0;0 1]),1000)'
α = [0.0]
β = [1.0,1.0]
y = rand.(Bernoulli.(logistic.(α[1] + X*β)))
log_logistic(x) = -log1pexp(-x)
function logreg(X,y,θ0=zeros(1+size(X,2)), cfg=GradientConfig(θ0))
function val(θ)
α = θ[1]
β = θ[2:length(θ)]
logp = log_logistic.(α + X*β)
logq = log_logistic.(-α - X*β)
sum(y .* logp + (1 .- y) .* logq)
end
function f(θ, gradResult)
gradient!(gradResult, val, θ, cfg)
val(θ)
end
f
end
opt=Opt(:LD_LBFGS, 3)
max_objective!(opt, logreg(X,y))
(maxf,maxx,ret) = optimize(opt, [0.0,0.0,0.0])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment