Skip to content

Instantly share code, notes, and snippets.

@tyleransom
Created March 6, 2017 21:27
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save tyleransom/8eeb2ebf6657d407cb537877d0169109 to your computer and use it in GitHub Desktop.
Save tyleransom/8eeb2ebf6657d407cb537877d0169109 to your computer and use it in GitHub Desktop.
Evaluate memory usage in Julia's JuMP optimizing language
using JuMP
using Ipopt
function datagen(N::Int64,T::Int64)
## Generate data for a linear model to test optimization
srand(1234)
# N = convert(Int64,N) #inputs to functions such as -ones- need to be integers!
# T = convert(Int64,T) #inputs to functions such as -ones- need to be integers!
n = N*T
# generate the Xs
X = cat(2,ones(N*T,1),5+3*randn(N*T,1),rand(N*T,1),
2.5+2*randn(N*T,1),15+3*randn(N*T,1),
.7-.1*randn(N*T,1),5+3*randn(N*T,1),
rand(N*T,1),2.5+2*randn(N*T,1),
15+3*randn(N*T,1),.7-.1*randn(N*T,1),
5+3*randn(N*T,1),rand(N*T,1),2.5+2*randn(N*T,1),
15+3*randn(N*T,1),.7-.1*randn(N*T,1))
# generate the betas (X coefficients)
ßans = [ 2.15; 0.10; 0.50; 0.10; 0.75; 1.2; 0.10; 0.50; 0.10; 0.75; 1.2; 0.10; 0.50; 0.10; 0.75; 1.2 ]
# generate the std dev of the errors
σans = 0.3
# generate the Ys from the Xs, betas, and error draws
draw = 0 + σans*randn(N*T,1)
y = X*ßans+draw
# return generated data so that other functions (below) have access
return X,y,ßans,σans,n
end
X,y,ßans,σans,n = datagen(convert(Int64,1e4),3)
# Estimate via OLS
ßhat = (X'*X)\(X'*y)
σhat = sqrt((y-X*ßhat)'*(y-X*ßhat)/(n-size(X,2)))
[round([ßhat;σhat],3) [ßans;σans]]
function genHess(m)
this_par = m.colVal
m_eval = JuMP.NLPEvaluator(m)
println("MathProgBase initialize")
@time MathProgBase.initialize(m_eval, [:ExprGraph, :Grad, :Hess])
println("MathProgBase.hesslag_structure")
@time hess_struct = MathProgBase.hesslag_structure(m_eval)
hess_vec = zeros(length(hess_struct[1]))
numconstr = length(m_eval.m.linconstr) + length(m_eval.m.quadconstr) + length(m_eval.m.nlpdata.nlconstr)
dimension = length(m.colVal)
println("MathProgBase.eval_hesslag")
@time MathProgBase.eval_hesslag(m_eval, hess_vec, this_par, 1.0, zeros(numconstr))
this_hess_ld = sparse(hess_struct[1], hess_struct[2], hess_vec, dimension, dimension)
hOpt = this_hess_ld + this_hess_ld' - sparse(diagm(diag(this_hess_ld)))
hOpt = -full(hOpt) #since we are maximizing
return hOpt
end
# Estimate via JuMP optimization package
function jumpMLE(y::Array{Float64,2},X::Array{Float64,2},startval::Array{Float64,1})
## MLE of classical linear regression model
# Declare the name of your model and the optimizer you will use
myMLE = Model(solver=IpoptSolver(tol=1e-6,print_level=0))
# Declare the variables you are optimizing over
@variable(myMLE, ß[i=1:size(X,2)], start = startval[i])
@variable(myMLE, σ>=0.0, start = startval[end])
# Write your objective function
@NLobjective(myMLE, Max, (n/2)*log(1/(2*π*σ^2))-sum((y[i]-sum(X[i,k]*ß[k]
for k=1:size(X,2)))^2 for i=1:size(X,1))/(2σ^2))
# Solve the objective function
println("Solver allocations")
@time status = solve(myMLE)
# Generate Hessian
println("Hessian function")
hOpt = genHess(myMLE)
# Calculate standard errors
seOpt = sqrt(diag(full(hOpt)\eye(size(hOpt,1))))
# Save estimates
ßopt = getvalue(ß[:])
σopt = getvalue(σ)
return ßopt,σopt,hOpt,seOpt
end
startval = [rand(size(X,2));rand(1)]
println(size(startval))
@time ßJump,σJump,seJump=jumpMLE(y,X,startval)
[[ßJump;σJump] [ßans;σans]]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment