Skip to content

Instantly share code, notes, and snippets.

@johanlofberg
Last active May 1, 2021 22:49
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save johanlofberg/3af68b7e5688caf4a679047636fea764 to your computer and use it in GitHub Desktop.
Save johanlofberg/3af68b7e5688caf4a679047636fea764 to your computer and use it in GitHub Desktop.
%% MILP model
n = 4;
m = 10;
p = randn(m,1);
R = randn(m,n);
w0 = randn(n,1);
w = sdpvar(n,1);
objective = sort(R*w,'descend')'*p+norm(w-w0,1);
Model = [norm(w,inf)<=1];
optimize(Model,objective);
%% LP model
p = rand(m,1);p = sort(p,'descending');
objective = sumk(R*w,m,p)+norm(w-w0,1);
Model = [norm(w,inf)<=1];
optimize(Model,objective);
%% NLP model without gradient
f = @(w)(sort(R*w,'descend')'*p);
objective = blackbox(f,w)+norm(w-w0,1);
ops = sdpsettings('solver','fmincon','fmincon.algorithm','active-set');
optimize(Model,objective,ops)
%% NLP model with gradient
f = @(w)(sort(R*w,'descend')'*p);
df = @(w)myderivative(w,R,p);
objective = blackbox(f,w,df)+norm(w-w0,1);
optimize(Model,objective,ops)
## Experimental plot
p = sort(rand(10,1),'descend');
R = randn(10,4);
z = rand(1,2);
x = -1:0.01:1;
y = -1:0.01:1;
for i = 1:length(x)
for j = 1:length(y)
w = [x(i);y(j);z];
s = sort(R*w,'descend');
J(i,j) = sum(s'*p);
end
end
mesh(x,y,J)
function df = myderivative(w,R,p)
[~,order] = sort(R*w,'descend');
df = R(order,:)'*p;
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment