Skip to content

Instantly share code, notes, and snippets.

@xgao32
Created October 17, 2018 21:40
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save xgao32/e6b8100a6689ffec24aa90257eb88472 to your computer and use it in GitHub Desktop.
Save xgao32/e6b8100a6689ffec24aa90257eb88472 to your computer and use it in GitHub Desktop.
% hw5_steepdes.m
%
% Math 571 HW 5 Q1.
%
% Script to compute and plot objective function value, norm of gradient of f(x), number of function and gradient evaluations, and runtime of using steepest descent method with backtracking line search to minimize
% f(x) = e^T* (e + A_1*(D_1*x + d_1).^2) + A_2*(D_2*x + d_2).^2).^1/2)
load 'MinSurfProb.mat'
tic;
x_0 = xlr; % starting point
%x_k = x_0;
toliter = 1000; % max number of iterations 1000
tolfun = 1e-10; % tolerance for ending steepest descent when ||f(x_k+1) - f(x_k)|| < tolfun
tolopt = 1e-10; % tolerance for ending steepest descent when || df(x_k) || < tolopt
%alpha = 1; % initial step size
% store result of intermediate iterations
% ||f(x_curent) - f(x_previous)||,
% objective function value,
% norm of gradient of f(x),
% number of function, gradient evaluations
% step size
res = zeros(toliter,6);
% store intermediate points
xpoints = zeros(toliter,size(xlr,1));
iter = 1;
feval_iter = 1; % count f(x) evaluation
dfeval_iter = 1; % count df(x) evaluation
n = size(A1,1); % 16384
f = @(x) dot(ones(n,1),((ones(n,1) + A1*(((D1*x)+d1).^2) + A2*(((D2*x)+d2).^2)).^0.5)); % function
df = @(x) ((D1'*diag((D1*x)+d1))*A1' + (D2'*diag((D2*x)+d2))*A2')*(1./((ones(n,1) + A1*(((D1*x)+d1).^2) + A2*(((D2*x)+d2).^2)).^0.5)); % gradient of function
%[fval,dfval] = funwithgrad(x_0); %too slow
fval = f(xlr); % evaluate f(x_current) and df(x_current)
dfval = df(xlr);
fval_prev = fval;
% update result
res(1,1) = norm(fval - fval_prev);
res(1,2) = fval;
res(1,3) = norm(dfval);
res(1,4) = feval_iter;
res(1,5) = dfeval_iter;
res(1,6) = 1;
xpoints(1,:) = xlr;
iter = iter + 1; % 1st iteration occured before steepest descent
while iter < toliter
[alpha, feval_iter_LS] = linesearch(x_0,fval,dfval,1,-dfval,f); % line search, initial step size = 1, p = -df(x_current)
x_k = x_0 + alpha*(-dfval); % x_k = x_0 + alpha*p, p = -df(x_current) for steepest descent
% [fval,dfval] = funwithgrad(x_k);
% update f(x_current), df(x_current) with point from steepest descent
fval = f(x_k);
dfval = df(x_k);
feval_iter = feval_iter + feval_iter_LS + 1; % update number of f(x) evaluation
dfeval_iter = dfeval_iter + 1;
res(iter,1) = norm(fval - fval_prev); % store || f(x_current) - f(x_previous) ||
res(iter,2) = fval; % f(x_current)
res(iter,3) = norm(dfval); % df(x_current)
res(iter,4) = feval_iter;
res(iter,5) = dfeval_iter;
res(iter,6) = alpha;
xpoints(iter,:) = x_k;
if mod(iter,10) == 0
sprintf('iteration %d fval %0.5e norm(fval-fprev) %0.5e norm(dfval) %0.5e alpha %0.5e', iter, fval, res(iter,1), res(iter,3), alpha)
save('hw5_steepdescent_intermediates_1000.mat');
end
if res(iter,1) < tolfun
disp('norm(f(x_current)-f(x_previous)) < 1e-10');
break
end
if res(iter,3) < tolopt
disp('norm(df(x_current)) < 1e-10');
break
end
x_0 = x_k; % update x_previous to x_current
fval_prev = fval; % update f(x_previous)
iter = iter + 1;
end
toc;
save('hw5_steepdescent_1000.mat')
fig = semilogy(linspace(1,iter,iter),res(1:iter,:));
xlabel('Iteration') % x-axis label
ylabel('')
%legend('starting point a = [-1;0]','starting point b = [1;1]','Location','southeast')
th = title('Min f(x) using Steepest Descent Method');
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment