Skip to content

Instantly share code, notes, and snippets.

@fabianp
Created March 19, 2018 18:40
Show Gist options
  • Save fabianp/0a3cbf128ffb4b651bd679baadc0949b to your computer and use it in GitHub Desktop.
Save fabianp/0a3cbf128ffb4b651bd679baadc0949b to your computer and use it in GitHub Desktop.
Python implementation of the Frank-Wolfe algorithm
import numpy as np
from scipy import sparse
# .. for plotting ..
import pylab as plt
# .. to generate a synthetic dataset ..
from sklearn import datasets
n_samples, n_features = 1000, 10000
A, b = datasets.make_regression(n_samples, n_features)
def FW(alpha, max_iter=200, tol=1e-8):
# .. initial estimate, could be any feasible point ..
x_t = sparse.dok_matrix((n_features, 1))
trace = [] # to keep track of the gap
# .. some quantities can be precomputed ..
Atb = A.T.dot(b)
for it in range(max_iter):
# .. compute gradient. Slightly more involved than usual because ..
# .. of the use of sparse matrices ..
Ax = x_t.T.dot(A.T).ravel()
grad = (A.T.dot(Ax) - Atb)
# .. the LMO results in a vector that is zero everywhere except for ..
# .. a single index. Of this vector we only store its index and magnitude ..
idx_oracle = np.argmax(np.abs(grad))
mag_oracle = alpha * np.sign(-grad[idx_oracle])
g_t = x_t.T.dot(grad).ravel() - grad[idx_oracle] * mag_oracle
trace.append(g_t)
if g_t <= tol:
break
q_t = A[:, idx_oracle] * mag_oracle - Ax
step_size = min(q_t.dot(b - Ax) / q_t.dot(q_t), 1.)
x_t = (1. - step_size) * x_t
x_t[idx_oracle] = x_t[idx_oracle] + step_size * mag_oracle
return x_t, np.array(trace)
# .. plot evolution of FW gap ..
sol, trace = FW(.5 * n_features)
plt.plot(trace)
plt.yscale('log')
plt.xlabel('Number of iterations')
plt.ylabel('FW gap')
plt.title('FW on a Lasso problem')
plt.grid()
plt.show()
sparsity = np.mean(sol.toarray().ravel() != 0)
print('Sparsity of solution: %s%%' % (sparsity * 100))
@sergeymsui
Copy link

Great)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment