Create a gist now

Instantly share code, notes, and snippets.

Ridge coefficients as a function of the regularization parameter
"""
Ridge coefficients as a function of the regularization parameter
----------------------------------------------------------------
And highlight in dashed lines the optimal value by cross-validation.
Author: Fabian Pedregosa -- <fabian@fseoane.net>
"""
print __doc__
import numpy as np
import pylab as pl
from scipy import linalg
from sklearn import linear_model
# ..
# .. Local variables ..
n, p = 20, 10
X = 1. / (np.arange(1, p+1) + np.arange(0, n)[:, np.newaxis])
y = np.ones(n)
n_alphas = 200
alphas = np.logspace(-14, -7, n_alphas)
# ..
# .. Ridge path using an SVD ..
U, s, Vt = linalg.svd(X, full_matrices=False)
d = s / (s[:, np.newaxis].T ** 2 + alphas[:, np.newaxis])
Uy = U.T.dot(y)
coefs = np.dot(d * U.T.dot(y), Vt)
# ..
# .. Best cross-validation score ..
ridge_cv = linear_model.RidgeCV(alphas=alphas, fit_intercept=False)
ridge_cv.fit(X, y)
# ..
# .. Display results ..
ax = pl.gca()
pl.vlines(ridge_cv.alpha_, np.min(coefs), np.max(coefs), linestyle='dashdot')
ax.plot(alphas, coefs)
ax.set_xscale('log')
ax.set_xlim(ax.get_xlim()[::-1]) # reverse axis
pl.xlabel('alpha')
pl.ylabel('weights')
pl.title('Ridge coefficients as a function of the regularization parameter')
pl.axis('tight')
pl.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment