Skip to content

Instantly share code, notes, and snippets.

@arose13
Last active September 27, 2019 15:53
Show Gist options
  • Save arose13/bd1efb7a31493274d5e0dd93bd32c712 to your computer and use it in GitHub Desktop.
Save arose13/bd1efb7a31493274d5e0dd93bd32c712 to your computer and use it in GitHub Desktop.
Compute Lasso and optimize lambda without needing cross validation
import numpy as np
import sklearn.linear_model as lm
class FastLasso:
def __init__(self, verbose=False):
self.alphas, self.coefs = 2*[None]
self.score_path_ = None
self.best_iteration_ = -1
self.best_score_ = -np.inf
self.verbose = verbose
def fit(self, X, y, x_val, y_val):
"""
Fit lasso without cross validation
:param X:
:param y:
:param eval_set: (x_val, y_val) tuple
:return:
"""
from sklearn.metrics import r2_score
# Fit model
self.alphas, self.coefs, *_ = lm.lasso_path(X, y, verbose=self.verbose)
# Find optimal coefs
self.score_path_ = []
for i in range(self.coefs.shape[1]):
m_i = self.coefs[:, i]
score_i = r2_score(y_val, x_val @ m_i)
if score_i > self.best_score_:
self.best_iteration_ = i
self.best_score_ = score_i
self.score_path_.append(score_i)
return self
def predict(self, X):
return X @ self.coefs[:, self.best_iteration_]
def score(self, X, y):
from sklearn.metrics import r2_score
return r2_score(y, self.predict(X))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment