Skip to content

Instantly share code, notes, and snippets.

@sschnug
sschnug / lasso_init_sklearn_demo.py
Last active October 3, 2018 06:38
lasso init in sklearn
import numpy as np
from sklearn.datasets.samples_generator import make_regression
from sklearn.linear_model import Lasso
X, y = make_regression(n_samples=200, n_features=5000, random_state=0)
dense_lasso = Lasso(alpha=1, fit_intercept=False, max_iter=1000)
dense_lasso.fit(X, y)
print('iterations needed: ', dense_lasso.n_iter_)
init_lasso = Lasso(alpha=1, fit_intercept=False, max_iter=1000, warm_start=True)
import numpy as np
from cvxpy import *
from scipy.spatial.distance import pdist, squareform
# Based on formulation described
# @ https://en.wikipedia.org/wiki/Travelling_salesman_problem (February 2016)
np.random.seed(1)
N = 5
@sschnug
sschnug / scipy.py
Created October 25, 2017 15:40
example callback change
import numpy as np
from scipy import optimize
### Setup inputs to the optimizer
def power_curve(x,beta1,beta2):
return beta1*x**beta2
def mm_curve(x,beta1,beta2):
"Beta2 >= 0"
import itertools
import networkx as nx
""" DATA """
S = ('S', [0, 1, 2]) # ranking: good to bad
K = ('K', [1, 2, 0])
L = ('L', [1, 2, 0])
D = ('D', [0, 2, 1])
A = ('A', [0, 2, 1])
J = ('J', [2, 1, 0])
import numpy as np
from scipy.optimize import minimize_scalar, minimize
import matplotlib.pyplot as plt
""" EXAMPLE PROB """
N_EVAL = 5
N_BOUNDS = (0., 1.2)
grid1d = np.linspace(*N_BOUNDS, N_EVAL)
""" IMPORTANT REMARK:
This is an IP-approach.
Simple examples, like the one given in the question, result in a model for which it SEEMS, produce integral basic feasible solutions.
This MIGHT indicate that the constraint matrix is totally unimodular and no IP/branching is necessary as an pure LP-solver will do.
To try this, change the following:
x = cvx.Variable((N_BLOCKS, N_SLOTS)) #, boolean=True) = now continuous variables