Skip to content

Instantly share code, notes, and snippets.

@kyleziegler
Created March 18, 2022 17:39
Show Gist options
  • Save kyleziegler/c1a58ceace94f77067639b667c4a97f3 to your computer and use it in GitHub Desktop.
Save kyleziegler/c1a58ceace94f77067639b667c4a97f3 to your computer and use it in GitHub Desktop.
from skopt import BayesSearchCV
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.model_selection import GridSearchCV
import matplotlib.pyplot as plt
plt.rcParams["figure.dpi"] = 100
plt.rcParams["figure.figsize"] = [10,4]
def bayes_search_cv_example():
model = GradientBoostingClassifier(random_state=0, n_estimators=50)
# Param n_iter: Number of parameter settings that are sampled;
# tradeoff between runtime vs quality of the solution.
# Param cv: Number of cross validation folds, default is 3.
# print(sklearn.metrics.SCORERS.keys())
opt = BayesSearchCV(
model,
{
'learning_rate':list(np.arange(0.1,0.4,0.1)),
'n_estimators': list(range(50,200,50)),
'min_samples_split': list(range(2,10,2))
},
n_iter=10,
cv=5,
n_jobs=-1,
pre_dispatch="2*n_jobs",
scoring='r2',
random_state = 0
)
opt.fit(X_train, y_train)
x = pd.DataFrame(opt.cv_results_)['mean_test_score'].index
y = pd.DataFrame(opt.cv_results_)['mean_test_score']
plot(x,y)
print("Best Validation Score:", opt.best_score_)
print("Test Score:", opt.score(X_test, y_test))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment