Last active
November 19, 2021 13:56
-
-
Save chrisliatas/47d970a7cd287f823d49684ef9290b66 to your computer and use it in GitHub Desktop.
LightGBM parameters for classification problem.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Production parameters | |
lgbm_params = { | |
'boosting_type': 'goss', # Gradient-based One-Side Sampling (goss) | |
'learning_rate': 0.05, # Boosting learning rate, default = 0.1 | |
'n_estimators': 10000, # Number of boosted trees to fit, default = 100 | |
'objective': 'binary', # the learning task and the corresponding learning objective, default = None | |
'class_weight': 'balanced', # Weights associated with classes, The ‘balanced’ mode* | |
'reg_alpha': 0.1, # L1 regularization term on weights (L1 penalty), default = 0.0 | |
'reg_lambda': 0.1, # L2 regularization term on weights (L2 penalty), default = 0.0 | |
'random_state': 67, | |
'n_jobs': -1, | |
} | |
# *uses the values of y to automatically adjust weights inversely proportional to class frequencies in the | |
# input data as n_samples / (n_classes * np.bincount(y)) | |
# Search parameters | |
srch_params = { | |
'boosting_type': 'gbdt', # Gradient Boosting Decision Tree | |
'num_leaves': 100, # Maximum tree leaves for base learners | |
'max_depth': 13, # Maximum tree depth for base learners | |
'learning_rate': 0.3, # Boosting learning rate, default = 0.1 | |
'n_estimators': 10000, # Number of boosted trees to fit, default = 100 | |
'objective': 'binary', # the learning task and the corresponding learning objective, default = None | |
'is_unbalance': True, # Weights associated with classes, same as ['class_weight': 'balanced'] | |
'subsample': .8, # Subsample ratio of the training instance | |
'subsample_freq': 10, # frequency for bagging, bagging at every k-iteration. | |
'reg_alpha': 0.1, # L1 regularization term on weights (L1 penalty), default = 0.0 | |
'reg_lambda': 0.1, # L2 regularization term on weights (L2 penalty), default = 0.0 | |
'random_state': 67, | |
'n_jobs': -1, | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment