Last active
July 11, 2020 06:22
-
-
Save beannguyen/640ef1588313b429a75ee5bd7df8c47c to your computer and use it in GitHub Desktop.
Custom ml_cross_val_score function from mlfinlab for the Keras Model
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from sklearn.metrics import log_loss, f1_score, accuracy_score | |
def ml_cross_val_score( | |
baseline_fn, | |
X: pd.DataFrame, | |
y: pd.Series, | |
cv_gen, | |
sample_weight_train: np.ndarray = None, | |
sample_weight_score: np.ndarray = None, | |
scoring = log_loss): | |
# pylint: disable=invalid-name | |
# pylint: disable=comparison-with-callable | |
# If no sample_weight then broadcast a value of 1 to all samples (full weight). | |
if sample_weight_train is None: | |
sample_weight_train = np.ones((X.shape[0],)) | |
if sample_weight_score is None: | |
sample_weight_score = np.ones((X.shape[0],)) | |
# Score model on KFolds | |
ret_scores = [] | |
for train, test in cv_gen.split(X=X, y=y): | |
print(f'Fold {len(ret_scores) + 1}') | |
X_train, y_train = X.iloc[train, :], y.iloc[train] | |
class_weights = compute_class_weight(y_train) | |
classifier = baseline_fn(input_dim=X_train.shape[1]) | |
classifier.fit(X_train, y_train, | |
validation_split=0.2, | |
sample_weight=sample_weight_train[train], | |
class_weight=class_weights, | |
epochs=100, verbose=2, callbacks=callbacks) | |
if scoring == log_loss: | |
prob = classifier.predict_proba(X.iloc[test, :]) | |
score = -1 * scoring(y.iloc[test], prob, sample_weight=sample_weight_score[test], labels=classifier.classes_) | |
else: | |
pred = classifier.predict_classes(X.iloc[test, :]) | |
score = scoring(y.iloc[test], pred, sample_weight=sample_weight_score[test]) | |
print('Score: ', score) | |
ret_scores.append(score) | |
return np.array(ret_scores) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment