Skip to content

Instantly share code, notes, and snippets.

🎯
Focusing

Davis David Davisy

🎯
Focusing
View GitHub Profile
View ml_project_with_config_file.py
# Import important packages
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
import joblib
import os
import yaml
View open_yaml_file_in_python.py
import yaml
#read yaml file
with open('my_config.yaml') as file:
yaml_data= yaml.safe_load(file)
print(yaml_data)
View my_config.yaml
#INITIAL SETTINGS
data_directory: ../data/
data_name: breast-cancer-wisconsin.data
drop_columns: ["id"]
target_name: class
test_size: 0.2
model_directory: ../models/
model_name: KNN_classifier.pkl
View ml_project-without_config_file.py
#mport important packages
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
import joblib
#path to the dataset
filename = "../data/breast-cancer-wisconsin.data"
View my_configuration.yaml
dataset:
script_path: ../datasets/cifar10_keras.py
model:
script_path: ../models/optimized.py
optimizer:
script_path: ../optimizers/adam_keras.py
initial_lr: 0.0001
train:
script_path: ../train/train_keras.py
artifacts_path: ../artifacts/cifar10_opt/
View explained_variance_ratio.py
from sklearn.decomposition import PCA
pca = PCA(random_state=42)
# train
pca.fit(X_train)
# Plots PCA components' explained variance ratios.
skplt.decomposition.plot_pca_component_variance(pca, figsize=(10, 8))
View precision_recall_curve.py
skplt.metrics.plot_precision_recall(
y_val, y_probas, title="Precision Recall Curve for XBG Classifier", figsize=(10, 8)
)
View roc_curve.py
y_probas = xgb_classifier.predict_proba(X_val)
skplt.metrics.plot_roc(
y_val, y_probas, title="Roc Curve for XBG Classifier",
figsize=(10, 8))
View generate_comfusion_matrix.py
y_pred = xgb_classifier.predict(X_val)
# plot confusion matrix for xgb_classifier
skplt.metrics.plot_confusion_matrix(
y_val,
y_pred,
normalize=True,
title="Confusion Matrix for XGB Classifier",
figsize=(10, 8),
)
View feature_importances.py
xgb_classifier = XGBClassifier(n_jobs=-1)
# train
xgb_classifier.fit(X_train, y_train)
# Generates a plot of a classifier's feature importances
skplt.estimators.plot_feature_importances(
xgb_classifier,
feature_names=feature_columns,
figsize=(10, 8),
You can’t perform that action at this time.