Skip to content

Instantly share code, notes, and snippets.

@youssefHosni
Created September 2, 2023 00:17
Show Gist options
  • Save youssefHosni/15267168ab37666ca216cbc69f77ebcc to your computer and use it in GitHub Desktop.
Save youssefHosni/15267168ab37666ca216cbc69f77ebcc to your computer and use it in GitHub Desktop.
# Load the dataset
data = load_iris()
X, y = data.data, data.target
# Split the data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Define base classifiers
base_classifiers = [
RandomForestClassifier(n_estimators=100, random_state=42),
GradientBoostingClassifier(n_estimators=100, random_state=42)
]
# Define a meta-classifier
meta_classifier = LogisticRegression()
# Create an array to hold the predictions from base classifiers
base_classifier_predictions = np.zeros((len(X_train), len(base_classifiers)))
# Perform stacking using K-fold cross-validation
kf = KFold(n_splits=5, shuffle=True, random_state=42)
for train_index, val_index in kf.split(X_train):
train_fold, val_fold = X_train[train_index], X_train[val_index]
train_target, val_target = y_train[train_index], y_train[val_index]
for i, clf in enumerate(base_classifiers):
cloned_clf = clone(clf)
cloned_clf.fit(train_fold, train_target)
base_classifier_predictions[val_index, i] = cloned_clf.predict(val_fold)
# Train the meta-classifier on base classifier predictions
meta_classifier.fit(base_classifier_predictions, y_train)
# Make predictions using the stacked ensemble
stacked_predictions = np.zeros((len(X_test), len(base_classifiers)))
for i, clf in enumerate(base_classifiers):
stacked_predictions[:, i] = clf.predict(X_test)
# Make final predictions using the meta-classifier
final_predictions = meta_classifier.predict(stacked_predictions)
# Evaluate the stacked ensemble's performance
accuracy = accuracy_score(y_test, final_predictions)
print(f"Stacked Ensemble Accuracy: {accuracy:.2f}")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment