Skip to content

Instantly share code, notes, and snippets.

@gabriel19913
Created May 21, 2019 01:17
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save gabriel19913/c56fd143a995b96530dae17a01a478cd to your computer and use it in GitHub Desktop.
Save gabriel19913/c56fd143a995b96530dae17a01a478cd to your computer and use it in GitHub Desktop.
from sklearn.datasets import load_wine
from sklearn.utils import shuffle
import numpy as np
from sklearn.model_selection import KFold
from sklearn.preprocessing import scale
from sklearn import tree
from sklearn.metrics import accuracy_score
from sklearn.model_selection import cross_val_score
from sklearn.metrics import accuracy_score
random_state = 0
np.random.seed(random_state)
features, target = load_wine(return_X_y=True)
features, target = shuffle(features, target)
features = scale(features)
scores = []
modelo = tree.DecisionTreeClassifier(random_state=random_state)
kf = KFold(n_splits=10, random_state=random_state)
for train_index, test_index in kf.split(features):
X_train, X_test, y_train, y_test = features[train_index], features[test_index], target[train_index], target[test_index]
modelo = modelo.fit(features, target)
scores.append(accuracy_score(y_test, y_pred))
print(scores)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment