# confusion matrix in sklearn | |
from sklearn.metrics import confusion_matrix | |
from sklearn.metrics import classification_report | |
# actual values | |
actual = [1,0,0,1,0,0,1,0,0,1] | |
# predicted values | |
predicted = [1,0,0,1,0,0,0,1,0,0] | |
# confusion matrix | |
matrix = confusion_matrix(actual,predicted, labels=[1,0]) | |
print('Confusion matrix : \n',matrix) | |
# outcome values order in sklearn | |
tp, fn, fp, tn = confusion_matrix(actual,predicted,labels=[1,0]).reshape(-1) | |
print('Outcome values : \n', tp, fn, fp, tn) | |
# classification report for precision, recall f1-score and accuracy | |
matrix = classification_report(actual,predicted,labels=[1,0]) | |
print('Classification report : \n',matrix) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment