Skip to content

Instantly share code, notes, and snippets.

@sananand007
Created May 31, 2018 17:48
Show Gist options
  • Save sananand007/7de2ed6b35d88cf4e06afaf2420112c2 to your computer and use it in GitHub Desktop.
Save sananand007/7de2ed6b35d88cf4e06afaf2420112c2 to your computer and use it in GitHub Desktop.
'''
Logistic Regression
'''
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score, confusion_matrix, f1_score, roc_curve, precision_score, classification_report
lgmodel = LogisticRegression(max_iter=100, C=1e5)
lgmodel.fit(X_train, y_train)
ypred=lgmodel.predict(X_test)
print(lgmodel.score(X_train, y_train))
print(confusion_matrix(y_test,ypred))
print(classification_report(y_test, ypred))
'''
0.8132022471910112
[[94 12]
[31 42]]
precision recall f1-score support
0 0.75 0.89 0.81 106
1 0.78 0.58 0.66 73
avg / total 0.76 0.76 0.75 179
'''
########################################################
'''
SVM
'''
from sklearn.svm import SVC
modelsvc=SVC(probability=True, gamma='auto')
modelsvc.fit(X_train, y_train)
ypred=modelsvc.predict(X_test)
print(modelsvc.score(X_train, y_train))
print(confusion_matrix(y_test, ypred))
print(classification_report(y_test, ypred))
'''
0.7893258426966292
[[90 16]
[24 49]]
precision recall f1-score support
0 0.79 0.85 0.82 106
1 0.75 0.67 0.71 73
avg / total 0.77 0.78 0.77 179
'''
########################################################
'''
Decision tree
'''
from sklearn.tree import DecisionTreeClassifier
dtmodel=DecisionTreeClassifier()
dtmodel.fit(X_train, y_train)
ypred=dtmodel.predict(X_test)
print(dtmodel.score(X_train, y_train))
print(confusion_matrix(y_test, ypred))
print(classification_report(y_test, ypred))
'''
0.8637640449438202
[[95 11]
[35 38]]
precision recall f1-score support
0 0.73 0.90 0.81 106
1 0.78 0.52 0.62 73
avg / total 0.75 0.74 0.73 179
'''
########################################################
'''
Random Forest
'''
from sklearn.ensemble import RandomForestClassifier
rfmodel=RandomForestClassifier(n_estimators=60)
rfmodel.fit(X_train, y_train)
ypred=rfmodel.predict(X_test)
print(rfmodel.score(X_train, y_train))
print(confusion_matrix(y_test, ypred))
print(classification_report(y_test, ypred))
'''
0.8637640449438202
[[95 11]
[33 40]]
precision recall f1-score support
0 0.74 0.90 0.81 106
1 0.78 0.55 0.65 73
avg / total 0.76 0.75 0.74 179
'''
########################################################
'''
Gradient Boosting
'''
from sklearn.ensemble import GradientBoostingClassifier
gbm=GradientBoostingClassifier(n_estimators=100)
gbm.fit(X_train, y_train)
ypred=gbm.predict(X_test)
print(gbm.score(X_train, y_train))
print(confusion_matrix(y_test, ypred))
print(classification_report(y_test, ypred))
'''
0.8455056179775281
[[96 10]
[32 41]]
precision recall f1-score support
0 0.75 0.91 0.82 106
1 0.80 0.56 0.66 73
avg / total 0.77 0.77 0.76 179
'''
########################################################
'''
Ensemble Voting Classifier
'''
from sklearn.ensemble import VotingClassifier
votingmodel=VotingClassifier(estimators=[('Logis',lgmodel), ('svc',modelsvc), ('dt',dtmodel), \
('rf',rfmodel), ('gbm',gbm)], voting='soft', n_jobs=5)
voting=votingmodel.fit(X_train, y_train)
'''
[[256 10]
[ 30 122]]
precision recall f1-score support
0 0.90 0.96 0.93 266
1 0.92 0.80 0.86 152
avg / total 0.91 0.90 0.90 418
'''
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment