Last active
August 13, 2018 04:09
-
-
Save seaslee/6436522 to your computer and use it in GitHub Desktop.
logistic regression examples using scikit-learn
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# -*- coding:utf-8 -*- | |
from sklearn import datasets | |
from sklearn import linear_model | |
from sklearn.metrics import f1_score | |
##### load data and split into train and test #### | |
data_digits = datasets.load_digits() | |
data = data_digits.data | |
target = data_digits.target | |
train_ratio = 0.8 | |
data_num = data.shape[0] | |
train_num = int(data_num * train_ratio) | |
train_data = data[:train_num,:] | |
val_data = data[train_num:,:] | |
train_target = target[:train_num] | |
val_target = target[train_num:] | |
#### train model and test #### | |
#### logistic regression #### | |
best_res = [0, 0 ,0] | |
res = [0,0,0] | |
for i in range(1,8): | |
lr_model = linear_model.LogisticRegression() | |
lr_l1_model = linear_model.LogisticRegression(C=10**i,penalty='l1') | |
lr_l2_model = linear_model.LogisticRegression(C=10**i,penalty='l2') | |
##### fit model#### | |
lr_model.fit(train_data, train_target) | |
lr_l1_model.fit(train_data, train_target) | |
lr_l2_model.fit(train_data, train_target) | |
#### test #### | |
val_predict = lr_model.predict(val_data) | |
val_l1_predict = lr_l1_model.predict(val_data) | |
val_l2_predict = lr_l2_model.predict(val_data) | |
#### result #### | |
res[0] = f1_score(val_target, val_predict) | |
res[1] = f1_score(val_target, val_l1_predict) | |
res[2] = f1_score(val_target, val_l2_predict) | |
for i in range(len(best_res)): | |
if res[i] > best_res[i]: | |
best_res[i] = res[i] | |
print '====================' | |
print 'logistic regression, f_score : %f' % res[0] | |
print 'logistic regression with l1 penalty, f_score : %f' % res[1] | |
print 'logistic regression with l2 penalty, f_score: %f' % res[2] | |
print '====================' | |
print 'logistic regression, best f_score : %f' % best_res[0] | |
print 'logistic regression with l1 penalty, best f_score : %f' % best_res[1] | |
print 'logistic regression with l2 penalty, best f_score: %f' % best_res[2] |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment