Last active
January 30, 2022 11:43
-
-
Save sunilkumardash9/21f95931882e16c268048ab4f72ef23c to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
from numpy import log,dot,exp,shape | |
import matplotlib.pyplot as plt | |
from sklearn.datasets import make_classification | |
X,y = make_classification(n_featues=4) | |
from sklearn.model_selection import train_test_split | |
X_tr,X_te,y_tr,y_te = train_test_split(X,y,test_size=0.1 | |
def standardize(X_tr): | |
for i in range(shape(X_tr)[1]): | |
X_tr[:,i] = (X_tr[:,i] - np.mean(X_tr[:,i]))/np.std(X_tr[:,i]) | |
def F1_score(y,y_hat): | |
tp,tn,fp,fn = 0,0,0,0 | |
for i in range(len(y)): | |
if y[i] == 1 and y_hat[i] == 1: | |
tp += 1 | |
elif y[i] == 1 and y_hat[i] == 0: | |
fn += 1 | |
elif y[i] == 0 and y_hat[i] == 1: | |
fp += 1 | |
elif y[i] == 0 and y_hat[i] == 0: | |
tn += 1 | |
precision = tp/(tp+fp) | |
recall = tp/(tp+fn) | |
f1_score = 2*precision*recall/(precision+recall) | |
return f1_score | |
class LogidticRegression: | |
def sigmoid(self,z): | |
sig = 1/(1+exp(-z)) | |
return sig | |
def initialize(self,X): | |
weights = np.zeros((shape(X)[1]+1,1)) | |
X = np.c_[np.ones((shape(X)[0],1)),X] | |
return weights,X | |
def fit(self,X,y,alpha=0.001,iter=400): | |
weights,X = self.initialize(X) | |
def cost(theta): | |
z = dot(X,theta) | |
cost0 = y.T.dot(log(self.sigmoid(z))) | |
cost1 = (1-y).T.dot(log(1-self.sigmoid(z))) | |
cost = -((cost1 + cost0))/len(y) | |
return cost | |
cost_list = np.zeros(iter,) | |
for i in range(iter): | |
weights = weights - alpha*dot(X.T,self.sigmoid(dot(X,weights))-np.reshape(y,(len(y),1))) | |
cost_list[i] = cost(weights) | |
self.weights = weights | |
return cost_list | |
def predict(self,X): | |
z = dot(self.initialize(X)[1],self.weights) | |
lis = [] | |
for i in self.sigmoid(z): | |
if i>0.5: | |
lis.append(1) | |
else: | |
lis.append(0) | |
return lis | |
standardize(X_tr) | |
standardize(X_te) | |
obj1 = LogidticRegression() | |
model= obj1.fit(X_tr,y_tr) | |
y_pred = obj1.predict(X_te) | |
y_train = obj1.predict(X_tr) | |
#Let's see the f1-score for training and testing data | |
f1_score_tr = F1_score(y_tr,y_train) | |
f1_score_te = F1_score(y_te,y_pred) | |
print(f1_score_tr) | |
print(f1_score_te) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment