Skip to content

Instantly share code, notes, and snippets.

@devil-cyber
Last active November 11, 2020 11:34
Show Gist options
  • Save devil-cyber/05a5db982b391628b462074be373816f to your computer and use it in GitHub Desktop.
Save devil-cyber/05a5db982b391628b462074be373816f to your computer and use it in GitHub Desktop.
Logistics Regression using Pytorch
import torch
import numpy as np
import torch.nn as nn
from sklearn import datasets
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
bc = datasets.load_breast_cancer()
x, y = bc.data, bc.target
n_samples, n_features = x.shape
print(n_samples, n_features)
X_train,X_test,y_train,y_test = train_test_split(x,y,test_size=.2,random_state=42)
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
X_train = torch.from_numpy(X_train.astype(np.float32))
X_test = torch.from_numpy(X_test.astype(np.float32))
y_train = torch.from_numpy(y_train.astype(np.float32))
y_test = torch.from_numpy(y_test.astype(np.float32))
y_train = y_train.view(y_train.shape[0],1)
y_test = y_test.view(y_test.shape[0],1)
class LogisticsRegression(nn.Module):
def __init__(self,input_dim):
super(LogisticsRegression,self).__init__()
self.linear = nn.Linear(input_dim,1)
def forward(self,x):
return torch.sigmoid(self.linear(x))
model = LogisticsRegression(n_features)
learning_rate = .01
criterion = nn.BCELoss()
optmizer = torch.optim.Adam(model.parameters(),lr=learning_rate)
n_iter = 100
for epoch in range(n_iter):
y_pred = model(X_train)
loss = criterion(y_pred,y_train)
loss.backward()
optmizer.step()
optmizer.zero_grad()
if (epoch+1)%10 ==0:
print(f"epoch: {epoch + 1}, loss={loss:.4f}")
with torch.no_grad():
y_predicted = model(X_test)
y_predicted_class = y_predicted.round()
acc = (y_predicted_class.eq(y_test).sum() / float(y_predicted.shape[0])) * 100
print(f"Accuracy: {acc:.4f}")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment