Skip to content

Instantly share code, notes, and snippets.

@FreeFly19
Last active July 21, 2023 16:17
Show Gist options
  • Save FreeFly19/86c8c858daa6085b26e0c62dde90abdd to your computer and use it in GitHub Desktop.
Save FreeFly19/86c8c858daa6085b26e0c62dde90abdd to your computer and use it in GitHub Desktop.
Logistic Regression from scratch with PyTorch
import torch
x = torch.tensor([1.5,2.8,15.,19.5])
y = torch.tensor([0.,0.,1.,1.])
w = torch.tensor([-.3], requires_grad=True)
b = torch.tensor([0.1232154], requires_grad=True)
def model(x):
return torch.sigmoid(x*w + b)
lr = 0.1
for i in range(100):
y_pred = model(x)
print(y_pred)
errors = - (y*torch.log(y_pred) + (1-y) * torch.log(1 - y_pred))
loss = errors.mean()
loss.backward()
w = torch.tensor([w - lr * w.grad], requires_grad=True)
b = torch.tensor([b - lr * b.grad], requires_grad=True)
print(f'loss: {loss.item():.4f}')
print(f'w: {w.item():.4f}')
print(f'b: {b.item():.4f}')
print()
print(model(x))
print(model(x) > 0.5)
print(y > 0.5)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment