Skip to content

Instantly share code, notes, and snippets.

@TheBojda
Created May 21, 2022 08:51
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save TheBojda/7b3e9dbbfc66afe9c489c72ba66450c5 to your computer and use it in GitHub Desktop.
Save TheBojda/7b3e9dbbfc66afe9c489c72ba66450c5 to your computer and use it in GitHub Desktop.
Linear regression with PyTorch autograd
import torch
import matplotlib.pyplot as plt
from torch.autograd import Variable
class Model:
def __init__(self):
self.W = Variable(torch.as_tensor(16.), requires_grad=True)
self.b = Variable(torch.as_tensor(10.), requires_grad=True)
def __call__(self, x):
return self.W * x + self.b
TRUE_W = 3.0 # slope
TRUE_b = 0.5 # intercept
NUM_EXAMPLES = 1000
X = torch.normal(0.0, 1.0, size=(NUM_EXAMPLES,))
noise = torch.normal(0.0, 1.0, size=(NUM_EXAMPLES,))
y = X * TRUE_W + TRUE_b + noise
model = Model()
plt.figure()
plt.scatter(X, y, label="true")
plt.scatter(X, model(X).detach().numpy(), label="predicted")
plt.legend()
plt.show()
def loss(y, y_pred):
return torch.square(y_pred - y).mean()
def train(model, X, y, lr=0.01):
current_loss = loss(y, model(X))
current_loss.backward()
with torch.no_grad():
model.W -= model.W.grad.data * lr
model.b -= model.b.grad.data * lr
model.W.grad.data.zero_()
model.b.grad.data.zero_()
Ws, bs = [], []
epochs = 20
for epoch in range(epochs):
with torch.no_grad():
Ws.append(model.W.numpy().item())
bs.append(model.b.numpy().item())
current_loss = loss(y, model(X))
train(model, X, y, lr=0.1)
print(f"Epoch {epoch}: Loss: {current_loss.numpy()}")
plt.figure()
plt.plot(range(epochs), Ws, 'r', range(epochs), bs, 'b')
plt.plot([TRUE_W] * epochs, 'r--', [TRUE_b] * epochs, 'b--')
plt.legend(['W', 'b', 'true W', 'true b'])
plt.show()
plt.figure()
plt.scatter(X, y, label="true")
plt.scatter(X, model(X).detach().numpy(), label="predicted")
plt.legend()
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment