Skip to content

Instantly share code, notes, and snippets.

@TheBojda
Created August 23, 2020 13:07
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save TheBojda/57bdc551ac62f774dce09c0dc2a48ea9 to your computer and use it in GitHub Desktop.
Save TheBojda/57bdc551ac62f774dce09c0dc2a48ea9 to your computer and use it in GitHub Desktop.
Simple linear regression with PyTorch autograd
import torch
import numpy as np
import matplotlib.pyplot as plt
TRUE_W = 3.0
TRUE_b = 0.5
NUM_EXAMPLES = 1000
x = torch.empty(NUM_EXAMPLES).normal_(mean=0,std=1.0)
noise = torch.empty(NUM_EXAMPLES).normal_(mean=0,std=1.0)
y = x * TRUE_W + TRUE_b + noise
W = torch.tensor([16.0], requires_grad=True)
b = torch.tensor([10.0], requires_grad=True)
def model(x):
return x * W + b
plt.figure()
plt.scatter(x, y, label="true")
plt.scatter(x, model(x).detach().numpy(), label="predicted")
plt.legend()
plt.show()
lr=0.1
epochs = 20
Ws, bs = [], []
for epoch in range(epochs):
y_pred = model(x)
loss = (y - y_pred).pow(2).mean()
loss.backward()
print(epoch, loss.item(), W.item(), b.item(), W.grad.item(), b.grad.item())
Ws.append(W.item())
bs.append(b.item())
with torch.no_grad():
W -= lr * W.grad
b -= lr * b.grad
W.grad.zero_()
b.grad.zero_()
plt.figure()
plt.plot(range(epochs), Ws, 'r', range(epochs), bs, 'b')
plt.plot([TRUE_W] * epochs, 'r--', [TRUE_b] * epochs, 'b--')
plt.legend(['W', 'b', 'true W', 'true b'])
plt.show()
plt.figure()
plt.scatter(x, y, label="true")
plt.scatter(x, model(x).detach().numpy(), label="predicted")
plt.legend()
plt.show()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment