Skip to content

Instantly share code, notes, and snippets.

@theSage21
Created December 17, 2018 14:02
Show Gist options
  • Save theSage21/4032498ab90baa9ab5d6a3972f4bd840 to your computer and use it in GitHub Desktop.
Save theSage21/4032498ab90baa9ab5d6a3972f4bd840 to your computer and use it in GitHub Desktop.
from torch import nn
from torch import optim
from tqdm import tqdm
from tensorboardX import SummaryWriter
import torch
n = 100
inp = torch.Tensor([[0, 0],
[0, 1],
[1, 0],
[1, 1]]*n)
out = torch.Tensor([0, 1, 1, 0]*n)
class Net(nn.Module):
def __init__(self):
super().__init__()
self.net = nn.Linear(2, 1, bias=False)
self.act = nn.Tanh()
def forward(self, input):
return self.act(self.net(input))
net = Net()
opt = optim.Adam(net.parameters())
metric = nn.MSELoss()
loss_w = SummaryWriter(log_dir='logs/loss')
grad_w = SummaryWriter(log_dir='logs/grad')
with tqdm() as pbar:
for epoch in range(10_000):
opt.zero_grad()
loss = metric(net(inp), out)
loss.backward()
opt.step()
pbar.set_description(f'Loss: {loss}')
pbar.update(1)
loss_w.add_scalar('line', loss, epoch)
grad_w.add_scalar('line', net.net.weight.grad.norm(), epoch)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment