Skip to content

Instantly share code, notes, and snippets.

@iacolippo
Created July 10, 2018 08:15
Show Gist options
  • Save iacolippo/9a06449fb4b819083dc61dc020dfed63 to your computer and use it in GitHub Desktop.
Save iacolippo/9a06449fb4b819083dc61dc020dfed63 to your computer and use it in GitHub Desktop.
from time import time
import torch
import torch.nn as nn
import torch.nn.functional as F
class ConvNetV0(nn.Module):
def __init__(self):
super(ConvNetV0, self).__init__()
self.conv1 = nn.Conv2d(3, 30, 4, padding=2)
self.conv2 = nn.Conv2d(30, 50, 16, padding=7, bias=True)
self.conv3 = nn.Conv2d(50, 20, 2, stride=2)
self.conv4 = nn.Conv2d(20, 2, 2, stride=2)
def forward(self, x):
x = self.conv1(x)
x = F.relu(x)
x = self.conv2(x)
x = F.relu(x)
x = self.conv3(x)
x = F.relu(x)
y = self.conv4(x)
return y
class ConvNetV1(nn.Module):
def __init__(self):
super(ConvNetV1, self).__init__()
self.conv0 = nn.Conv2d(3, 50, 4, padding=1, stride=2)
self.conv_r1 = nn.Conv2d(50, 40, 15, padding=7, bias=True)
self.conv_r2 = nn.Conv2d(40, 25, 3, padding=1)
self.conv_r3 = nn.Conv2d(25, 25, 2, stride=2)
# self.conv_r3 = nn.MaxPool2d(2, stride=2)
self.conv_b1 = nn.Conv2d(50, 15, 4, padding=1, stride=2)
self.conv1 = nn.Conv2d(40, 2, 1)
def forward(self, x):
x = self.conv0(x)
x = F.relu(x)
x1 = self.conv_r1(x)
x1 = F.relu(x1)
x1 = self.conv_r2(x1)
x1 = F.relu(x1)
x1 = self.conv_r3(x1)
x2 = self.conv_b1(x)
y = torch.cat([x1, x2], dim=1)
y = self.conv1(y)
return y
torch.backends.cudnn.benchmark = True
Data = torch.randn(64, 3, 256, 256)
Labels = torch.Tensor(64, 8192)
Net = ConvNetV0()
Labels = Labels.to("cuda:0")
Data = Data.to("cuda:0")
Net = Net.to("cuda:0")
for i in range(8):
start = time()
out = Net(Data[i:i+8]).view(8, -1)
labels = Labels[i:i+8]
s = time()
loss = F.mse_loss(out, labels)
loss_f = time() - s
s = time()
loss.backward()
loss_b = time() - s
print('V0 Total: {:.4f} Loss forward: {:.4f} Loss backward: {:.4f}'.format(time()-start, loss_f, loss_b))
del Data, Labels, Net
torch.cuda.empty_cache()
Data = torch.randn(64, 3, 256, 256)
Labels = torch.Tensor(64, 8192)
Net = ConvNetV1()
Labels = Labels.to("cuda:0")
Data = Data.to("cuda:0")
Net = Net.to("cuda:0")
for i in range(8):
start = time()
out = Net(Data[i:i+8]).view(8, -1)
labels = Labels[i:i+8]
s = time()
loss = F.mse_loss(out, labels)
loss_f = time() - s
s = time()
loss.backward()
loss_b = time() - s
print('V1 Total: {:.4f} Loss forward: {:.4f} Loss backward: {:.4f}'.format(time()-start, loss_f, loss_b))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment