Skip to content

Instantly share code, notes, and snippets.

@erykml
Created January 25, 2019 21:50
Show Gist options
  • Save erykml/b446407501744fa0a8afbbc8a3a8af2c to your computer and use it in GitHub Desktop.
Save erykml/b446407501744fa0a8afbbc8a3a8af2c to your computer and use it in GitHub Desktop.
# create the class containing the architecture of the network (inherits from nn.Module)
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
# define the layers
# cheatsheet
# nn.Conv2d(in_channels, out_channels, kernel_size, stride=1,
# padding=0, dilation=1, groups=1, bias=True)
# conv layer (input: 128x128x3 image tensor)
self.conv1 = nn.Conv2d(3, 16, 3, padding=1)
# conv layer (input 64x64x16 tensor)
self.conv2 = nn.Conv2d(16, 32, 3, padding=1)
# conv layer (input: 32x32x32 tensor)
self.conv3 = nn.Conv2d(32, 64, 3, padding=1)
# max pooling layer
self.pool = nn.MaxPool2d(2, 2)
# linear layer
self.fc1 = nn.Linear(64 * 16 * 16, 128)
# linear layer (500 -> 10)
self.fc2 = nn.Linear(128, 1)
# dropout layer (p=0.25)
self.dropout = nn.Dropout(0.25)
def forward(self, x):
# add sequence of convolutional and max pooling layers
x = self.pool(F.relu(self.conv1(x)))
x = self.pool(F.relu(self.conv2(x)))
x = self.pool(F.relu(self.conv3(x)))
# flatten image input
x = x.view(-1, 64 * 16 * 16)
# add dropout layer
x = self.dropout(x)
# add 1st hidden layer, with relu activation function
x = F.relu(self.fc1(x))
# add dropout layer
x = self.dropout(x)
# add 2nd hidden layer, with relu activation function
x = self.fc2(x)
return x
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment