Skip to content

Instantly share code, notes, and snippets.

@wayofnumbers
Created October 22, 2019 21:55
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save wayofnumbers/c45951cca7cc4abcee1d182fa3d5f816 to your computer and use it in GitHub Desktop.
Save wayofnumbers/c45951cca7cc4abcee1d182fa3d5f816 to your computer and use it in GitHub Desktop.
FMNIST-network
# Build the neural network, expand on top of nn.Module
class Network(nn.Module):
def __init__(self):
super().__init__()
# define layers
self.conv1 = nn.Conv2d(in_channels=1, out_channels=6, kernel_size=5)
self.conv2 = nn.Conv2d(in_channels=6, out_channels=12, kernel_size=5)
self.fc1 = nn.Linear(in_features=12*4*4, out_features=120)
self.fc2 = nn.Linear(in_features=120, out_features=60)
self.out = nn.Linear(in_features=60, out_features=10)
# define forward function
def forward(self, t):
# conv 1
t = self.conv1(t)
t = F.relu(t)
t = F.max_pool2d(t, kernel_size=2, stride=2)
# conv 2
t = self.conv2(t)
t = F.relu(t)
t = F.max_pool2d(t, kernel_size=2, stride=2)
# fc1
t = t.reshape(-1, 12*4*4)
t = self.fc1(t)
t = F.relu(t)
# fc2
t = self.fc2(t)
t = F.relu(t)
# output
t = self.out(t)
# don't need softmax here since we'll use cross-entropy as activation.
return t
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment