Skip to content

Instantly share code, notes, and snippets.

@viniciusmss
Last active April 22, 2020 14:58
Show Gist options
  • Save viniciusmss/39b0cd413b1c9c509984fc0ec5503c23 to your computer and use it in GitHub Desktop.
Save viniciusmss/39b0cd413b1c9c509984fc0ec5503c23 to your computer and use it in GitHub Desktop.
@variational_estimator
class BNN(nn.Module):
def __init__(self):
super().__init__()
# convolutional layer (sees 32x32x3 image tensor)
self.conv1 = BayesianConv2d(3, 8, (3,3), padding=1)
# convolutional layer (sees 16x16x8 tensor)
self.conv2 = BayesianConv2d(8, 16, (3,3), padding=1)
# convolutional layer (sees 8x8x16 tensor)
self.conv3 = BayesianConv2d(16, 16, (3,3), padding=1)
# max pooling layer
self.pool = nn.MaxPool2d(2, 2)
# linear layer (16 * 4 * 4 -> 100)
self.fc1 = BayesianLinear(16 * 4 * 4, 100)
# linear layer (100 -> 10)
self.fc2 = BayesianLinear(100, 10)
def forward(self, x):
# add sequence of convolutional and max pooling layers
x = self.pool(F.relu(self.conv1(x)))
x = self.pool(F.relu(self.conv2(x)))
x = self.pool(F.relu(self.conv3(x)))
# flatten image input
x = x.view(-1, 16 * 4 * 4)
# add 1st hidden layer, with relu activation function
x = F.relu(self.fc1(x))
return self.fc2(x)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment