Skip to content

Instantly share code, notes, and snippets.

@srossross
Created March 10, 2023 19:20
Show Gist options
  • Save srossross/87b7204e68553a10e22553ece8de52c4 to your computer and use it in GitHub Desktop.
Save srossross/87b7204e68553a10e22553ece8de52c4 to your computer and use it in GitHub Desktop.
class NeuralNetwork(nn.Module):
def __init__(self, numChannels, classes):
# call the parent constructor
super(NeuralNetwork, self).__init__()
# initialize first set of CONV => RELU => POOL layers
self.conv1 = nn.Conv2d(
in_channels=numChannels, out_channels=20, kernel_size=(5, 5)
)
self.relu1 = nn.ReLU()
self.maxpool1 = nn.MaxPool2d(kernel_size=(2, 2), stride=(2, 2))
# initialize second set of CONV => RELU => POOL layers
self.conv2 = nn.Conv2d(in_channels=20, out_channels=50, kernel_size=(5, 5))
self.relu2 = nn.ReLU()
self.maxpool2 = nn.MaxPool2d(kernel_size=(2, 2), stride=(2, 2))
# initialize first (and only) set of FC => RELU layers
self.fc1 = nn.Linear(in_features=81, out_features=500)
self.relu3 = nn.ReLU()
# initialize our softmax classifier
self.fc2 = nn.Linear(in_features=500, out_features=classes)
self.logSoftmax = nn.LogSoftmax(dim=1)
def forward(self, x):
# pass the input through our first set of CONV => RELU =>
# POOL layers
x = self.conv1(x)
x = self.relu1(x)
x = self.maxpool1(x)
# pass the output from the previous layer through the second
# set of CONV => RELU => POOL layers
x = self.conv2(x)
x = self.relu2(x)
x = self.maxpool2(x)
# flatten the output from the previous layer and pass it
# through our only set of FC => RELU layers
x = torch.flatten(x, 1)
print(x.shape)
x = self.fc1(x)
x = self.relu3(x)
# pass the output to our softmax classifier to get our output
# predictions
x = self.fc2(x)
output = self.logSoftmax(x)
# return the output predictions
return output
model = NeuralNetwork(3, 3)
print(model)
model(torch.randn(3, 50 , 50))
cmod = torch.compile(model)
cmod(torch.randn(3, 50 , 50, requires_grad=True))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment