Skip to content

Instantly share code, notes, and snippets.

@miki998
Created April 13, 2020 10:13
Show Gist options
  • Save miki998/efee75b81b8896637e180b6419b79596 to your computer and use it in GitHub Desktop.
Save miki998/efee75b81b8896637e180b6419b79596 to your computer and use it in GitHub Desktop.
# Create CNN Model
class CNNModel(nn.Module):
def __init__(self):
super(CNNModel, self).__init__()
# Convolution 1
self.cnn1 = nn.Conv2d(in_channels=1, out_channels=16, kernel_size=3, stride=1, padding=0)
self.relu1 = nn.ReLU()
# Max pool 1
self.maxpool1 = nn.MaxPool2d(kernel_size=2)
# Convolution 2
self.cnn2 = nn.Conv2d(in_channels=16, out_channels=32, kernel_size=3, stride=1, padding=0)
self.relu2 = nn.ReLU()
# Max pool 2
self.maxpool2 = nn.MaxPool2d(kernel_size=2)
# Fully connected 1
self.fc1 = nn.Linear(32 * 5 * 5, 10)
def forward(self, x):
# Set 1
out = self.cnn1(x)
out = self.relu1(out)
out = self.maxpool1(out)
# Set 2
out = self.cnn2(out)
out = self.relu2(out)
out = self.maxpool2(out)
#Flatten
out = out.view(out.size(0), -1)
#Dense
out = self.fc1(out)
return out
#Definition of hyperparameters
n_iters = 2500
num_epochs = n_iters / (len(train_x) / batch_size)
num_epochs = int(num_epochs)
# Cross Entropy Loss
error = nn.CrossEntropyLoss()
# SGD Optimizer
learning_rate = 0.001
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment