Skip to content

Instantly share code, notes, and snippets.

@adamoudad
Last active October 13, 2020 01:36
Show Gist options
  • Save adamoudad/41fc5eefc4cbb7e7786673e01683755c to your computer and use it in GitHub Desktop.
Save adamoudad/41fc5eefc4cbb7e7786673e01683755c to your computer and use it in GitHub Desktop.
import torch
from torch import nn
import torch.nn.functional as F
class Perceptron(nn.Module):
def __init__(self):
super().__init__()
self.dropout2 = nn.Dropout(0.5)
self.fc1 = nn.Linear(28 * 28, 128) # 9216
self.fc2 = nn.Linear(128, 10)
def forward(self, x):
x = torch.flatten(x, 1)
x = self.fc1(x)
x = F.relu(x)
x = self.dropout2(x)
x = self.fc2(x)
output = F.log_softmax(x, dim=1)
return output
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment