Skip to content

Instantly share code, notes, and snippets.

@mohcinemadkour
Last active July 25, 2020 05:40
Show Gist options
  • Save mohcinemadkour/c355fefd12b292697e682bbc270bd6aa to your computer and use it in GitHub Desktop.
Save mohcinemadkour/c355fefd12b292697e682bbc270bd6aa to your computer and use it in GitHub Desktop.
class MLP(nn.Module):
def __init__(self, hidden_layers=[800, 800], droprates=[0, 0]):
super(MLP, self).__init__()
self.model = nn.Sequential()
self.model.add_module("dropout0",MyDropout(p=droprates[0]))
self.model.add_module("input", nn.Linear(28*28, hidden_layers[0]))
self.model.add_module("tanh", nn.Tanh())
# Add hidden layers
for i,d in enumerate(hidden_layers[:-1]):
self.model.add_module("dropout_hidden"+str(i+1), MyDropout(p=droprates[1]))
self.model.add_module("hidden"+str(i+1), nn.Linear(hidden_layers[i], hidden_layers[i+1]))
self.model.add_module("tanh_hidden"+str(i+1), nn.Tanh())
self.model.add_module("final",nn.Linear(hidden_layers[-1], 10))
def forward(self, x):
# Turn to 1D
x = x.view(x.shape[0], 28*28)
x = self.model(x)
return x
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment