Skip to content

Instantly share code, notes, and snippets.

@Lexie88rus
Created June 27, 2019 09:06
Show Gist options
  • Save Lexie88rus/380ff5dc03233f42c87bab82119f4730 to your computer and use it in GitHub Desktop.
Save Lexie88rus/380ff5dc03233f42c87bab82119f4730 to your computer and use it in GitHub Desktop.
BReLU demo
class ClassifierBReLU(nn.Module):
'''
Simple fully-connected classifier model to demonstrate BReLU activation.
'''
def __init__(self):
super(ClassifierBReLU, self).__init__()
# initialize layers
self.fc1 = nn.Linear(784, 256)
self.fc2 = nn.Linear(256, 128)
self.fc3 = nn.Linear(128, 64)
self.fc4 = nn.Linear(64, 10)
# create shortcuts for BReLU
self.a1 = brelu.apply
self.a2 = brelu.apply
self.a3 = brelu.apply
def forward(self, x):
# make sure the input tensor is flattened
x = x.view(x.shape[0], -1)
# apply BReLU
x = self.a1(self.fc1(x))
x = self.a2(self.fc2(x))
x = self.a3(self.fc3(x))
x = F.log_softmax(self.fc4(x), dim=1)
return x
model = ClassifierBReLU()
train_model(model)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment