Skip to content

Instantly share code, notes, and snippets.

@Lexie88rus
Created June 27, 2019 08:54
Show Gist options
  • Save Lexie88rus/bfec8b635ce7773f6a039714a4b707f4 to your computer and use it in GitHub Desktop.
Save Lexie88rus/bfec8b635ce7773f6a039714a4b707f4 to your computer and use it in GitHub Desktop.
Soft Exponential demo
# create class for basic fully-connected deep neural network
class ClassifierSExp(nn.Module):
'''
Basic fully-connected network to test Soft Exponential activation.
'''
def __init__(self):
super().__init__()
# initialize layers
self.fc1 = nn.Linear(784, 256)
self.fc2 = nn.Linear(256, 128)
self.fc3 = nn.Linear(128, 64)
self.fc4 = nn.Linear(64, 10)
# initialize Soft Exponential activation
self.a1 = soft_exponential(256)
self.a2 = soft_exponential(128)
self.a3 = soft_exponential(64)
def forward(self, x):
# make sure the input tensor is flattened
x = x.view(x.shape[0], -1)
# apply Soft Exponential unit
x = self.a1(self.fc1(x))
x = self.a2(self.fc2(x))
x = self.a3(self.fc3(x))
x = F.log_softmax(self.fc4(x), dim=1)
return x
model = ClassifierSExp()
train_model(model)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment