Skip to content

Instantly share code, notes, and snippets.

View Lexie88rus's full-sized avatar
🏡
WFH

Aleksandra Deis Lexie88rus

🏡
WFH
View GitHub Profile
@Lexie88rus
Lexie88rus / imports.py
Created June 27, 2019 08:12
Imports for custom activations
# Import basic libraries
import numpy as np # linear algebra
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
from collections import OrderedDict
# Import PyTorch
import torch # import main library
from torch.autograd import Variable
import torch.nn as nn # import modules
from torch.autograd import Function # import Function to create custom activations
@Lexie88rus
Lexie88rus / prepare_dataset.py
Created June 27, 2019 08:16
Preparing the dataset
# Define a transform
transform = transforms.Compose([transforms.ToTensor()])
# Download and load the training data for Fashion MNIST
trainset = datasets.FashionMNIST('~/.pytorch/F_MNIST_data/', download=True, train=True, transform=transform)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=64, shuffle=True)
@Lexie88rus
Lexie88rus / train_model.py
Created June 27, 2019 08:20
Sample training function
# helper function to train a model
def train_model(model, trainloader):
'''
Function trains the model and prints out the training log.
INPUT:
model - initialized PyTorch model ready for training.
trainloader - PyTorch dataloader for training data.
'''
#setup training
@Lexie88rus
Lexie88rus / SiLU.py
Created June 27, 2019 08:38
SiLU implementation
# simply define a silu function
def silu(input):
'''
Applies the Sigmoid Linear Unit (SiLU) function element-wise:
SiLU(x) = x * sigmoid(x)
'''
return input * torch.sigmoid(input) # use torch.sigmoid to make sure that we created the most efficient implemetation based on builtin PyTorch functions
# create a class wrapper from PyTorch nn.Module, so
# use SiLU with model created with Sequential
# initialize activation function
activation_function = SiLU()
# Initialize the model using nn.Sequential
model = nn.Sequential(OrderedDict([
('fc1', nn.Linear(784, 256)),
('activation1', activation_function), # use SiLU
('fc2', nn.Linear(256, 128)),
@Lexie88rus
Lexie88rus / SiLU_demo_class.py
Created June 27, 2019 08:44
SiLU demo (in class)
# create class for basic fully-connected deep neural network
class ClassifierSiLU(nn.Module):
'''
Demo classifier model class to demonstrate SiLU
'''
def __init__(self):
super().__init__()
# initialize layers
self.fc1 = nn.Linear(784, 256)
@Lexie88rus
Lexie88rus / soft_exponential.py
Created June 27, 2019 08:53
Soft Exponential activation
class soft_exponential(nn.Module):
'''
Implementation of soft exponential activation.
Shape:
- Input: (N, *) where * means, any number of additional
dimensions
- Output: (N, *), same shape as the input
Parameters:
@Lexie88rus
Lexie88rus / soft_exponential_demo.py
Created June 27, 2019 08:54
Soft Exponential demo
# create class for basic fully-connected deep neural network
class ClassifierSExp(nn.Module):
'''
Basic fully-connected network to test Soft Exponential activation.
'''
def __init__(self):
super().__init__()
# initialize layers
self.fc1 = nn.Linear(784, 256)
@Lexie88rus
Lexie88rus / BReLU.py
Created June 27, 2019 09:04
BReLU implementation
class brelu(Function):
'''
Implementation of BReLU activation function.
Shape:
- Input: (N, *) where * means, any number of additional
dimensions
- Output: (N, *), same shape as the input
References:
class ClassifierBReLU(nn.Module):
'''
Simple fully-connected classifier model to demonstrate BReLU activation.
'''
def __init__(self):
super(ClassifierBReLU, self).__init__()
# initialize layers
self.fc1 = nn.Linear(784, 256)
self.fc2 = nn.Linear(256, 128)