Skip to content

Instantly share code, notes, and snippets.

View jojonki's full-sized avatar

Junki Ohmura jojonki

View GitHub Profile
@jojonki
jojonki / memnn.py
Created November 30, 2017 15:49
End-to-End Memory Networks
class MemNN(nn.Module):
def __init__(self, vocab_size, embd_size, ans_size, hops=3):
super(MemNN, self).__init__()
self.hops = hops
self.embedding = nn.Embedding(vocab_size, embd_size)
self.fc = nn.Linear(embd_size, ans_size)
def forward(self, x, q):
# x (bs, story_len, s_sent_len)
@jojonki
jojonki / to_var.py
Created November 30, 2017 16:16
Convert to Variable in PyTorch
from torch.autograd import Variable
def to_var(x):
if torch.cuda.is_available():
x = x.cuda()
return Variable(x)
@jojonki
jojonki / init_weights.py
Created December 1, 2017 22:37
Custom initialization of weights in PyTorch
# https://github.com/pytorch/examples/blob/master/dcgan/main.py#L95-L102
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
m.weight.data.normal_(0.0, 0.02)
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
@jojonki
jojonki / print_training_params.py
Last active December 7, 2017 15:06
Print training parameters in PyTorch
for name, param in model.named_parameters():
if param.requires_grad:
print(name, param.data)
@jojonki
jojonki / ema.py
Last active March 13, 2020 05:18
Apply exponential moving average decay for variables in PyTorch
# How to apply exponential moving average decay for variables?
# https://discuss.pytorch.org/t/how-to-apply-exponential-moving-average-decay-for-variables/10856/2
class EMA(nn.Module):
def __init__(self, mu):
super(EMA, self).__init__()
self.mu = mu
def forward(self,x, last_average):
new_average = self.mu*x + (1-self.mu)*last_average
return new_average
@jojonki
jojonki / pure_sgd.py
Created December 7, 2017 15:17
Pure SGD in PyTOrch
# https://github.com/jcjohnson/pytorch-examples
for t in range(500):
# Forward pass: compute predicted y by passing x to the model. Module objects
# override the __call__ operator so you can call them like functions. When
# doing so you pass a Variable of input data to the Module and it produces
# a Variable of output data.
y_pred = model(x)
# Compute and print loss. We pass Variables containing the predicted and true
# values of y, and the loss function returns a Variable containing the loss.
@jojonki
jojonki / load_wowrd2vec_binray_gensim.py
Last active December 18, 2017 21:29
load_wowrd2vec_binray_gensim.py
from gensim.models.keyedvectors import KeyedVectors
model_path = './data/GoogleNews-vectors-negative300.bin'
model = KeyedVectors.load_word2vec_format('./data/GoogleNews-vectors-negative300.bin', binary=True)
model.wv['computer'] # array([ 1.07421875e-01, -2.01171875e-01, 1.23046875e-01,
# you can use this weights like this.
def load_embd_weights(word2vec, vocab_size, embd_size, w2i):
embedding_matrix = np.zeros((vocab_size, embd_size))
print('embed_matrix.shape', embedding_matrix.shape)
found_ct = 0
@jojonki
jojonki / to_categorical.py
Last active January 1, 2018 21:49
to_categorical_labels
def to_var(x):
if torch.cuda.is_available():
x = x.cuda()
return torch.autograd.Variable(x)
def to_categorical(ys, n_classes):
# 1-hot encodes tensors
if len(ys) == 0:
return ys
d = [to_var(torch.from_numpy(np.eye(n_classes, dtype='uint8')[y])) for y in ys]
@jojonki
jojonki / benchmark.py
Last active January 10, 2018 15:26
Performance on Titan V
# see original code
# https://discuss.pytorch.org/t/solved-titan-v-on-pytorch-0-3-0-cuda-9-0-cudnn-7-0-is-much-slower-than-1080-ti/11320/10?u=jef
import torch
from torchvision.models import vgg16,densenet121,resnet152
from time import time
import torch.nn as nn
import torch.backends.cudnn as cudnn
import torch.optim
from torch.autograd import Variable
@jojonki
jojonki / python_logging.py
Created January 29, 2018 16:27
Python logging to console and a file
import logging
logging.basicConfig(level=logging.DEBUG,
filename='output.log',
format='%(asctime)s [%(levelname)-7s] %(module)s | %(message)s',
datefmt='%Y%m%d-%H%M%S')
console = logging.StreamHandler()
console.setLevel(logging.DEBUG)
formatter = logging.Formatter('[%(levelname)-7s] %(message)s')