This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
refs = [] | |
while True: | |
try: | |
ref = input() | |
refs.append(ref) | |
except EOFError: | |
break |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
ああああああああああああああああ | |
ああああああああああああああああ | |
ああああああああああああああああ | |
ああああああああああああああああ | |
ああああああああああああああああ | |
ああああああああああああああああ | |
ああああああああああああああああ | |
ああああああああああああああああ | |
ああああああああああああああああ | |
ああああああああああああああああ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
find . -type f -print0 | xargs -0 nkf --overwrite -w -Lu |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import logging | |
logging.basicConfig(level=logging.DEBUG, | |
filename='output.log', | |
format='%(asctime)s [%(levelname)-7s] %(module)s | %(message)s', | |
datefmt='%Y%m%d-%H%M%S') | |
console = logging.StreamHandler() | |
console.setLevel(logging.DEBUG) | |
formatter = logging.Formatter('[%(levelname)-7s] %(message)s') |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# see original code | |
# https://discuss.pytorch.org/t/solved-titan-v-on-pytorch-0-3-0-cuda-9-0-cudnn-7-0-is-much-slower-than-1080-ti/11320/10?u=jef | |
import torch | |
from torchvision.models import vgg16,densenet121,resnet152 | |
from time import time | |
import torch.nn as nn | |
import torch.backends.cudnn as cudnn | |
import torch.optim | |
from torch.autograd import Variable |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def to_var(x): | |
if torch.cuda.is_available(): | |
x = x.cuda() | |
return torch.autograd.Variable(x) | |
def to_categorical(ys, n_classes): | |
# 1-hot encodes tensors | |
if len(ys) == 0: | |
return ys | |
d = [to_var(torch.from_numpy(np.eye(n_classes, dtype='uint8')[y])) for y in ys] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from gensim.models.keyedvectors import KeyedVectors | |
model_path = './data/GoogleNews-vectors-negative300.bin' | |
model = KeyedVectors.load_word2vec_format('./data/GoogleNews-vectors-negative300.bin', binary=True) | |
model.wv['computer'] # array([ 1.07421875e-01, -2.01171875e-01, 1.23046875e-01, | |
# you can use this weights like this. | |
def load_embd_weights(word2vec, vocab_size, embd_size, w2i): | |
embedding_matrix = np.zeros((vocab_size, embd_size)) | |
print('embed_matrix.shape', embedding_matrix.shape) | |
found_ct = 0 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# https://github.com/jcjohnson/pytorch-examples | |
for t in range(500): | |
# Forward pass: compute predicted y by passing x to the model. Module objects | |
# override the __call__ operator so you can call them like functions. When | |
# doing so you pass a Variable of input data to the Module and it produces | |
# a Variable of output data. | |
y_pred = model(x) | |
# Compute and print loss. We pass Variables containing the predicted and true | |
# values of y, and the loss function returns a Variable containing the loss. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# How to apply exponential moving average decay for variables? | |
# https://discuss.pytorch.org/t/how-to-apply-exponential-moving-average-decay-for-variables/10856/2 | |
class EMA(nn.Module): | |
def __init__(self, mu): | |
super(EMA, self).__init__() | |
self.mu = mu | |
def forward(self,x, last_average): | |
new_average = self.mu*x + (1-self.mu)*last_average | |
return new_average |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
for name, param in model.named_parameters(): | |
if param.requires_grad: | |
print(name, param.data) |