Skip to content

Instantly share code, notes, and snippets.

View ikhlestov's full-sized avatar
🐍
Coding

Illarion ikhlestov

🐍
Coding
View GitHub Profile
@ikhlestov
ikhlestov / probability_task.py
Created September 20, 2017 00:16
probability_theory
from itertools import combinations, permutations
import numpy as np
def check_line(line):
start = False
for i in line:
if i != 0 and not start:
start = True
counter = 0
elif i == 0 and start:
@ikhlestov
ikhlestov / architecture_pattern.py
Created September 13, 2017 20:10
pytorch: architecture pattern
class ImagesDataset(torch.utils.data.Dataset):
pass
class Net(nn.Module):
pass
model = Net()
optimizer = torch.optim.SGD(model.parameters(), lr=0.01)
scheduler = lr_scheduler.StepLR(optimizer, step_size=30, gamma=0.1)
criterion = torch.nn.MSELoss()
@ikhlestov
ikhlestov / custom_data_loader.py
Created September 13, 2017 19:41
pytorch: custom data loader
import torch
import torchvision as tv
class ImagesDataset(torch.utils.data.Dataset):
def __init__(self, df, transform=None,
loader=tv.datasets.folder.default_loader):
self.df = df
self.transform = transform
self.loader = loader
@ikhlestov
ikhlestov / model_printing_and_saving.py
Created September 13, 2017 19:15
pytorch: model printing and saving
from collections import OrderedDict
import torch.nn as nn
model = nn.Sequential(OrderedDict([
('conv1', nn.Conv2d(1, 20, 5)),
('relu1', nn.ReLU()),
('conv2', nn.Conv2d(20, 64, 5)),
('relu2', nn.ReLU())
]))
@ikhlestov
ikhlestov / bells_and_whistles.py
Created September 13, 2017 19:08
pytorch: learning rate scheduler, train flag, random seed
# scheduler example
from torch.optim import lr_scheduler
optimizer = torch.optim.SGD(model.parameters(), lr=0.01)
scheduler = lr_scheduler.StepLR(optimizer, step_size=30, gamma=0.1)
for epoch in range(100):
scheduler.step()
train()
validate()
@ikhlestov
ikhlestov / excluding_subrgaphs_from_backward.py
Created September 13, 2017 19:02
pytorch: excluding subgraphs from backward
import torch
from torch.autograd import Variable
# requires grad
# If there’s a single input to an operation that requires gradient,
# its output will also require gradient.
x = Variable(torch.randn(5, 5))
y = Variable(torch.randn(5, 5))
z = Variable(torch.randn(5, 5), requires_grad=True)
a = x + y
@ikhlestov
ikhlestov / weights_initialization.py
Created September 12, 2017 17:18
pytorch: weights initialization
import torch
from torch.autograd import Variable
# new way with `init` module
w = torch.Tensor(3, 5)
torch.nn.init.normal(w)
# work for Variables also
w2 = Variable(w)
torch.nn.init.normal(w2)
# old styled direct access to tensors data attribute
@ikhlestov
ikhlestov / cuda_wrapper.py
Created September 12, 2017 17:08
pytorch: CUDA wrapper
class Trainer:
def __init__(self, model, use_cuda=False, gpu_idx=0):
self.use_cuda = use_cuda
self.gpu_idx = gpu_idx
self.model = self.to_gpu(model)
def to_gpu(self, tensor):
if self.use_cuda:
return tensor.cuda(self.gpu_idx)
else:
@ikhlestov
ikhlestov / cuda_device_allocation.py
Created September 12, 2017 17:06
pytorch: cuda device allocation
import torch
# check is cuda enabled
torch.cuda.is_available()
# set required device
torch.cuda.set_device(0)
# work with some required cuda device
with torch.cuda.device(1):
@ikhlestov
ikhlestov / train_model_on_cuda.py
Created September 12, 2017 17:05
pytorch: train model on cuda
import torch
### tensor example
x_cpu = torch.randn(10, 20)
w_cpu = torch.randn(20, 10)
# direct transfer to the GPU
x_gpu = x_cpu.cuda()
w_gpu = w_cpu.cuda()
result_gpu = x_gpu @ w_gpu
# get back from GPU to CPU