Skip to content

Instantly share code, notes, and snippets.

View dasayan05's full-sized avatar
💭
Reject negativity. Stay positive. Use ReLU.

Ayan Das dasayan05

💭
Reject negativity. Stay positive. Use ReLU.
View GitHub Profile
@dasayan05
dasayan05 / mog.py
Last active May 6, 2020 16:54
Example usage of Pyro for MoG
import pyro, torch, numpy as np
import pyro.distributions as dist
import pyro.optim as optim
import pyro.infer as infer
import matplotlib.pyplot as plt
plt.style.use('ggplot')
from scipy.stats import norm
plt.ioff()
def getdata(N, mean1=2.0, mean2=-1.0, std1=0.5, std2=0.5):
@dasayan05
dasayan05 / sync_gradients.py
Created February 27, 2019 10:20
Synchronize gradients
def sync_gradients(model, rank, world_size):
for param in model.parameters():
dist.all_reduce(param.grad.data, op=dist.reduce_op.SUM)
@dasayan05
dasayan05 / sync_init_weights.py
Created February 27, 2019 10:19
Synchronize intial weights
def sync_initial_weights(model, rank, world_size):
for param in model.parameters():
if rank == 0:
# Rank 0 is sending it's own weight
# to all it's siblings (1 to world_size)
for sibling in range(1, world_size):
dist.send(param.data, dst=sibling)
else:
# Siblings must recieve the parameters
dist.recv(param.data, src=0)
@dasayan05
dasayan05 / train.py
Created February 27, 2019 10:16
Distributed training of DL model
model = LeNet()
# first synchronization of initial weights
sync_initial_weights(model, rank, world_size)
optimizer = optim.SGD(model.parameters(), lr=1e-3, momentum=0.85)
model.train()
for epoch in range(1, epochs + 1):
for data, target in train_loader:
optimizer.zero_grad()
@dasayan05
dasayan05 / allreduce.py
Created February 27, 2019 09:33
Basic usage of All-reduce
def main(rank, world):
if rank == 0:
x = torch.tensor([1.])
elif rank == 1:
x = torch.tensor([2.])
elif rank == 2:
x = torch.tensor([-3.])
dist.all_reduce(x, op=dist.reduce_op.SUM)
print('Rank {} has {}'.format(rank, x))
@dasayan05
dasayan05 / ptdist.py
Last active December 4, 2019 17:45
Peer-to-peer communication
# filename 'ptdist.py'
import torch
import torch.distributed as dist
def main(rank, world):
if rank == 0:
x = torch.tensor([1., -1.]) # Tensor of interest
dist.send(x, dst=1)
print('Rank-0 has sent the following tensor to Rank-1')
print(x)
else: