Skip to content

Instantly share code, notes, and snippets.

@vedantroy
Created August 22, 2022 07:04
Show Gist options
  • Save vedantroy/91e08805f63b4697b2a7b3fd48742a2d to your computer and use it in GitHub Desktop.
Save vedantroy/91e08805f63b4697b2a7b3fd48742a2d to your computer and use it in GitHub Desktop.
"""run.py:"""
#!/usr/bin/env python
import os
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
def run(rank, size):
""" Distributed function to be implemented later. """
pass
def init_process(rank, size, fn, backend='gloo'):
""" Initialize the distributed environment. """
os.environ['MASTER_ADDR'] = '127.0.0.1'
os.environ['MASTER_PORT'] = '29500'
dist.init_process_group(backend, rank=rank, world_size=size)
fn(rank, size)
if __name__ == "__main__":
size = 2
processes = []
mp.set_start_method("spawn")
for rank in range(size):
p = mp.Process(target=init_process, args=(rank, size, run))
p.start()
processes.append(p)
for p in processes:
p.join()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment