-
-
Save mrshenli/eff5fdb654b5b4779ff4687a848d638d to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import torch.distributed as dist | |
import torch.multiprocessing as mp | |
import torch.nn as nn | |
import torch.optim as optim | |
from torch.nn.parallel import DistributedDataParallel as DDP | |
def profile_hook(state_dict, bucket): | |
rank = dist.get_rank() | |
tensor = bucket.get_tensors()[0] | |
metrics = {} | |
state_dict[bucket.get_index()] = metrics | |
# record event before comm | |
e_bfr = torch.cuda.Event(enable_timing=True) | |
metrics["e_bfr"] = e_bfr | |
with torch.cuda.device(rank): | |
e_bfr.record() | |
# launch async comm | |
fut = dist.all_reduce(tensor, async_op=True).get_future() | |
def cb(fut): | |
# record event after comm | |
e_aft = torch.cuda.Event(enable_timing=True) | |
metrics["e_aft"] = e_aft | |
with torch.cuda.device(rank): | |
e_aft.record() | |
fut.then(cb) | |
return fut | |
def example(rank, world_size): | |
dist.init_process_group( | |
"nccl", | |
rank=rank, | |
world_size=world_size, | |
init_method="tcp://localhost:23456" | |
) | |
model = nn.Sequential(*[nn.Linear(500, 500).to(rank) for _ in range(20)]).to(rank) | |
ddp_model = DDP(model, device_ids=[rank], bucket_cap_mb=1) | |
if rank == 0: | |
state_dict = {} | |
ddp_model.register_comm_hook(state_dict, profile_hook) | |
# warmup | |
for _ in range(3): | |
ddp_model(torch.randn(20, 500).to(rank)).sum().backward() | |
ddp_model(torch.randn(20, 500).to(rank)).sum().backward() | |
torch.cuda.synchronize(rank) | |
if rank == 0: | |
for bucket_index in range(len(state_dict)): | |
e_bfr = state_dict[bucket_index]["e_bfr"] | |
e_aft = state_dict[bucket_index]["e_aft"] | |
print(f"bucket {bucket_index} comm time: {e_bfr.elapsed_time(e_aft)}") | |
def main(): | |
world_size = 2 | |
mp.spawn(example, | |
args=(world_size,), | |
nprocs=world_size, | |
join=True) | |
if __name__=="__main__": | |
main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment