This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
import torch | |
import torch.nn.parallel.distributed as DDP | |
import torch.multiprocessing as mp | |
import torch.distributed as dist | |
class MyIterableDataset(torch.utils.data.IterableDataset): | |
def __init__(self, rank): | |
super().__init__() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
- Make sure conda env is activated | |
- Install pip + git for this conda env with: | |
- Then you can install stuff needed to with pip, such as: | |
`conda install git pip` | |
`pip install -e git+https://github.com/pytorch/pytorch_sphinx_theme.git#egg=pytorch_sphinx_theme` | |
`pip install -r requirements.txt` | |
- Then `make html` should not return any errors. | |
To resolve katex issues: | |
npm install -g katex |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import torch | |
import torch.distributed as dist | |
import os | |
import torch.multiprocessing as mp | |
import torch.nn as nn | |
import contextlib | |
def worker(rank): | |
dist.init_process_group("nccl", rank=rank, world_size=2) | |
torch.cuda.set_device(rank) | |
batch_factor = 3 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from threading import Thread | |
import torch | |
def foo(): | |
print(f"thread device {torch.cuda.current_device()}") | |
def set(): | |
torch.cuda.set_device(1) | |
print(torch.cuda.current_device()) | |
thr = Thread(target=foo, args=()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
import tempfile | |
import torch | |
import torch.distributed as dist | |
import torch.nn as nn | |
import torch.optim as optim | |
import torch.multiprocessing as mp | |
from torch.nn.parallel import DistributedDataParallel as DDP |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#include <iostream> | |
#include <stdint.h> | |
int main() { | |
int foo; | |
// Convert address of foo int to uint | |
uintptr_t addr = reinterpret_cast<uintptr_t>(&foo); | |
std::cout << addr; | |
// Convert back to int ptr and use | |
auto ptr = reinterpret_cast<int*>(addr); | |
*ptr = 3; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#include <iostream> | |
#include <functional> | |
template <typename T> | |
std::function<T(void)> withLogging(std::function<T(void)> func) { | |
return [func]() { | |
std::cout << "log\n"; | |
// Propagate value returned by func() | |
return func(); | |
}; | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def timed_log(text): | |
print(f"{datetime.now().strftime('%H:%M:%S')} {text}") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
import tempfile | |
import torch | |
import torch.distributed as dist | |
import torch.nn as nn | |
import torch.optim as optim | |
import torch.multiprocessing as mp | |
import torchvision | |
from torch.nn.parallel import DistributedDataParallel as DDP |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
import psutil | |
import torch.multiprocessing as mp | |
import torch.distributed as dist | |
import gc | |
def get_mb(): | |
process = psutil.Process(os.getpid()) | |
return process.memory_info().rss * 1e-6 |