Skip to content

Instantly share code, notes, and snippets.

@pzelasko
Created March 1, 2021 16:59
Show Gist options
  • Save pzelasko/cda0d8d7f4de880e2f59e4ed5e3b346a to your computer and use it in GitHub Desktop.
Save pzelasko/cda0d8d7f4de880e2f59e4ed5e3b346a to your computer and use it in GitHub Desktop.
Disable shared memory in PyTorch dataloader
import sys
import torch
from torch.utils.data import dataloader
from torch.multiprocessing import reductions
from multiprocessing.reduction import ForkingPickler
default_collate_func = dataloader.default_collate
def default_collate_override(batch):
dataloader._use_shared_memory = False
return default_collate_func(batch)
setattr(dataloader, 'default_collate', default_collate_override)
for t in torch._storage_classes:
if sys.version_info[0] == 2:
if t in ForkingPickler.dispatch:
del ForkingPickler.dispatch[t]
else:
if t in ForkingPickler._extra_reducers:
del ForkingPickler._extra_reducers[t]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment