Skip to content

Instantly share code, notes, and snippets.

@anijain2305
Created February 23, 2024 21:39
Show Gist options
  • Save anijain2305/7d07de29a574fbcb4a471760c16540ca to your computer and use it in GitHub Desktop.
Save anijain2305/7d07de29a574fbcb4a471760c16540ca to your computer and use it in GitHub Desktop.
import torch
torch._dynamo.config.guard_nn_modules = True
class SubMod(torch.nn.Module):
def __init__(self):
super().__init__()
self.linear = torch.nn.Linear(10, 10)
def forward(self, x):
return self.linear(x)
class TopModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.submods = [SubMod(), SubMod()]
self.submods = [torch.compile(mod) for mod in self.submods]
def forward(self, x):
for submod in self.submods:
x = submod(x)
return x
mod = TopModule()
x = torch.randn(10, 10)
mod(x)
@anijain2305
Copy link
Author

anijain2305 commented Feb 23, 2024

V0223 13:39:51.605000 140679072908416 torch/_dynamo/guards.py:1067] [0/0] GUARDS:
V0223 13:39:51.605000 140679072908416 torch/_dynamo/guards.py:1075] [0/0] hasattr(L['x'], '_dynamo_dynamic_indices') == False           # return self.linear(x)  # examples/submod.py:10 in forward

V0223 13:39:51.606000 140679072908416 torch/_dynamo/guards.py:1075] [0/0] ___check_obj_id(L['self'], 140676230050336)                   # return self.linear(x)  # examples/submod.py:10 in forward

V0223 13:39:51.606000 140679072908416 torch/_dynamo/guards.py:1075] [0/0] L['self'].training == True                                    # return self.linear(x)  # examples/submod.py:10 in forward
V0223 13:39:51.606000 140679072908416 torch/_dynamo/guards.py:1075] [0/0] utils_device.CURRENT_DEVICE == None                           # _dynamo/output_graph.py:395 in init_ambient_guards
V0223 13:39:51.606000 140679072908416 torch/_dynamo/guards.py:1075] [0/0] ___check_current_backend(140676194199360)                     # _dynamo/output_graph.py:401 in init_ambient_guards
V0223 13:39:51.606000 140679072908416 torch/_dynamo/guards.py:1075] [0/0] ___check_obj_id(L['self'].linear, 140676230050096)            # return self.linear(x)  # examples/submod.py:10 in forward
V0223 13:39:51.606000 140679072908416 torch/_dynamo/guards.py:1075] [0/0] L['self'].linear.training == True                             # return self.linear(x)  # examples/submod.py:10 in forward
V0223 13:39:51.606000 140679072908416 torch/_dynamo/guards.py:1075] [0/0] check_tensor(L['x'], Tensor, DispatchKeySet(CPU, BackendSelect, ADInplaceOrView, AutogradCPU), torch.float32, device=None, requires_grad=False, size=[10, 10], stride=[10, 1])  # return self.linear(x)  # examples/submod.py:10 in forward




V0223 13:39:51.705000 140679072908416 torch/_dynamo/guards.py:1067] [0/1] GUARDS:
V0223 13:39:51.705000 140679072908416 torch/_dynamo/guards.py:1075] [0/1] hasattr(L['x'], '_dynamo_dynamic_indices') == False           # return self.linear(x)  # examples/submod.py:10 in forward

V0223 13:39:51.705000 140679072908416 torch/_dynamo/guards.py:1075] [0/1] ___check_obj_id(L['self'], 140676230050288)                   # return self.linear(x)  # examples/submod.py:10 in forward

V0223 13:39:51.705000 140679072908416 torch/_dynamo/guards.py:1075] [0/1] L['self'].training == True                                    # return self.linear(x)  # examples/submod.py:10 in forward
V0223 13:39:51.706000 140679072908416 torch/_dynamo/guards.py:1075] [0/1] utils_device.CURRENT_DEVICE == None                           # _dynamo/output_graph.py:395 in init_ambient_guards
V0223 13:39:51.706000 140679072908416 torch/_dynamo/guards.py:1075] [0/1] ___check_current_backend(140677220758096)                     # _dynamo/output_graph.py:401 in init_ambient_guards
V0223 13:39:51.706000 140679072908416 torch/_dynamo/guards.py:1075] [0/1] ___check_obj_id(L['self'].linear, 140676230050384)            # return self.linear(x)  # examples/submod.py:10 in forward
V0223 13:39:51.706000 140679072908416 torch/_dynamo/guards.py:1075] [0/1] L['self'].linear.training == True                             # return self.linear(x)  # examples/submod.py:10 in forward
V0223 13:39:51.706000 140679072908416 torch/_dynamo/guards.py:1075] [0/1] check_tensor(L['x'], Tensor, DispatchKeySet(CPU, BackendSelect, ADInplaceOrView, AutogradCPU), torch.float32, device=None, requires_grad=True, size=[10, 10], stride=[10, 1])  # return self.linear(x)  # examples/submod.py:10 in forward

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment