Skip to content

Commit

Permalink
Make TORCH_COMPILE_DEBUG=1 work again
Browse files Browse the repository at this point in the history
  • Loading branch information
ipiszy committed Nov 3, 2023
1 parent 132cb57 commit 2016fc9
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 2 deletions.
1 change: 1 addition & 0 deletions test/inductor/test_torchinductor.py
Original file line number Diff line number Diff line change
Expand Up @@ -3111,6 +3111,7 @@ def forward(self, l_input_: torch.Tensor):
o2 = torch.compile(mod)(inp)
self.assertEqual(o1, o2)

@patch.object(config.trace, "enabled", True)
def test_layer_norm(self):
m = torch.nn.Sequential(
torch.nn.LayerNorm(32),
Expand Down
4 changes: 2 additions & 2 deletions torch/_inductor/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -779,7 +779,7 @@ def __init__(self, scheduler: "Scheduler", snodes: List[SchedulerNode]):
# NB: No need to call super().__init__() because we don't need to re-use any of its logic.
self.snodes = snodes
self.scheduler = scheduler
self.node: ir.Buffer
self.node: Optional[ir.Buffer] = None

Check failure on line 782 in torch/_inductor/scheduler.py

View workflow job for this annotation

GitHub Actions / lintrunner / linux-job

MYPYNOFOLLOW [assignment]

Incompatible types in assignment (expression has type "Optional[Buffer]", base class "BaseSchedulerNode" defined the type as "Buffer")
self.users: List[NodeUser] = []
self.inverse_users = []
self.node_users = []
Expand Down Expand Up @@ -1017,7 +1017,7 @@ def __init__(
else:
self.scheduler = scheduler
self.snodes = nodes
self.node: ir.Buffer
self.node: Optional[ir.Buffer] = None
self.users: List[NodeUser] = []

self.set_read_writes(
Expand Down

0 comments on commit 2016fc9

Please sign in to comment.