From e967f2f58352b4efacc8cc01e1883e0a895e9fcf Mon Sep 17 00:00:00 2001 From: Mergen Nachin Date: Fri, 29 Aug 2025 07:03:58 -0700 Subject: [PATCH] Remove test_aot_buffer_mutation Summary: The test is failing with this error in fbsource. This looks like it is exercising all codepath, and we can delete? AssertionError: RuntimeError not raised File "/usr/local/fbcode/platform010/lib/python3.12/unittest/case.py", line 58, in testPartExecutor yield File "/usr/local/fbcode/platform010/lib/python3.12/unittest/case.py", line 634, in run self._callTestMethod(testMethod) File "/usr/local/fbcode/platform010/lib/python3.12/unittest/case.py", line 589, in _callTestMethod if method() is not None: ^^^^^^^^ File "/data/users/mnachin/fbsource/buck-out/v2/gen/fbcode/d2b9e0cfbb98b699/executorch/exir/tests/__tracer__/tracer#link-tree/executorch/exir/tests/test_tracer.py", line 339, in test_aot_buffer_mutation with self.assertRaisesRegex( ^^^^^^^^^^^^^^^^^^^^^^^ ``` test_aot_buffer_mutation (executorch.exir.tests.test_tracer.TestTorchDispatchFXTracer) ... FAIL ====================================================================== FAIL: test_aot_buffer_mutation (executorch.exir.tests.test_tracer.TestTorchDispatchFXTracer) ---------------------------------------------------------------------- Traceback (most recent call last): File "/data/users/mnachin/fbsource/buck-out/v2/gen/fbcode/d2b9e0cfbb98b699/executorch/exir/tests/__tracer__/tracer#link-tree/executorch/exir/tests/test_tracer.py", line 339, in test_aot_buffer_mutation with self.assertRaisesRegex( ^^^^^^^^^^^^^^^^^^^^^^^ AssertionError: RuntimeError not raised ``` Reviewed By: tugsbayasgalan Differential Revision: D81320975 --- exir/tests/test_tracer.py | 70 --------------------------------------- 1 file changed, 70 deletions(-) diff --git a/exir/tests/test_tracer.py b/exir/tests/test_tracer.py index 594e760ab32..22e01f33332 100644 --- a/exir/tests/test_tracer.py +++ b/exir/tests/test_tracer.py @@ -307,76 +307,6 @@ def f(x: torch.Tensor, y: List[torch.Tensor]) -> Dict[str, torch.Tensor]: self.assertEqual(prog(*inp), f(*inp)) - def test_aot_buffer_mutation(self) -> None: - class Module(torch.nn.Module): - def __init__(self): - super().__init__() - self.register_buffer( - "_bin_num_examples", - torch.empty([42]).fill_( - 0.0, - ), - ) - - def forward(self, x, y, z): - self._bin_num_examples.index_copy_( - dim=0, - index=y, - source=z, - ) - self._bin_num_examples.index_add_( - dim=0, index=torch.arange(4), source=x - ) - return self._bin_num_examples - 1, x * z - - model = Module() - example_inputs = ( - torch.randn(4, requires_grad=True), - torch.tensor(0), - torch.tensor(3.14), - ) - - with self.assertRaisesRegex( - RuntimeError, - "Found a graph input that requires gradients, and received a mutation.", - ): - _ = exir.capture( - model, - example_inputs, - exir.CaptureConfig( - enable_aot=True, - ), - ) - - # Note that model._bin_num_examples is mutated during exir.capture - # We need to create a new_model - new_model = Module() - example_inputs = ( - torch.randn(4), - torch.tensor(0), - torch.tensor(3.14), - ) - - ep = exir.capture( - new_model, - example_inputs, - exir.CaptureConfig( - enable_aot=True, - ), - ) - - test_inputs = ( - torch.randn(4), - torch.tensor(0), - torch.tensor(2.1), - ) - graph_outputs = ep(*test_inputs) - eager_outputs = Module()(*test_inputs) - self.assertEqual(len(graph_outputs), 2) - self.assertEqual(len(eager_outputs), 2) - self.assertTrue(torch.allclose(graph_outputs[0], eager_outputs[0])) - self.assertTrue(torch.allclose(graph_outputs[1], eager_outputs[1])) - def test_assume_constant_by_default_prop(self) -> None: def foo(x: torch.Tensor, y: torch.Tensor) -> torch.Tensor: if x.shape[0] > 3: