Skip to content

Commit

Permalink
Revert "Skip test_memory_format_nn_BatchNorm2d in inductor (#125970)"
Browse files Browse the repository at this point in the history
This reverts commit 0a9c6e9.

ghstack-source-id: 5190acf0d730adb6db15e891a0fa7a776cd9dfc0
Pull Request resolved: #126594
  • Loading branch information
shunting314 committed May 18, 2024
1 parent ced75a4 commit 7df8eef
Showing 1 changed file with 3 additions and 8 deletions.
11 changes: 3 additions & 8 deletions torch/testing/_internal/common_modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
nllloss_reference, nlllossNd_reference, smoothl1loss_reference, softmarginloss_reference, get_reduction)
from torch.testing._internal.common_utils import (
freeze_rng_state, set_single_threaded_if_parallel_tbb, skipIfMps, GRADCHECK_NONDET_TOL, TEST_WITH_ROCM, IS_WINDOWS,
skipIfTorchDynamo, TEST_WITH_TORCHINDUCTOR)
skipIfTorchDynamo)
from types import ModuleType
from typing import List, Tuple, Type, Set, Dict
import operator
Expand Down Expand Up @@ -127,7 +127,7 @@ def _parametrize_test(self, test, generic_cls, device_cls):
def test_wrapper(*args, **kwargs):
return test(*args, **kwargs)

if self.skip_if_dynamo and not TEST_WITH_TORCHINDUCTOR:
if self.skip_if_dynamo and not torch.testing._internal.common_utils.TEST_WITH_TORCHINDUCTOR:
test_wrapper = skipIfTorchDynamo("Policy: we don't run ModuleInfo tests w/ Dynamo")(test_wrapper)

decorator_fn = partial(module_info.get_decorators, generic_cls.__name__,
Expand Down Expand Up @@ -3469,12 +3469,7 @@ def module_error_inputs_torch_nn_Pad3d(module_info, device, dtype, requires_grad
unittest.expectedFailure, 'TestEagerFusionModuleInfo',
'test_aot_autograd_module_exhaustive',
active_if=operator.itemgetter('training')
),
# test fails if run alone in inductor https://github.com/pytorch/pytorch/issues/125967
DecorateInfo(
unittest.skip("Skipped https://github.com/pytorch/pytorch/issues/125967"),
'TestModule', 'test_memory_format', device_type='cuda',
active_if=(TEST_WITH_TORCHINDUCTOR)),)
),)
),
ModuleInfo(torch.nn.BatchNorm3d,
train_and_eval_differ=True,
Expand Down

0 comments on commit 7df8eef

Please sign in to comment.