Skip to content

Commit

Permalink
Skip test_memory_format_nn_BatchNorm2d in inductor (#125970)
Browse files Browse the repository at this point in the history
Skipping the test in the context of #125967 until the issue is root caused and fixed properly.

Pull Request resolved: #125970
Approved by: https://github.com/clee2000
  • Loading branch information
huydhn authored and pytorchmergebot committed May 11, 2024
1 parent da7ced6 commit 0a9c6e9
Showing 1 changed file with 8 additions and 3 deletions.
11 changes: 8 additions & 3 deletions torch/testing/_internal/common_modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
nllloss_reference, nlllossNd_reference, smoothl1loss_reference, softmarginloss_reference, get_reduction)
from torch.testing._internal.common_utils import (
freeze_rng_state, set_single_threaded_if_parallel_tbb, skipIfMps, GRADCHECK_NONDET_TOL, TEST_WITH_ROCM, IS_WINDOWS,
skipIfTorchDynamo)
skipIfTorchDynamo, TEST_WITH_TORCHINDUCTOR)
from types import ModuleType
from typing import List, Tuple, Type, Set, Dict
import operator
Expand Down Expand Up @@ -127,7 +127,7 @@ def _parametrize_test(self, test, generic_cls, device_cls):
def test_wrapper(*args, **kwargs):
return test(*args, **kwargs)

if self.skip_if_dynamo and not torch.testing._internal.common_utils.TEST_WITH_TORCHINDUCTOR:
if self.skip_if_dynamo and not TEST_WITH_TORCHINDUCTOR:
test_wrapper = skipIfTorchDynamo("Policy: we don't run ModuleInfo tests w/ Dynamo")(test_wrapper)

decorator_fn = partial(module_info.get_decorators, generic_cls.__name__,
Expand Down Expand Up @@ -3469,7 +3469,12 @@ def module_error_inputs_torch_nn_Pad3d(module_info, device, dtype, requires_grad
unittest.expectedFailure, 'TestEagerFusionModuleInfo',
'test_aot_autograd_module_exhaustive',
active_if=operator.itemgetter('training')
),)
),
# test fails if run alone in inductor https://github.com/pytorch/pytorch/issues/125967
DecorateInfo(
unittest.skip("Skipped https://github.com/pytorch/pytorch/issues/125967"),
'TestModule', 'test_memory_format', device_type='cuda',
active_if=(TEST_WITH_TORCHINDUCTOR)),)
),
ModuleInfo(torch.nn.BatchNorm3d,
train_and_eval_differ=True,
Expand Down

0 comments on commit 0a9c6e9

Please sign in to comment.