From 17f47ed701a32fc9f500b6c66fbe945020318965 Mon Sep 17 00:00:00 2001 From: rraminen Date: Fri, 18 Jul 2025 06:43:59 +0000 Subject: [PATCH] Fix warnings --- test/test_torch.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/test/test_torch.py b/test/test_torch.py index 5094d96d3cd33..b4e179fc1af06 100644 --- a/test/test_torch.py +++ b/test/test_torch.py @@ -43,7 +43,7 @@ skipIfRocm, skipIfNoSciPy, TemporaryFileName, TemporaryDirectoryName, wrapDeterministicFlagAPITest, DeterministicGuard, CudaSyncGuard, bytes_to_scalar, parametrize, skipIfMPS, noncontiguous_like, - AlwaysWarnTypedStorageRemoval, TEST_WITH_TORCHDYNAMO, xfailIfTorchDynamo) + AlwaysWarnTypedStorageRemoval, TEST_WITH_TORCHDYNAMO, xfailIfTorchDynamo, set_warn_always_context) from multiprocessing.reduction import ForkingPickler from torch.testing._internal.common_device_type import ( expectedFailureMeta, @@ -10830,8 +10830,8 @@ def test_bf16_supported_on_cpu(self): self.assertFalse(torch.cuda.is_bf16_supported()) def test_tensor_with_grad_to_scalar_warning(self) -> None: - - with warnings.catch_warnings(record=True) as w: + with (warnings.catch_warnings(record=True) as w, + set_warn_always_context(True)): warnings.simplefilter("always") x = torch.tensor(2.0, requires_grad=True) @@ -10844,9 +10844,6 @@ def test_tensor_with_grad_to_scalar_warning(self) -> None: str(w[0].message) ) - _ = math.pow(x, 3) # calling it again does not result in a second warning - self.assertEqual(len(w), 1) - # The following block extends TestTorch with negative dim wrapping tests # FIXME: replace these with OpInfo sample inputs or systemic OpInfo tests # Functions to test negative dimension wrapping