Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 3 additions & 6 deletions test/test_torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
skipIfRocm, skipIfNoSciPy, TemporaryFileName, TemporaryDirectoryName,
wrapDeterministicFlagAPITest, DeterministicGuard, CudaSyncGuard,
bytes_to_scalar, parametrize, skipIfMPS, noncontiguous_like,
AlwaysWarnTypedStorageRemoval, TEST_WITH_TORCHDYNAMO, xfailIfTorchDynamo)
AlwaysWarnTypedStorageRemoval, TEST_WITH_TORCHDYNAMO, xfailIfTorchDynamo, set_warn_always_context)
from multiprocessing.reduction import ForkingPickler
from torch.testing._internal.common_device_type import (
expectedFailureMeta,
Expand Down Expand Up @@ -10830,8 +10830,8 @@ def test_bf16_supported_on_cpu(self):
self.assertFalse(torch.cuda.is_bf16_supported())

def test_tensor_with_grad_to_scalar_warning(self) -> None:

with warnings.catch_warnings(record=True) as w:
with (warnings.catch_warnings(record=True) as w,
set_warn_always_context(True)):
warnings.simplefilter("always")

x = torch.tensor(2.0, requires_grad=True)
Expand All @@ -10844,9 +10844,6 @@ def test_tensor_with_grad_to_scalar_warning(self) -> None:
str(w[0].message)
)

_ = math.pow(x, 3) # calling it again does not result in a second warning
self.assertEqual(len(w), 1)

# The following block extends TestTorch with negative dim wrapping tests
# FIXME: replace these with OpInfo sample inputs or systemic OpInfo tests
# Functions to test negative dimension wrapping
Expand Down