Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

move TypedStorage handling to assertEqual #89557

Closed
wants to merge 17 commits into from
Closed
2 changes: 2 additions & 0 deletions test/test_serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -675,6 +675,8 @@ def test_load_error_msg(self):
with self.assertRaisesRegex(AttributeError, expected_err_msg):
torch.load(resource)

# See https://github.com/pytorch/pytorch/pull/89557. Will be fixed by @kurtamohler
@unittest.expectedFailure
def test_save_different_dtype_unallocated(self):
devices = ['cpu']
if torch.cuda.is_available():
Expand Down
28 changes: 1 addition & 27 deletions torch/testing/_comparison.py
Original file line number Diff line number Diff line change
Expand Up @@ -1076,35 +1076,9 @@ def originate_pairs(
Returns:
(List[Pair]): Originated pairs.
"""
if isinstance(actual, torch.TypedStorage) and isinstance(
expected, torch.TypedStorage
):
actual_len = actual._size()
expected_len = expected._size()
if actual_len != expected_len:
raise ErrorMeta(
AssertionError,
f"The length of the sequences mismatch: {actual_len} != {expected_len}",
id=id,
)

pairs = []
for idx in range(actual_len):
pairs.extend(
originate_pairs(
actual._getitem(idx),
expected._getitem(idx),
pair_types=pair_types,
sequence_types=sequence_types,
mapping_types=mapping_types,
id=(*id, idx),
**options,
)
)
return pairs
# We explicitly exclude str's here since they are self-referential and would cause an infinite recursion loop:
# "a" == "a"[0][0]...
elif (
if (
isinstance(actual, sequence_types)
and not isinstance(actual, str)
and isinstance(expected, sequence_types)
Expand Down
28 changes: 27 additions & 1 deletion torch/testing/_internal/common_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1879,6 +1879,32 @@ def _process_inputs(self, actual, expected, *, id, allow_subclasses):
return actual, expected


class TypedStoragePair(TensorLikePair):
"""Pair for :class:`torch.storage.TypedStorage` inputs.

After their deprecation, they no longer act as a regular sequence, since :meth:`torch.storage.TypedStorage.__len__`
and :meth:`torch.storage.TypedStorage.__getitem__` are disabled. Thus, we need to convert them to tensors manually.
"""
def __init__(self, actual, expected, *, rtol_override=0.0, atol_override=0.0, **other_parameters):
self._check_inputs_isinstance(actual, expected, cls=torch.storage.TypedStorage)
super().__init__(actual, expected, **other_parameters)
self.rtol = max(self.rtol, rtol_override)
self.atol = max(self.atol, atol_override)

def _to_tensor(self, typed_storage):
return torch.tensor(
typed_storage._untyped_storage,
dtype={
torch.quint8: torch.uint8,
torch.quint4x2: torch.uint8,
torch.quint2x4: torch.uint8,
torch.qint32: torch.int32,
torch.qint8: torch.int8
}.get(typed_storage.dtype, typed_storage.dtype),
device=typed_storage.device,
)


class UnittestPair(Pair):
"""Fallback ABC pair that handles non-numeric inputs.

Expand Down Expand Up @@ -2864,14 +2890,14 @@ def to_list(input):
RelaxedBooleanPair,
RelaxedNumberPair,
TensorOrArrayPair,
TypedStoragePair,
StringPair,
SetPair,
TypePair,
ObjectPair,
),
sequence_types=(
Sequence,
torch.storage.TypedStorage,
pmeier marked this conversation as resolved.
Show resolved Hide resolved
Sequential,
ModuleList,
ParameterList,
Expand Down