Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

move TypedStorage handling to assertEqual #89557

Closed
wants to merge 17 commits into from
Closed
27 changes: 1 addition & 26 deletions torch/testing/_comparison.py
Original file line number Diff line number Diff line change
Expand Up @@ -927,34 +927,9 @@ def originate_pairs(
Returns:
(List[Pair]): Originated pairs.
"""
if (
isinstance(actual, torch.TypedStorage)
and isinstance(expected, torch.TypedStorage)
):
actual_len = actual._size()
expected_len = expected._size()
if actual_len != expected_len:
raise ErrorMeta(
AssertionError, f"The length of the sequences mismatch: {actual_len} != {expected_len}", id=id
)

pairs = []
for idx in range(actual_len):
pairs.extend(
originate_pairs(
actual._getitem(idx),
expected._getitem(idx),
pair_types=pair_types,
sequence_types=sequence_types,
mapping_types=mapping_types,
id=(*id, idx),
**options,
)
)
return pairs
# We explicitly exclude str's here since they are self-referential and would cause an infinite recursion loop:
# "a" == "a"[0][0]...
elif (
if (
isinstance(actual, sequence_types)
and not isinstance(actual, str)
and isinstance(expected, sequence_types)
Expand Down
5 changes: 4 additions & 1 deletion torch/testing/_internal/common_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2567,6 +2567,10 @@ def to_list(input):
if isinstance(y, torch.Tensor) and y.is_nested:
y = y.unbind()

# TODO: explain why this is needed
if isinstance(x, torch.TypedStorage) and isinstance(y, torch.TypedStorage):
x, y = [[storage._getitem(idx) for idx in storage._size()] for storage in [x, y]]
pmeier marked this conversation as resolved.
Show resolved Hide resolved

assert_equal(
x,
y,
Expand All @@ -2582,7 +2586,6 @@ def to_list(input):
),
sequence_types=(
Sequence,
torch.storage.TypedStorage,
pmeier marked this conversation as resolved.
Show resolved Hide resolved
Sequential,
ModuleList,
ParameterList,
Expand Down