From c4b6ed460990f43fe4bbf8cedb067b9f42199e9d Mon Sep 17 00:00:00 2001 From: "Edward Z. Yang" Date: Thu, 25 Apr 2024 12:12:53 -0700 Subject: [PATCH] guard_size_oblivious in unbind (#124959) Signed-off-by: Edward Z. Yang Pull Request resolved: https://github.com/pytorch/pytorch/pull/124959 Approved by: https://github.com/albanD --- torch/_refs/__init__.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/torch/_refs/__init__.py b/torch/_refs/__init__.py index 144419eb5bc5..a0b00e2c9edf 100644 --- a/torch/_refs/__init__.py +++ b/torch/_refs/__init__.py @@ -3890,12 +3890,14 @@ def unflatten(a: TensorLikeType, dim: int, sizes: ShapeType) -> TensorLikeType: @register_decomposition(aten.unbind) def unbind(t: TensorLikeType, dim: int = 0) -> TensorSequenceType: + from torch.fx.experimental.symbolic_shapes import guard_size_oblivious + dim = utils.canonicalize_dim(t.ndim, dim) torch._check_index( len(t.shape) > 0, lambda: "Dimension specified as 0 but tensor has no dimensions", ) - if t.shape[dim] == 0: + if guard_size_oblivious(t.shape[dim] == 0): return tuple() else: return tuple(