From d36a2802a77b34345a3b5223d56b716c30867be6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Xavier=20Dupr=C3=A9?= Date: Mon, 10 Nov 2025 18:37:43 +0100 Subject: [PATCH] add sym_storage_offset --- onnxscript/function_libs/torch_lib/ops/core.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/onnxscript/function_libs/torch_lib/ops/core.py b/onnxscript/function_libs/torch_lib/ops/core.py index a25015b232..d33f96d285 100644 --- a/onnxscript/function_libs/torch_lib/ops/core.py +++ b/onnxscript/function_libs/torch_lib/ops/core.py @@ -8750,6 +8750,14 @@ def aten_sym_size(self: TensorType, dim: int = 0) -> INT64: return op.Squeeze(op.Shape(self, end=dim + 1, start=dim)) +@torch_op("aten::sym_storage_offset", trace_only=True) +def aten_sym_storage_offset(self: TensorType, dim: int = 0) -> INT64: + """sym_storage_offset(Tensor self, int dim) -> SymInt""" + # storage offset is not used in onnx world. + # the output of this function is not used. + return op.Constant(value_int=0) + + def aten_symeig( self: TensorType, eigenvectors: bool = False, upper: bool = True ) -> tuple[TensorType, TensorType]: