From 6d0540de86c756fa4281a7346eddf4cde3433437 Mon Sep 17 00:00:00 2001 From: Dheeraj Peri Date: Wed, 24 Sep 2025 16:55:16 -0700 Subject: [PATCH 1/3] fix: Fix a bug with dynamic shape validation in MTMM Signed-off-by: Dheeraj Peri --- .../dynamo/runtime/_MutableTorchTensorRTModule.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py b/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py index 1cffec77c2..e824073a79 100644 --- a/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py +++ b/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py @@ -611,7 +611,8 @@ def _check_inputs_shape( elif isinstance(input1, dict): if input1.keys() != input2.keys(): return False - for (ka, va), vb in zip(input1.items(), input2.values()): + for ka, va in input1.items(): + vb = input2[ka] if type(va) != type(vb): return False if isinstance(va, bool) and va != vb: @@ -650,7 +651,7 @@ def _check_tensor_shapes_with_dynamic_shapes( dyn = dynamic_shape[i] if axis_1 > dyn.max or axis_1 < dyn.min: raise DynamicShapeOutOfRangeException( - f"The input size ({axis_1}) of dimension ({i}) is not in dynamic shape range [{dyn.max}, {dyn.max}]!" + f"The input size ({axis_1}) of dimension ({i}) of tensor2 with shape {t2.shape} is not in dynamic shape range [{dyn.min}, {dyn.max}]!" ) return True From 5bdcb5fafaf0d154634b3f054197bc2ed65c7f14 Mon Sep 17 00:00:00 2001 From: Dheeraj Peri Date: Wed, 24 Sep 2025 19:26:05 -0700 Subject: [PATCH 2/3] chore: change name Signed-off-by: Dheeraj Peri --- py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py b/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py index e824073a79..7e4b375410 100644 --- a/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py +++ b/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py @@ -651,7 +651,7 @@ def _check_tensor_shapes_with_dynamic_shapes( dyn = dynamic_shape[i] if axis_1 > dyn.max or axis_1 < dyn.min: raise DynamicShapeOutOfRangeException( - f"The input size ({axis_1}) of dimension ({i}) of tensor2 with shape {t2.shape} is not in dynamic shape range [{dyn.min}, {dyn.max}]!" + f"The input size ({axis_1}) of dimension ({i}) of t2 with shape {t2.shape} is not in dynamic shape range [{dyn.min}, {dyn.max}]!" ) return True From 892ab79c530274352d8a75399fe81c3ac194fc18 Mon Sep 17 00:00:00 2001 From: Dheeraj Peri Date: Wed, 24 Sep 2025 20:55:28 -0700 Subject: [PATCH 3/3] chore: address review comment Signed-off-by: Dheeraj Peri --- .../dynamo/runtime/_MutableTorchTensorRTModule.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py b/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py index 7e4b375410..45ca10f9cf 100644 --- a/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py +++ b/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py @@ -639,9 +639,9 @@ def _check_inputs_shape( @staticmethod def _check_tensor_shapes_with_dynamic_shapes( - t1: torch.tensor, t2: torch.tensor, dynamic_shape: dict[int, Any] + input_1: torch.tensor, input_2: torch.tensor, dynamic_shape: dict[int, Any] ) -> bool: - for (i, axis_0), axis_1 in zip(enumerate(t1.shape), t2.shape): + for (i, axis_0), axis_1 in zip(enumerate(input_1.shape), input_2.shape): if axis_0 != axis_1: if i not in dynamic_shape: logger.warning( @@ -651,7 +651,7 @@ def _check_tensor_shapes_with_dynamic_shapes( dyn = dynamic_shape[i] if axis_1 > dyn.max or axis_1 < dyn.min: raise DynamicShapeOutOfRangeException( - f"The input size ({axis_1}) of dimension ({i}) of t2 with shape {t2.shape} is not in dynamic shape range [{dyn.min}, {dyn.max}]!" + f"Dimension ({i}) of new input tensor is not the range of supported shapes (saw: ({axis_1}), expected: [{dyn.min}, {dyn.max}])" ) return True