From e2850537da0f10f4ff93302f1461a4f495464ce9 Mon Sep 17 00:00:00 2001 From: Digant Desai Date: Thu, 23 Jan 2025 14:23:45 -0800 Subject: [PATCH] Dynamic shape testing for softmax Summary: As title Differential Revision: D68586985 --- backends/xnnpack/test/ops/test_softmax.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/backends/xnnpack/test/ops/test_softmax.py b/backends/xnnpack/test/ops/test_softmax.py index cc544a28a28..f909e8ce5f2 100644 --- a/backends/xnnpack/test/ops/test_softmax.py +++ b/backends/xnnpack/test/ops/test_softmax.py @@ -24,9 +24,14 @@ def _test_softmax(self, inputs): # as xnnpack only supports softmax on the last dimension. valid_dims = [len(inputs[0]) - 1, -1] + dynamic_shape = {} + for i in range(len(inputs[0].shape)): + dynamic_shape[i] = torch.export.Dim(f"dynamic_dim{i}", min=1, max=100) + dynamic_shape = (dynamic_shape,) + for dim in valid_dims: ( - Tester(self.Softmax(dim), inputs) + Tester(self.Softmax(dim), inputs, dynamic_shapes=dynamic_shape) .export() .check_count({"torch.ops.aten.softmax": 1}) .to_edge_transform_and_lower() @@ -34,7 +39,7 @@ def _test_softmax(self, inputs): .check_not(["executorch_exir_dialects_edge__ops_aten__softmax_default"]) .to_executorch() .serialize() - .run_method_and_compare_outputs() + .run_method_and_compare_outputs(num_runs=5) ) def test_fp16_softmax(self):