diff --git a/onnx/defs/nn/defs.cc b/onnx/defs/nn/defs.cc index 6928d96773e..158231d0ac3 100644 --- a/onnx/defs/nn/defs.cc +++ b/onnx/defs/nn/defs.cc @@ -1191,6 +1191,10 @@ void convTransposeShapeInference(InferenceContext& ctx) { if (pads.size() != n_input_dims * 2) { fail_shape_inference("Attribute pads has incorrect size"); } + const auto* auto_pad_attr = ctx.getAttribute("auto_pad"); + if (nullptr != auto_pad_attr) { + fail_shape_inference("The pads attribute cannot be used simultaneously with auto_pad attribute"); + } } else { pads.assign(n_input_dims * 2, 0); const auto* auto_pad_attr = ctx.getAttribute("auto_pad"); diff --git a/onnx/test/shape_inference_test.py b/onnx/test/shape_inference_test.py index 863de06f4df..606f1973c63 100644 --- a/onnx/test/shape_inference_test.py +++ b/onnx/test/shape_inference_test.py @@ -1537,6 +1537,16 @@ def test_conv_transpose_with_group_and_output_shape(self): # type: () -> None []) self._assert_inferred(graph, [make_tensor_value_info('Y', TensorProto.FLOAT, (25, 64, 36, 36))]) + def test_conv_transpose_with_pads_and_auto_pads(self): # type: () -> None + # This test should fail because pads cannot be used simultaneously with auto_pad + graph = self._make_graph( + [('X', TensorProto.FLOAT, (1, 1, 2, 2)), + ('W', TensorProto.FLOAT, (1, 1, 3, 3)), + ('B', TensorProto.FLOAT, (1, ))], + [make_node('ConvTranspose', ['X', 'W', 'B'], 'Y', auto_pad="SAME_UPPER", strides=[1, 1], pads=[0, 1, 1, 0])], + []) + self.assertRaises(RuntimeError, onnx.shape_inference.infer_shapes, helper.make_model(graph)) + def test_mvn_function_output_shape(self): # type: () -> None graph = self._make_graph( [('X', TensorProto.FLOAT, (25, 48, 16, 16))],