Skip to content
This repository was archived by the owner on Jan 13, 2024. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 62 additions & 0 deletions _unittests/ut_onnxrt/test_shape_inference_xop.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
"""
@brief test log(time=3s)
"""
import unittest
import numpy
from onnx.shape_inference import infer_shapes
from pyquickhelper.pycode import ExtTestCase
from mlprodict.onnxrt import OnnxShapeInference
from mlprodict.onnxrt.ops_shape.shape_result import ShapeResult
from mlprodict.plotting.text_plot import onnx_simple_text_plot
from mlprodict.tools import get_opset_number_from_onnx
from mlprodict.npy.xop import loadop
from mlprodict.npy.xop_variable import Variable


class TestOnnxShapeInferenceXop(ExtTestCase):

opsets = list(range(10, get_opset_number_from_onnx() + 1))

def check_infer_shapes(self, onx, out, rt):
onnx_shapes = infer_shapes(onx)
inferred = onnx_shapes.graph.value_info # pylint: disable=
for data in inferred:
if data.name not in out:
raise AssertionError("Name %r not found." % data.name)
shape, dtype, sparse = OnnxShapeInference._get_shape(
data) # pylint: disable=W0212
for i in range(len(shape)):
if not isinstance(shape[i], str):
continue
if shape[i].startswith('unk_'):
shape[i] = shape[i][4:]
res = ShapeResult(data.name, shape, dtype, sparse)
if res != out[data.name]:
raise AssertionError(
"Unexpected differences for name %r:\nexp: %r\ngot: %r"
"\n-----\n%s" % (
data.name, res, out[data.name],
onnx_simple_text_plot(onx)))

def test_onnx_shape_inference(self):
OnnxAdd = loadop('OnnxAdd')
dtype = numpy.float32
for opset in TestOnnxShapeInferenceXop.opsets:
with self.subTest(opset=opset):
cop = OnnxAdd('X', numpy.array(
[[1]], dtype=dtype), op_version=opset)
cop4 = OnnxAdd(cop, numpy.array([[2]], dtype=dtype),
output_names=['Y'])
vari = Variable('X', numpy.float32, [None, None])
model_def = cop4.to_onnx([vari], run_shape=False)
rt = OnnxShapeInference(model_def)
out = rt.run()
self.assertIn('X', out)
self.assertIn('Y', out)
y = out['Y']
self.assertEqual(numpy.float32, y.dtype)
self.assertEqual(['_0', '_1'], y.shape)


if __name__ == "__main__":
unittest.main(verbosity=2)
12 changes: 8 additions & 4 deletions mlprodict/npy/xop.py
Original file line number Diff line number Diff line change
Expand Up @@ -963,7 +963,7 @@ def _get_type(node, name=None, outputs=None):

def to_onnx(self, inputs=None, outputs=None,
other_outputs=None, target_opset=None,
verbose=0):
verbose=0, run_shape=True):
"""
Converts this operator into an ONNX graph.

Expand All @@ -976,6 +976,10 @@ def to_onnx(self, inputs=None, outputs=None,
node
:param target_opset: dictionary with target opset per domain,
None for the default one
:param run_shape: in case output shapes are not specify,
the function runs function :epkg:`infer_shapes`
to guess them, False would disable that
default behaviour
:param verbose: prints information
"""
# opsets
Expand All @@ -1002,7 +1006,7 @@ def to_onnx(self, inputs=None, outputs=None,
target_opset, self.op_version, self.__class__.__name__))

# get the graph
nodes, graph_inputs, graph_outputs, run_shape = self._node_to_graph(
nodes, graph_inputs, graph_outputs, run_shape2 = self._node_to_graph(
other_outputs, inputs, outputs)
if len(nodes) == 0:
raise RuntimeError( # pragma: no cover
Expand All @@ -1018,8 +1022,8 @@ def to_onnx(self, inputs=None, outputs=None,

return builder.to_onnx(
inputs=graph_inputs, outputs=graph_outputs,
target_opset=target_opset, run_shape=run_shape,
verbose=verbose)
target_opset=target_opset, verbose=verbose,
run_shape=run_shape and run_shape2)

@staticmethod
def _merge_op_version(n1, n2):
Expand Down
7 changes: 6 additions & 1 deletion mlprodict/onnxrt/onnx_shape_inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ def __repr__(self):

@staticmethod
def _get_shape(obj, known_shapes=None, result_name=None):
dtype = TENSOR_TYPE_TO_NP_TYPE[obj.type.tensor_type.elem_type]
dtype = TENSOR_TYPE_TO_NP_TYPE.get(
obj.type.tensor_type.elem_type, None)
shape = []
for dimi, d in enumerate(obj.type.tensor_type.shape.dim):
v = d.dim_value if d.dim_value > 0 else d.dim_param
Expand Down Expand Up @@ -75,6 +76,10 @@ def _run_empty(self):
"" % obj.name)
shape, dtype, sparse = self._get_shape(
obj, known_shapes, result_name=obj.name)
if dtype is None:
# The onnx graph was created with named outputs
# but with no type or shape.
continue
known_shapes.update(obj.name, ShapeResult(
obj.name, shape, dtype, sparse=sparse))

Expand Down
5 changes: 3 additions & 2 deletions mlprodict/onnxrt/ops_shape/shape_container.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
@file
@brief Class ShapeContainer
"""
import pprint
from .shape_result import ShapeResult


Expand Down Expand Up @@ -249,7 +250,7 @@ def vars_in_values(values):
results[k] = v.resolve(variables)
except RuntimeError as e:
raise RuntimeError(
"Unable to resolve shapes and constraints:\n%r"
"" % self.shapes) from e
"Unable to resolve shapes and constraints:\n%s"
"" % pprint.pformat(self.shapes)) from e
self.resolved_ = results
return self.resolved_
5 changes: 2 additions & 3 deletions mlprodict/onnxrt/ops_shape/shape_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,9 +244,8 @@ def resolve(self, variables):
if v in variables:
vals = variables[v]
if vals is None:
raise RuntimeError( # pragma: no cover
"Inconclusive shape (None) for v=%r (in %r)."
"" % (v, self))
# size unknown
continue
if len(vals) == 1:
res.shape[i] = list(vals)[0]
else:
Expand Down