Skip to content

Commit

Permalink
Merge pull request #25524 from alexlyulkov:al/openvino-layers
Browse files Browse the repository at this point in the history
Added more OpenVINO layers to dnn
  • Loading branch information
asmorkalov committed May 3, 2024
2 parents 6e5a53c + f3f29fa commit ac9a858
Show file tree
Hide file tree
Showing 7 changed files with 158 additions and 7 deletions.
38 changes: 38 additions & 0 deletions modules/dnn/src/layers/cumsum_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
// of this distribution and at http://opencv.org/license.html.

#include "../precomp.hpp"
#include "../op_inf_engine.hpp"
#include "../ie_ngraph.hpp"
#include "layers_common.hpp"

#include <opencv2/dnn/shape_utils.hpp>
Expand Down Expand Up @@ -32,6 +34,12 @@ class CumSumLayerImpl CV_FINAL : public CumSumLayer
return exclusive_raw == 0;
}

virtual bool supportBackend(int backendId) CV_OVERRIDE
{
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
}

void forward(InputArrayOfArrays inputs_arr, OutputArrayOfArrays outputs_arr, OutputArrayOfArrays internals_arr) CV_OVERRIDE
{
CV_TRACE_FUNCTION();
Expand Down Expand Up @@ -120,6 +128,36 @@ class CumSumLayerImpl CV_FINAL : public CumSumLayer
}
}

#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
std::shared_ptr<ov::op::v0::CumSum> cumsum;
if (nodes.size() == 2)
{
int32_t axis_shape = 1;
auto axis_scalar = std::make_shared<ov::op::v1::Reshape>(
nodes[1].dynamicCast<InfEngineNgraphNode>()->node,
std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{}, &axis_shape),
false);
cumsum = std::make_shared<ov::op::v0::CumSum>(
nodes[0].dynamicCast<InfEngineNgraphNode>()->node,
std::make_shared<ov::op::v0::Convert>(axis_scalar, ov::element::i32),
exclusive_raw,
reverse_raw);
}
else
{
cumsum = std::make_shared<ov::op::v0::CumSum>(
nodes[0].dynamicCast<InfEngineNgraphNode>()->node,
std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{}, &axis_raw),
exclusive_raw,
reverse_raw);
}
return Ptr<BackendNode>(new InfEngineNgraphNode(cumsum));
}
#endif // HAVE_DNN_NGRAPH

int axis_raw;
int exclusive_raw;
int reverse_raw;
Expand Down
27 changes: 25 additions & 2 deletions modules/dnn/src/layers/expand_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
// of this distribution and at http://opencv.org/license.html.

#include "../precomp.hpp"
#include "../op_inf_engine.hpp"
#include "../ie_ngraph.hpp"
#include <opencv2/dnn/shape_utils.hpp>

namespace cv { namespace dnn {
Expand All @@ -27,8 +29,10 @@ class ExpandLayerImpl CV_FINAL : public ExpandLayer
const_input_1d = params.get("const_input_1d", false);
}

virtual bool supportBackend(int backendId) CV_OVERRIDE {
return backendId == DNN_BACKEND_OPENCV;
virtual bool supportBackend(int backendId) CV_OVERRIDE
{
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
}

virtual bool getMemoryShapes(const std::vector<MatShape> &inputs,
Expand Down Expand Up @@ -137,6 +141,25 @@ class ExpandLayerImpl CV_FINAL : public ExpandLayer
}
}

#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
auto input_shape = nodes[0].dynamicCast<InfEngineNgraphNode>()->node.get_shape();
CV_CheckGE(target_shape.size(), input_shape.size(), "");

std::vector<int32_t> output_shape(target_shape.begin(), target_shape.end());
for (int i = 1; i < input_shape.size() + 1; ++i)
output_shape[output_shape.size() - i] = std::max(
(int32_t)input_shape[input_shape.size() - i],
output_shape[output_shape.size() - i]);

auto shape_node = std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{output_shape.size()}, output_shape.data());
auto expand = std::make_shared<ov::op::v3::Broadcast>(nodes[0].dynamicCast<InfEngineNgraphNode>()->node, shape_node);
return Ptr<BackendNode>(new InfEngineNgraphNode(expand));
}
#endif // HAVE_DNN_NGRAPH

private:
MatShape target_shape;
bool const_input_1d;
Expand Down
24 changes: 23 additions & 1 deletion modules/dnn/src/layers/gather_elements_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
// of this distribution and at http://opencv.org/license.html.

#include "../precomp.hpp"
#include "../op_inf_engine.hpp"
#include "../ie_ngraph.hpp"
#include <opencv2/dnn/shape_utils.hpp>

namespace cv { namespace dnn {
Expand Down Expand Up @@ -30,7 +32,8 @@ class GatherElementsLayerImpl CV_FINAL : public GatherElementsLayer

virtual bool supportBackend(int backendId) CV_OVERRIDE
{
return backendId == DNN_BACKEND_OPENCV;
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
}

virtual bool getMemoryShapes(const std::vector<MatShape> &inputs,
Expand Down Expand Up @@ -148,6 +151,25 @@ class GatherElementsLayerImpl CV_FINAL : public GatherElementsLayer
};
}

#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
int32_t indicesBoundValue = nodes[0].dynamicCast<InfEngineNgraphNode>()->node.get_shape()[axis];
auto indicesBound = std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{}, &indicesBoundValue);
auto indices = std::make_shared<ov::op::v0::Convert>(nodes[1].dynamicCast<InfEngineNgraphNode>()->node, ov::element::i32);
auto indicesNonNegative = std::make_shared<ov::op::v1::Mod>(
std::make_shared<ov::op::v1::Add>(indices, indicesBound),
indicesBound);

auto gatherElements = std::make_shared<ov::op::v6::GatherElements>(
nodes[0].dynamicCast<InfEngineNgraphNode>()->node,
indicesNonNegative,
axis);
return Ptr<BackendNode>(new InfEngineNgraphNode(gatherElements));
}
#endif // HAVE_DNN_NGRAPH

private:
int axis;
};
Expand Down
18 changes: 17 additions & 1 deletion modules/dnn/src/layers/gather_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
// of this distribution and at http://opencv.org/license.html.

#include "../precomp.hpp"
#include "../op_inf_engine.hpp"
#include "../ie_ngraph.hpp"
#include "layers_common.hpp"


Expand All @@ -20,7 +22,8 @@ class GatherLayerImpl CV_FINAL : public GatherLayer

virtual bool supportBackend(int backendId) CV_OVERRIDE
{
return backendId == DNN_BACKEND_OPENCV;
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
}

virtual bool getMemoryShapes(const std::vector<MatShape> &inputs,
Expand Down Expand Up @@ -113,6 +116,19 @@ class GatherLayerImpl CV_FINAL : public GatherLayer
}
}

#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
auto axisNode = std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{}, &m_axis);
auto gather = std::make_shared<ov::op::v8::Gather>(
nodes[0].dynamicCast<InfEngineNgraphNode>()->node,
std::make_shared<ov::op::v0::Convert>(nodes[1].dynamicCast<InfEngineNgraphNode>()->node, ov::element::i32),
axisNode);
return Ptr<BackendNode>(new InfEngineNgraphNode(gather));
}
#endif // HAVE_DNN_NGRAPH

private:
// The axis to gather along
int m_axis;
Expand Down
17 changes: 16 additions & 1 deletion modules/dnn/src/layers/scatterND_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
// of this distribution and at http://opencv.org/license.html.

#include "../precomp.hpp"
#include "../op_inf_engine.hpp"
#include "../ie_ngraph.hpp"
#include "layers_common.hpp"

#include <algorithm> // for std::max & std::min
Expand Down Expand Up @@ -42,7 +44,8 @@ class ScatterNDLayerImpl CV_FINAL : public ScatterNDLayer

virtual bool supportBackend(int backendId) CV_OVERRIDE
{
return backendId == DNN_BACKEND_OPENCV;
return backendId == DNN_BACKEND_OPENCV ||
(backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && reduction == REDUCTION::NONE);
}

virtual bool getMemoryShapes(const std::vector<MatShape> &inputs,
Expand Down Expand Up @@ -207,6 +210,18 @@ class ScatterNDLayerImpl CV_FINAL : public ScatterNDLayer
CV_Error(Error::StsBadArg, "Unsupported reduction.");
};
}

#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
auto scatterND = std::make_shared<ov::op::v3::ScatterNDUpdate>(
nodes[0].dynamicCast<InfEngineNgraphNode>()->node,
std::make_shared<ov::op::v0::Convert>(nodes[1].dynamicCast<InfEngineNgraphNode>()->node, ov::element::i32),
nodes[2].dynamicCast<InfEngineNgraphNode>()->node);
return Ptr<BackendNode>(new InfEngineNgraphNode(scatterND));
}
#endif // HAVE_DNN_NGRAPH
};

Ptr<ScatterNDLayer> ScatterNDLayer::create(const LayerParams& params)
Expand Down
26 changes: 25 additions & 1 deletion modules/dnn/src/layers/scatter_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
// of this distribution and at http://opencv.org/license.html.

#include "../precomp.hpp"
#include "../op_inf_engine.hpp"
#include "../ie_ngraph.hpp"
#include "layers_common.hpp"

#include <algorithm> // for std::max & std::min
Expand Down Expand Up @@ -43,7 +45,8 @@ class ScatterLayerImpl CV_FINAL : public ScatterLayer

virtual bool supportBackend(int backendId) CV_OVERRIDE
{
return backendId == DNN_BACKEND_OPENCV;
return backendId == DNN_BACKEND_OPENCV ||
(backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && reduction == REDUCTION::NONE);
}

virtual bool getMemoryShapes(const std::vector<MatShape> &inputs,
Expand Down Expand Up @@ -203,6 +206,27 @@ class ScatterLayerImpl CV_FINAL : public ScatterLayer
};
}

#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
int32_t indicesBoundValue = nodes[0].dynamicCast<InfEngineNgraphNode>()->node.get_shape()[axis];
auto indicesBound = std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{}, &indicesBoundValue);
auto indices = std::make_shared<ov::op::v0::Convert>(nodes[1].dynamicCast<InfEngineNgraphNode>()->node, ov::element::i32);
auto indicesNonNegative = std::make_shared<ov::op::v1::Mod>(
std::make_shared<ov::op::v1::Add>(indices, indicesBound),
indicesBound);

auto axis_node = std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{}, &axis);
auto scatterElements = std::make_shared<ov::op::v3::ScatterElementsUpdate>(
nodes[0].dynamicCast<InfEngineNgraphNode>()->node,
indicesNonNegative,
nodes[2].dynamicCast<InfEngineNgraphNode>()->node,
axis_node);
return Ptr<BackendNode>(new InfEngineNgraphNode(scatterElements));
}
#endif // HAVE_DNN_NGRAPH

private:
// Attributes
int axis;
Expand Down
15 changes: 14 additions & 1 deletion modules/dnn/src/layers/tile_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@

#include "../precomp.hpp"
#include "layers_common.hpp"
#include "../op_inf_engine.hpp"
#include "../ie_ngraph.hpp"

#include <opencv2/dnn/shape_utils.hpp>

Expand Down Expand Up @@ -31,7 +33,8 @@ class TileLayerImpl CV_FINAL : public TileLayer

virtual bool supportBackend(int backendId) CV_OVERRIDE
{
return backendId == DNN_BACKEND_OPENCV;
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
}

virtual bool getMemoryShapes(const std::vector<MatShape> &inputs,
Expand Down Expand Up @@ -85,6 +88,16 @@ class TileLayerImpl CV_FINAL : public TileLayer
tmp.copyTo(out);
}

#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
{
auto repeats_node = std::make_shared<ov::op::v0::Constant>(ov::element::i32, ov::Shape{repeats.size()}, repeats.data());
auto tile = std::make_shared<ov::op::v0::Tile>(nodes[0].dynamicCast<InfEngineNgraphNode>()->node, repeats_node);
return Ptr<BackendNode>(new InfEngineNgraphNode(tile));
}
#endif // HAVE_DNN_NGRAPH

private:
std::vector<int> repeats;
};
Expand Down

0 comments on commit ac9a858

Please sign in to comment.