Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ONNX] Extend ONNX Frontend with com.microsoft.Pad #22000

Merged
merged 20 commits into from
Apr 5, 2024
Merged
Show file tree
Hide file tree
Changes from 17 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
87 changes: 87 additions & 0 deletions src/frontends/onnx/frontend/src/op/com.microsoft/pad.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
// Copyright (C) 2018-2024 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#include "op/com.microsoft/pad.hpp"

#include "core/null_node.hpp"
#include "exceptions.hpp"
#include "openvino/op/constant.hpp"
#include "openvino/op/pad.hpp"
#include "openvino/op/squeeze.hpp"
#include "openvino/op/util/op_types.hpp"
#include "utils/reshape.hpp"
#include "utils/split.hpp"

namespace {
ov::op::PadMode get_pad_mode(std::string mode) {
ov::op::PadMode pad_mode;

if (mode == "constant") {
pad_mode = ov::op::PadMode::CONSTANT;
} else if (mode == "reflect") {
pad_mode = ov::op::PadMode::REFLECT;
} else if (mode == "edge") {
pad_mode = ov::op::PadMode::EDGE;
} else {
OPENVINO_THROW("Unsupported padding mode: [" + mode + "]");
}

return pad_mode;
}
} // namespace

using namespace ov::op;

namespace ov {
namespace frontend {
namespace onnx {
namespace op {
namespace custom {
namespace set_1 {
ov::OutputVector pad(const ov::frontend::onnx::Node& node) {
const auto inputs = node.get_ov_inputs();
const auto& data = inputs[0];
const auto& pads_input = inputs[1];
auto pads = pads_input;
if (pads.get_shape().size() == 2) {
pads = std::make_shared<v0::Squeeze>(pads);
}
siddhant-0707 marked this conversation as resolved.
Show resolved Hide resolved
ov::Output<ov::Node> values;
ov::Output<ov::Node> padding_begin;
ov::Output<ov::Node> padding_end;

if (inputs.size() == 3 && !ov::op::util::is_null(inputs[2])) {
values = reshape::interpret_as_scalar(inputs[2]);
} else {
values = v0::Constant::create(data.get_element_type(), ov::Shape{}, {0});
}

if (ov::op::util::is_constant(pads.get_node())) {
std::vector<std::int64_t> pads_vector =
ov::as_type_ptr<v0::Constant>(pads.get_node_shared_ptr())->get_vector<std::int64_t>();

std::size_t const half_size = pads_vector.size() / 2;
std::vector<std::int64_t> padding_begin_values(pads_vector.begin(), pads_vector.begin() + half_size);
std::vector<std::int64_t> padding_end_values(pads_vector.begin() + half_size, pads_vector.end());

padding_begin = v0::Constant::create(ov::element::i64, ov::Shape{half_size}, padding_begin_values);
padding_end = v0::Constant::create(ov::element::i64, ov::Shape{half_size}, padding_end_values);
} else {
ov::OutputVector padding = ov::op::util::make_split(pads, 2, 0);

padding_begin = padding.at(0);
padding_end = padding.at(1);
}

const std::string mode = node.get_attribute_value<std::string>("mode", "constant");
ov::op::PadMode pad_mode = get_pad_mode(mode);

return {std::make_shared<v12::Pad>(data, padding_begin, padding_end, values, pad_mode)};
}
} // namespace set_1
} // namespace custom
} // namespace op
} // namespace onnx
} // namespace frontend
} // namespace ov
21 changes: 21 additions & 0 deletions src/frontends/onnx/frontend/src/op/com.microsoft/pad.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
// Copyright (C) 2018-2024 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

#pragma once

#include "core/node.hpp"

namespace ov {
namespace frontend {
namespace onnx {
namespace op {
namespace custom {
namespace set_1 {
ov::OutputVector pad(const ov::frontend::onnx::Node& node);
} // namespace set_1
} // namespace custom
} // namespace op
} // namespace onnx
} // namespace frontend
} // namespace ngraph
5 changes: 5 additions & 0 deletions src/frontends/onnx/frontend/src/ops_bridge.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
#include "op/com.microsoft/embed_layer_normalization.hpp"
#include "op/com.microsoft/fused_conv.hpp"
#include "op/com.microsoft/fusedgemm.hpp"
#include "op/com.microsoft/pad.hpp"
#include "op/com.microsoft/skip_layer_normalization.hpp"
#include "op/compress.hpp"
#include "op/concat.hpp"
Expand Down Expand Up @@ -608,6 +609,10 @@ OperatorsBridge::OperatorsBridge() {
op::set_13::dequantize_linear,
"com.microsoft");
register_operator_in_custom_domain("Gelu", VersionRange::since(1), op::set_1::gelu, "com.microsoft");
register_operator_in_custom_domain("Pad",
VersionRange::single_version_for_all_opsets(),
op::custom::set_1::pad,
"com.microsoft");
register_operator_in_custom_domain("QuantizeLinear",
VersionRange::since(1),
op::set_13::quantize_linear,
Expand Down
66 changes: 66 additions & 0 deletions src/frontends/onnx/tests/models/com.microsoft/pad_1d.prototxt
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
ir_version: 6
producer_name: "OV ONNX Frontend"
graph {
node {
input: "x"
input: "pads"
output: "y"
op_type: "Pad"
attribute {
name: "mode"
s: "constant"
type: STRING
}
domain: "com.microsoft"
}
name: "test_pad_1d_microsoft"
input {
name: "x"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 3
}
dim {
dim_value: 2
}
}
}
}
}
input {
name: "pads"
type {
tensor_type {
elem_type: 7
shape {
dim {
dim_value: 4
}
}
}
}
}
output {
name: "y"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 3
}
dim {
dim_value: 4
}
}
}
}
}
}
opset_import {
domain: "com.microsoft"
version: 1
}
69 changes: 69 additions & 0 deletions src/frontends/onnx/tests/models/com.microsoft/pad_2d.prototxt
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
ir_version: 6
producer_name: "OV ONNX Frontend"
graph {
node {
input: "x"
input: "pads"
output: "y"
op_type: "Pad"
attribute {
name: "mode"
s: "constant"
type: STRING
}
domain: "com.microsoft"
}
name: "test_pad_2d_microsoft"
input {
name: "x"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 3
}
dim {
dim_value: 2
}
}
}
}
}
input {
name: "pads"
type {
tensor_type {
elem_type: 7
shape {
dim {
dim_value: 1
}
dim {
dim_value: 4
}
}
}
}
}
output {
name: "y"
type {
tensor_type {
elem_type: 1
shape {
dim {
dim_value: 3
}
dim {
dim_value: 4
}
}
}
}
}
}
opset_import {
domain: "com.microsoft"
version: 1
}
24 changes: 24 additions & 0 deletions src/frontends/onnx/tests/onnx_import_com_microsoft.in.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1276,3 +1276,27 @@ OPENVINO_TEST(${BACKEND_NAME}, onnx_com_microsoft_gather_nd) {
test_case.add_expected_output<int>(Shape{2}, output);
test_case.run_with_tolerance_as_fp();
}

OPENVINO_TEST(${BACKEND_NAME}, onnx_com_microsoft_pad_2d) {
const auto model = convert_model("com.microsoft/pad_2d.onnx");
auto test_case = ov::test::TestCase(model, s_device);

test_case.add_input<float>({1.f, 1.2f, 2.3f, 3.4f, 4.5f, 5.7f});
test_case.add_input<int64_t>({0, 2, 0, 0});
test_case.add_expected_output<float>(Shape{3, 4},
{0.f, 0.f, 1.f, 1.2f, 0.f, 0.f, 2.3f, 3.4f, 0.f, 0.f, 4.5f, 5.7f});

test_case.run();
}

OPENVINO_TEST(${BACKEND_NAME}, onnx_com_microsoft_pad_1d) {
const auto model = convert_model("com.microsoft/pad_1d.onnx");
auto test_case = ov::test::TestCase(model, s_device);

test_case.add_input<float>({1.f, 1.2f, 2.3f, 3.4f, 4.5f, 5.7f});
test_case.add_input<int64_t>({0, 2, 0, 0});
test_case.add_expected_output<float>(Shape{3, 4},
{0.f, 0.f, 1.f, 1.2f, 0.f, 0.f, 2.3f, 3.4f, 0.f, 0.f, 4.5f, 5.7f});

test_case.run();
}
Loading