Skip to content

Commit

Permalink
Merge pull request #25390 from Abdurrahheem:ash/0d-padding-layer
Browse files Browse the repository at this point in the history
1/0D test padding layer #25390

This PR introduces 0/1D test for `padding` layer.

### Pull Request Readiness Checklist

See details at https://github.com/opencv/opencv/wiki/How_to_contribute#making-a-good-pull-request

- [x] I agree to contribute to the project under Apache 2 License.
- [x] To the best of my knowledge, the proposed patch is not based on a code under GPL or another license that is incompatible with OpenCV
- [x] The PR is proposed to the proper branch
- [x] There is a reference to the original bug report and related work
- [x] There is accuracy test, performance test and test data in opencv_extra repository, if applicable
      Patch to opencv_extra has the same branch name.
- [x] The feature is well documented and sample code can be built with the project CMake
  • Loading branch information
Abdurrahheem committed May 15, 2024
1 parent 48c31bd commit 5260b48
Show file tree
Hide file tree
Showing 2 changed files with 67 additions and 0 deletions.
6 changes: 6 additions & 0 deletions modules/dnn/src/layers/padding_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,13 @@ class PaddingLayerImpl CV_FINAL : public PaddingLayer
{
CV_Assert(inputs.size() == 1);
const MatShape& inpShape = inputs[0];
if (inpShape.empty()){
CV_Assert(paddings.size() == 1);
outputs.resize(1, MatShape(1, paddings[0].first + paddings[0].second + 1));
return false;
}
CV_Assert(inpShape.size() >= paddings.size());

CV_Assert(inputDims == -1 || inpShape.size() == inputDims || inpShape.size() > paddings.size());

outputs.resize(1, inpShape);
Expand Down
61 changes: 61 additions & 0 deletions modules/dnn/test/test_layers_1d.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -567,6 +567,67 @@ INSTANTIATE_TEST_CASE_P(/*nothing*/, Layer_Slice_Test,
std::vector<int>({1, 4})
));

typedef testing::TestWithParam<tuple<std::vector<int>>> Layer_Padding_Test;
TEST_P(Layer_Padding_Test, Accuracy_01D){

std::vector<int> input_shape = get<0>(GetParam());
float pad_value = 10;

LayerParams lp;
lp.type = "Padding";
lp.name = "PaddingLayer";
std::vector<int> paddings = {5, 3}; // Pad before and pad after for one dimension
lp.set("paddings", DictValue::arrayInt(paddings.data(), paddings.size()));
lp.set("value", pad_value);
lp.set("input_dims", (input_shape.size() == 1) ? -1 : 0);
Ptr<PaddingLayer> layer = PaddingLayer::create(lp);

cv::Mat input(input_shape.size(), input_shape.data(), CV_32F);
cv::randn(input, 0.0, 1.0);


// Fill in the padding values manually
// Create output ref shape depending on the input shape and input_dims
std::vector<int> output_shape;
if (input_shape.size() == 0){
output_shape = {1 + paddings[0] + paddings[1]};
} else if (input_shape.size() == 1){
output_shape = {input_shape[0] + paddings[0] + paddings[1]};
} else {
output_shape = {input_shape[0], input_shape[1] + paddings[0] + paddings[1]};
}

cv::Mat output_ref(output_shape.size(), output_shape.data(), CV_32F, pad_value);

if (input_shape.size() == 0){
output_ref.at<float>(paddings[0]) = input.at<float>(0);
} else if (input_shape.size() == 1){
for (int i = 0; i < input_shape[0]; ++i){
output_ref.at<float>(i + paddings[0]) = input.at<float>(i);
}
} else {
for (int i = 0; i < input_shape[0]; ++i){
for (int j = 0; j < input_shape[1]; ++j){
output_ref.at<float>(i, j + paddings[0]) = input.at<float>(i, j);
}
}
}

std::vector<Mat> inputs{input};
std::vector<Mat> outputs;
runLayer(layer, inputs, outputs);
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);
}
INSTANTIATE_TEST_CASE_P(/*nothing*/, Layer_Padding_Test,
/*input blob shape*/ testing::Values(
std::vector<int>{},
std::vector<int>{1},
std::vector<int>{1, 4},
std::vector<int>{4, 1}
));

typedef testing::TestWithParam<tuple<std::vector<int>>> Layer_FullyConnected_Test;
TEST_P(Layer_FullyConnected_Test, Accuracy_01D)
{
Expand Down

0 comments on commit 5260b48

Please sign in to comment.