Skip to content

Commit

Permalink
Assert argument position switch
Browse files Browse the repository at this point in the history
  • Loading branch information
Abdurrahheem committed May 6, 2024
1 parent 069d3e5 commit 7c59b39
Showing 1 changed file with 27 additions and 27 deletions.
54 changes: 27 additions & 27 deletions modules/dnn/test/test_layers_1d.cpp
Expand Up @@ -42,8 +42,8 @@ TEST_P(Layer_1d_Test, Scale)

cv::Mat output_ref = input.mul(weight);
runLayer(layer, inputs, outputs);
ASSERT_EQ(outputs.size(), 1);
ASSERT_EQ(shape(outputs[0]), shape(output_ref));
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);
}

Expand Down Expand Up @@ -83,8 +83,8 @@ TEST_P(Layer_Gather_1d_Test, Accuracy) {
std::vector<Mat> outputs;

runLayer(layer, inputs, outputs);
ASSERT_EQ(outputs.size(), 1);
ASSERT_EQ(shape(outputs[0]), shape(output_ref));
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);
}
INSTANTIATE_TEST_CASE_P(/*nothing*/, Layer_Gather_1d_Test, Combine(
Expand Down Expand Up @@ -154,7 +154,7 @@ TEST_P(Layer_Arg_1d_Test, Accuracy_01D) {

runLayer(layer, inputs, outputs);
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(outputs[0]), shape(output_ref));
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);
}

Expand Down Expand Up @@ -202,8 +202,8 @@ TEST_P(Layer_NaryElemwise_1d_Test, Accuracy) {

runLayer(layer, inputs, outputs);
if (!output_ref.empty()) {
ASSERT_EQ(outputs.size(), 1);
ASSERT_EQ(shape(outputs[0]), shape(output_ref));
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);
} else {
CV_Error(Error::StsAssert, "Provided operation: " + operation + " is not supported. Please check the test instantiation.");
Expand Down Expand Up @@ -256,8 +256,8 @@ TEST_P(Layer_Elemwise_1d_Test, Accuracy_01D) {

runLayer(layer, inputs, outputs);
if (!output_ref.empty()) {
ASSERT_EQ(outputs.size(), 1);
ASSERT_EQ(shape(outputs[0]), shape(output_ref));
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);
} else {
CV_Error(Error::StsAssert, "Provided operation: " + operation + " is not supported. Please check the test instantiation.");
Expand Down Expand Up @@ -295,8 +295,8 @@ TEST(Layer_Reshape_Test, Accuracy_1D)
std::vector<Mat> outputs;

runLayer(layer, inputs, outputs);
ASSERT_EQ(outputs.size(), 1);
ASSERT_EQ(shape(outputs[0]), shape(output_ref));
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);
}

Expand Down Expand Up @@ -360,8 +360,8 @@ TEST_P(Layer_Expand_Test, Accuracy_ND) {
std::vector<Mat> outputs;

runLayer(layer, inputs, outputs);
ASSERT_EQ(outputs.size(), 1);
ASSERT_EQ(shape(outputs[0]), shape(output_ref));
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);
}
INSTANTIATE_TEST_CASE_P(/*nothing*/, Layer_Expand_Test, Combine(
Expand Down Expand Up @@ -403,8 +403,8 @@ TEST_P(Layer_Concat_Test, Accuracy_01D)
std::vector<Mat> outputs;

runLayer(layer, inputs, outputs);
ASSERT_EQ(outputs.size(), 1);
ASSERT_EQ(shape(outputs[0]), shape(output_ref));
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);
}
INSTANTIATE_TEST_CASE_P(/*nothing*/, Layer_Concat_Test,
Expand Down Expand Up @@ -443,8 +443,8 @@ TEST_P(Layer_Softmax_Test, Accuracy_01D) {
std::vector<Mat> inputs{input};
std::vector<Mat> outputs;
runLayer(layer, inputs, outputs);
ASSERT_EQ(outputs.size(), 1);
ASSERT_EQ(shape(outputs[0]), shape(output_ref));
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);
}

Expand Down Expand Up @@ -501,8 +501,8 @@ TEST_P(Layer_Scatter_Test, Accuracy1D) {
std::vector<Mat> inputs{output, indices_mat, input};
std::vector<Mat> outputs;
runLayer(layer, inputs, outputs);
ASSERT_EQ(outputs.size(), 1);
ASSERT_EQ(shape(outputs[0]), shape(output_ref));
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);
}
INSTANTIATE_TEST_CASE_P(/*nothing*/, Layer_Scatter_Test, Combine(
Expand Down Expand Up @@ -534,8 +534,8 @@ TEST_P(Layer_Permute_Test, Accuracy_01D)
std::vector<Mat> outputs;

runLayer(layer, inputs, outputs);
ASSERT_EQ(outputs.size(), 1);
ASSERT_EQ(shape(outputs[0]), shape(output_ref));
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);
}
INSTANTIATE_TEST_CASE_P(/*nothing*/, Layer_Permute_Test,
Expand Down Expand Up @@ -627,8 +627,8 @@ TEST_P(Layer_FullyConnected_Test, Accuracy_01D)
std::vector<Mat> inputs{input};
std::vector<Mat> outputs;
runLayer(layer, inputs, outputs);
ASSERT_EQ(outputs.size(), 1);
ASSERT_EQ(shape(outputs[0]), shape(output_ref));
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);
}
INSTANTIATE_TEST_CASE_P(/*nothting*/, Layer_FullyConnected_Test,
Expand Down Expand Up @@ -673,8 +673,8 @@ TEST_P(Layer_BatchNorm_Test, Accuracy_01D)
cv::sqrt(varMat + 1e-5, varMat);
output_ref = (output_ref - meanMat) / varMat;

ASSERT_EQ(outputs.size(), 1);
ASSERT_EQ(shape(outputs[0]), shape(output_ref));
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);

}
Expand Down Expand Up @@ -706,8 +706,8 @@ TEST_P(Layer_Const_Test, Accuracy_01D)
std::vector<Mat> inputs; // No inputs are needed for a ConstLayer
std::vector<Mat> outputs;
runLayer(layer, inputs, outputs);
ASSERT_EQ(outputs.size(), 1);
ASSERT_EQ(shape(outputs[0]), shape(output_ref));
ASSERT_EQ(1, outputs.size());
ASSERT_EQ(shape(output_ref), shape(outputs[0]));
normAssert(output_ref, outputs[0]);
}
INSTANTIATE_TEST_CASE_P(/*nothing*/, Layer_Const_Test, testing::Values(
Expand Down

0 comments on commit 7c59b39

Please sign in to comment.