Skip to content

Commit

Permalink
ENH: Increase itk::RegularStepGradientDescentOptimizerv4 coverage
Browse files Browse the repository at this point in the history
Increase `itk::RegularStepGradientDescentOptimizerv4` coverage:
- Test the Set/Get methods using the `ITK_TEST_SET_GET_VALUE` macro.
- Test exceptions using the `ITK_TRY_EXPECT_EXCEPTION` macro.

Take advantage of the commit to make the style of the messages printed
in the standard output consistent.
  • Loading branch information
jhlegarreta authored and hjmjohnson committed Sep 12, 2022
1 parent f0d3d80 commit 59e7d01
Showing 1 changed file with 47 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -183,12 +183,6 @@ RegularStepGradientDescentOptimizerv4TestHelper(
initialPosition[1] = -100;
metric->SetParameters(initialPosition);

ScalesType parametersScale(spaceDimension);
parametersScale[0] = 1.0;
parametersScale[1] = 1.0;

optimizer->SetScales(parametersScale);

typename OptimizerType::InternalComputationValueType learningRate = 100;
optimizer->SetLearningRate(learningRate);

Expand All @@ -209,13 +203,29 @@ RegularStepGradientDescentOptimizerv4TestHelper(
optimizer->SetCurrentLearningRateRelaxation(currentLearningRateRelaxation);
ITK_TEST_SET_GET_VALUE(currentLearningRateRelaxation, optimizer->GetCurrentLearningRateRelaxation());

std::cout << "currentPosition before optimization: " << optimizer->GetMetric()->GetParameters() << std::endl;
// Test exceptions
ScalesType parametersScaleExcp(spaceDimension - 1);
parametersScaleExcp.Fill(1.0);
optimizer->SetScales(parametersScaleExcp);

ITK_TRY_EXPECT_EXCEPTION(optimizer->StartOptimization());

ScalesType parametersScale(spaceDimension);
parametersScale.Fill(1.0);
optimizer->SetScales(parametersScale);

optimizer->SetRelaxationFactor(relaxationFactor);
ITK_TEST_SET_GET_VALUE(relaxationFactor, optimizer->GetRelaxationFactor());

std::cout << "CurrentPosition before optimization: " << optimizer->GetMetric()->GetParameters() << std::endl;

ITK_TRY_EXPECT_NO_EXCEPTION(optimizer->StartOptimization());


std::cout << "currentPosition after optimization: " << optimizer->GetMetric()->GetParameters() << std::endl;
std::cout << " Stop Condition = " << optimizer->GetStopConditionDescription() << std::endl;
std::cout << "CurrentPosition after optimization: " << optimizer->GetMetric()->GetParameters() << std::endl;
std::cout << "Stop Condition: " << optimizer->GetStopConditionDescription() << std::endl;


if (optimizer->GetCurrentIteration() > 0)
{
std::cerr << "The optimizer is running iterations despite of ";
Expand Down Expand Up @@ -391,12 +401,12 @@ itkRegularStepGradientDescentOptimizerv4Test(int, char *[])
}

//
// Test the Exception if the RelaxationFactor is set to a value more than one.
// Test the Exception if the RelaxationFactor is set to a negative value.
//
std::cout << "\nTest the Exception if the RelaxationFactor is set to a value larger than one:" << std::endl;
std::cout << "\nTest the Exception if the RelaxationFactor is set to a negative value:" << std::endl;
{
itk::SizeValueType numberOfIterations3 = 100;
OptimizerType::InternalComputationValueType relaxationFactor3 = 1.1;
OptimizerType::InternalComputationValueType relaxationFactor3 = -1.0;
OptimizerType::InternalComputationValueType gradientMagnitudeTolerance3 = 0.01;
bool expectedExceptionReceived =
RegularStepGradientDescentOptimizerv4TestHelper<OptimizerType>(numberOfIterations3,
Expand All @@ -406,6 +416,31 @@ itkRegularStepGradientDescentOptimizerv4Test(int, char *[])
minimumStepLength,
gradientMagnitudeTolerance3,
currentLearningRateRelaxation);
if (!expectedExceptionReceived)
{
std::cerr << "Failure to produce an exception when";
std::cerr << " the RelaxationFactor is negative " << std::endl;
std::cerr << "TEST FAILED !" << std::endl;
testStatus = EXIT_FAILURE;
}
}

//
// Test the Exception if the RelaxationFactor is set to a value larger than one.
//
std::cout << "\nTest the Exception if the RelaxationFactor is set to a value larger than one:" << std::endl;
{
itk::SizeValueType numberOfIterations4 = 100;
OptimizerType::InternalComputationValueType relaxationFactor4 = 1.1;
OptimizerType::InternalComputationValueType gradientMagnitudeTolerance4 = 0.01;
bool expectedExceptionReceived =
RegularStepGradientDescentOptimizerv4TestHelper<OptimizerType>(numberOfIterations4,
doEstimateLearningRateAtEachIteration,
doEstimateLearningRateOnce,
relaxationFactor4,
minimumStepLength,
gradientMagnitudeTolerance4,
currentLearningRateRelaxation);

if (!expectedExceptionReceived)
{
Expand Down

0 comments on commit 59e7d01

Please sign in to comment.