diff --git a/Modules/Numerics/Optimizersv4/include/itkGradientDescentOptimizerv4.h b/Modules/Numerics/Optimizersv4/include/itkGradientDescentOptimizerv4.h index bfd72fd3900..619997321c6 100644 --- a/Modules/Numerics/Optimizersv4/include/itkGradientDescentOptimizerv4.h +++ b/Modules/Numerics/Optimizersv4/include/itkGradientDescentOptimizerv4.h @@ -202,6 +202,10 @@ class ITK_TEMPLATE_EXPORT GradientDescentOptimizerv4Template EstimateLearningRate(); protected: + /** Compute value and derivative, handle exceptions, store best value, and update iteration */ + void + GetMetricValueAndDerivative(); + /** Advance one step following the gradient direction. * Includes transform update. */ virtual void diff --git a/Modules/Numerics/Optimizersv4/include/itkGradientDescentOptimizerv4.hxx b/Modules/Numerics/Optimizersv4/include/itkGradientDescentOptimizerv4.hxx index 0bd0a1962ad..074d957cfaa 100644 --- a/Modules/Numerics/Optimizersv4/include/itkGradientDescentOptimizerv4.hxx +++ b/Modules/Numerics/Optimizersv4/include/itkGradientDescentOptimizerv4.hxx @@ -41,15 +41,20 @@ GradientDescentOptimizerv4Template::StartOptimiza // Must call the superclass version for basic validation and setup. Superclass::StartOptimization(doOnlyInitialization); + // Initialize variables + this->m_CurrentIteration = 0; + this->m_ConvergenceValue = NumericTraits::max(); + this->m_CurrentMetricValue = NumericTraits::max(); + this->m_Gradient.Fill(NumericTraits::ZeroValue()); + this->m_PreviousGradient.Fill(NumericTraits::ZeroValue()); + + // Update best parameters. if (this->m_ReturnBestParametersAndValue) { this->m_BestParameters = this->GetCurrentPosition(); - this->m_CurrentBestValue = NumericTraits::max(); + this->m_CurrentBestValue = this->m_CurrentMetricValue; } - this->m_CurrentIteration = 0; - this->m_ConvergenceValue = NumericTraits::max(); - if (!doOnlyInitialization) { this->ResumeOptimization(); @@ -76,37 +81,40 @@ GradientDescentOptimizerv4Template::ResumeOptimiz this->m_StopConditionDescription << this->GetNameOfClass() << ": "; this->InvokeEvent(StartEvent()); - this->m_Stop = false; - while (!this->m_Stop) + // If it is the first iteration, the value and derivative are computed for the first time + if (this->m_CurrentIteration == 0) { - // Do not run the loop if the maximum number of iterations is reached or its value is zero. + // Unless no iterations were requested, in which case the value and derivative + // are never computed if (this->m_CurrentIteration >= this->m_NumberOfIterations) { this->m_StopConditionDescription << "Maximum number of iterations (" << this->m_NumberOfIterations << ") exceeded."; this->m_StopCondition = StopConditionObjectToObjectOptimizerEnum::MAXIMUM_NUMBER_OF_ITERATIONS; this->StopOptimization(); - break; + return; } - // Save previous value with shallow swap that will be used by child optimizer. - swap(this->m_PreviousGradient, this->m_Gradient); - // Compute metric value/derivative. - try - { - // m_Gradient will be sized as needed by metric. If it's already - // proper size, no new allocation is done. - this->m_Metric->GetValueAndDerivative(this->m_CurrentMetricValue, this->m_Gradient); - } - catch (ExceptionObject & err) + this->GetMetricValueAndDerivative(); + + // Some metrics (RegularStep, QuasiNewton) rely on both m_Gradient and m_PreviousGradient to be valid. + // Therefore set m_PreviousGradient to be equal to m_Gradient. + this->m_PreviousGradient = this->m_Gradient; + } + + // Loop until stop condition encountered + this->m_Stop = false; + while (!this->m_Stop) + { + // Check for exit on iterations + if (this->m_CurrentIteration >= this->m_NumberOfIterations) { - this->m_StopCondition = StopConditionObjectToObjectOptimizerEnum::COSTFUNCTION_ERROR; - this->m_StopConditionDescription << "Metric error during optimization"; + this->m_StopConditionDescription << "Maximum number of iterations (" << this->m_NumberOfIterations + << ") exceeded."; + this->m_StopCondition = StopConditionObjectToObjectOptimizerEnum::MAXIMUM_NUMBER_OF_ITERATIONS; this->StopOptimization(); - - // Pass exception to caller - throw err; + break; } // Check if optimization has been stopped externally. @@ -143,19 +151,45 @@ GradientDescentOptimizerv4Template::ResumeOptimiz // This will modify the gradient and update the transform. this->AdvanceOneStep(); - // Store best value and position - if (this->m_ReturnBestParametersAndValue && this->m_CurrentMetricValue < this->m_CurrentBestValue) - { - this->m_CurrentBestValue = this->m_CurrentMetricValue; - this->m_BestParameters = this->GetCurrentPosition(); - } - - // Update and check iteration count - this->m_CurrentIteration++; + // Save previous value with shallow swap that will be used by child optimizer. + swap(this->m_PreviousGradient, this->m_Gradient); + // Compute metric value/derivative. + this->GetMetricValueAndDerivative(); } // while (!m_Stop) } +template +void +GradientDescentOptimizerv4Template::GetMetricValueAndDerivative() +{ + // Compute metric value/derivative. + try + { + // m_Gradient will be sized as needed by metric. If it's already + // proper size, no new allocation is done. + this->m_Metric->GetValueAndDerivative(this->m_CurrentMetricValue, this->m_Gradient); + } + catch (ExceptionObject & err) + { + this->m_StopCondition = StopConditionObjectToObjectOptimizerEnum::COSTFUNCTION_ERROR; + this->m_StopConditionDescription << "Metric error during optimization"; + this->StopOptimization(); + // Pass exception to caller + throw err; + } + + // Store best value and position + if (this->m_ReturnBestParametersAndValue && this->m_CurrentMetricValue < this->m_CurrentBestValue) + { + this->m_CurrentBestValue = this->m_CurrentMetricValue; + this->m_BestParameters = this->GetCurrentPosition(); + } + + // Update and check iteration count + this->m_CurrentIteration++; +} + template void GradientDescentOptimizerv4Template::AdvanceOneStep() diff --git a/Modules/Numerics/Optimizersv4/include/itkQuasiNewtonOptimizerv4.hxx b/Modules/Numerics/Optimizersv4/include/itkQuasiNewtonOptimizerv4.hxx index 4b6f3f18f50..18cb0b26f59 100644 --- a/Modules/Numerics/Optimizersv4/include/itkQuasiNewtonOptimizerv4.hxx +++ b/Modules/Numerics/Optimizersv4/include/itkQuasiNewtonOptimizerv4.hxx @@ -98,7 +98,7 @@ QuasiNewtonOptimizerv4Template::AdvanceOneStep() const SizeValueType numPara = this->m_Metric->GetNumberOfParameters(); this->m_CurrentPosition = this->m_Metric->GetParameters(); - if (this->GetCurrentIteration() == 0) + if (this->GetCurrentIteration() == 1) { // initialize some information this->m_PreviousValue = this->GetCurrentMetricValue(); @@ -136,7 +136,7 @@ QuasiNewtonOptimizerv4Template::AdvanceOneStep() return; } - if (this->GetCurrentIteration() > 0) + if (this->GetCurrentIteration() > 1) { ParametersType lastStep(numPara); lastStep = this->m_CurrentPosition - this->m_PreviousPosition; @@ -324,7 +324,7 @@ QuasiNewtonOptimizerv4Template::EstimateNewtonSte for (IndexValueType loc = low; loc <= high; ++loc) { - if (this->GetCurrentIteration() == 0) + if (this->GetCurrentIteration() == 1) { this->m_NewtonStepValidFlags[loc] = false; } @@ -345,7 +345,7 @@ template bool QuasiNewtonOptimizerv4Template::ComputeHessianAndStepWithBFGS(IndexValueType loc) { - if (this->GetCurrentIteration() == 0) + if (this->GetCurrentIteration() == 1) { return false; } diff --git a/Modules/Numerics/Optimizersv4/include/itkRegularStepGradientDescentOptimizerv4.hxx b/Modules/Numerics/Optimizersv4/include/itkRegularStepGradientDescentOptimizerv4.hxx index 46d3685785c..ce2b9b9e261 100644 --- a/Modules/Numerics/Optimizersv4/include/itkRegularStepGradientDescentOptimizerv4.hxx +++ b/Modules/Numerics/Optimizersv4/include/itkRegularStepGradientDescentOptimizerv4.hxx @@ -41,25 +41,8 @@ RegularStepGradientDescentOptimizerv4::StartOptim { this->m_UseConvergenceMonitoring = false; - // Call the grandparent version for basic validation and setup - GradientDescentOptimizerBasev4Template::StartOptimization(doOnlyInitialization); - - if (this->m_ReturnBestParametersAndValue) - { - this->m_BestParameters = this->GetCurrentPosition(); - this->m_CurrentBestValue = NumericTraits::max(); - } - - const SizeValueType spaceDimension = this->m_Metric->GetNumberOfParameters(); - - this->m_Gradient = DerivativeType(spaceDimension); - this->m_PreviousGradient = DerivativeType(spaceDimension); - this->m_Gradient.Fill(0.0f); - this->m_PreviousGradient.Fill(0.0f); - - // Reset the iterations and learning rate scale when the optimizer is started again. + // Reset the learning rate scale when the optimizer is started again. this->m_CurrentLearningRateRelaxation = 1.0; - this->m_CurrentIteration = 0; // validity check for the value of GradientMagnitudeTolerance if (m_GradientMagnitudeTolerance < 0.0) @@ -69,10 +52,9 @@ RegularStepGradientDescentOptimizerv4::StartOptim << m_GradientMagnitudeTolerance); } - if (!doOnlyInitialization) - { - this->ResumeOptimization(); - } + /* Must call the superclass version for basic validation, setup, + * and to start the optimization loop. */ + Superclass::StartOptimization(doOnlyInitialization); } template diff --git a/Modules/Numerics/Optimizersv4/test/itkGradientDescentOptimizerv4Test.cxx b/Modules/Numerics/Optimizersv4/test/itkGradientDescentOptimizerv4Test.cxx index 6e41f0a3e6d..530eef3439d 100644 --- a/Modules/Numerics/Optimizersv4/test/itkGradientDescentOptimizerv4Test.cxx +++ b/Modules/Numerics/Optimizersv4/test/itkGradientDescentOptimizerv4Test.cxx @@ -332,6 +332,32 @@ itkGradientDescentOptimizerv4Test(int, char *[]) } } + // Verify that the optimizer stays at the initial position if + // number of iterations is set to one. + std::cout << "\nCheck the optimizer position (parameters) are equal to the initial " + "value when number of iterations is set to one:" + << std::endl; + { + scales.Fill(1.0); + weights.Fill(1.0); + itkOptimizer->SetScales(scales); + itkOptimizer->SetWeights(weights); + itkOptimizer->SetNumberOfIterations(1); + metric->SetParameters(initialPosition); + trueParameters[0] = 100; + trueParameters[1] = -100; + if (GradientDescentOptimizerv4RunTest(itkOptimizer, trueParameters) == EXIT_FAILURE) + { + result = EXIT_FAILURE; + } + // Also the score should be equal to the initial score + double value = itkOptimizer->GetValue(); + if (value != 24000) + { + result = EXIT_FAILURE; + } + } + std::cout << "\nTest the Exception if the optimizer is not set properly:" << std::endl; OptimizerType::Pointer badOptimizer = OptimizerType::New(); bool caught = false; diff --git a/Modules/Numerics/Optimizersv4/test/itkGradientDescentOptimizerv4Test2.cxx b/Modules/Numerics/Optimizersv4/test/itkGradientDescentOptimizerv4Test2.cxx index c5cbc6fad82..7d0b1fd1388 100644 --- a/Modules/Numerics/Optimizersv4/test/itkGradientDescentOptimizerv4Test2.cxx +++ b/Modules/Numerics/Optimizersv4/test/itkGradientDescentOptimizerv4Test2.cxx @@ -160,7 +160,7 @@ itkGradientDescentOptimizerv4Test2(int, char *[]) metric->SetParameters(initialPosition); itkOptimizer->SetLearningRate(1.0); - itkOptimizer->SetNumberOfIterations(1); + itkOptimizer->SetNumberOfIterations(2); ScalesType scales(metric->GetNumberOfLocalParameters()); for (NumberOfParametersType i = 0; i < metric->GetNumberOfLocalParameters(); ++i) diff --git a/Modules/Numerics/Optimizersv4/test/itkRegularStepGradientDescentOptimizerv4Test.cxx b/Modules/Numerics/Optimizersv4/test/itkRegularStepGradientDescentOptimizerv4Test.cxx index bd68dd03db3..0a7ed3a4e91 100644 --- a/Modules/Numerics/Optimizersv4/test/itkRegularStepGradientDescentOptimizerv4Test.cxx +++ b/Modules/Numerics/Optimizersv4/test/itkRegularStepGradientDescentOptimizerv4Test.cxx @@ -384,6 +384,50 @@ itkRegularStepGradientDescentOptimizerv4Test(int, char *[]) currentLearningRateRelaxation); } + // Verify that the optimizer stays at the initial position if + // number of iterations is set to one. + std::cout << "\nCheck the optimizer position (parameters) are equal to the initial " + "value when number of iterations is set to one:" + << std::endl; + { + RSGv4TestMetric::Pointer metric = RSGv4TestMetric::New(); + itkOptimizer->SetMetric(metric); + using ParametersType = RSGv4TestMetric::ParametersType; + const unsigned int spaceDimension = metric->GetNumberOfParameters(); + ParametersType initialPosition(spaceDimension); + initialPosition[0] = 100; + initialPosition[1] = -100; + metric->SetParameters(initialPosition); + + itkOptimizer->SetNumberOfIterations(1); + try + { + itkOptimizer->StartOptimization(); + } + catch (const itk::ExceptionObject & e) + { + return EXIT_FAILURE; + } + + // Check results to see if it is within range + ParametersType finalPosition = itkOptimizer->GetMetric()->GetParameters(); + double trueParameters[2] = { 100, -100 }; + for (unsigned int j = 0; j < 2; ++j) + { + if (!itk::Math::FloatAlmostEqual(finalPosition[j], trueParameters[j])) + { + testStatus = EXIT_FAILURE; + } + } + + // Also the score should be equal to the initial score + double value = itkOptimizer->GetValue(); + if (value != 24000) + { + testStatus = EXIT_FAILURE; + } + } + // // Test the Exception if the GradientMagnitudeTolerance is set to a negative value //