Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,10 @@ class ITK_TEMPLATE_EXPORT GradientDescentOptimizerv4Template
EstimateLearningRate();

protected:
/** Compute value and derivative, handle exceptions, store best value, and update iteration */
void
GetMetricValueAndDerivative();

/** Advance one step following the gradient direction.
* Includes transform update. */
virtual void
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,15 +41,20 @@ GradientDescentOptimizerv4Template<TInternalComputationValueType>::StartOptimiza
// Must call the superclass version for basic validation and setup.
Superclass::StartOptimization(doOnlyInitialization);

// Initialize variables
this->m_CurrentIteration = 0;
this->m_ConvergenceValue = NumericTraits<TInternalComputationValueType>::max();
this->m_CurrentMetricValue = NumericTraits<MeasureType>::max();
this->m_Gradient.Fill(NumericTraits<TInternalComputationValueType>::ZeroValue());
this->m_PreviousGradient.Fill(NumericTraits<TInternalComputationValueType>::ZeroValue());

// Update best parameters.
if (this->m_ReturnBestParametersAndValue)
{
this->m_BestParameters = this->GetCurrentPosition();
this->m_CurrentBestValue = NumericTraits<MeasureType>::max();
this->m_CurrentBestValue = this->m_CurrentMetricValue;
}

this->m_CurrentIteration = 0;
this->m_ConvergenceValue = NumericTraits<TInternalComputationValueType>::max();

if (!doOnlyInitialization)
{
this->ResumeOptimization();
Expand All @@ -76,37 +81,40 @@ GradientDescentOptimizerv4Template<TInternalComputationValueType>::ResumeOptimiz
this->m_StopConditionDescription << this->GetNameOfClass() << ": ";
this->InvokeEvent(StartEvent());

this->m_Stop = false;
while (!this->m_Stop)
// If it is the first iteration, the value and derivative are computed for the first time
if (this->m_CurrentIteration == 0)
{
// Do not run the loop if the maximum number of iterations is reached or its value is zero.
// Unless no iterations were requested, in which case the value and derivative
// are never computed
if (this->m_CurrentIteration >= this->m_NumberOfIterations)
{
this->m_StopConditionDescription << "Maximum number of iterations (" << this->m_NumberOfIterations
<< ") exceeded.";
this->m_StopCondition = StopConditionObjectToObjectOptimizerEnum::MAXIMUM_NUMBER_OF_ITERATIONS;
this->StopOptimization();
break;
return;
}

// Save previous value with shallow swap that will be used by child optimizer.
swap(this->m_PreviousGradient, this->m_Gradient);

// Compute metric value/derivative.
try
{
// m_Gradient will be sized as needed by metric. If it's already
// proper size, no new allocation is done.
this->m_Metric->GetValueAndDerivative(this->m_CurrentMetricValue, this->m_Gradient);
}
catch (ExceptionObject & err)
this->GetMetricValueAndDerivative();

// Some metrics (RegularStep, QuasiNewton) rely on both m_Gradient and m_PreviousGradient to be valid.
// Therefore set m_PreviousGradient to be equal to m_Gradient.
this->m_PreviousGradient = this->m_Gradient;
}

// Loop until stop condition encountered
this->m_Stop = false;
while (!this->m_Stop)
{
// Check for exit on iterations
if (this->m_CurrentIteration >= this->m_NumberOfIterations)
{
this->m_StopCondition = StopConditionObjectToObjectOptimizerEnum::COSTFUNCTION_ERROR;
this->m_StopConditionDescription << "Metric error during optimization";
this->m_StopConditionDescription << "Maximum number of iterations (" << this->m_NumberOfIterations
<< ") exceeded.";
this->m_StopCondition = StopConditionObjectToObjectOptimizerEnum::MAXIMUM_NUMBER_OF_ITERATIONS;
this->StopOptimization();

// Pass exception to caller
throw err;
break;
}

// Check if optimization has been stopped externally.
Expand Down Expand Up @@ -143,19 +151,45 @@ GradientDescentOptimizerv4Template<TInternalComputationValueType>::ResumeOptimiz
// This will modify the gradient and update the transform.
this->AdvanceOneStep();

// Store best value and position
if (this->m_ReturnBestParametersAndValue && this->m_CurrentMetricValue < this->m_CurrentBestValue)
{
this->m_CurrentBestValue = this->m_CurrentMetricValue;
this->m_BestParameters = this->GetCurrentPosition();
}

// Update and check iteration count
this->m_CurrentIteration++;
// Save previous value with shallow swap that will be used by child optimizer.
swap(this->m_PreviousGradient, this->m_Gradient);

// Compute metric value/derivative.
this->GetMetricValueAndDerivative();
} // while (!m_Stop)
}

template <typename TInternalComputationValueType>
void
GradientDescentOptimizerv4Template<TInternalComputationValueType>::GetMetricValueAndDerivative()
{
// Compute metric value/derivative.
try
{
// m_Gradient will be sized as needed by metric. If it's already
// proper size, no new allocation is done.
this->m_Metric->GetValueAndDerivative(this->m_CurrentMetricValue, this->m_Gradient);
}
catch (ExceptionObject & err)
{
this->m_StopCondition = StopConditionObjectToObjectOptimizerEnum::COSTFUNCTION_ERROR;
this->m_StopConditionDescription << "Metric error during optimization";
this->StopOptimization();
// Pass exception to caller
throw err;
}

// Store best value and position
if (this->m_ReturnBestParametersAndValue && this->m_CurrentMetricValue < this->m_CurrentBestValue)
{
this->m_CurrentBestValue = this->m_CurrentMetricValue;
this->m_BestParameters = this->GetCurrentPosition();
}

// Update and check iteration count
this->m_CurrentIteration++;
}

template <typename TInternalComputationValueType>
void
GradientDescentOptimizerv4Template<TInternalComputationValueType>::AdvanceOneStep()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ QuasiNewtonOptimizerv4Template<TInternalComputationValueType>::AdvanceOneStep()
const SizeValueType numPara = this->m_Metric->GetNumberOfParameters();
this->m_CurrentPosition = this->m_Metric->GetParameters();

if (this->GetCurrentIteration() == 0)
if (this->GetCurrentIteration() == 1)
{
// initialize some information
this->m_PreviousValue = this->GetCurrentMetricValue();
Expand Down Expand Up @@ -136,7 +136,7 @@ QuasiNewtonOptimizerv4Template<TInternalComputationValueType>::AdvanceOneStep()
return;
}

if (this->GetCurrentIteration() > 0)
if (this->GetCurrentIteration() > 1)
{
ParametersType lastStep(numPara);
lastStep = this->m_CurrentPosition - this->m_PreviousPosition;
Expand Down Expand Up @@ -324,7 +324,7 @@ QuasiNewtonOptimizerv4Template<TInternalComputationValueType>::EstimateNewtonSte

for (IndexValueType loc = low; loc <= high; ++loc)
{
if (this->GetCurrentIteration() == 0)
if (this->GetCurrentIteration() == 1)
{
this->m_NewtonStepValidFlags[loc] = false;
}
Expand All @@ -345,7 +345,7 @@ template <typename TInternalComputationValueType>
bool
QuasiNewtonOptimizerv4Template<TInternalComputationValueType>::ComputeHessianAndStepWithBFGS(IndexValueType loc)
{
if (this->GetCurrentIteration() == 0)
if (this->GetCurrentIteration() == 1)
{
return false;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,25 +41,8 @@ RegularStepGradientDescentOptimizerv4<TInternalComputationValueType>::StartOptim
{
this->m_UseConvergenceMonitoring = false;

// Call the grandparent version for basic validation and setup
GradientDescentOptimizerBasev4Template<TInternalComputationValueType>::StartOptimization(doOnlyInitialization);

if (this->m_ReturnBestParametersAndValue)
{
this->m_BestParameters = this->GetCurrentPosition();
this->m_CurrentBestValue = NumericTraits<MeasureType>::max();
}

const SizeValueType spaceDimension = this->m_Metric->GetNumberOfParameters();

this->m_Gradient = DerivativeType(spaceDimension);
this->m_PreviousGradient = DerivativeType(spaceDimension);
this->m_Gradient.Fill(0.0f);
this->m_PreviousGradient.Fill(0.0f);

// Reset the iterations and learning rate scale when the optimizer is started again.
// Reset the learning rate scale when the optimizer is started again.
this->m_CurrentLearningRateRelaxation = 1.0;
this->m_CurrentIteration = 0;

// validity check for the value of GradientMagnitudeTolerance
if (m_GradientMagnitudeTolerance < 0.0)
Expand All @@ -69,10 +52,9 @@ RegularStepGradientDescentOptimizerv4<TInternalComputationValueType>::StartOptim
<< m_GradientMagnitudeTolerance);
}

if (!doOnlyInitialization)
{
this->ResumeOptimization();
}
/* Must call the superclass version for basic validation, setup,
* and to start the optimization loop. */
Superclass::StartOptimization(doOnlyInitialization);
}

template <typename TInternalComputationValueType>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -332,6 +332,32 @@ itkGradientDescentOptimizerv4Test(int, char *[])
}
}

// Verify that the optimizer stays at the initial position if
// number of iterations is set to one.
std::cout << "\nCheck the optimizer position (parameters) are equal to the initial "
"value when number of iterations is set to one:"
<< std::endl;
{
scales.Fill(1.0);
weights.Fill(1.0);
itkOptimizer->SetScales(scales);
itkOptimizer->SetWeights(weights);
itkOptimizer->SetNumberOfIterations(1);
metric->SetParameters(initialPosition);
trueParameters[0] = 100;
trueParameters[1] = -100;
if (GradientDescentOptimizerv4RunTest(itkOptimizer, trueParameters) == EXIT_FAILURE)
{
result = EXIT_FAILURE;
}
// Also the score should be equal to the initial score
double value = itkOptimizer->GetValue();
if (value != 24000)
{
result = EXIT_FAILURE;
}
}

std::cout << "\nTest the Exception if the optimizer is not set properly:" << std::endl;
OptimizerType::Pointer badOptimizer = OptimizerType::New();
bool caught = false;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ itkGradientDescentOptimizerv4Test2(int, char *[])
metric->SetParameters(initialPosition);

itkOptimizer->SetLearningRate(1.0);
itkOptimizer->SetNumberOfIterations(1);
itkOptimizer->SetNumberOfIterations(2);

ScalesType scales(metric->GetNumberOfLocalParameters());
for (NumberOfParametersType i = 0; i < metric->GetNumberOfLocalParameters(); ++i)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -384,6 +384,50 @@ itkRegularStepGradientDescentOptimizerv4Test(int, char *[])
currentLearningRateRelaxation);
}

// Verify that the optimizer stays at the initial position if
// number of iterations is set to one.
std::cout << "\nCheck the optimizer position (parameters) are equal to the initial "
"value when number of iterations is set to one:"
<< std::endl;
{
RSGv4TestMetric::Pointer metric = RSGv4TestMetric::New();
itkOptimizer->SetMetric(metric);
using ParametersType = RSGv4TestMetric::ParametersType;
const unsigned int spaceDimension = metric->GetNumberOfParameters();
ParametersType initialPosition(spaceDimension);
initialPosition[0] = 100;
initialPosition[1] = -100;
metric->SetParameters(initialPosition);

itkOptimizer->SetNumberOfIterations(1);
try
{
itkOptimizer->StartOptimization();
}
catch (const itk::ExceptionObject & e)
{
return EXIT_FAILURE;
}

// Check results to see if it is within range
ParametersType finalPosition = itkOptimizer->GetMetric()->GetParameters();
double trueParameters[2] = { 100, -100 };
for (unsigned int j = 0; j < 2; ++j)
{
if (!itk::Math::FloatAlmostEqual(finalPosition[j], trueParameters[j]))
{
testStatus = EXIT_FAILURE;
}
}

// Also the score should be equal to the initial score
double value = itkOptimizer->GetValue();
if (value != 24000)
{
testStatus = EXIT_FAILURE;
}
}

//
// Test the Exception if the GradientMagnitudeTolerance is set to a negative value
//
Expand Down