#include <itkGradientDescentOptimizer.h>
Implement a gradient descent optimizer.
GradientDescentOptimizer implements a simple gradient descent optimizer. At each iteration the current position is updated according to
The learning rate is a fixed scalar defined via SetLearningRate(). The optimizer steps through a user defined number of iterations; no convergence checking is done.
Additionally, user can scale each component, , by setting a scaling vector using method SetScale().
- See also
- RegularStepGradientDescentOptimizer
- Examples
- Examples/RegistrationITKv4/ImageRegistration2.cxx, SphinxExamples/src/Core/Transform/MutualInformationAffine/Code.cxx, SphinxExamples/src/Registration/Common/MutualInformation/Code.cxx, and SphinxExamples/src/Registration/Common/PerformMultiModalityRegistrationWithMutualInformation/Code.cxx.
Definition at line 72 of file itkGradientDescentOptimizer.h.
◆ ConstPointer
◆ Pointer
◆ Self
◆ StopConditionGradientDescentOptimizerEnum
◆ Superclass
◆ GradientDescentOptimizer()
itk::GradientDescentOptimizer::GradientDescentOptimizer |
( |
| ) |
|
|
protected |
Methods to configure the cost function.
◆ ~GradientDescentOptimizer()
itk::GradientDescentOptimizer::~GradientDescentOptimizer |
( |
| ) |
|
|
overrideprotecteddefault |
Methods to configure the cost function.
◆ AdvanceOneStep()
virtual void itk::GradientDescentOptimizer::AdvanceOneStep |
( |
| ) |
|
|
virtual |
◆ CreateAnother()
virtual::itk::LightObject::Pointer itk::GradientDescentOptimizer::CreateAnother |
( |
| ) |
const |
|
virtual |
Create an object from an instance, potentially deferring to a factory. This method allows you to create an instance of an object that is exactly the same type as the referring object. This is useful in cases where an object has been cast back to a base class.
Reimplemented from itk::Object.
Reimplemented in itk::QuaternionRigidTransformGradientDescentOptimizer.
◆ GetCurrentIteration()
virtual SizeValueType itk::GradientDescentOptimizer::GetCurrentIteration |
( |
| ) |
const |
|
virtual |
Get the current iteration number.
◆ GetGradient()
virtual const DerivativeType& itk::GradientDescentOptimizer::GetGradient |
( |
| ) |
const |
|
virtual |
◆ GetLearningRate()
virtual const double& itk::GradientDescentOptimizer::GetLearningRate |
( |
| ) |
const |
|
virtual |
◆ GetMaximize()
virtual const bool& itk::GradientDescentOptimizer::GetMaximize |
( |
| ) |
const |
|
virtual |
Methods to configure the cost function.
◆ GetMinimize()
bool itk::GradientDescentOptimizer::GetMinimize |
( |
| ) |
const |
|
inline |
◆ GetNameOfClass()
virtual const char* itk::GradientDescentOptimizer::GetNameOfClass |
( |
| ) |
const |
|
virtual |
◆ GetNumberOfIterations()
virtual const SizeValueType& itk::GradientDescentOptimizer::GetNumberOfIterations |
( |
| ) |
const |
|
virtual |
Get the number of iterations.
◆ GetStopCondition()
◆ GetStopConditionDescription()
const std::string itk::GradientDescentOptimizer::GetStopConditionDescription |
( |
| ) |
const |
|
overridevirtual |
Methods to configure the cost function.
Reimplemented from itk::Optimizer.
◆ GetValue()
virtual const double& itk::GradientDescentOptimizer::GetValue |
( |
| ) |
const |
|
virtual |
◆ MaximizeOff()
virtual void itk::GradientDescentOptimizer::MaximizeOff |
( |
| ) |
|
|
virtual |
Methods to configure the cost function.
◆ MaximizeOn()
virtual void itk::GradientDescentOptimizer::MaximizeOn |
( |
| ) |
|
|
virtual |
Methods to configure the cost function.
◆ MinimizeOff()
void itk::GradientDescentOptimizer::MinimizeOff |
( |
| ) |
|
|
inline |
◆ MinimizeOn()
void itk::GradientDescentOptimizer::MinimizeOn |
( |
| ) |
|
|
inline |
◆ New()
static Pointer itk::GradientDescentOptimizer::New |
( |
| ) |
|
|
static |
Method for creation through the object factory.
◆ PrintSelf()
void itk::GradientDescentOptimizer::PrintSelf |
( |
std::ostream & |
os, |
|
|
Indent |
indent |
|
) |
| const |
|
overrideprotectedvirtual |
Methods to configure the cost function.
Reimplemented from itk::Object.
◆ ResumeOptimization()
void itk::GradientDescentOptimizer::ResumeOptimization |
( |
| ) |
|
Resume previously stopped optimization with current parameters
- See also
- StopOptimization.
◆ SetLearningRate()
virtual void itk::GradientDescentOptimizer::SetLearningRate |
( |
double |
_arg | ) |
|
|
virtual |
◆ SetMaximize()
virtual void itk::GradientDescentOptimizer::SetMaximize |
( |
bool |
_arg | ) |
|
|
virtual |
Methods to configure the cost function.
◆ SetMinimize()
void itk::GradientDescentOptimizer::SetMinimize |
( |
bool |
v | ) |
|
|
inline |
◆ SetNumberOfIterations()
virtual void itk::GradientDescentOptimizer::SetNumberOfIterations |
( |
SizeValueType |
_arg | ) |
|
|
virtual |
Set the number of iterations.
◆ StartOptimization()
void itk::GradientDescentOptimizer::StartOptimization |
( |
| ) |
|
|
overridevirtual |
◆ StopOptimization()
void itk::GradientDescentOptimizer::StopOptimization |
( |
| ) |
|
◆ m_CurrentIteration
SizeValueType itk::GradientDescentOptimizer::m_CurrentIteration { 0 } |
|
private |
◆ m_Gradient
◆ m_LearningRate
double itk::GradientDescentOptimizer::m_LearningRate { 1.0 } |
|
protected |
◆ m_Maximize
bool itk::GradientDescentOptimizer::m_Maximize { false } |
|
protected |
◆ m_NumberOfIterations
SizeValueType itk::GradientDescentOptimizer::m_NumberOfIterations { 100 } |
|
private |
◆ m_Stop
bool itk::GradientDescentOptimizer::m_Stop { false } |
|
private |
◆ m_StopCondition
◆ m_StopConditionDescription
std::ostringstream itk::GradientDescentOptimizer::m_StopConditionDescription |
|
private |
◆ m_Value
double itk::GradientDescentOptimizer::m_Value { 0.0 } |
|
private |
The documentation for this class was generated from the following file: