ITK
5.2.0
Insight Toolkit
|
#include <itkRegularStepGradientDescentOptimizerv4.h>
Public Member Functions | |
virtual ::itk::LightObject::Pointer | CreateAnother () const |
virtual const char * | GetNameOfClass () const |
Public Member Functions inherited from itk::GradientDescentOptimizerv4Template< TInternalComputationValueType > | |
virtual void | SetLearningRate (TInternalComputationValueType _arg) |
virtual const TInternalComputationValueType & | GetLearningRate () const |
virtual void | SetMaximumStepSizeInPhysicalUnits (TInternalComputationValueType _arg) |
virtual const TInternalComputationValueType & | GetMaximumStepSizeInPhysicalUnits () const |
virtual void | SetDoEstimateLearningRateAtEachIteration (bool _arg) |
virtual const bool & | GetDoEstimateLearningRateAtEachIteration () const |
virtual void | DoEstimateLearningRateAtEachIterationOn () |
virtual void | DoEstimateLearningRateAtEachIterationOff () |
virtual void | SetDoEstimateLearningRateOnce (bool _arg) |
virtual const bool & | GetDoEstimateLearningRateOnce () const |
virtual void | DoEstimateLearningRateOnceOn () |
virtual void | DoEstimateLearningRateOnceOff () |
virtual void | SetMinimumConvergenceValue (TInternalComputationValueType _arg) |
virtual void | SetConvergenceWindowSize (SizeValueType _arg) |
virtual const TInternalComputationValueType & | GetConvergenceValue () const |
virtual void | SetReturnBestParametersAndValue (bool _arg) |
virtual const bool & | GetReturnBestParametersAndValue () const |
virtual void | ReturnBestParametersAndValueOn () |
virtual void | ReturnBestParametersAndValueOff () |
void | StopOptimization () override |
void | ResumeOptimization () override |
Public Member Functions inherited from itk::GradientDescentOptimizerBasev4Template< TInternalComputationValueType > | |
virtual const DerivativeType & | GetGradient () const |
virtual const StopConditionObjectToObjectOptimizerEnum & | GetStopCondition () const |
void | SetNumberOfIterations (const SizeValueType numberOfIterations) override |
SizeValueType | GetNumberOfIterations () const override |
SizeValueType | GetCurrentIteration () const override |
const StopConditionReturnStringType | GetStopConditionDescription () const override |
virtual void | ModifyGradientByScales () |
virtual void | ModifyGradientByLearningRate () |
Public Member Functions inherited from itk::ObjectToObjectOptimizerBaseTemplate< TInternalComputationValueType > | |
virtual void | SetMetric (MetricType *_arg) |
virtual MetricType * | GetModifiableMetric () |
virtual const MetricType * | GetMetric () const |
virtual const MeasureType & | GetCurrentMetricValue () const |
virtual const MeasureType & | GetValue () const |
virtual void | SetScales (const ScalesType &scales) |
virtual const ScalesType & | GetScales () const |
virtual const bool & | GetScalesAreIdentity () const |
virtual void | SetWeights (ScalesType _arg) |
virtual const ScalesType & | GetWeights () const |
virtual const bool & | GetWeightsAreIdentity () const |
bool | GetScalesInitialized () const |
virtual void | SetScalesEstimator (ScalesEstimatorType *_arg) |
virtual void | SetDoEstimateScales (bool _arg) |
virtual const bool & | GetDoEstimateScales () const |
virtual void | DoEstimateScalesOn () |
virtual void | DoEstimateScalesOff () |
virtual void | SetNumberOfWorkUnits (ThreadIdType number) |
virtual const ThreadIdType & | GetNumberOfWorkUnits () const |
virtual const ParametersType & | GetCurrentPosition () const |
Public Member Functions inherited from itk::Object | |
unsigned long | AddObserver (const EventObject &event, Command *) |
unsigned long | AddObserver (const EventObject &event, Command *) const |
unsigned long | AddObserver (const EventObject &event, std::function< void(const EventObject &)> function) const |
virtual void | DebugOff () const |
virtual void | DebugOn () const |
Command * | GetCommand (unsigned long tag) |
bool | GetDebug () const |
MetaDataDictionary & | GetMetaDataDictionary () |
const MetaDataDictionary & | GetMetaDataDictionary () const |
virtual ModifiedTimeType | GetMTime () const |
virtual const TimeStamp & | GetTimeStamp () const |
bool | HasObserver (const EventObject &event) const |
void | InvokeEvent (const EventObject &) |
void | InvokeEvent (const EventObject &) const |
virtual void | Modified () const |
void | Register () const override |
void | RemoveAllObservers () |
void | RemoveObserver (unsigned long tag) |
void | SetDebug (bool debugFlag) const |
void | SetReferenceCount (int) override |
void | UnRegister () const noexcept override |
void | SetMetaDataDictionary (const MetaDataDictionary &rhs) |
void | SetMetaDataDictionary (MetaDataDictionary &&rrhs) |
virtual void | SetObjectName (std::string _arg) |
virtual const std::string & | GetObjectName () const |
Public Member Functions inherited from itk::LightObject | |
Pointer | Clone () const |
virtual void | Delete () |
virtual int | GetReferenceCount () const |
void | Print (std::ostream &os, Indent indent=0) const |
Static Public Member Functions | |
static Pointer | New () |
Static Public Member Functions inherited from itk::GradientDescentOptimizerv4Template< TInternalComputationValueType > | |
static Pointer | New () |
Static Public Member Functions inherited from itk::Object | |
static bool | GetGlobalWarningDisplay () |
static void | GlobalWarningDisplayOff () |
static void | GlobalWarningDisplayOn () |
static Pointer | New () |
static void | SetGlobalWarningDisplay (bool val) |
Static Public Member Functions inherited from itk::LightObject | |
static void | BreakOnError () |
static Pointer | New () |
TInternalComputationValueType | m_RelaxationFactor |
TInternalComputationValueType | m_MinimumStepLength |
TInternalComputationValueType | m_GradientMagnitudeTolerance |
MeasureType | m_CurrentLearningRateRelaxation |
virtual void | SetMinimumStepLength (TInternalComputationValueType _arg) |
virtual const TInternalComputationValueType & | GetMinimumStepLength () const |
virtual void | SetRelaxationFactor (TInternalComputationValueType _arg) |
virtual const TInternalComputationValueType & | GetRelaxationFactor () const |
virtual void | SetGradientMagnitudeTolerance (TInternalComputationValueType _arg) |
virtual const TInternalComputationValueType & | GetGradientMagnitudeTolerance () const |
virtual void | SetCurrentLearningRateRelaxation (MeasureType _arg) |
virtual const MeasureType & | GetCurrentLearningRateRelaxation () const |
void | StartOptimization (bool doOnlyInitialization=false) override |
void | EstimateLearningRate () override |
double | GetCurrentStepLength () const |
void | AdvanceOneStep () override |
void | ModifyGradientByScalesOverSubRange (const IndexRangeType &subrange) override |
void | ModifyGradientByLearningRateOverSubRange (const IndexRangeType &subrange) override |
RegularStepGradientDescentOptimizerv4 () | |
~RegularStepGradientDescentOptimizerv4 () override=default | |
void | PrintSelf (std::ostream &os, Indent indent) const override |
Regular Step Gradient descent optimizer.
This optimizer is a variant of gradient descent that attempts to prevent it from taking steps that are too large. At each iteration, this optimizer will take a step along the direction of the metric derivative. Each time the direction of the derivative abruptly changes, the optimizer assumes that a local extrema has been passed and reacts by reducing the step length by a relaxation factor that is set to 0.5 by default. The default value for the initial step length is 1, and this value can only be changed manually via SetLearningRate() since this optimizer does not use the ScaleEstimator to automatically estimate the learning rate. Also note that unlike the previous version of ReuglarStepGradientDescentOptimizer, ITKv4 does not have a "maximize/minimize" option to modify the effect of the metric derivative. The assigned metric is assumed to return a parameter derivative result that "improves" the optimization.
Definition at line 47 of file itkRegularStepGradientDescentOptimizerv4.h.
using itk::RegularStepGradientDescentOptimizerv4< TInternalComputationValueType >::CompensatedSummationType = CompensatedSummation<InternalComputationValueType> |
Compensated summation type.
Definition at line 79 of file itkRegularStepGradientDescentOptimizerv4.h.
using itk::RegularStepGradientDescentOptimizerv4< TInternalComputationValueType >::ConstPointer = SmartPointer<const Self> |
Definition at line 57 of file itkRegularStepGradientDescentOptimizerv4.h.
using itk::RegularStepGradientDescentOptimizerv4< TInternalComputationValueType >::DerivativeType = typename Superclass::DerivativeType |
Derivative type.
Definition at line 70 of file itkRegularStepGradientDescentOptimizerv4.h.
using itk::RegularStepGradientDescentOptimizerv4< TInternalComputationValueType >::IndexRangeType = typename Superclass::IndexRangeType |
Definition at line 74 of file itkRegularStepGradientDescentOptimizerv4.h.
using itk::RegularStepGradientDescentOptimizerv4< TInternalComputationValueType >::InternalComputationValueType = TInternalComputationValueType |
It should be possible to derive the internal computation type from the class object.
Definition at line 67 of file itkRegularStepGradientDescentOptimizerv4.h.
using itk::RegularStepGradientDescentOptimizerv4< TInternalComputationValueType >::MeasureType = typename Superclass::MeasureType |
Metric type over which this class is templated.
Definition at line 73 of file itkRegularStepGradientDescentOptimizerv4.h.
using itk::RegularStepGradientDescentOptimizerv4< TInternalComputationValueType >::ParametersType = typename Superclass::ParametersType |
Definition at line 76 of file itkRegularStepGradientDescentOptimizerv4.h.
using itk::RegularStepGradientDescentOptimizerv4< TInternalComputationValueType >::Pointer = SmartPointer<Self> |
Definition at line 56 of file itkRegularStepGradientDescentOptimizerv4.h.
using itk::RegularStepGradientDescentOptimizerv4< TInternalComputationValueType >::ScalesType = typename Superclass::ScalesType |
Definition at line 75 of file itkRegularStepGradientDescentOptimizerv4.h.
using itk::RegularStepGradientDescentOptimizerv4< TInternalComputationValueType >::Self = RegularStepGradientDescentOptimizerv4 |
Standard class type aliases.
Definition at line 54 of file itkRegularStepGradientDescentOptimizerv4.h.
using itk::RegularStepGradientDescentOptimizerv4< TInternalComputationValueType >::Superclass = GradientDescentOptimizerv4Template<TInternalComputationValueType> |
Definition at line 55 of file itkRegularStepGradientDescentOptimizerv4.h.
|
protected |
Default constructor.
|
overrideprotecteddefault |
Destructor.
|
overrideprotectedvirtual |
Advance one Step following the gradient direction. Includes transform update.
Reimplemented from itk::GradientDescentOptimizerv4Template< TInternalComputationValueType >.
|
virtual |
Create an object from an instance, potentially deferring to a factory. This method allows you to create an instance of an object that is exactly the same type as the referring object. This is useful in cases where an object has been cast back to a base class.
Reimplemented from itk::GradientDescentOptimizerv4Template< TInternalComputationValueType >.
|
overridevirtual |
Estimate the learning rate based on the current gradient.
Reimplemented from itk::GradientDescentOptimizerv4Template< TInternalComputationValueType >.
|
virtual |
Minimum step length (learning rate) value for convergence checking. When the local minima is passed by taking a large step, the step length is adjusted (decreased) by the relaxation factor, so that smaller steps are taken towards the minimum point (convergence). When the step length value reaches a small value, it would be treated as converged.
The default value is set to 1e-4 to pass all tests.
double itk::RegularStepGradientDescentOptimizerv4< TInternalComputationValueType >::GetCurrentStepLength | ( | ) | const |
Get current gradient step value.
|
virtual |
Minimum step length (learning rate) value for convergence checking. When the local minima is passed by taking a large step, the step length is adjusted (decreased) by the relaxation factor, so that smaller steps are taken towards the minimum point (convergence). When the step length value reaches a small value, it would be treated as converged.
The default value is set to 1e-4 to pass all tests.
|
virtual |
Minimum step length (learning rate) value for convergence checking. When the local minima is passed by taking a large step, the step length is adjusted (decreased) by the relaxation factor, so that smaller steps are taken towards the minimum point (convergence). When the step length value reaches a small value, it would be treated as converged.
The default value is set to 1e-4 to pass all tests.
|
virtual |
Run-time type information (and related methods).
Reimplemented from itk::GradientDescentOptimizerv4Template< TInternalComputationValueType >.
|
virtual |
Minimum step length (learning rate) value for convergence checking. When the local minima is passed by taking a large step, the step length is adjusted (decreased) by the relaxation factor, so that smaller steps are taken towards the minimum point (convergence). When the step length value reaches a small value, it would be treated as converged.
The default value is set to 1e-4 to pass all tests.
|
overrideprotectedvirtual |
Minimum step length (learning rate) value for convergence checking. When the local minima is passed by taking a large step, the step length is adjusted (decreased) by the relaxation factor, so that smaller steps are taken towards the minimum point (convergence). When the step length value reaches a small value, it would be treated as converged.
The default value is set to 1e-4 to pass all tests.
Reimplemented from itk::GradientDescentOptimizerv4Template< TInternalComputationValueType >.
|
overrideprotectedvirtual |
Modify the input gradient over a given index range.
Reimplemented from itk::GradientDescentOptimizerv4Template< TInternalComputationValueType >.
|
static |
New macro for creation of through a Smart Pointer.
|
overrideprotectedvirtual |
Minimum step length (learning rate) value for convergence checking. When the local minima is passed by taking a large step, the step length is adjusted (decreased) by the relaxation factor, so that smaller steps are taken towards the minimum point (convergence). When the step length value reaches a small value, it would be treated as converged.
The default value is set to 1e-4 to pass all tests.
Reimplemented from itk::GradientDescentOptimizerv4Template< TInternalComputationValueType >.
|
virtual |
Set/Get current scale for learning rate.
|
virtual |
Set/Get gradient magnitude tolerance value for convergence checking.
|
virtual |
Minimum step length (learning rate) value for convergence checking. When the local minima is passed by taking a large step, the step length is adjusted (decreased) by the relaxation factor, so that smaller steps are taken towards the minimum point (convergence). When the step length value reaches a small value, it would be treated as converged.
The default value is set to 1e-4 to pass all tests.
|
virtual |
Set/Get relaxation factor value.
|
overridevirtual |
Start and run the optimization.
Reimplemented from itk::GradientDescentOptimizerv4Template< TInternalComputationValueType >.
|
private |
Minimum step length (learning rate) value for convergence checking. When the local minima is passed by taking a large step, the step length is adjusted (decreased) by the relaxation factor, so that smaller steps are taken towards the minimum point (convergence). When the step length value reaches a small value, it would be treated as converged.
The default value is set to 1e-4 to pass all tests.
Definition at line 152 of file itkRegularStepGradientDescentOptimizerv4.h.
|
private |
Minimum step length (learning rate) value for convergence checking. When the local minima is passed by taking a large step, the step length is adjusted (decreased) by the relaxation factor, so that smaller steps are taken towards the minimum point (convergence). When the step length value reaches a small value, it would be treated as converged.
The default value is set to 1e-4 to pass all tests.
Definition at line 150 of file itkRegularStepGradientDescentOptimizerv4.h.
|
private |
Minimum step length (learning rate) value for convergence checking. When the local minima is passed by taking a large step, the step length is adjusted (decreased) by the relaxation factor, so that smaller steps are taken towards the minimum point (convergence). When the step length value reaches a small value, it would be treated as converged.
The default value is set to 1e-4 to pass all tests.
Definition at line 148 of file itkRegularStepGradientDescentOptimizerv4.h.
|
private |
Minimum step length (learning rate) value for convergence checking. When the local minima is passed by taking a large step, the step length is adjusted (decreased) by the relaxation factor, so that smaller steps are taken towards the minimum point (convergence). When the step length value reaches a small value, it would be treated as converged.
The default value is set to 1e-4 to pass all tests.
Definition at line 146 of file itkRegularStepGradientDescentOptimizerv4.h.