go home Home | Main Page | Modules | Namespace List | Class Hierarchy | Alphabetical List | Data Structures | File List | Namespace Members | Data Fields | Globals | Related Pages
itkAdaptiveStochasticLBFGSOptimizer.h
Go to the documentation of this file.
1 /*=========================================================================
2  *
3  * Copyright UMC Utrecht and contributors
4  *
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at
8  *
9  * http://www.apache.org/licenses/LICENSE-2.0.txt
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  *
17  *=========================================================================*/
18 #ifndef __itkAdaptiveStochasticLBFGSOptimizer_h
19 #define __itkAdaptiveStochasticLBFGSOptimizer_h
20 
21 #include "../StandardStochasticGradientDescent/itkStandardStochasticGradientDescentOptimizer.h"
22 
23 namespace itk
24 {
71 {
72 public:
73 
77  typedef SmartPointer<Self> Pointer;
78  typedef SmartPointer<const Self> ConstPointer;
79 
81  itkNewMacro( Self );
82 
86 
96 
98  itkSetMacro( UseAdaptiveStepSizes, bool );
99  itkGetConstMacro( UseAdaptiveStepSizes, bool );
100 
102  itkSetMacro( UseSearchDirForAdaptiveStepSize, bool );
103  itkGetConstMacro( UseSearchDirForAdaptiveStepSize, bool );
104 
107  itkSetMacro( SigmoidMax, double );
108  itkGetConstMacro( SigmoidMax, double );
109 
112  itkSetMacro( SigmoidMin, double );
113  itkGetConstMacro( SigmoidMin, double );
114 
117  itkSetMacro( SigmoidScale, double );
118  itkGetConstMacro( SigmoidScale, double );
119 
120 protected:
121 
124 
132  void UpdateCurrentTime( void ) override;
133 
135  //m_previousGradient m_PrePreviousGradient are not used, where should I put them?
136  //DerivativeType m_previousGradient;
137  //DerivativeType m_PrePreviousGradient;
138  unsigned long m_UpdateFrequenceL;
142  std::string m_StepSizeStrategy;
143 
144 private:
145 
146  AdaptiveStochasticLBFGSOptimizer( const Self& ); // purposely not implemented
147  void operator=( const Self& ); // purposely not implemented
148 
151  double m_SigmoidMax;
152  double m_SigmoidMin;
154 
155 }; // end class AdaptiveStochasticLBFGSOptimizer
156 
157 } // end namespace itk
158 
159 #endif
Superclass::ScaledCostFunctionType ScaledCostFunctionType
Superclass::ScaledCostFunctionPointer ScaledCostFunctionPointer
This class implements a gradient descent optimizer with a decaying gain.


Generated on 1652341256 for elastix by doxygen 1.9.1 elastix logo