• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Ceres Solver - A fast non-linear least squares minimizer
2 // Copyright 2012 Google Inc. All rights reserved.
3 // http://code.google.com/p/ceres-solver/
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are met:
7 //
8 // * Redistributions of source code must retain the above copyright notice,
9 //   this list of conditions and the following disclaimer.
10 // * Redistributions in binary form must reproduce the above copyright notice,
11 //   this list of conditions and the following disclaimer in the documentation
12 //   and/or other materials provided with the distribution.
13 // * Neither the name of Google Inc. nor the names of its contributors may be
14 //   used to endorse or promote products derived from this software without
15 //   specific prior written permission.
16 //
17 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18 // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20 // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21 // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24 // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25 // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27 // POSSIBILITY OF SUCH DAMAGE.
28 //
29 // Author: sameeragarwal@google.com (Sameer Agarwal)
30 //
31 // Interface for and implementation of various Line search algorithms.
32 
33 #ifndef CERES_INTERNAL_LINE_SEARCH_H_
34 #define CERES_INTERNAL_LINE_SEARCH_H_
35 
36 #include <string>
37 #include <vector>
38 #include "ceres/internal/eigen.h"
39 #include "ceres/internal/port.h"
40 #include "ceres/types.h"
41 
42 namespace ceres {
43 namespace internal {
44 
45 class Evaluator;
46 struct FunctionSample;
47 
48 // Line search is another name for a one dimensional optimization
49 // algorithm. The name "line search" comes from the fact one
50 // dimensional optimization problems that arise as subproblems of
51 // general multidimensional optimization problems.
52 //
53 // While finding the exact minimum of a one dimensionl function is
54 // hard, instances of LineSearch find a point that satisfies a
55 // sufficient decrease condition. Depending on the particular
56 // condition used, we get a variety of different line search
57 // algorithms, e.g., Armijo, Wolfe etc.
58 class LineSearch {
59  public:
60   class Function;
61 
62   struct Options {
OptionsOptions63     Options()
64         : interpolation_type(CUBIC),
65           sufficient_decrease(1e-4),
66           max_step_contraction(1e-3),
67           min_step_contraction(0.9),
68           min_step_size(1e-9),
69           max_num_iterations(20),
70           sufficient_curvature_decrease(0.9),
71           max_step_expansion(10.0),
72           is_silent(false),
73           function(NULL) {}
74 
75     // Degree of the polynomial used to approximate the objective
76     // function.
77     LineSearchInterpolationType interpolation_type;
78 
79     // Armijo and Wolfe line search parameters.
80 
81     // Solving the line search problem exactly is computationally
82     // prohibitive. Fortunately, line search based optimization
83     // algorithms can still guarantee convergence if instead of an
84     // exact solution, the line search algorithm returns a solution
85     // which decreases the value of the objective function
86     // sufficiently. More precisely, we are looking for a step_size
87     // s.t.
88     //
89     //  f(step_size) <= f(0) + sufficient_decrease * f'(0) * step_size
90     double sufficient_decrease;
91 
92     // In each iteration of the Armijo / Wolfe line search,
93     //
94     // new_step_size >= max_step_contraction * step_size
95     //
96     // Note that by definition, for contraction:
97     //
98     //  0 < max_step_contraction < min_step_contraction < 1
99     //
100     double max_step_contraction;
101 
102     // In each iteration of the Armijo / Wolfe line search,
103     //
104     // new_step_size <= min_step_contraction * step_size
105     // Note that by definition, for contraction:
106     //
107     //  0 < max_step_contraction < min_step_contraction < 1
108     //
109     double min_step_contraction;
110 
111     // If during the line search, the step_size falls below this
112     // value, it is truncated to zero.
113     double min_step_size;
114 
115     // Maximum number of trial step size iterations during each line search,
116     // if a step size satisfying the search conditions cannot be found within
117     // this number of trials, the line search will terminate.
118     int max_num_iterations;
119 
120     // Wolfe-specific line search parameters.
121 
122     // The strong Wolfe conditions consist of the Armijo sufficient
123     // decrease condition, and an additional requirement that the
124     // step-size be chosen s.t. the _magnitude_ ('strong' Wolfe
125     // conditions) of the gradient along the search direction
126     // decreases sufficiently. Precisely, this second condition
127     // is that we seek a step_size s.t.
128     //
129     //   |f'(step_size)| <= sufficient_curvature_decrease * |f'(0)|
130     //
131     // Where f() is the line search objective and f'() is the derivative
132     // of f w.r.t step_size (d f / d step_size).
133     double sufficient_curvature_decrease;
134 
135     // During the bracketing phase of the Wolfe search, the step size is
136     // increased until either a point satisfying the Wolfe conditions is
137     // found, or an upper bound for a bracket containing a point satisfying
138     // the conditions is found.  Precisely, at each iteration of the
139     // expansion:
140     //
141     //   new_step_size <= max_step_expansion * step_size.
142     //
143     // By definition for expansion, max_step_expansion > 1.0.
144     double max_step_expansion;
145 
146     bool is_silent;
147 
148     // The one dimensional function that the line search algorithm
149     // minimizes.
150     Function* function;
151   };
152 
153   // An object used by the line search to access the function values
154   // and gradient of the one dimensional function being optimized.
155   //
156   // In practice, this object will provide access to the objective
157   // function value and the directional derivative of the underlying
158   // optimization problem along a specific search direction.
159   //
160   // See LineSearchFunction for an example implementation.
161   class Function {
162    public:
~Function()163     virtual ~Function() {}
164     // Evaluate the line search objective
165     //
166     //   f(x) = p(position + x * direction)
167     //
168     // Where, p is the objective function of the general optimization
169     // problem.
170     //
171     // g is the gradient f'(x) at x.
172     //
173     // f must not be null. The gradient is computed only if g is not null.
174     virtual bool Evaluate(double x, double* f, double* g) = 0;
175   };
176 
177   // Result of the line search.
178   struct Summary {
SummarySummary179     Summary()
180         : success(false),
181           optimal_step_size(0.0),
182           num_function_evaluations(0),
183           num_gradient_evaluations(0),
184           num_iterations(0) {}
185 
186     bool success;
187     double optimal_step_size;
188     int num_function_evaluations;
189     int num_gradient_evaluations;
190     int num_iterations;
191     string error;
192   };
193 
194   explicit LineSearch(const LineSearch::Options& options);
~LineSearch()195   virtual ~LineSearch() {}
196 
197   static LineSearch* Create(const LineSearchType line_search_type,
198                             const LineSearch::Options& options,
199                             string* error);
200 
201   // Perform the line search.
202   //
203   // step_size_estimate must be a positive number.
204   //
205   // initial_cost and initial_gradient are the values and gradient of
206   // the function at zero.
207   // summary must not be null and will contain the result of the line
208   // search.
209   //
210   // Summary::success is true if a non-zero step size is found.
211   virtual void Search(double step_size_estimate,
212                       double initial_cost,
213                       double initial_gradient,
214                       Summary* summary) = 0;
215   double InterpolatingPolynomialMinimizingStepSize(
216       const LineSearchInterpolationType& interpolation_type,
217       const FunctionSample& lowerbound_sample,
218       const FunctionSample& previous_sample,
219       const FunctionSample& current_sample,
220       const double min_step_size,
221       const double max_step_size) const;
222 
223  protected:
options()224   const LineSearch::Options& options() const { return options_; }
225 
226  private:
227   LineSearch::Options options_;
228 };
229 
230 class LineSearchFunction : public LineSearch::Function {
231  public:
232   explicit LineSearchFunction(Evaluator* evaluator);
~LineSearchFunction()233   virtual ~LineSearchFunction() {}
234   void Init(const Vector& position, const Vector& direction);
235   virtual bool Evaluate(double x, double* f, double* g);
236   double DirectionInfinityNorm() const;
237 
238  private:
239   Evaluator* evaluator_;
240   Vector position_;
241   Vector direction_;
242 
243   // evaluation_point = Evaluator::Plus(position_,  x * direction_);
244   Vector evaluation_point_;
245 
246   // scaled_direction = x * direction_;
247   Vector scaled_direction_;
248   Vector gradient_;
249 };
250 
251 // Backtracking and interpolation based Armijo line search. This
252 // implementation is based on the Armijo line search that ships in the
253 // minFunc package by Mark Schmidt.
254 //
255 // For more details: http://www.di.ens.fr/~mschmidt/Software/minFunc.html
256 class ArmijoLineSearch : public LineSearch {
257  public:
258   explicit ArmijoLineSearch(const LineSearch::Options& options);
~ArmijoLineSearch()259   virtual ~ArmijoLineSearch() {}
260   virtual void Search(double step_size_estimate,
261                       double initial_cost,
262                       double initial_gradient,
263                       Summary* summary);
264 };
265 
266 // Bracketing / Zoom Strong Wolfe condition line search.  This implementation
267 // is based on the pseudo-code algorithm presented in Nocedal & Wright [1]
268 // (p60-61) with inspiration from the WolfeLineSearch which ships with the
269 // minFunc package by Mark Schmidt [2].
270 //
271 // [1] Nocedal J., Wright S., Numerical Optimization, 2nd Ed., Springer, 1999.
272 // [2] http://www.di.ens.fr/~mschmidt/Software/minFunc.html.
273 class WolfeLineSearch : public LineSearch {
274  public:
275   explicit WolfeLineSearch(const LineSearch::Options& options);
~WolfeLineSearch()276   virtual ~WolfeLineSearch() {}
277   virtual void Search(double step_size_estimate,
278                       double initial_cost,
279                       double initial_gradient,
280                       Summary* summary);
281   // Returns true iff either a valid point, or valid bracket are found.
282   bool BracketingPhase(const FunctionSample& initial_position,
283                        const double step_size_estimate,
284                        FunctionSample* bracket_low,
285                        FunctionSample* bracket_high,
286                        bool* perform_zoom_search,
287                        Summary* summary);
288   // Returns true iff final_line_sample satisfies strong Wolfe conditions.
289   bool ZoomPhase(const FunctionSample& initial_position,
290                  FunctionSample bracket_low,
291                  FunctionSample bracket_high,
292                  FunctionSample* solution,
293                  Summary* summary);
294 };
295 
296 }  // namespace internal
297 }  // namespace ceres
298 
299 #endif  // CERES_INTERNAL_LINE_SEARCH_H_
300