• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Ceres Solver - A fast non-linear least squares minimizer
2 // Copyright 2010, 2011, 2012 Google Inc. All rights reserved.
3 // http://code.google.com/p/ceres-solver/
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are met:
7 //
8 // * Redistributions of source code must retain the above copyright notice,
9 //   this list of conditions and the following disclaimer.
10 // * Redistributions in binary form must reproduce the above copyright notice,
11 //   this list of conditions and the following disclaimer in the documentation
12 //   and/or other materials provided with the distribution.
13 // * Neither the name of Google Inc. nor the names of its contributors may be
14 //   used to endorse or promote products derived from this software without
15 //   specific prior written permission.
16 //
17 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18 // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20 // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21 // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24 // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25 // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27 // POSSIBILITY OF SUCH DAMAGE.
28 //
29 // Author: sameeragarwal@google.com (Sameer Agarwal)
30 //         keir@google.com (Keir Mierle)
31 //
32 // The Problem object is used to build and hold least squares problems.
33 
34 #ifndef CERES_PUBLIC_PROBLEM_H_
35 #define CERES_PUBLIC_PROBLEM_H_
36 
37 #include <cstddef>
38 #include <map>
39 #include <set>
40 #include <vector>
41 
42 #include "ceres/internal/macros.h"
43 #include "ceres/internal/port.h"
44 #include "ceres/internal/scoped_ptr.h"
45 #include "ceres/types.h"
46 #include "glog/logging.h"
47 
48 
49 namespace ceres {
50 
51 class CostFunction;
52 class LossFunction;
53 class LocalParameterization;
54 class Solver;
55 struct CRSMatrix;
56 
57 namespace internal {
58 class Preprocessor;
59 class ProblemImpl;
60 class ParameterBlock;
61 class ResidualBlock;
62 }  // namespace internal
63 
64 // A ResidualBlockId is an opaque handle clients can use to remove residual
65 // blocks from a Problem after adding them.
66 typedef internal::ResidualBlock* ResidualBlockId;
67 
68 // A class to represent non-linear least squares problems. Such
69 // problems have a cost function that is a sum of error terms (known
70 // as "residuals"), where each residual is a function of some subset
71 // of the parameters. The cost function takes the form
72 //
73 //    N    1
74 //   SUM  --- loss( || r_i1, r_i2,..., r_ik ||^2  ),
75 //   i=1   2
76 //
77 // where
78 //
79 //   r_ij     is residual number i, component j; the residual is a
80 //            function of some subset of the parameters x1...xk. For
81 //            example, in a structure from motion problem a residual
82 //            might be the difference between a measured point in an
83 //            image and the reprojected position for the matching
84 //            camera, point pair. The residual would have two
85 //            components, error in x and error in y.
86 //
87 //   loss(y)  is the loss function; for example, squared error or
88 //            Huber L1 loss. If loss(y) = y, then the cost function is
89 //            non-robustified least squares.
90 //
91 // This class is specifically designed to address the important subset
92 // of "sparse" least squares problems, where each component of the
93 // residual depends only on a small number number of parameters, even
94 // though the total number of residuals and parameters may be very
95 // large. This property affords tremendous gains in scale, allowing
96 // efficient solving of large problems that are otherwise
97 // inaccessible.
98 //
99 // The canonical example of a sparse least squares problem is
100 // "structure-from-motion" (SFM), where the parameters are points and
101 // cameras, and residuals are reprojection errors. Typically a single
102 // residual will depend only on 9 parameters (3 for the point, 6 for
103 // the camera).
104 //
105 // To create a least squares problem, use the AddResidualBlock() and
106 // AddParameterBlock() methods, documented below. Here is an example least
107 // squares problem containing 3 parameter blocks of sizes 3, 4 and 5
108 // respectively and two residual terms of size 2 and 6:
109 //
110 //   double x1[] = { 1.0, 2.0, 3.0 };
111 //   double x2[] = { 1.0, 2.0, 3.0, 5.0 };
112 //   double x3[] = { 1.0, 2.0, 3.0, 6.0, 7.0 };
113 //
114 //   Problem problem;
115 //
116 //   problem.AddResidualBlock(new MyUnaryCostFunction(...), x1);
117 //   problem.AddResidualBlock(new MyBinaryCostFunction(...), x2, x3);
118 //
119 // Please see cost_function.h for details of the CostFunction object.
120 class Problem {
121  public:
122   struct Options {
OptionsOptions123     Options()
124         : cost_function_ownership(TAKE_OWNERSHIP),
125           loss_function_ownership(TAKE_OWNERSHIP),
126           local_parameterization_ownership(TAKE_OWNERSHIP),
127           enable_fast_parameter_block_removal(false),
128           disable_all_safety_checks(false) {}
129 
130     // These flags control whether the Problem object owns the cost
131     // functions, loss functions, and parameterizations passed into
132     // the Problem. If set to TAKE_OWNERSHIP, then the problem object
133     // will delete the corresponding cost or loss functions on
134     // destruction. The destructor is careful to delete the pointers
135     // only once, since sharing cost/loss/parameterizations is
136     // allowed.
137     Ownership cost_function_ownership;
138     Ownership loss_function_ownership;
139     Ownership local_parameterization_ownership;
140 
141     // If true, trades memory for a faster RemoveParameterBlock() operation.
142     //
143     // RemoveParameterBlock() takes time proportional to the size of the entire
144     // Problem. If you only remove parameter blocks from the Problem
145     // occassionaly, this may be acceptable. However, if you are modifying the
146     // Problem frequently, and have memory to spare, then flip this switch to
147     // make RemoveParameterBlock() take time proportional to the number of
148     // residual blocks that depend on it.  The increase in memory usage is an
149     // additonal hash set per parameter block containing all the residuals that
150     // depend on the parameter block.
151     bool enable_fast_parameter_block_removal;
152 
153     // By default, Ceres performs a variety of safety checks when constructing
154     // the problem. There is a small but measurable performance penalty to
155     // these checks, typically around 5% of construction time. If you are sure
156     // your problem construction is correct, and 5% of the problem construction
157     // time is truly an overhead you want to avoid, then you can set
158     // disable_all_safety_checks to true.
159     //
160     // WARNING: Do not set this to true, unless you are absolutely sure of what
161     // you are doing.
162     bool disable_all_safety_checks;
163   };
164 
165   // The default constructor is equivalent to the
166   // invocation Problem(Problem::Options()).
167   Problem();
168   explicit Problem(const Options& options);
169 
170   ~Problem();
171 
172   // Add a residual block to the overall cost function. The cost
173   // function carries with it information about the sizes of the
174   // parameter blocks it expects. The function checks that these match
175   // the sizes of the parameter blocks listed in parameter_blocks. The
176   // program aborts if a mismatch is detected. loss_function can be
177   // NULL, in which case the cost of the term is just the squared norm
178   // of the residuals.
179   //
180   // The user has the option of explicitly adding the parameter blocks
181   // using AddParameterBlock. This causes additional correctness
182   // checking; however, AddResidualBlock implicitly adds the parameter
183   // blocks if they are not present, so calling AddParameterBlock
184   // explicitly is not required.
185   //
186   // The Problem object by default takes ownership of the
187   // cost_function and loss_function pointers. These objects remain
188   // live for the life of the Problem object. If the user wishes to
189   // keep control over the destruction of these objects, then they can
190   // do this by setting the corresponding enums in the Options struct.
191   //
192   // Note: Even though the Problem takes ownership of cost_function
193   // and loss_function, it does not preclude the user from re-using
194   // them in another residual block. The destructor takes care to call
195   // delete on each cost_function or loss_function pointer only once,
196   // regardless of how many residual blocks refer to them.
197   //
198   // Example usage:
199   //
200   //   double x1[] = {1.0, 2.0, 3.0};
201   //   double x2[] = {1.0, 2.0, 5.0, 6.0};
202   //   double x3[] = {3.0, 6.0, 2.0, 5.0, 1.0};
203   //
204   //   Problem problem;
205   //
206   //   problem.AddResidualBlock(new MyUnaryCostFunction(...), NULL, x1);
207   //   problem.AddResidualBlock(new MyBinaryCostFunction(...), NULL, x2, x1);
208   //
209   ResidualBlockId AddResidualBlock(CostFunction* cost_function,
210                                    LossFunction* loss_function,
211                                    const vector<double*>& parameter_blocks);
212 
213   // Convenience methods for adding residuals with a small number of
214   // parameters. This is the common case. Instead of specifying the
215   // parameter block arguments as a vector, list them as pointers.
216   ResidualBlockId AddResidualBlock(CostFunction* cost_function,
217                                    LossFunction* loss_function,
218                                    double* x0);
219   ResidualBlockId AddResidualBlock(CostFunction* cost_function,
220                                    LossFunction* loss_function,
221                                    double* x0, double* x1);
222   ResidualBlockId AddResidualBlock(CostFunction* cost_function,
223                                    LossFunction* loss_function,
224                                    double* x0, double* x1, double* x2);
225   ResidualBlockId AddResidualBlock(CostFunction* cost_function,
226                                    LossFunction* loss_function,
227                                    double* x0, double* x1, double* x2,
228                                    double* x3);
229   ResidualBlockId AddResidualBlock(CostFunction* cost_function,
230                                    LossFunction* loss_function,
231                                    double* x0, double* x1, double* x2,
232                                    double* x3, double* x4);
233   ResidualBlockId AddResidualBlock(CostFunction* cost_function,
234                                    LossFunction* loss_function,
235                                    double* x0, double* x1, double* x2,
236                                    double* x3, double* x4, double* x5);
237   ResidualBlockId AddResidualBlock(CostFunction* cost_function,
238                                    LossFunction* loss_function,
239                                    double* x0, double* x1, double* x2,
240                                    double* x3, double* x4, double* x5,
241                                    double* x6);
242   ResidualBlockId AddResidualBlock(CostFunction* cost_function,
243                                    LossFunction* loss_function,
244                                    double* x0, double* x1, double* x2,
245                                    double* x3, double* x4, double* x5,
246                                    double* x6, double* x7);
247   ResidualBlockId AddResidualBlock(CostFunction* cost_function,
248                                    LossFunction* loss_function,
249                                    double* x0, double* x1, double* x2,
250                                    double* x3, double* x4, double* x5,
251                                    double* x6, double* x7, double* x8);
252   ResidualBlockId AddResidualBlock(CostFunction* cost_function,
253                                    LossFunction* loss_function,
254                                    double* x0, double* x1, double* x2,
255                                    double* x3, double* x4, double* x5,
256                                    double* x6, double* x7, double* x8,
257                                    double* x9);
258 
259   // Add a parameter block with appropriate size to the problem.
260   // Repeated calls with the same arguments are ignored. Repeated
261   // calls with the same double pointer but a different size results
262   // in undefined behaviour.
263   void AddParameterBlock(double* values, int size);
264 
265   // Add a parameter block with appropriate size and parameterization
266   // to the problem. Repeated calls with the same arguments are
267   // ignored. Repeated calls with the same double pointer but a
268   // different size results in undefined behaviour.
269   void AddParameterBlock(double* values,
270                          int size,
271                          LocalParameterization* local_parameterization);
272 
273   // Remove a parameter block from the problem. The parameterization of the
274   // parameter block, if it exists, will persist until the deletion of the
275   // problem (similar to cost/loss functions in residual block removal). Any
276   // residual blocks that depend on the parameter are also removed, as
277   // described above in RemoveResidualBlock().
278   //
279   // If Problem::Options::enable_fast_parameter_block_removal is true, then the
280   // removal is fast (almost constant time). Otherwise, removing a parameter
281   // block will incur a scan of the entire Problem object.
282   //
283   // WARNING: Removing a residual or parameter block will destroy the implicit
284   // ordering, rendering the jacobian or residuals returned from the solver
285   // uninterpretable. If you depend on the evaluated jacobian, do not use
286   // remove! This may change in a future release.
287   void RemoveParameterBlock(double* values);
288 
289   // Remove a residual block from the problem. Any parameters that the residual
290   // block depends on are not removed. The cost and loss functions for the
291   // residual block will not get deleted immediately; won't happen until the
292   // problem itself is deleted.
293   //
294   // WARNING: Removing a residual or parameter block will destroy the implicit
295   // ordering, rendering the jacobian or residuals returned from the solver
296   // uninterpretable. If you depend on the evaluated jacobian, do not use
297   // remove! This may change in a future release.
298   void RemoveResidualBlock(ResidualBlockId residual_block);
299 
300   // Hold the indicated parameter block constant during optimization.
301   void SetParameterBlockConstant(double* values);
302 
303   // Allow the indicated parameter to vary during optimization.
304   void SetParameterBlockVariable(double* values);
305 
306   // Set the local parameterization for one of the parameter blocks.
307   // The local_parameterization is owned by the Problem by default. It
308   // is acceptable to set the same parameterization for multiple
309   // parameters; the destructor is careful to delete local
310   // parameterizations only once. The local parameterization can only
311   // be set once per parameter, and cannot be changed once set.
312   void SetParameterization(double* values,
313                            LocalParameterization* local_parameterization);
314 
315   // Number of parameter blocks in the problem. Always equals
316   // parameter_blocks().size() and parameter_block_sizes().size().
317   int NumParameterBlocks() const;
318 
319   // The size of the parameter vector obtained by summing over the
320   // sizes of all the parameter blocks.
321   int NumParameters() const;
322 
323   // Number of residual blocks in the problem. Always equals
324   // residual_blocks().size().
325   int NumResidualBlocks() const;
326 
327   // The size of the residual vector obtained by summing over the
328   // sizes of all of the residual blocks.
329   int NumResiduals() const;
330 
331   // The size of the parameter block.
332   int ParameterBlockSize(const double* values) const;
333 
334   // The size of local parameterization for the parameter block. If
335   // there is no local parameterization associated with this parameter
336   // block, then ParameterBlockLocalSize = ParameterBlockSize.
337   int ParameterBlockLocalSize(const double* values) const;
338 
339   // Fills the passed parameter_blocks vector with pointers to the
340   // parameter blocks currently in the problem. After this call,
341   // parameter_block.size() == NumParameterBlocks.
342   void GetParameterBlocks(vector<double*>* parameter_blocks) const;
343 
344   // Options struct to control Problem::Evaluate.
345   struct EvaluateOptions {
EvaluateOptionsEvaluateOptions346     EvaluateOptions()
347         : apply_loss_function(true),
348           num_threads(1) {
349     }
350 
351     // The set of parameter blocks for which evaluation should be
352     // performed. This vector determines the order that parameter
353     // blocks occur in the gradient vector and in the columns of the
354     // jacobian matrix. If parameter_blocks is empty, then it is
355     // assumed to be equal to vector containing ALL the parameter
356     // blocks.  Generally speaking the parameter blocks will occur in
357     // the order in which they were added to the problem. But, this
358     // may change if the user removes any parameter blocks from the
359     // problem.
360     //
361     // NOTE: This vector should contain the same pointers as the ones
362     // used to add parameter blocks to the Problem. These parameter
363     // block should NOT point to new memory locations. Bad things will
364     // happen otherwise.
365     vector<double*> parameter_blocks;
366 
367     // The set of residual blocks to evaluate. This vector determines
368     // the order in which the residuals occur, and how the rows of the
369     // jacobian are ordered. If residual_blocks is empty, then it is
370     // assumed to be equal to the vector containing all the residual
371     // blocks. If this vector is empty, then it is assumed to be equal
372     // to a vector containing ALL the residual blocks. Generally
373     // speaking the residual blocks will occur in the order in which
374     // they were added to the problem. But, this may change if the
375     // user removes any residual blocks from the problem.
376     vector<ResidualBlockId> residual_blocks;
377 
378     // Even though the residual blocks in the problem may contain loss
379     // functions, setting apply_loss_function to false will turn off
380     // the application of the loss function to the output of the cost
381     // function. This is of use for example if the user wishes to
382     // analyse the solution quality by studying the distribution of
383     // residuals before and after the solve.
384     bool apply_loss_function;
385 
386     int num_threads;
387   };
388 
389   // Evaluate Problem. Any of the output pointers can be NULL. Which
390   // residual blocks and parameter blocks are used is controlled by
391   // the EvaluateOptions struct above.
392   //
393   // Note 1: The evaluation will use the values stored in the memory
394   // locations pointed to by the parameter block pointers used at the
395   // time of the construction of the problem. i.e.,
396   //
397   //   Problem problem;
398   //   double x = 1;
399   //   problem.AddResidualBlock(new MyCostFunction, NULL, &x);
400   //
401   //   double cost = 0.0;
402   //   problem.Evaluate(Problem::EvaluateOptions(), &cost, NULL, NULL, NULL);
403   //
404   // The cost is evaluated at x = 1. If you wish to evaluate the
405   // problem at x = 2, then
406   //
407   //    x = 2;
408   //    problem.Evaluate(Problem::EvaluateOptions(), &cost, NULL, NULL, NULL);
409   //
410   // is the way to do so.
411   //
412   // Note 2: If no local parameterizations are used, then the size of
413   // the gradient vector (and the number of columns in the jacobian)
414   // is the sum of the sizes of all the parameter blocks. If a
415   // parameter block has a local parameterization, then it contributes
416   // "LocalSize" entries to the gradient vector (and the number of
417   // columns in the jacobian).
418   bool Evaluate(const EvaluateOptions& options,
419                 double* cost,
420                 vector<double>* residuals,
421                 vector<double>* gradient,
422                 CRSMatrix* jacobian);
423 
424  private:
425   friend class Solver;
426   friend class Covariance;
427   internal::scoped_ptr<internal::ProblemImpl> problem_impl_;
428   CERES_DISALLOW_COPY_AND_ASSIGN(Problem);
429 };
430 
431 }  // namespace ceres
432 
433 #endif  // CERES_PUBLIC_PROBLEM_H_
434