• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Ceres Solver - A fast non-linear least squares minimizer
2 // Copyright 2014 Google Inc. All rights reserved.
3 // http://code.google.com/p/ceres-solver/
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are met:
7 //
8 // * Redistributions of source code must retain the above copyright notice,
9 //   this list of conditions and the following disclaimer.
10 // * Redistributions in binary form must reproduce the above copyright notice,
11 //   this list of conditions and the following disclaimer in the documentation
12 //   and/or other materials provided with the distribution.
13 // * Neither the name of Google Inc. nor the names of its contributors may be
14 //   used to endorse or promote products derived from this software without
15 //   specific prior written permission.
16 //
17 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
18 // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20 // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
21 // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24 // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25 // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27 // POSSIBILITY OF SUCH DAMAGE.
28 //
29 // Author: keir@google.com (Keir Mierle)
30 
31 #include "ceres/solver_impl.h"
32 
33 #include <cstdio>
34 #include <iostream>  // NOLINT
35 #include <numeric>
36 #include <string>
37 #include "ceres/array_utils.h"
38 #include "ceres/callbacks.h"
39 #include "ceres/coordinate_descent_minimizer.h"
40 #include "ceres/cxsparse.h"
41 #include "ceres/evaluator.h"
42 #include "ceres/gradient_checking_cost_function.h"
43 #include "ceres/iteration_callback.h"
44 #include "ceres/levenberg_marquardt_strategy.h"
45 #include "ceres/line_search_minimizer.h"
46 #include "ceres/linear_solver.h"
47 #include "ceres/map_util.h"
48 #include "ceres/minimizer.h"
49 #include "ceres/ordered_groups.h"
50 #include "ceres/parameter_block.h"
51 #include "ceres/parameter_block_ordering.h"
52 #include "ceres/preconditioner.h"
53 #include "ceres/problem.h"
54 #include "ceres/problem_impl.h"
55 #include "ceres/program.h"
56 #include "ceres/reorder_program.h"
57 #include "ceres/residual_block.h"
58 #include "ceres/stringprintf.h"
59 #include "ceres/suitesparse.h"
60 #include "ceres/summary_utils.h"
61 #include "ceres/trust_region_minimizer.h"
62 #include "ceres/wall_time.h"
63 
64 namespace ceres {
65 namespace internal {
66 
TrustRegionMinimize(const Solver::Options & options,Program * program,CoordinateDescentMinimizer * inner_iteration_minimizer,Evaluator * evaluator,LinearSolver * linear_solver,Solver::Summary * summary)67 void SolverImpl::TrustRegionMinimize(
68     const Solver::Options& options,
69     Program* program,
70     CoordinateDescentMinimizer* inner_iteration_minimizer,
71     Evaluator* evaluator,
72     LinearSolver* linear_solver,
73     Solver::Summary* summary) {
74   Minimizer::Options minimizer_options(options);
75   minimizer_options.is_constrained = program->IsBoundsConstrained();
76 
77   // The optimizer works on contiguous parameter vectors; allocate
78   // some.
79   Vector parameters(program->NumParameters());
80 
81   // Collect the discontiguous parameters into a contiguous state
82   // vector.
83   program->ParameterBlocksToStateVector(parameters.data());
84 
85   LoggingCallback logging_callback(TRUST_REGION,
86                                    options.minimizer_progress_to_stdout);
87   if (options.logging_type != SILENT) {
88     minimizer_options.callbacks.insert(minimizer_options.callbacks.begin(),
89                                        &logging_callback);
90   }
91 
92   StateUpdatingCallback updating_callback(program, parameters.data());
93   if (options.update_state_every_iteration) {
94     // This must get pushed to the front of the callbacks so that it is run
95     // before any of the user callbacks.
96     minimizer_options.callbacks.insert(minimizer_options.callbacks.begin(),
97                                        &updating_callback);
98   }
99 
100   minimizer_options.evaluator = evaluator;
101   scoped_ptr<SparseMatrix> jacobian(evaluator->CreateJacobian());
102 
103   minimizer_options.jacobian = jacobian.get();
104   minimizer_options.inner_iteration_minimizer = inner_iteration_minimizer;
105 
106   TrustRegionStrategy::Options trust_region_strategy_options;
107   trust_region_strategy_options.linear_solver = linear_solver;
108   trust_region_strategy_options.initial_radius =
109       options.initial_trust_region_radius;
110   trust_region_strategy_options.max_radius = options.max_trust_region_radius;
111   trust_region_strategy_options.min_lm_diagonal = options.min_lm_diagonal;
112   trust_region_strategy_options.max_lm_diagonal = options.max_lm_diagonal;
113   trust_region_strategy_options.trust_region_strategy_type =
114       options.trust_region_strategy_type;
115   trust_region_strategy_options.dogleg_type = options.dogleg_type;
116   scoped_ptr<TrustRegionStrategy> strategy(
117       TrustRegionStrategy::Create(trust_region_strategy_options));
118   minimizer_options.trust_region_strategy = strategy.get();
119 
120   TrustRegionMinimizer minimizer;
121   double minimizer_start_time = WallTimeInSeconds();
122   minimizer.Minimize(minimizer_options, parameters.data(), summary);
123 
124   // If the user aborted mid-optimization or the optimization
125   // terminated because of a numerical failure, then do not update
126   // user state.
127   if (summary->termination_type != USER_FAILURE &&
128       summary->termination_type != FAILURE) {
129     program->StateVectorToParameterBlocks(parameters.data());
130     program->CopyParameterBlockStateToUserState();
131   }
132 
133   summary->minimizer_time_in_seconds =
134       WallTimeInSeconds() - minimizer_start_time;
135 }
136 
LineSearchMinimize(const Solver::Options & options,Program * program,Evaluator * evaluator,Solver::Summary * summary)137 void SolverImpl::LineSearchMinimize(
138     const Solver::Options& options,
139     Program* program,
140     Evaluator* evaluator,
141     Solver::Summary* summary) {
142   Minimizer::Options minimizer_options(options);
143 
144   // The optimizer works on contiguous parameter vectors; allocate some.
145   Vector parameters(program->NumParameters());
146 
147   // Collect the discontiguous parameters into a contiguous state vector.
148   program->ParameterBlocksToStateVector(parameters.data());
149 
150   LoggingCallback logging_callback(LINE_SEARCH,
151                                    options.minimizer_progress_to_stdout);
152   if (options.logging_type != SILENT) {
153     minimizer_options.callbacks.insert(minimizer_options.callbacks.begin(),
154                                        &logging_callback);
155   }
156 
157   StateUpdatingCallback updating_callback(program, parameters.data());
158   if (options.update_state_every_iteration) {
159     // This must get pushed to the front of the callbacks so that it is run
160     // before any of the user callbacks.
161     minimizer_options.callbacks.insert(minimizer_options.callbacks.begin(),
162                                        &updating_callback);
163   }
164 
165   minimizer_options.evaluator = evaluator;
166 
167   LineSearchMinimizer minimizer;
168   double minimizer_start_time = WallTimeInSeconds();
169   minimizer.Minimize(minimizer_options, parameters.data(), summary);
170 
171   // If the user aborted mid-optimization or the optimization
172   // terminated because of a numerical failure, then do not update
173   // user state.
174   if (summary->termination_type != USER_FAILURE &&
175       summary->termination_type != FAILURE) {
176     program->StateVectorToParameterBlocks(parameters.data());
177     program->CopyParameterBlockStateToUserState();
178   }
179 
180   summary->minimizer_time_in_seconds =
181       WallTimeInSeconds() - minimizer_start_time;
182 }
183 
Solve(const Solver::Options & options,ProblemImpl * problem_impl,Solver::Summary * summary)184 void SolverImpl::Solve(const Solver::Options& options,
185                        ProblemImpl* problem_impl,
186                        Solver::Summary* summary) {
187   VLOG(2) << "Initial problem: "
188           << problem_impl->NumParameterBlocks()
189           << " parameter blocks, "
190           << problem_impl->NumParameters()
191           << " parameters,  "
192           << problem_impl->NumResidualBlocks()
193           << " residual blocks, "
194           << problem_impl->NumResiduals()
195           << " residuals.";
196   if (options.minimizer_type == TRUST_REGION) {
197     TrustRegionSolve(options, problem_impl, summary);
198   } else {
199     LineSearchSolve(options, problem_impl, summary);
200   }
201 }
202 
TrustRegionSolve(const Solver::Options & original_options,ProblemImpl * original_problem_impl,Solver::Summary * summary)203 void SolverImpl::TrustRegionSolve(const Solver::Options& original_options,
204                                   ProblemImpl* original_problem_impl,
205                                   Solver::Summary* summary) {
206   EventLogger event_logger("TrustRegionSolve");
207   double solver_start_time = WallTimeInSeconds();
208 
209   Program* original_program = original_problem_impl->mutable_program();
210   ProblemImpl* problem_impl = original_problem_impl;
211 
212   summary->minimizer_type = TRUST_REGION;
213 
214   SummarizeGivenProgram(*original_program, summary);
215   OrderingToGroupSizes(original_options.linear_solver_ordering.get(),
216                        &(summary->linear_solver_ordering_given));
217   OrderingToGroupSizes(original_options.inner_iteration_ordering.get(),
218                        &(summary->inner_iteration_ordering_given));
219 
220   Solver::Options options(original_options);
221 
222 #ifndef CERES_USE_OPENMP
223   if (options.num_threads > 1) {
224     LOG(WARNING)
225         << "OpenMP support is not compiled into this binary; "
226         << "only options.num_threads=1 is supported. Switching "
227         << "to single threaded mode.";
228     options.num_threads = 1;
229   }
230   if (options.num_linear_solver_threads > 1) {
231     LOG(WARNING)
232         << "OpenMP support is not compiled into this binary; "
233         << "only options.num_linear_solver_threads=1 is supported. Switching "
234         << "to single threaded mode.";
235     options.num_linear_solver_threads = 1;
236   }
237 #endif
238 
239   summary->num_threads_given = original_options.num_threads;
240   summary->num_threads_used = options.num_threads;
241 
242   if (options.trust_region_minimizer_iterations_to_dump.size() > 0 &&
243       options.trust_region_problem_dump_format_type != CONSOLE &&
244       options.trust_region_problem_dump_directory.empty()) {
245     summary->message =
246         "Solver::Options::trust_region_problem_dump_directory is empty.";
247     LOG(ERROR) << summary->message;
248     return;
249   }
250 
251   if (!original_program->ParameterBlocksAreFinite(&summary->message)) {
252     LOG(ERROR) << "Terminating: " << summary->message;
253     return;
254   }
255 
256   if (!original_program->IsFeasible(&summary->message)) {
257     LOG(ERROR) << "Terminating: " << summary->message;
258     return;
259   }
260 
261   event_logger.AddEvent("Init");
262 
263   original_program->SetParameterBlockStatePtrsToUserStatePtrs();
264   event_logger.AddEvent("SetParameterBlockPtrs");
265 
266   // If the user requests gradient checking, construct a new
267   // ProblemImpl by wrapping the CostFunctions of problem_impl inside
268   // GradientCheckingCostFunction and replacing problem_impl with
269   // gradient_checking_problem_impl.
270   scoped_ptr<ProblemImpl> gradient_checking_problem_impl;
271   if (options.check_gradients) {
272     VLOG(1) << "Checking Gradients";
273     gradient_checking_problem_impl.reset(
274         CreateGradientCheckingProblemImpl(
275             problem_impl,
276             options.numeric_derivative_relative_step_size,
277             options.gradient_check_relative_precision));
278 
279     // From here on, problem_impl will point to the gradient checking
280     // version.
281     problem_impl = gradient_checking_problem_impl.get();
282   }
283 
284   if (options.linear_solver_ordering.get() != NULL) {
285     if (!IsOrderingValid(options, problem_impl, &summary->message)) {
286       LOG(ERROR) << summary->message;
287       return;
288     }
289     event_logger.AddEvent("CheckOrdering");
290   } else {
291     options.linear_solver_ordering.reset(new ParameterBlockOrdering);
292     const ProblemImpl::ParameterMap& parameter_map =
293         problem_impl->parameter_map();
294     for (ProblemImpl::ParameterMap::const_iterator it = parameter_map.begin();
295          it != parameter_map.end();
296          ++it) {
297       options.linear_solver_ordering->AddElementToGroup(it->first, 0);
298     }
299     event_logger.AddEvent("ConstructOrdering");
300   }
301 
302   // Create the three objects needed to minimize: the transformed program, the
303   // evaluator, and the linear solver.
304   scoped_ptr<Program> reduced_program(CreateReducedProgram(&options,
305                                                            problem_impl,
306                                                            &summary->fixed_cost,
307                                                            &summary->message));
308 
309   event_logger.AddEvent("CreateReducedProgram");
310   if (reduced_program == NULL) {
311     return;
312   }
313 
314   OrderingToGroupSizes(options.linear_solver_ordering.get(),
315                        &(summary->linear_solver_ordering_used));
316   SummarizeReducedProgram(*reduced_program, summary);
317 
318   if (summary->num_parameter_blocks_reduced == 0) {
319     summary->preprocessor_time_in_seconds =
320         WallTimeInSeconds() - solver_start_time;
321 
322     double post_process_start_time = WallTimeInSeconds();
323 
324      summary->message =
325         "Function tolerance reached. "
326         "No non-constant parameter blocks found.";
327     summary->termination_type = CONVERGENCE;
328     VLOG_IF(1, options.logging_type != SILENT) << summary->message;
329 
330     summary->initial_cost = summary->fixed_cost;
331     summary->final_cost = summary->fixed_cost;
332 
333     // Ensure the program state is set to the user parameters on the way out.
334     original_program->SetParameterBlockStatePtrsToUserStatePtrs();
335     original_program->SetParameterOffsetsAndIndex();
336 
337     summary->postprocessor_time_in_seconds =
338         WallTimeInSeconds() - post_process_start_time;
339     return;
340   }
341 
342   scoped_ptr<LinearSolver>
343       linear_solver(CreateLinearSolver(&options, &summary->message));
344   event_logger.AddEvent("CreateLinearSolver");
345   if (linear_solver == NULL) {
346     return;
347   }
348 
349   summary->linear_solver_type_given = original_options.linear_solver_type;
350   summary->linear_solver_type_used = options.linear_solver_type;
351 
352   summary->preconditioner_type = options.preconditioner_type;
353   summary->visibility_clustering_type = options.visibility_clustering_type;
354 
355   summary->num_linear_solver_threads_given =
356       original_options.num_linear_solver_threads;
357   summary->num_linear_solver_threads_used = options.num_linear_solver_threads;
358 
359   summary->dense_linear_algebra_library_type =
360       options.dense_linear_algebra_library_type;
361   summary->sparse_linear_algebra_library_type =
362       options.sparse_linear_algebra_library_type;
363 
364   summary->trust_region_strategy_type = options.trust_region_strategy_type;
365   summary->dogleg_type = options.dogleg_type;
366 
367   scoped_ptr<Evaluator> evaluator(CreateEvaluator(options,
368                                                   problem_impl->parameter_map(),
369                                                   reduced_program.get(),
370                                                   &summary->message));
371 
372   event_logger.AddEvent("CreateEvaluator");
373 
374   if (evaluator == NULL) {
375     return;
376   }
377 
378   scoped_ptr<CoordinateDescentMinimizer> inner_iteration_minimizer;
379   if (options.use_inner_iterations) {
380     if (reduced_program->parameter_blocks().size() < 2) {
381       LOG(WARNING) << "Reduced problem only contains one parameter block."
382                    << "Disabling inner iterations.";
383     } else {
384       inner_iteration_minimizer.reset(
385           CreateInnerIterationMinimizer(options,
386                                         *reduced_program,
387                                         problem_impl->parameter_map(),
388                                         summary));
389       if (inner_iteration_minimizer == NULL) {
390         LOG(ERROR) << summary->message;
391         return;
392       }
393     }
394   }
395   event_logger.AddEvent("CreateInnerIterationMinimizer");
396 
397   double minimizer_start_time = WallTimeInSeconds();
398   summary->preprocessor_time_in_seconds =
399       minimizer_start_time - solver_start_time;
400 
401   // Run the optimization.
402   TrustRegionMinimize(options,
403                       reduced_program.get(),
404                       inner_iteration_minimizer.get(),
405                       evaluator.get(),
406                       linear_solver.get(),
407                       summary);
408   event_logger.AddEvent("Minimize");
409 
410   double post_process_start_time = WallTimeInSeconds();
411 
412   SetSummaryFinalCost(summary);
413 
414   // Ensure the program state is set to the user parameters on the way
415   // out.
416   original_program->SetParameterBlockStatePtrsToUserStatePtrs();
417   original_program->SetParameterOffsetsAndIndex();
418 
419   const map<string, double>& linear_solver_time_statistics =
420       linear_solver->TimeStatistics();
421   summary->linear_solver_time_in_seconds =
422       FindWithDefault(linear_solver_time_statistics,
423                       "LinearSolver::Solve",
424                       0.0);
425 
426   const map<string, double>& evaluator_time_statistics =
427       evaluator->TimeStatistics();
428 
429   summary->residual_evaluation_time_in_seconds =
430       FindWithDefault(evaluator_time_statistics, "Evaluator::Residual", 0.0);
431   summary->jacobian_evaluation_time_in_seconds =
432       FindWithDefault(evaluator_time_statistics, "Evaluator::Jacobian", 0.0);
433 
434   // Stick a fork in it, we're done.
435   summary->postprocessor_time_in_seconds =
436       WallTimeInSeconds() - post_process_start_time;
437   event_logger.AddEvent("PostProcess");
438 }
439 
LineSearchSolve(const Solver::Options & original_options,ProblemImpl * original_problem_impl,Solver::Summary * summary)440 void SolverImpl::LineSearchSolve(const Solver::Options& original_options,
441                                  ProblemImpl* original_problem_impl,
442                                  Solver::Summary* summary) {
443   double solver_start_time = WallTimeInSeconds();
444 
445   Program* original_program = original_problem_impl->mutable_program();
446   ProblemImpl* problem_impl = original_problem_impl;
447 
448   SummarizeGivenProgram(*original_program, summary);
449   summary->minimizer_type = LINE_SEARCH;
450   summary->line_search_direction_type =
451       original_options.line_search_direction_type;
452   summary->max_lbfgs_rank = original_options.max_lbfgs_rank;
453   summary->line_search_type = original_options.line_search_type;
454   summary->line_search_interpolation_type =
455       original_options.line_search_interpolation_type;
456   summary->nonlinear_conjugate_gradient_type =
457       original_options.nonlinear_conjugate_gradient_type;
458 
459   if (original_program->IsBoundsConstrained()) {
460     summary->message =  "LINE_SEARCH Minimizer does not support bounds.";
461     LOG(ERROR) << "Terminating: " << summary->message;
462     return;
463   }
464 
465   Solver::Options options(original_options);
466 
467   // This ensures that we get a Block Jacobian Evaluator along with
468   // none of the Schur nonsense. This file will have to be extensively
469   // refactored to deal with the various bits of cleanups related to
470   // line search.
471   options.linear_solver_type = CGNR;
472 
473 
474 #ifndef CERES_USE_OPENMP
475   if (options.num_threads > 1) {
476     LOG(WARNING)
477         << "OpenMP support is not compiled into this binary; "
478         << "only options.num_threads=1 is supported. Switching "
479         << "to single threaded mode.";
480     options.num_threads = 1;
481   }
482 #endif  // CERES_USE_OPENMP
483 
484   summary->num_threads_given = original_options.num_threads;
485   summary->num_threads_used = options.num_threads;
486 
487   if (!original_program->ParameterBlocksAreFinite(&summary->message)) {
488     LOG(ERROR) << "Terminating: " << summary->message;
489     return;
490   }
491 
492   if (options.linear_solver_ordering.get() != NULL) {
493     if (!IsOrderingValid(options, problem_impl, &summary->message)) {
494       LOG(ERROR) << summary->message;
495       return;
496     }
497   } else {
498     options.linear_solver_ordering.reset(new ParameterBlockOrdering);
499     const ProblemImpl::ParameterMap& parameter_map =
500         problem_impl->parameter_map();
501     for (ProblemImpl::ParameterMap::const_iterator it = parameter_map.begin();
502          it != parameter_map.end();
503          ++it) {
504       options.linear_solver_ordering->AddElementToGroup(it->first, 0);
505     }
506   }
507 
508 
509   original_program->SetParameterBlockStatePtrsToUserStatePtrs();
510 
511   // If the user requests gradient checking, construct a new
512   // ProblemImpl by wrapping the CostFunctions of problem_impl inside
513   // GradientCheckingCostFunction and replacing problem_impl with
514   // gradient_checking_problem_impl.
515   scoped_ptr<ProblemImpl> gradient_checking_problem_impl;
516   if (options.check_gradients) {
517     VLOG(1) << "Checking Gradients";
518     gradient_checking_problem_impl.reset(
519         CreateGradientCheckingProblemImpl(
520             problem_impl,
521             options.numeric_derivative_relative_step_size,
522             options.gradient_check_relative_precision));
523 
524     // From here on, problem_impl will point to the gradient checking
525     // version.
526     problem_impl = gradient_checking_problem_impl.get();
527   }
528 
529   // Create the three objects needed to minimize: the transformed program, the
530   // evaluator, and the linear solver.
531   scoped_ptr<Program> reduced_program(CreateReducedProgram(&options,
532                                                            problem_impl,
533                                                            &summary->fixed_cost,
534                                                            &summary->message));
535   if (reduced_program == NULL) {
536     return;
537   }
538 
539   SummarizeReducedProgram(*reduced_program, summary);
540   if (summary->num_parameter_blocks_reduced == 0) {
541     summary->preprocessor_time_in_seconds =
542         WallTimeInSeconds() - solver_start_time;
543 
544     summary->message =
545         "Function tolerance reached. "
546         "No non-constant parameter blocks found.";
547     summary->termination_type = CONVERGENCE;
548     VLOG_IF(1, options.logging_type != SILENT) << summary->message;
549     summary->initial_cost = summary->fixed_cost;
550     summary->final_cost = summary->fixed_cost;
551 
552     const double post_process_start_time = WallTimeInSeconds();
553     SetSummaryFinalCost(summary);
554 
555     // Ensure the program state is set to the user parameters on the way out.
556     original_program->SetParameterBlockStatePtrsToUserStatePtrs();
557     original_program->SetParameterOffsetsAndIndex();
558 
559     summary->postprocessor_time_in_seconds =
560         WallTimeInSeconds() - post_process_start_time;
561     return;
562   }
563 
564   scoped_ptr<Evaluator> evaluator(CreateEvaluator(options,
565                                                   problem_impl->parameter_map(),
566                                                   reduced_program.get(),
567                                                   &summary->message));
568   if (evaluator == NULL) {
569     return;
570   }
571 
572   const double minimizer_start_time = WallTimeInSeconds();
573   summary->preprocessor_time_in_seconds =
574       minimizer_start_time - solver_start_time;
575 
576   // Run the optimization.
577   LineSearchMinimize(options, reduced_program.get(), evaluator.get(), summary);
578 
579   const double post_process_start_time = WallTimeInSeconds();
580 
581   SetSummaryFinalCost(summary);
582 
583   // Ensure the program state is set to the user parameters on the way out.
584   original_program->SetParameterBlockStatePtrsToUserStatePtrs();
585   original_program->SetParameterOffsetsAndIndex();
586 
587   const map<string, double>& evaluator_time_statistics =
588       evaluator->TimeStatistics();
589 
590   summary->residual_evaluation_time_in_seconds =
591       FindWithDefault(evaluator_time_statistics, "Evaluator::Residual", 0.0);
592   summary->jacobian_evaluation_time_in_seconds =
593       FindWithDefault(evaluator_time_statistics, "Evaluator::Jacobian", 0.0);
594 
595   // Stick a fork in it, we're done.
596   summary->postprocessor_time_in_seconds =
597       WallTimeInSeconds() - post_process_start_time;
598 }
599 
IsOrderingValid(const Solver::Options & options,const ProblemImpl * problem_impl,string * error)600 bool SolverImpl::IsOrderingValid(const Solver::Options& options,
601                                  const ProblemImpl* problem_impl,
602                                  string* error) {
603   if (options.linear_solver_ordering->NumElements() !=
604       problem_impl->NumParameterBlocks()) {
605       *error = "Number of parameter blocks in user supplied ordering "
606           "does not match the number of parameter blocks in the problem";
607     return false;
608   }
609 
610   const Program& program = problem_impl->program();
611   const vector<ParameterBlock*>& parameter_blocks = program.parameter_blocks();
612   for (vector<ParameterBlock*>::const_iterator it = parameter_blocks.begin();
613        it != parameter_blocks.end();
614        ++it) {
615     if (!options.linear_solver_ordering
616         ->IsMember(const_cast<double*>((*it)->user_state()))) {
617       *error = "Problem contains a parameter block that is not in "
618           "the user specified ordering.";
619       return false;
620     }
621   }
622 
623   if (IsSchurType(options.linear_solver_type) &&
624       options.linear_solver_ordering->NumGroups() > 1) {
625     const vector<ResidualBlock*>& residual_blocks = program.residual_blocks();
626     const set<double*>& e_blocks  =
627         options.linear_solver_ordering->group_to_elements().begin()->second;
628     if (!IsParameterBlockSetIndependent(e_blocks, residual_blocks)) {
629       *error = "The user requested the use of a Schur type solver. "
630           "But the first elimination group in the ordering is not an "
631           "independent set.";
632       return false;
633     }
634   }
635   return true;
636 }
637 
IsParameterBlockSetIndependent(const set<double * > & parameter_block_ptrs,const vector<ResidualBlock * > & residual_blocks)638 bool SolverImpl::IsParameterBlockSetIndependent(
639     const set<double*>& parameter_block_ptrs,
640     const vector<ResidualBlock*>& residual_blocks) {
641   // Loop over each residual block and ensure that no two parameter
642   // blocks in the same residual block are part of
643   // parameter_block_ptrs as that would violate the assumption that it
644   // is an independent set in the Hessian matrix.
645   for (vector<ResidualBlock*>::const_iterator it = residual_blocks.begin();
646        it != residual_blocks.end();
647        ++it) {
648     ParameterBlock* const* parameter_blocks = (*it)->parameter_blocks();
649     const int num_parameter_blocks = (*it)->NumParameterBlocks();
650     int count = 0;
651     for (int i = 0; i < num_parameter_blocks; ++i) {
652       count += parameter_block_ptrs.count(
653           parameter_blocks[i]->mutable_user_state());
654     }
655     if (count > 1) {
656       return false;
657     }
658   }
659   return true;
660 }
661 
CreateReducedProgram(Solver::Options * options,ProblemImpl * problem_impl,double * fixed_cost,string * error)662 Program* SolverImpl::CreateReducedProgram(Solver::Options* options,
663                                           ProblemImpl* problem_impl,
664                                           double* fixed_cost,
665                                           string* error) {
666   CHECK_NOTNULL(options->linear_solver_ordering.get());
667   Program* original_program = problem_impl->mutable_program();
668 
669   vector<double*> removed_parameter_blocks;
670   scoped_ptr<Program> reduced_program(
671       original_program->CreateReducedProgram(&removed_parameter_blocks,
672                                              fixed_cost,
673                                              error));
674   if (reduced_program.get() == NULL) {
675     return NULL;
676   }
677 
678   VLOG(2) << "Reduced problem: "
679           << reduced_program->NumParameterBlocks()
680           << " parameter blocks, "
681           << reduced_program->NumParameters()
682           << " parameters,  "
683           << reduced_program->NumResidualBlocks()
684           << " residual blocks, "
685           << reduced_program->NumResiduals()
686           << " residuals.";
687 
688   if (reduced_program->NumParameterBlocks() == 0) {
689     LOG(WARNING) << "No varying parameter blocks to optimize; "
690                  << "bailing early.";
691     return reduced_program.release();
692   }
693 
694   ParameterBlockOrdering* linear_solver_ordering =
695       options->linear_solver_ordering.get();
696   const int min_group_id =
697       linear_solver_ordering->MinNonZeroGroup();
698   linear_solver_ordering->Remove(removed_parameter_blocks);
699 
700   ParameterBlockOrdering* inner_iteration_ordering =
701       options->inner_iteration_ordering.get();
702   if (inner_iteration_ordering != NULL) {
703     inner_iteration_ordering->Remove(removed_parameter_blocks);
704   }
705 
706   if (IsSchurType(options->linear_solver_type) &&
707       linear_solver_ordering->GroupSize(min_group_id) == 0) {
708     // If the user requested the use of a Schur type solver, and
709     // supplied a non-NULL linear_solver_ordering object with more than
710     // one elimination group, then it can happen that after all the
711     // parameter blocks which are fixed or unused have been removed from
712     // the program and the ordering, there are no more parameter blocks
713     // in the first elimination group.
714     //
715     // In such a case, the use of a Schur type solver is not possible,
716     // as they assume there is at least one e_block. Thus, we
717     // automatically switch to the closest solver to the one indicated
718     // by the user.
719     if (options->linear_solver_type == ITERATIVE_SCHUR) {
720       options->preconditioner_type =
721         Preconditioner::PreconditionerForZeroEBlocks(
722             options->preconditioner_type);
723     }
724 
725     options->linear_solver_type =
726         LinearSolver::LinearSolverForZeroEBlocks(
727             options->linear_solver_type);
728   }
729 
730   if (IsSchurType(options->linear_solver_type)) {
731     if (!ReorderProgramForSchurTypeLinearSolver(
732             options->linear_solver_type,
733             options->sparse_linear_algebra_library_type,
734             problem_impl->parameter_map(),
735             linear_solver_ordering,
736             reduced_program.get(),
737             error)) {
738       return NULL;
739     }
740     return reduced_program.release();
741   }
742 
743   if (options->linear_solver_type == SPARSE_NORMAL_CHOLESKY &&
744       !options->dynamic_sparsity) {
745     if (!ReorderProgramForSparseNormalCholesky(
746             options->sparse_linear_algebra_library_type,
747             *linear_solver_ordering,
748             reduced_program.get(),
749             error)) {
750       return NULL;
751     }
752 
753     return reduced_program.release();
754   }
755 
756   reduced_program->SetParameterOffsetsAndIndex();
757   return reduced_program.release();
758 }
759 
CreateLinearSolver(Solver::Options * options,string * error)760 LinearSolver* SolverImpl::CreateLinearSolver(Solver::Options* options,
761                                              string* error) {
762   CHECK_NOTNULL(options);
763   CHECK_NOTNULL(options->linear_solver_ordering.get());
764   CHECK_NOTNULL(error);
765 
766   if (options->trust_region_strategy_type == DOGLEG) {
767     if (options->linear_solver_type == ITERATIVE_SCHUR ||
768         options->linear_solver_type == CGNR) {
769       *error = "DOGLEG only supports exact factorization based linear "
770                "solvers. If you want to use an iterative solver please "
771                "use LEVENBERG_MARQUARDT as the trust_region_strategy_type";
772       return NULL;
773     }
774   }
775 
776 #ifdef CERES_NO_LAPACK
777   if (options->linear_solver_type == DENSE_NORMAL_CHOLESKY &&
778       options->dense_linear_algebra_library_type == LAPACK) {
779     *error = "Can't use DENSE_NORMAL_CHOLESKY with LAPACK because "
780         "LAPACK was not enabled when Ceres was built.";
781     return NULL;
782   }
783 
784   if (options->linear_solver_type == DENSE_QR &&
785       options->dense_linear_algebra_library_type == LAPACK) {
786     *error = "Can't use DENSE_QR with LAPACK because "
787         "LAPACK was not enabled when Ceres was built.";
788     return NULL;
789   }
790 
791   if (options->linear_solver_type == DENSE_SCHUR &&
792       options->dense_linear_algebra_library_type == LAPACK) {
793     *error = "Can't use DENSE_SCHUR with LAPACK because "
794         "LAPACK was not enabled when Ceres was built.";
795     return NULL;
796   }
797 #endif
798 
799 #ifdef CERES_NO_SUITESPARSE
800   if (options->linear_solver_type == SPARSE_NORMAL_CHOLESKY &&
801       options->sparse_linear_algebra_library_type == SUITE_SPARSE) {
802     *error = "Can't use SPARSE_NORMAL_CHOLESKY with SUITESPARSE because "
803              "SuiteSparse was not enabled when Ceres was built.";
804     return NULL;
805   }
806 
807   if (options->preconditioner_type == CLUSTER_JACOBI) {
808     *error =  "CLUSTER_JACOBI preconditioner not suppored. Please build Ceres "
809         "with SuiteSparse support.";
810     return NULL;
811   }
812 
813   if (options->preconditioner_type == CLUSTER_TRIDIAGONAL) {
814     *error =  "CLUSTER_TRIDIAGONAL preconditioner not suppored. Please build "
815         "Ceres with SuiteSparse support.";
816     return NULL;
817   }
818 #endif
819 
820 #ifdef CERES_NO_CXSPARSE
821   if (options->linear_solver_type == SPARSE_NORMAL_CHOLESKY &&
822       options->sparse_linear_algebra_library_type == CX_SPARSE) {
823     *error = "Can't use SPARSE_NORMAL_CHOLESKY with CXSPARSE because "
824              "CXSparse was not enabled when Ceres was built.";
825     return NULL;
826   }
827 #endif
828 
829   if (options->max_linear_solver_iterations <= 0) {
830     *error = "Solver::Options::max_linear_solver_iterations is not positive.";
831     return NULL;
832   }
833   if (options->min_linear_solver_iterations <= 0) {
834     *error = "Solver::Options::min_linear_solver_iterations is not positive.";
835     return NULL;
836   }
837   if (options->min_linear_solver_iterations >
838       options->max_linear_solver_iterations) {
839     *error = "Solver::Options::min_linear_solver_iterations > "
840         "Solver::Options::max_linear_solver_iterations.";
841     return NULL;
842   }
843 
844   LinearSolver::Options linear_solver_options;
845   linear_solver_options.min_num_iterations =
846         options->min_linear_solver_iterations;
847   linear_solver_options.max_num_iterations =
848       options->max_linear_solver_iterations;
849   linear_solver_options.type = options->linear_solver_type;
850   linear_solver_options.preconditioner_type = options->preconditioner_type;
851   linear_solver_options.visibility_clustering_type =
852       options->visibility_clustering_type;
853   linear_solver_options.sparse_linear_algebra_library_type =
854       options->sparse_linear_algebra_library_type;
855   linear_solver_options.dense_linear_algebra_library_type =
856       options->dense_linear_algebra_library_type;
857   linear_solver_options.use_postordering = options->use_postordering;
858   linear_solver_options.dynamic_sparsity = options->dynamic_sparsity;
859 
860   // Ignore user's postordering preferences and force it to be true if
861   // cholmod_camd is not available. This ensures that the linear
862   // solver does not assume that a fill-reducing pre-ordering has been
863   // done.
864 #if !defined(CERES_NO_SUITESPARSE) && defined(CERES_NO_CAMD)
865   if (IsSchurType(linear_solver_options.type) &&
866       options->sparse_linear_algebra_library_type == SUITE_SPARSE) {
867     linear_solver_options.use_postordering = true;
868   }
869 #endif
870 
871   linear_solver_options.num_threads = options->num_linear_solver_threads;
872   options->num_linear_solver_threads = linear_solver_options.num_threads;
873 
874   OrderingToGroupSizes(options->linear_solver_ordering.get(),
875                        &linear_solver_options.elimination_groups);
876   // Schur type solvers, expect at least two elimination groups. If
877   // there is only one elimination group, then CreateReducedProgram
878   // guarantees that this group only contains e_blocks. Thus we add a
879   // dummy elimination group with zero blocks in it.
880   if (IsSchurType(linear_solver_options.type) &&
881       linear_solver_options.elimination_groups.size() == 1) {
882     linear_solver_options.elimination_groups.push_back(0);
883   }
884 
885   return LinearSolver::Create(linear_solver_options);
886 }
887 
CreateEvaluator(const Solver::Options & options,const ProblemImpl::ParameterMap & parameter_map,Program * program,string * error)888 Evaluator* SolverImpl::CreateEvaluator(
889     const Solver::Options& options,
890     const ProblemImpl::ParameterMap& parameter_map,
891     Program* program,
892     string* error) {
893   Evaluator::Options evaluator_options;
894   evaluator_options.linear_solver_type = options.linear_solver_type;
895   evaluator_options.num_eliminate_blocks =
896       (options.linear_solver_ordering->NumGroups() > 0 &&
897        IsSchurType(options.linear_solver_type))
898       ? (options.linear_solver_ordering
899          ->group_to_elements().begin()
900          ->second.size())
901       : 0;
902   evaluator_options.num_threads = options.num_threads;
903   evaluator_options.dynamic_sparsity = options.dynamic_sparsity;
904   return Evaluator::Create(evaluator_options, program, error);
905 }
906 
CreateInnerIterationMinimizer(const Solver::Options & options,const Program & program,const ProblemImpl::ParameterMap & parameter_map,Solver::Summary * summary)907 CoordinateDescentMinimizer* SolverImpl::CreateInnerIterationMinimizer(
908     const Solver::Options& options,
909     const Program& program,
910     const ProblemImpl::ParameterMap& parameter_map,
911     Solver::Summary* summary) {
912   summary->inner_iterations_given = true;
913 
914   scoped_ptr<CoordinateDescentMinimizer> inner_iteration_minimizer(
915       new CoordinateDescentMinimizer);
916   scoped_ptr<ParameterBlockOrdering> inner_iteration_ordering;
917   ParameterBlockOrdering* ordering_ptr  = NULL;
918 
919   if (options.inner_iteration_ordering.get() == NULL) {
920     inner_iteration_ordering.reset(
921         CoordinateDescentMinimizer::CreateOrdering(program));
922     ordering_ptr = inner_iteration_ordering.get();
923   } else {
924     ordering_ptr = options.inner_iteration_ordering.get();
925     if (!CoordinateDescentMinimizer::IsOrderingValid(program,
926                                                      *ordering_ptr,
927                                                      &summary->message)) {
928       return NULL;
929     }
930   }
931 
932   if (!inner_iteration_minimizer->Init(program,
933                                        parameter_map,
934                                        *ordering_ptr,
935                                        &summary->message)) {
936     return NULL;
937   }
938 
939   summary->inner_iterations_used = true;
940   summary->inner_iteration_time_in_seconds = 0.0;
941   OrderingToGroupSizes(ordering_ptr,
942                        &(summary->inner_iteration_ordering_used));
943   return inner_iteration_minimizer.release();
944 }
945 
946 }  // namespace internal
947 }  // namespace ceres
948