Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

User-provided hook within the internal loop of non-linear optimizers #586

Merged
merged 4 commits into from
Nov 18, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions gtsam/nonlinear/NonlinearConjugateGradientOptimizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -200,6 +200,10 @@ boost::tuple<V, int> nonlinearConjugateGradient(const S &system,
currentValues = system.advance(prevValues, alpha, direction);
currentError = system.error(currentValues);

// User hook:
if (params.iterationHook)
params.iterationHook(iteration, prevError, currentError);

// Maybe show output
if (params.verbosity >= NonlinearOptimizerParams::ERROR)
std::cout << "iteration: " << iteration << ", currentError: " << currentError << std::endl;
Expand Down
6 changes: 5 additions & 1 deletion gtsam/nonlinear/NonlinearOptimizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,11 @@ void NonlinearOptimizer::defaultOptimize() {

// Update newError for either printouts or conditional-end checks:
newError = error();


// User hook:
if (params.iterationHook)
params.iterationHook(iterations(), currentError, newError);

// Maybe show output
if (params.verbosity >= NonlinearOptimizerParams::VALUES)
values().print("newValues");
Expand Down
2 changes: 1 addition & 1 deletion gtsam/nonlinear/NonlinearOptimizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ class GTSAM_EXPORT NonlinearOptimizer {

public:
/** A shared pointer to this class */
typedef boost::shared_ptr<const NonlinearOptimizer> shared_ptr;
using shared_ptr = boost::shared_ptr<const NonlinearOptimizer>;

/// @name Standard interface
/// @{
Expand Down
60 changes: 44 additions & 16 deletions gtsam/nonlinear/NonlinearOptimizerParams.h
Original file line number Diff line number Diff line change
Expand Up @@ -38,21 +38,12 @@ class GTSAM_EXPORT NonlinearOptimizerParams {
SILENT, TERMINATION, ERROR, VALUES, DELTA, LINEAR
};

size_t maxIterations; ///< The maximum iterations to stop iterating (default 100)
double relativeErrorTol; ///< The maximum relative error decrease to stop iterating (default 1e-5)
double absoluteErrorTol; ///< The maximum absolute error decrease to stop iterating (default 1e-5)
double errorTol; ///< The maximum total error to stop iterating (default 0.0)
Verbosity verbosity; ///< The printing verbosity during optimization (default SILENT)
Ordering::OrderingType orderingType; ///< The method of ordering use during variable elimination (default COLAMD)

NonlinearOptimizerParams() :
maxIterations(100), relativeErrorTol(1e-5), absoluteErrorTol(1e-5), errorTol(
0.0), verbosity(SILENT), orderingType(Ordering::COLAMD),
linearSolverType(MULTIFRONTAL_CHOLESKY) {}

virtual ~NonlinearOptimizerParams() {
}
virtual void print(const std::string& str = "") const;
size_t maxIterations = 100; ///< The maximum iterations to stop iterating (default 100)
double relativeErrorTol = 1e-5; ///< The maximum relative error decrease to stop iterating (default 1e-5)
double absoluteErrorTol = 1e-5; ///< The maximum absolute error decrease to stop iterating (default 1e-5)
double errorTol = 0.0; ///< The maximum total error to stop iterating (default 0.0)
Verbosity verbosity = SILENT; ///< The printing verbosity during optimization (default SILENT)
Ordering::OrderingType orderingType = Ordering::COLAMD; ///< The method of ordering use during variable elimination (default COLAMD)

size_t getMaxIterations() const { return maxIterations; }
double getRelativeErrorTol() const { return relativeErrorTol; }
Expand All @@ -71,6 +62,37 @@ class GTSAM_EXPORT NonlinearOptimizerParams {
static Verbosity verbosityTranslator(const std::string &s) ;
static std::string verbosityTranslator(Verbosity value) ;

/** Type for an optional user-provided hook to be called after each
* internal optimizer iteration. See iterationHook below. */
using IterationHook = std::function<
void(size_t /*iteration*/, double/*errorBefore*/, double/*errorAfter*/)>;

/** Optional user-provided iteration hook to be called after each
* optimization iteration (Default: none).
* Note that `IterationHook` is defined as a std::function<> with this
* signature:
* \code
* void(size_t iteration, double errorBefore, double errorAfter)
* \endcode
* which allows binding by means of a reference to a regular function:
* \code
* void foo(size_t iteration, double errorBefore, double errorAfter);
* // ...
* lmOpts.iterationHook = &foo;
* \endcode
* or to a C++11 lambda (preferred if you need to capture additional
* context variables, such that the optimizer object itself, the factor graph,
* etc.):
* \code
* lmOpts.iterationHook = [&](size_t iter, double oldError, double newError)
* {
* // ...
* };
* \endcode
* or to the result of a properly-formed `std::bind` call.
*/
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Amazing!!!

IterationHook iterationHook;

/** See NonlinearOptimizerParams::linearSolverType */
enum LinearSolverType {
MULTIFRONTAL_CHOLESKY,
Expand All @@ -81,10 +103,16 @@ class GTSAM_EXPORT NonlinearOptimizerParams {
CHOLMOD, /* Experimental Flag */
};

LinearSolverType linearSolverType; ///< The type of linear solver to use in the nonlinear optimizer
LinearSolverType linearSolverType = MULTIFRONTAL_CHOLESKY; ///< The type of linear solver to use in the nonlinear optimizer
boost::optional<Ordering> ordering; ///< The optional variable elimination ordering, or empty to use COLAMD (default: empty)
IterativeOptimizationParameters::shared_ptr iterativeParams; ///< The container for iterativeOptimization parameters. used in CG Solvers.

NonlinearOptimizerParams() = default;
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I remember seeing a comment from @dellaert about having variables before constructors, so he'll be happy about this one. 😄

virtual ~NonlinearOptimizerParams() {
}

virtual void print(const std::string& str = "") const;

inline bool isMultifrontal() const {
return (linearSolverType == MULTIFRONTAL_CHOLESKY)
|| (linearSolverType == MULTIFRONTAL_QR);
Expand Down
52 changes: 52 additions & 0 deletions tests/testNonlinearOptimizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -566,6 +566,58 @@ TEST( NonlinearOptimizer, logfile )
// EXPECT(actual.str()==expected.str());
}

/* ************************************************************************* */
TEST( NonlinearOptimizer, iterationHook_LM )
{
NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());

Point2 x0(3,3);
Values c0;
c0.insert(X(1), x0);

// Levenberg-Marquardt
LevenbergMarquardtParams lmParams;
size_t lastIterCalled = 0;
lmParams.iterationHook = [&](size_t iteration, double oldError, double newError)
{
// Tests:
lastIterCalled = iteration;
EXPECT(newError<oldError);

// Example of evolution printout:
//std::cout << "iter: " << iteration << " error: " << oldError << " => " << newError <<"\n";
};
LevenbergMarquardtOptimizer(fg, c0, lmParams).optimize();

EXPECT(lastIterCalled>5);
}
/* ************************************************************************* */
TEST( NonlinearOptimizer, iterationHook_CG )
{
NonlinearFactorGraph fg(example::createReallyNonlinearFactorGraph());

Point2 x0(3,3);
Values c0;
c0.insert(X(1), x0);

// Levenberg-Marquardt
NonlinearConjugateGradientOptimizer::Parameters cgParams;
size_t lastIterCalled = 0;
cgParams.iterationHook = [&](size_t iteration, double oldError, double newError)
{
// Tests:
lastIterCalled = iteration;
EXPECT(newError<oldError);

// Example of evolution printout:
//std::cout << "iter: " << iteration << " error: " << oldError << " => " << newError <<"\n";
};
NonlinearConjugateGradientOptimizer(fg, c0, cgParams).optimize();

EXPECT(lastIterCalled>5);
}


/* ************************************************************************* */
//// Minimal traits example
struct MyType : public Vector3 {
Expand Down