From e20494324f7c1ef27e2c92d684dd0a5dc8b45cf4 Mon Sep 17 00:00:00 2001 From: Gerry Chen Date: Wed, 9 Oct 2019 15:58:57 -0400 Subject: [PATCH 1/3] Gaussian Factor Graph unittests and linearization (with Python bindings) --- .../gtsam/tests/test_GaussianFactorGraph.py | 91 ++++++++++ gtsam.h | 4 + gtsam/nonlinear/Marginals.cpp | 30 +++- gtsam/nonlinear/Marginals.h | 26 ++- tests/testMarginals.cpp | 170 ++++++++++-------- 5 files changed, 242 insertions(+), 79 deletions(-) create mode 100644 cython/gtsam/tests/test_GaussianFactorGraph.py diff --git a/cython/gtsam/tests/test_GaussianFactorGraph.py b/cython/gtsam/tests/test_GaussianFactorGraph.py new file mode 100644 index 0000000000..2e7875af06 --- /dev/null +++ b/cython/gtsam/tests/test_GaussianFactorGraph.py @@ -0,0 +1,91 @@ +""" +GTSAM Copyright 2010-2019, Georgia Tech Research Corporation, +Atlanta, Georgia 30332-0415 +All Rights Reserved + +See LICENSE for the license information + +Unit tests for Linear Factor Graphs. +Author: Frank Dellaert & Gerry Chen +""" +# pylint: disable=invalid-name, no-name-in-module, no-member + +from __future__ import print_function + +import unittest + +import gtsam +from gtsam.utils.test_case import GtsamTestCase + +import numpy as np + +def create_graph(): + """Create a basic linear factor graph for testing""" + graph = gtsam.GaussianFactorGraph() + + x0 = gtsam.symbol(ord('x'), 0) + x1 = gtsam.symbol(ord('x'), 1) + x2 = gtsam.symbol(ord('x'), 2) + + BETWEEN_NOISE = gtsam.noiseModel_Diagonal.Sigmas(np.ones(1)) + PRIOR_NOISE = gtsam.noiseModel_Diagonal.Sigmas(np.ones(1)) + + graph.add(x1, np.eye(1), x0, -np.eye(1), np.ones(1), BETWEEN_NOISE) + graph.add(x2, np.eye(1), x1, -np.eye(1), 2*np.ones(1), BETWEEN_NOISE) + graph.add(x0, np.eye(1), np.zeros(1), PRIOR_NOISE) + + return graph, (x0, x1, x2) + +class TestGaussianFactorGraph(GtsamTestCase): + """Tests for Gaussian Factor Graphs.""" + + def test_fg(self): + """Test solving a linear factor graph""" + graph, X = create_graph() + result = graph.optimize() + + EXPECTEDX = [0, 1, 3] + + # check solutions + self.assertAlmostEqual(EXPECTEDX[0], result.at(X[0]), delta=1e-8) + self.assertAlmostEqual(EXPECTEDX[1], result.at(X[1]), delta=1e-8) + self.assertAlmostEqual(EXPECTEDX[2], result.at(X[2]), delta=1e-8) + + def test_convertNonlinear(self): + """Test converting a linear factor graph to a nonlinear one""" + graph, X = create_graph() + + EXPECTEDM = [1, 2, 3] + + # create nonlinear factor graph for marginalization + nfg = gtsam.LinearContainerFactor.ConvertLinearGraph(graph) + optimizer = gtsam.LevenbergMarquardtOptimizer(nfg, gtsam.Values()) + nlresult = optimizer.optimizeSafely() + + # marginalize + marginals = gtsam.Marginals(nfg, nlresult) + m = [marginals.marginalCovariance(x) for x in X] + + # check linear marginalizations + self.assertAlmostEqual(EXPECTEDM[0], m[0], delta=1e-8) + self.assertAlmostEqual(EXPECTEDM[1], m[1], delta=1e-8) + self.assertAlmostEqual(EXPECTEDM[2], m[2], delta=1e-8) + + def test_linearMarginalization(self): + """Marginalize a linear factor graph""" + graph, X = create_graph() + result = graph.optimize() + + EXPECTEDM = [1, 2, 3] + + # linear factor graph marginalize + marginals = gtsam.Marginals(graph, result) + m = [marginals.marginalCovariance(x) for x in X] + + # check linear marginalizations + self.assertAlmostEqual(EXPECTEDM[0], m[0], delta=1e-8) + self.assertAlmostEqual(EXPECTEDM[1], m[1], delta=1e-8) + self.assertAlmostEqual(EXPECTEDM[2], m[2], delta=1e-8) + +if __name__ == '__main__': + unittest.main() diff --git a/gtsam.h b/gtsam.h index bf35755809..1a11490842 100644 --- a/gtsam.h +++ b/gtsam.h @@ -2011,6 +2011,10 @@ class Values { class Marginals { Marginals(const gtsam::NonlinearFactorGraph& graph, const gtsam::Values& solution); + Marginals(const gtsam::GaussianFactorGraph& gfgraph, + const gtsam::Values& solution); + Marginals(const gtsam::GaussianFactorGraph& gfgraph, + const gtsam::VectorValues& solutionvec); void print(string s) const; Matrix marginalCovariance(size_t variable) const; diff --git a/gtsam/nonlinear/Marginals.cpp b/gtsam/nonlinear/Marginals.cpp index fcbbf2f44c..11e123f131 100644 --- a/gtsam/nonlinear/Marginals.cpp +++ b/gtsam/nonlinear/Marginals.cpp @@ -28,15 +28,35 @@ namespace gtsam { /* ************************************************************************* */ Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization, EliminateableFactorGraph::OptionalOrdering ordering) -{ + : values_(solution), factorization_(factorization) { gttic(MarginalsConstructor); - - // Linearize graph graph_ = *graph.linearize(solution); + computeBayesTree(ordering); +} + +/* ************************************************************************* */ +Marginals::Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization, + EliminateableFactorGraph::OptionalOrdering ordering) + : graph_(graph), factorization_(factorization) { + gttic(MarginalsConstructor); + Values vals; + for (const VectorValues::KeyValuePair& v: solution) { + vals.insert(v.first, v.second); + } + values_ = vals; + computeBayesTree(ordering); +} - // Store values - values_ = solution; +/* ************************************************************************* */ +Marginals::Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization, + EliminateableFactorGraph::OptionalOrdering ordering) + : graph_(graph), values_(solution), factorization_(factorization) { + gttic(MarginalsConstructor); + computeBayesTree(ordering); +} +/* ************************************************************************* */ +void Marginals::computeBayesTree(EliminateableFactorGraph::OptionalOrdering ordering) { // Compute BayesTree factorization_ = factorization; if(factorization_ == CHOLESKY) diff --git a/gtsam/nonlinear/Marginals.h b/gtsam/nonlinear/Marginals.h index e0a78042dd..54a290196d 100644 --- a/gtsam/nonlinear/Marginals.h +++ b/gtsam/nonlinear/Marginals.h @@ -51,7 +51,7 @@ class GTSAM_EXPORT Marginals { /// Default constructor only for Cython wrapper Marginals(){} - /** Construct a marginals class. + /** Construct a marginals class from a nonlinear factor graph. * @param graph The factor graph defining the full joint density on all variables. * @param solution The linearization point about which to compute Gaussian marginals (usually the MLE as obtained from a NonlinearOptimizer). * @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems). @@ -60,6 +60,24 @@ class GTSAM_EXPORT Marginals { Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization = CHOLESKY, EliminateableFactorGraph::OptionalOrdering ordering = boost::none); + /** Construct a marginals class from a linear factor graph. + * @param graph The factor graph defining the full joint density on all variables. + * @param solution The solution point to compute Gaussian marginals. + * @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems). + * @param ordering An optional variable ordering for elimination. + */ + Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization = CHOLESKY, + EliminateableFactorGraph::OptionalOrdering ordering = boost::none); + + /** Construct a marginals class from a linear factor graph. + * @param graph The factor graph defining the full joint density on all variables. + * @param solution The solution point to compute Gaussian marginals. + * @param factorization The linear decomposition mode - either Marginals::CHOLESKY (faster and suitable for most problems) or Marginals::QR (slower but more numerically stable for poorly-conditioned problems). + * @param ordering An optional variable ordering for elimination. + */ + Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization = CHOLESKY, + EliminateableFactorGraph::OptionalOrdering ordering = boost::none); + /** print */ void print(const std::string& str = "Marginals: ", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; @@ -81,6 +99,12 @@ class GTSAM_EXPORT Marginals { /** Optimize the bayes tree */ VectorValues optimize() const; + +protected: + + /** Compute the Bayes Tree as a helper function to the constructor */ + void computeBayesTree(EliminateableFactorGraph::OptionalOrdering ordering); + }; /** diff --git a/tests/testMarginals.cpp b/tests/testMarginals.cpp index 4bdbd93879..3c35c6bc01 100644 --- a/tests/testMarginals.cpp +++ b/tests/testMarginals.cpp @@ -87,6 +87,12 @@ TEST(Marginals, planarSLAMmarginals) { soln.insert(x3, Pose2(4.0, 0.0, 0.0)); soln.insert(l1, Point2(2.0, 2.0)); soln.insert(l2, Point2(4.0, 2.0)); + VectorValues soln_lin; + soln_lin.insert(x1, Vector3(0.0, 0.0, 0.0)); + soln_lin.insert(x2, Vector3(2.0, 0.0, 0.0)); + soln_lin.insert(x3, Vector3(4.0, 0.0, 0.0)); + soln_lin.insert(l1, Vector2(2.0, 2.0)); + soln_lin.insert(l2, Vector2(4.0, 2.0)); Matrix expectedx1(3,3); expectedx1 << @@ -110,71 +116,80 @@ TEST(Marginals, planarSLAMmarginals) { Matrix expectedl2(2,2); expectedl2 << 0.293870968, -0.104516129, - -0.104516129, 0.391935484; - - // Check marginals covariances for all variables (Cholesky mode) - Marginals marginals(graph, soln, Marginals::CHOLESKY); - EXPECT(assert_equal(expectedx1, marginals.marginalCovariance(x1), 1e-8)); - EXPECT(assert_equal(expectedx2, marginals.marginalCovariance(x2), 1e-8)); - EXPECT(assert_equal(expectedx3, marginals.marginalCovariance(x3), 1e-8)); - EXPECT(assert_equal(expectedl1, marginals.marginalCovariance(l1), 1e-8)); - EXPECT(assert_equal(expectedl2, marginals.marginalCovariance(l2), 1e-8)); - - // Check marginals covariances for all variables (QR mode) + -0.104516129, 0.391935484; + + auto testMarginals = [&] (Marginals marginals) { + EXPECT(assert_equal(expectedx1, marginals.marginalCovariance(x1), 1e-8)); + EXPECT(assert_equal(expectedx2, marginals.marginalCovariance(x2), 1e-8)); + EXPECT(assert_equal(expectedx3, marginals.marginalCovariance(x3), 1e-8)); + EXPECT(assert_equal(expectedl1, marginals.marginalCovariance(l1), 1e-8)); + EXPECT(assert_equal(expectedl2, marginals.marginalCovariance(l2), 1e-8)); + }; + + auto testJointMarginals = [&] (Marginals marginals) { + // Check joint marginals for 3 variables + Matrix expected_l2x1x3(8,8); + expected_l2x1x3 << + 0.293871159514111, -0.104516127560770, 0.090000180000270, -0.000000000000000, -0.020000000000000, 0.151935669757191, -0.104516127560770, -0.050967744878460, + -0.104516127560770, 0.391935664055174, 0.000000000000000, 0.090000180000270, 0.040000000000000, 0.007741936219615, 0.351935664055174, 0.056129031890193, + 0.090000180000270, 0.000000000000000, 0.090000180000270, -0.000000000000000, 0.000000000000000, 0.090000180000270, 0.000000000000000, 0.000000000000000, + -0.000000000000000, 0.090000180000270, -0.000000000000000, 0.090000180000270, 0.000000000000000, -0.000000000000000, 0.090000180000270, 0.000000000000000, + -0.020000000000000, 0.040000000000000, 0.000000000000000, 0.000000000000000, 0.010000000000000, 0.000000000000000, 0.040000000000000, 0.010000000000000, + 0.151935669757191, 0.007741936219615, 0.090000180000270, -0.000000000000000, 0.000000000000000, 0.160967924878730, 0.007741936219615, 0.004516127560770, + -0.104516127560770, 0.351935664055174, 0.000000000000000, 0.090000180000270, 0.040000000000000, 0.007741936219615, 0.351935664055174, 0.056129031890193, + -0.050967744878460, 0.056129031890193, 0.000000000000000, 0.000000000000000, 0.010000000000000, 0.004516127560770, 0.056129031890193, 0.027741936219615; + KeyVector variables {x1, l2, x3}; + JointMarginal joint_l2x1x3 = marginals.jointMarginalCovariance(variables); + EXPECT(assert_equal(Matrix(expected_l2x1x3.block(0,0,2,2)), Matrix(joint_l2x1x3(l2,l2)), 1e-6)); + EXPECT(assert_equal(Matrix(expected_l2x1x3.block(2,0,3,2)), Matrix(joint_l2x1x3(x1,l2)), 1e-6)); + EXPECT(assert_equal(Matrix(expected_l2x1x3.block(5,0,3,2)), Matrix(joint_l2x1x3(x3,l2)), 1e-6)); + + EXPECT(assert_equal(Matrix(expected_l2x1x3.block(0,2,2,3)), Matrix(joint_l2x1x3(l2,x1)), 1e-6)); + EXPECT(assert_equal(Matrix(expected_l2x1x3.block(2,2,3,3)), Matrix(joint_l2x1x3(x1,x1)), 1e-6)); + EXPECT(assert_equal(Matrix(expected_l2x1x3.block(5,2,3,3)), Matrix(joint_l2x1x3(x3,x1)), 1e-6)); + + EXPECT(assert_equal(Matrix(expected_l2x1x3.block(0,5,2,3)), Matrix(joint_l2x1x3(l2,x3)), 1e-6)); + EXPECT(assert_equal(Matrix(expected_l2x1x3.block(2,5,3,3)), Matrix(joint_l2x1x3(x1,x3)), 1e-6)); + EXPECT(assert_equal(Matrix(expected_l2x1x3.block(5,5,3,3)), Matrix(joint_l2x1x3(x3,x3)), 1e-6)); + + // Check joint marginals for 2 variables (different code path than >2 variable case above) + Matrix expected_l2x1(5,5); + expected_l2x1 << + 0.293871159514111, -0.104516127560770, 0.090000180000270, -0.000000000000000, -0.020000000000000, + -0.104516127560770, 0.391935664055174, 0.000000000000000, 0.090000180000270, 0.040000000000000, + 0.090000180000270, 0.000000000000000, 0.090000180000270, -0.000000000000000, 0.000000000000000, + -0.000000000000000, 0.090000180000270, -0.000000000000000, 0.090000180000270, 0.000000000000000, + -0.020000000000000, 0.040000000000000, 0.000000000000000, 0.000000000000000, 0.010000000000000; + variables.resize(2); + variables[0] = l2; + variables[1] = x1; + JointMarginal joint_l2x1 = marginals.jointMarginalCovariance(variables); + EXPECT(assert_equal(Matrix(expected_l2x1.block(0,0,2,2)), Matrix(joint_l2x1(l2,l2)), 1e-6)); + EXPECT(assert_equal(Matrix(expected_l2x1.block(2,0,3,2)), Matrix(joint_l2x1(x1,l2)), 1e-6)); + EXPECT(assert_equal(Matrix(expected_l2x1.block(0,2,2,3)), Matrix(joint_l2x1(l2,x1)), 1e-6)); + EXPECT(assert_equal(Matrix(expected_l2x1.block(2,2,3,3)), Matrix(joint_l2x1(x1,x1)), 1e-6)); + + // Check joint marginal for 1 variable (different code path than >1 variable cases above) + variables.resize(1); + variables[0] = x1; + JointMarginal joint_x1 = marginals.jointMarginalCovariance(variables); + EXPECT(assert_equal(expectedx1, Matrix(joint_l2x1(x1,x1)), 1e-6)); + }; + + Marginals marginals; + + marginals = Marginals(graph, soln, Marginals::CHOLESKY); + testMarginals(marginals); marginals = Marginals(graph, soln, Marginals::QR); - EXPECT(assert_equal(expectedx1, marginals.marginalCovariance(x1), 1e-8)); - EXPECT(assert_equal(expectedx2, marginals.marginalCovariance(x2), 1e-8)); - EXPECT(assert_equal(expectedx3, marginals.marginalCovariance(x3), 1e-8)); - EXPECT(assert_equal(expectedl1, marginals.marginalCovariance(l1), 1e-8)); - EXPECT(assert_equal(expectedl2, marginals.marginalCovariance(l2), 1e-8)); - - // Check joint marginals for 3 variables - Matrix expected_l2x1x3(8,8); - expected_l2x1x3 << - 0.293871159514111, -0.104516127560770, 0.090000180000270, -0.000000000000000, -0.020000000000000, 0.151935669757191, -0.104516127560770, -0.050967744878460, - -0.104516127560770, 0.391935664055174, 0.000000000000000, 0.090000180000270, 0.040000000000000, 0.007741936219615, 0.351935664055174, 0.056129031890193, - 0.090000180000270, 0.000000000000000, 0.090000180000270, -0.000000000000000, 0.000000000000000, 0.090000180000270, 0.000000000000000, 0.000000000000000, - -0.000000000000000, 0.090000180000270, -0.000000000000000, 0.090000180000270, 0.000000000000000, -0.000000000000000, 0.090000180000270, 0.000000000000000, - -0.020000000000000, 0.040000000000000, 0.000000000000000, 0.000000000000000, 0.010000000000000, 0.000000000000000, 0.040000000000000, 0.010000000000000, - 0.151935669757191, 0.007741936219615, 0.090000180000270, -0.000000000000000, 0.000000000000000, 0.160967924878730, 0.007741936219615, 0.004516127560770, - -0.104516127560770, 0.351935664055174, 0.000000000000000, 0.090000180000270, 0.040000000000000, 0.007741936219615, 0.351935664055174, 0.056129031890193, - -0.050967744878460, 0.056129031890193, 0.000000000000000, 0.000000000000000, 0.010000000000000, 0.004516127560770, 0.056129031890193, 0.027741936219615; - KeyVector variables {x1, l2, x3}; - JointMarginal joint_l2x1x3 = marginals.jointMarginalCovariance(variables); - EXPECT(assert_equal(Matrix(expected_l2x1x3.block(0,0,2,2)), Matrix(joint_l2x1x3(l2,l2)), 1e-6)); - EXPECT(assert_equal(Matrix(expected_l2x1x3.block(2,0,3,2)), Matrix(joint_l2x1x3(x1,l2)), 1e-6)); - EXPECT(assert_equal(Matrix(expected_l2x1x3.block(5,0,3,2)), Matrix(joint_l2x1x3(x3,l2)), 1e-6)); - - EXPECT(assert_equal(Matrix(expected_l2x1x3.block(0,2,2,3)), Matrix(joint_l2x1x3(l2,x1)), 1e-6)); - EXPECT(assert_equal(Matrix(expected_l2x1x3.block(2,2,3,3)), Matrix(joint_l2x1x3(x1,x1)), 1e-6)); - EXPECT(assert_equal(Matrix(expected_l2x1x3.block(5,2,3,3)), Matrix(joint_l2x1x3(x3,x1)), 1e-6)); - - EXPECT(assert_equal(Matrix(expected_l2x1x3.block(0,5,2,3)), Matrix(joint_l2x1x3(l2,x3)), 1e-6)); - EXPECT(assert_equal(Matrix(expected_l2x1x3.block(2,5,3,3)), Matrix(joint_l2x1x3(x1,x3)), 1e-6)); - EXPECT(assert_equal(Matrix(expected_l2x1x3.block(5,5,3,3)), Matrix(joint_l2x1x3(x3,x3)), 1e-6)); - - // Check joint marginals for 2 variables (different code path than >2 variable case above) - Matrix expected_l2x1(5,5); - expected_l2x1 << - 0.293871159514111, -0.104516127560770, 0.090000180000270, -0.000000000000000, -0.020000000000000, - -0.104516127560770, 0.391935664055174, 0.000000000000000, 0.090000180000270, 0.040000000000000, - 0.090000180000270, 0.000000000000000, 0.090000180000270, -0.000000000000000, 0.000000000000000, - -0.000000000000000, 0.090000180000270, -0.000000000000000, 0.090000180000270, 0.000000000000000, - -0.020000000000000, 0.040000000000000, 0.000000000000000, 0.000000000000000, 0.010000000000000; - variables.resize(2); - variables[0] = l2; - variables[1] = x1; - JointMarginal joint_l2x1 = marginals.jointMarginalCovariance(variables); - EXPECT(assert_equal(Matrix(expected_l2x1.block(0,0,2,2)), Matrix(joint_l2x1(l2,l2)), 1e-6)); - EXPECT(assert_equal(Matrix(expected_l2x1.block(2,0,3,2)), Matrix(joint_l2x1(x1,l2)), 1e-6)); - EXPECT(assert_equal(Matrix(expected_l2x1.block(0,2,2,3)), Matrix(joint_l2x1(l2,x1)), 1e-6)); - EXPECT(assert_equal(Matrix(expected_l2x1.block(2,2,3,3)), Matrix(joint_l2x1(x1,x1)), 1e-6)); - - // Check joint marginal for 1 variable (different code path than >1 variable cases above) - variables.resize(1); - variables[0] = x1; - JointMarginal joint_x1 = marginals.jointMarginalCovariance(variables); - EXPECT(assert_equal(expectedx1, Matrix(joint_l2x1(x1,x1)), 1e-6)); + testMarginals(marginals); + testJointMarginals(marginals); + + GaussianFactorGraph gfg = *graph.linearize(soln); + marginals = Marginals(gfg, soln_lin, Marginals::CHOLESKY); + testMarginals(marginals); + marginals = Marginals(gfg, soln_lin, Marginals::QR); + testMarginals(marginals); + testJointMarginals(marginals); } /* ************************************************************************* */ @@ -222,17 +237,26 @@ TEST(Marginals, order) { vals.at(3).bearing(vals.at(101)), vals.at(3).range(vals.at(101)), noiseModel::Unit::Create(2)); + auto testMarginals = [&] (Marginals marginals, KeySet set) { + KeyVector keys(set.begin(), set.end()); + JointMarginal joint = marginals.jointMarginalCovariance(keys); + + LONGS_EQUAL(3, (long)joint(0,0).rows()); + LONGS_EQUAL(3, (long)joint(1,1).rows()); + LONGS_EQUAL(3, (long)joint(2,2).rows()); + LONGS_EQUAL(3, (long)joint(3,3).rows()); + LONGS_EQUAL(2, (long)joint(100,100).rows()); + LONGS_EQUAL(2, (long)joint(101,101).rows()); + }; + Marginals marginals(fg, vals); KeySet set = fg.keys(); - KeyVector keys(set.begin(), set.end()); - JointMarginal joint = marginals.jointMarginalCovariance(keys); - - LONGS_EQUAL(3, (long)joint(0,0).rows()); - LONGS_EQUAL(3, (long)joint(1,1).rows()); - LONGS_EQUAL(3, (long)joint(2,2).rows()); - LONGS_EQUAL(3, (long)joint(3,3).rows()); - LONGS_EQUAL(2, (long)joint(100,100).rows()); - LONGS_EQUAL(2, (long)joint(101,101).rows()); + testMarginals(marginals, set); + + GaussianFactorGraph gfg = *fg.linearize(vals); + marginals = Marginals(gfg, vals); + set = gfg.keys(); + testMarginals(marginals, set); } /* ************************************************************************* */ From bc511a26eca5053097d1d09cd2b3931b1a95d282 Mon Sep 17 00:00:00 2001 From: Gerry Chen Date: Wed, 9 Oct 2019 17:12:18 -0400 Subject: [PATCH 2/3] `optional` and `auto` modernization --- gtsam/nonlinear/Marginals.cpp | 17 ++++++++--------- gtsam/nonlinear/Marginals.h | 8 ++++---- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/gtsam/nonlinear/Marginals.cpp b/gtsam/nonlinear/Marginals.cpp index 11e123f131..7f554b5e5e 100644 --- a/gtsam/nonlinear/Marginals.cpp +++ b/gtsam/nonlinear/Marginals.cpp @@ -27,7 +27,7 @@ namespace gtsam { /* ************************************************************************* */ Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization, - EliminateableFactorGraph::OptionalOrdering ordering) + boost::optional ordering) : values_(solution), factorization_(factorization) { gttic(MarginalsConstructor); graph_ = *graph.linearize(solution); @@ -36,12 +36,12 @@ Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, /* ************************************************************************* */ Marginals::Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization, - EliminateableFactorGraph::OptionalOrdering ordering) + boost::optional ordering) : graph_(graph), factorization_(factorization) { gttic(MarginalsConstructor); Values vals; - for (const VectorValues::KeyValuePair& v: solution) { - vals.insert(v.first, v.second); + for (const auto& keyValue: solution) { + vals.insert(keyValue.first, keyValue.second); } values_ = vals; computeBayesTree(ordering); @@ -49,16 +49,15 @@ Marginals::Marginals(const GaussianFactorGraph& graph, const VectorValues& solut /* ************************************************************************* */ Marginals::Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization, - EliminateableFactorGraph::OptionalOrdering ordering) + boost::optional ordering) : graph_(graph), values_(solution), factorization_(factorization) { gttic(MarginalsConstructor); computeBayesTree(ordering); } /* ************************************************************************* */ -void Marginals::computeBayesTree(EliminateableFactorGraph::OptionalOrdering ordering) { +void Marginals::computeBayesTree(boost::optional ordering) { // Compute BayesTree - factorization_ = factorization; if(factorization_ == CHOLESKY) bayesTree_ = *graph_.eliminateMultifrontal(ordering, EliminatePreferCholesky); else if(factorization_ == QR) @@ -145,7 +144,7 @@ JointMarginal Marginals::jointMarginalInformation(const KeyVector& variables) co // Get dimensions from factor graph std::vector dims; dims.reserve(variablesSorted.size()); - for(Key key: variablesSorted) { + for(const auto& key: variablesSorted) { dims.push_back(values_.at(key).dim()); } @@ -162,7 +161,7 @@ VectorValues Marginals::optimize() const { void JointMarginal::print(const std::string& s, const KeyFormatter& formatter) const { cout << s << "Joint marginal on keys "; bool first = true; - for(Key key: keys_) { + for(const auto& key: keys_) { if(!first) cout << ", "; else diff --git a/gtsam/nonlinear/Marginals.h b/gtsam/nonlinear/Marginals.h index 54a290196d..4e59215448 100644 --- a/gtsam/nonlinear/Marginals.h +++ b/gtsam/nonlinear/Marginals.h @@ -58,7 +58,7 @@ class GTSAM_EXPORT Marginals { * @param ordering An optional variable ordering for elimination. */ Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization = CHOLESKY, - EliminateableFactorGraph::OptionalOrdering ordering = boost::none); + boost::optional ordering = boost::none); /** Construct a marginals class from a linear factor graph. * @param graph The factor graph defining the full joint density on all variables. @@ -67,7 +67,7 @@ class GTSAM_EXPORT Marginals { * @param ordering An optional variable ordering for elimination. */ Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization = CHOLESKY, - EliminateableFactorGraph::OptionalOrdering ordering = boost::none); + boost::optional ordering = boost::none); /** Construct a marginals class from a linear factor graph. * @param graph The factor graph defining the full joint density on all variables. @@ -76,7 +76,7 @@ class GTSAM_EXPORT Marginals { * @param ordering An optional variable ordering for elimination. */ Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization = CHOLESKY, - EliminateableFactorGraph::OptionalOrdering ordering = boost::none); + boost::optional ordering = boost::none); /** print */ void print(const std::string& str = "Marginals: ", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; @@ -103,7 +103,7 @@ class GTSAM_EXPORT Marginals { protected: /** Compute the Bayes Tree as a helper function to the constructor */ - void computeBayesTree(EliminateableFactorGraph::OptionalOrdering ordering); + void computeBayesTree(boost::optional ordering); }; From 5900eaa8ec35467b41ca98fa6b5f289f8ed8a560 Mon Sep 17 00:00:00 2001 From: Gerry Chen Date: Thu, 10 Oct 2019 01:02:38 -0400 Subject: [PATCH 3/3] Change ordering back to OptionalOrdering typedef --- gtsam/nonlinear/Marginals.cpp | 8 ++++---- gtsam/nonlinear/Marginals.h | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/gtsam/nonlinear/Marginals.cpp b/gtsam/nonlinear/Marginals.cpp index 7f554b5e5e..c24cb61433 100644 --- a/gtsam/nonlinear/Marginals.cpp +++ b/gtsam/nonlinear/Marginals.cpp @@ -27,7 +27,7 @@ namespace gtsam { /* ************************************************************************* */ Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization, - boost::optional ordering) + EliminateableFactorGraph::OptionalOrdering ordering) : values_(solution), factorization_(factorization) { gttic(MarginalsConstructor); graph_ = *graph.linearize(solution); @@ -36,7 +36,7 @@ Marginals::Marginals(const NonlinearFactorGraph& graph, const Values& solution, /* ************************************************************************* */ Marginals::Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization, - boost::optional ordering) + EliminateableFactorGraph::OptionalOrdering ordering) : graph_(graph), factorization_(factorization) { gttic(MarginalsConstructor); Values vals; @@ -49,14 +49,14 @@ Marginals::Marginals(const GaussianFactorGraph& graph, const VectorValues& solut /* ************************************************************************* */ Marginals::Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization, - boost::optional ordering) + EliminateableFactorGraph::OptionalOrdering ordering) : graph_(graph), values_(solution), factorization_(factorization) { gttic(MarginalsConstructor); computeBayesTree(ordering); } /* ************************************************************************* */ -void Marginals::computeBayesTree(boost::optional ordering) { +void Marginals::computeBayesTree(EliminateableFactorGraph::OptionalOrdering ordering) { // Compute BayesTree if(factorization_ == CHOLESKY) bayesTree_ = *graph_.eliminateMultifrontal(ordering, EliminatePreferCholesky); diff --git a/gtsam/nonlinear/Marginals.h b/gtsam/nonlinear/Marginals.h index 4e59215448..54a290196d 100644 --- a/gtsam/nonlinear/Marginals.h +++ b/gtsam/nonlinear/Marginals.h @@ -58,7 +58,7 @@ class GTSAM_EXPORT Marginals { * @param ordering An optional variable ordering for elimination. */ Marginals(const NonlinearFactorGraph& graph, const Values& solution, Factorization factorization = CHOLESKY, - boost::optional ordering = boost::none); + EliminateableFactorGraph::OptionalOrdering ordering = boost::none); /** Construct a marginals class from a linear factor graph. * @param graph The factor graph defining the full joint density on all variables. @@ -67,7 +67,7 @@ class GTSAM_EXPORT Marginals { * @param ordering An optional variable ordering for elimination. */ Marginals(const GaussianFactorGraph& graph, const Values& solution, Factorization factorization = CHOLESKY, - boost::optional ordering = boost::none); + EliminateableFactorGraph::OptionalOrdering ordering = boost::none); /** Construct a marginals class from a linear factor graph. * @param graph The factor graph defining the full joint density on all variables. @@ -76,7 +76,7 @@ class GTSAM_EXPORT Marginals { * @param ordering An optional variable ordering for elimination. */ Marginals(const GaussianFactorGraph& graph, const VectorValues& solution, Factorization factorization = CHOLESKY, - boost::optional ordering = boost::none); + EliminateableFactorGraph::OptionalOrdering ordering = boost::none); /** print */ void print(const std::string& str = "Marginals: ", const KeyFormatter& keyFormatter = DefaultKeyFormatter) const; @@ -103,7 +103,7 @@ class GTSAM_EXPORT Marginals { protected: /** Compute the Bayes Tree as a helper function to the constructor */ - void computeBayesTree(boost::optional ordering); + void computeBayesTree(EliminateableFactorGraph::OptionalOrdering ordering); };