Skip to content

Commit

Permalink
Merge pull request #20 from varunagrawal/fix/hybrid-resize
Browse files Browse the repository at this point in the history
Hybrid Resize
  • Loading branch information
varunagrawal authored Feb 17, 2022
2 parents 9da0e55 + ae02498 commit 4fe4c2f
Show file tree
Hide file tree
Showing 7 changed files with 180 additions and 81 deletions.
44 changes: 23 additions & 21 deletions gtsam/hybrid/GaussianHybridFactorGraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ template class EliminateableFactorGraph<GaussianHybridFactorGraph>;

void GaussianHybridFactorGraph::print(
const string& str, const gtsam::KeyFormatter& keyFormatter) const {
Base::print(str, keyFormatter);
std::cout << (str.empty() ? str : str + " ") << std::endl;
Base::print("", keyFormatter);
factorGraph_.print("GaussianGraph", keyFormatter);
}

Expand Down Expand Up @@ -100,19 +101,20 @@ ostream& operator<<(ostream& os,
}

// The function type that does a single elimination step on a variable.
pair<AbstractConditional::shared_ptr, boost::shared_ptr<Factor>> EliminateHybrid(
const GaussianHybridFactorGraph& factors, const Ordering& ordering) {
pair<AbstractConditional::shared_ptr, boost::shared_ptr<Factor>>
EliminateHybrid(const GaussianHybridFactorGraph& factors,
const Ordering& ordering) {
// STEP 1: SUM
// Create a new decision tree with all factors gathered at leaves.
auto sum = factors.sum();

// zero out all sums with null ptrs
auto zeroOut = [](const GaussianFactorGraph &gfg) {
bool hasNull = std::any_of(gfg.begin(),
gfg.end(),
[](const GaussianFactor::shared_ptr &ptr) { return !ptr; });
auto zeroOut = [](const GaussianFactorGraph& gfg) {
bool hasNull =
std::any_of(gfg.begin(), gfg.end(),
[](const GaussianFactor::shared_ptr& ptr) { return !ptr; });

return hasNull?GaussianFactorGraph():gfg;
return hasNull ? GaussianFactorGraph() : gfg;
};

// TODO(fan): Now let's assume that all continuous will be eliminated first!
Expand All @@ -123,8 +125,8 @@ pair<AbstractConditional::shared_ptr, boost::shared_ptr<Factor>> EliminateHybrid
dfg.push_back(factors.discreteGraph());

auto dbn = EliminateForMPE(dfg, ordering);
auto &df = dbn.first;
auto &newFactor = dbn.second;
auto& df = dbn.first;
auto& newFactor = dbn.second;
return {df, newFactor};
}

Expand All @@ -140,17 +142,17 @@ pair<AbstractConditional::shared_ptr, boost::shared_ptr<Factor>> EliminateHybrid

KeyVector keysOfEliminated; // Not the ordering
KeyVector keysOfSeparator; // TODO(frank): Is this just (keys - ordering)?
auto eliminate =
[&](const GaussianFactorGraph &graph) -> GaussianFactorGraph::EliminationResult {
if (graph.empty()) return {nullptr, nullptr};
auto result = EliminatePreferCholesky(graph, ordering);
if (keysOfEliminated.empty())
keysOfEliminated =
result.first->keys(); // Initialize the keysOfEliminated to be the
// keysOfEliminated of the GaussianConditional
if (keysOfSeparator.empty()) keysOfSeparator = result.second->keys();
return result;
};
auto eliminate = [&](const GaussianFactorGraph& graph)
-> GaussianFactorGraph::EliminationResult {
if (graph.empty()) return {nullptr, nullptr};
auto result = EliminatePreferCholesky(graph, ordering);
if (keysOfEliminated.empty())
keysOfEliminated =
result.first->keys(); // Initialize the keysOfEliminated to be the
// keysOfEliminated of the GaussianConditional
if (keysOfSeparator.empty()) keysOfSeparator = result.second->keys();
return result;
};
DecisionTree<Key, Pair> eliminationResults(sum, eliminate);

// STEP 3: Create result
Expand Down
11 changes: 11 additions & 0 deletions gtsam/hybrid/HybridFactorGraph.h
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,17 @@ class HybridFactorGraph : public FactorGraph<Factor> {
/// The total number of factors in the DC factor graph.
size_t nrDcFactors() const { return dcGraph_.size(); }

/** Directly resize the number of factors in the graph. If the new size is
* less than the original, factors at the end will be removed. If the new
* size is larger than the original, null factors will be appended.
*/
void resize(size_t size) override {
Base::resize(size);
factorGraph_.resize(size);
discreteGraph_.resize(size);
dcGraph_.resize(size);
}

/// Get all the discrete keys in the hybrid factor graph.
virtual DiscreteKeys discreteKeys() const {
DiscreteKeys result;
Expand Down
51 changes: 29 additions & 22 deletions gtsam/hybrid/IncrementalHybrid.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,9 @@
*/

#include <gtsam/hybrid/IncrementalHybrid.h>
#include <unordered_set>

#include <algorithm>
#include <unordered_set>

void gtsam::IncrementalHybrid::update(gtsam::GaussianHybridFactorGraph graph,
const gtsam::Ordering &ordering,
Expand All @@ -32,23 +33,30 @@ void gtsam::IncrementalHybrid::update(gtsam::GaussianHybridFactorGraph graph,
for (auto &&conditional : *hybridBayesNet_) {
for (auto &key : conditional->frontals()) {
if (allVars.find(key) != allVars.end()) {
if (auto
gf = boost::dynamic_pointer_cast<GaussianMixture>(conditional)) {
if (auto gf =
boost::dynamic_pointer_cast<GaussianMixture>(conditional)) {
graph.push_back(gf);
} else if (auto df =
boost::dynamic_pointer_cast<DiscreteConditional>(conditional)) {
} else if (auto df = boost::dynamic_pointer_cast<DiscreteConditional>(
conditional)) {
graph.push_back(df);
}
break;
}
}
}
} else {
// Initialize an empty HybridBayesNet
hybridBayesNet_ = boost::make_shared<HybridBayesNet>();
}

// Eliminate partially.
std::tie(hybridBayesNet_, remainingFactorGraph_) =
HybridBayesNet::shared_ptr bayesNetFragment;
std::tie(bayesNetFragment, remainingFactorGraph_) =
graph.eliminatePartialSequential(ordering);

// Add the partial bayes net to the posterior bayes net.
hybridBayesNet_->push_back<HybridBayesNet>(*bayesNetFragment);

// Prune
if (maxNrLeaves) {
const auto N = *maxNrLeaves;
Expand All @@ -62,17 +70,15 @@ void gtsam::IncrementalHybrid::update(gtsam::GaussianHybridFactorGraph graph,
// Let's assume that the structure of the last discrete density will be the
// same as the last continuous
std::vector<double> probabilities;
// TODO(fan): The number of probabilities can be lower than the actual number of choices
discreteFactor->visit([&](const double &prob) {
probabilities.emplace_back(prob);
});
// TODO(fan): The number of probabilities can be lower than the actual
// number of choices
discreteFactor->visit(
[&](const double &prob) { probabilities.emplace_back(prob); });

if (probabilities.size() < N) return;

std::nth_element(probabilities.begin(),
probabilities.begin() + N,
probabilities.end(),
std::greater<double>{});
std::nth_element(probabilities.begin(), probabilities.begin() + N,
probabilities.end(), std::greater<double>{});

auto thresholdValue = probabilities[N - 1];

Expand All @@ -83,14 +89,16 @@ void gtsam::IncrementalHybrid::update(gtsam::GaussianHybridFactorGraph graph,
DecisionTree<Key, double> thresholded(*discreteFactor, threshold);

// Create a new factor with pruned tree
// DecisionTreeFactor newFactor(discreteFactor->discreteKeys(), thresholded);
// DecisionTreeFactor newFactor(discreteFactor->discreteKeys(),
// thresholded);
discreteFactor->root_ = thresholded.root_;

std::vector<std::pair<DiscreteValues, double>> assignments = discreteFactor->enumerate();
std::vector<std::pair<DiscreteValues, double>> assignments =
discreteFactor->enumerate();

// Loop over all assignments and create a vector of GaussianConditionals
std::vector<GaussianFactor::shared_ptr> prunedConditionals;
for (auto && av : assignments) {
for (auto &&av : assignments) {
const DiscreteValues &assignment = av.first;
const double value = av.second;

Expand All @@ -101,11 +109,10 @@ void gtsam::IncrementalHybrid::update(gtsam::GaussianHybridFactorGraph graph,
}
}

GaussianMixture::Factors prunedConditionalsTree(
lastDensity->discreteKeys(),
prunedConditionals
);
GaussianMixture::Factors prunedConditionalsTree(lastDensity->discreteKeys(),
prunedConditionals);

hybridBayesNet_->atGaussian(hybridBayesNet_->size() - 1)->factors_ = prunedConditionalsTree;
hybridBayesNet_->atGaussian(hybridBayesNet_->size() - 1)->factors_ =
prunedConditionalsTree;
}
}
5 changes: 4 additions & 1 deletion gtsam/hybrid/NonlinearHybridFactorGraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,11 @@ using namespace std;

namespace gtsam {

/* ************************************************************************* */
void NonlinearHybridFactorGraph::print(
const string& str, const gtsam::KeyFormatter& keyFormatter) const {
Base::print(str, keyFormatter);
std::cout << (str.empty() ? str : str + " ") << std::endl;
Base::print("", keyFormatter);
factorGraph_.print("NonlinearFactorGraph", keyFormatter);
}

Expand All @@ -32,6 +34,7 @@ bool NonlinearHybridFactorGraph::equals(const NonlinearHybridFactorGraph& other,
return Base::equals(other, tol);
}

/* ************************************************************************* */
GaussianHybridFactorGraph NonlinearHybridFactorGraph::linearize(
const Values& continuousValues) const {
// linearize the continuous factors
Expand Down
116 changes: 97 additions & 19 deletions gtsam/hybrid/tests/testHybridFactorGraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@
* @date December 2021
*/

#include "Switching.h"

#include <gtsam/base/TestableAssertions.h>
#include <gtsam/base/utilities.h>
#include <gtsam/discrete/DiscreteBayesNet.h>
Expand All @@ -35,6 +33,8 @@

#include <numeric>

#include "Switching.h"

// Include for test suite
#include <CppUnitLite/TestHarness.h>

Expand Down Expand Up @@ -74,6 +74,75 @@ TEST(HybridFactorGraph, GaussianFactorGraph) {
EXPECT_LONGS_EQUAL(2, dcmfg.gaussianGraph().size());
}

/* ************************************************************************** */
/// Test that the resize method works correctly for a
/// NonlinearHybridFactorGraph.
TEST(NonlinearHybridFactorGraph, Resize) {
NonlinearHybridFactorGraph fg;
auto nonlinearFactor = boost::make_shared<BetweenFactor<double>>();
fg.push_back(nonlinearFactor);

auto discreteFactor = boost::make_shared<DecisionTreeFactor>();
fg.push_back(discreteFactor);

auto dcFactor = boost::make_shared<DCMixtureFactor<MotionModel>>();
fg.push_back(dcFactor);

EXPECT_LONGS_EQUAL(fg.dcGraph().size(), 1);
EXPECT_LONGS_EQUAL(fg.discreteGraph().size(), 1);
EXPECT_LONGS_EQUAL(fg.nonlinearGraph().size(), 1);

EXPECT_LONGS_EQUAL(fg.size(), 3);

fg.resize(0);
EXPECT_LONGS_EQUAL(fg.dcGraph().size(), 0);
EXPECT_LONGS_EQUAL(fg.discreteGraph().size(), 0);
EXPECT_LONGS_EQUAL(fg.nonlinearGraph().size(), 0);

EXPECT_LONGS_EQUAL(fg.size(), 0);
}

/* ************************************************************************** */
/// Test that the resize method works correctly for a
/// GaussianHybridFactorGraph.
TEST(GaussianHybridFactorGraph, Resize) {
NonlinearHybridFactorGraph nhfg;
auto nonlinearFactor = boost::make_shared<BetweenFactor<double>>(
X(0), X(1), 0.0, Isotropic::Sigma(1, 0.1));
nhfg.push_back(nonlinearFactor);
auto discreteFactor = boost::make_shared<DecisionTreeFactor>();
nhfg.push_back(discreteFactor);

KeyVector contKeys = {X(0), X(1)};
auto noise_model = noiseModel::Isotropic::Sigma(1, 1.0);
auto still = boost::make_shared<MotionModel>(X(0), X(1), 0.0, noise_model),
moving = boost::make_shared<MotionModel>(X(0), X(1), 1.0, noise_model);
std::vector<MotionModel::shared_ptr> components = {still, moving};
auto dcFactor = boost::make_shared<DCMixtureFactor<MotionModel>>(
contKeys, DiscreteKeys{gtsam::DiscreteKey(M(1), 2)}, components);
nhfg.push_back(dcFactor);

Values linearizationPoint;
linearizationPoint.insert<double>(X(0), 0);
linearizationPoint.insert<double>(X(1), 1);

// Generate `GaussianHybridFactorGraph` by linearizing
GaussianHybridFactorGraph fg = nhfg.linearize(linearizationPoint);

EXPECT_LONGS_EQUAL(fg.dcGraph().size(), 1);
EXPECT_LONGS_EQUAL(fg.discreteGraph().size(), 1);
EXPECT_LONGS_EQUAL(fg.gaussianGraph().size(), 1);

EXPECT_LONGS_EQUAL(fg.size(), 3);

fg.resize(0);
EXPECT_LONGS_EQUAL(fg.dcGraph().size(), 0);
EXPECT_LONGS_EQUAL(fg.discreteGraph().size(), 0);
EXPECT_LONGS_EQUAL(fg.gaussianGraph().size(), 0);

EXPECT_LONGS_EQUAL(fg.size(), 0);
}

/* ****************************************************************************
* Test push_back on HFG makes the correct distinction.
*/
Expand Down Expand Up @@ -278,7 +347,8 @@ TEST(DCGaussianElimination, EliminateHybrid_2_Variable) {
std::tie(abstractConditionalMixture, factorOnModes) =
EliminateHybrid(factors, ordering);

auto gaussianConditionalMixture = dynamic_pointer_cast<GaussianMixture>(abstractConditionalMixture);
auto gaussianConditionalMixture =
dynamic_pointer_cast<GaussianMixture>(abstractConditionalMixture);

CHECK(gaussianConditionalMixture);
EXPECT_LONGS_EQUAL(
Expand Down Expand Up @@ -367,11 +437,11 @@ TEST_UNSAFE(HybridFactorGraph, Partial_Elimination) {
// GTSAM_PRINT(*remainingFactorGraph); // HybridFactorGraph
EXPECT_LONGS_EQUAL(3, remainingFactorGraph->size());
EXPECT(remainingFactorGraph->discreteGraph().at(0)->keys() ==
KeyVector({M(1)}));
KeyVector({M(1)}));
EXPECT(remainingFactorGraph->discreteGraph().at(1)->keys() ==
KeyVector({M(2), M(1)}));
KeyVector({M(2), M(1)}));
EXPECT(remainingFactorGraph->discreteGraph().at(2)->keys() ==
KeyVector({M(2), M(1)}));
KeyVector({M(2), M(1)}));
}

/* ****************************************************************************/
Expand Down Expand Up @@ -425,13 +495,13 @@ TEST_UNSAFE(HybridFactorGraph, Full_Elimination) {
// P(m1 | m2)
EXPECT(hybridBayesNet->at(3)->frontals() == KeyVector{M(1)});
EXPECT(hybridBayesNet->at(3)->parents() == KeyVector({M(2)}));
EXPECT(dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(3))->equals(
*discreteBayesNet.at(0)));
EXPECT(dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(3))
->equals(*discreteBayesNet.at(0)));
// P(m2)
EXPECT(hybridBayesNet->at(4)->frontals() == KeyVector{M(2)});
EXPECT_LONGS_EQUAL(0, hybridBayesNet->at(4)->nrParents());
EXPECT(dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(4))->equals(
*discreteBayesNet.at(1)));
EXPECT(dynamic_pointer_cast<DiscreteConditional>(hybridBayesNet->at(4))
->equals(*discreteBayesNet.at(1)));
}

/* ****************************************************************************/
Expand All @@ -452,20 +522,28 @@ TEST(HybridFactorGraph, Printing) {
linearizedFactorGraph.eliminatePartialSequential(ordering);

string expected_hybridFactorGraph =
"size: 8\nDiscreteFactorGraph\n"
"\nsize: 8\nDiscreteFactorGraph\n"
"size: 2\nfactor 0: P( m1 ):\n"
" Leaf 0.5\n\nfactor 1: P( m2 | m1 ):\n"
" Choice(m2) \n 0 Choice(m1) \n 0 0 Leaf 0.3333\n 0 1 Leaf 0.6\n"
" 1 Choice(m1) \n 1 0 Leaf 0.6667\n 1 1 Leaf 0.4\n\nDCFactorGraph \n"
"size: 2\nfactor 0: [ x1 x2; m1 ]{\n Choice(m1) \n 0 Leaf Jacobian factor on 2 keys: \n"
" A[x1] = [\n\t-1\n]\n A[x2] = [\n\t1\n]\n b = [ -1 ]\n No noise model\n\n\n"
" 1 Leaf Jacobian factor on 2 keys: \n A[x1] = [\n\t-1\n]\n A[x2] = [\n\t1\n]\n b = [ -0 ]\n"
"size: 2\nfactor 0: [ x1 x2; m1 ]{\n Choice(m1) \n 0 Leaf Jacobian "
"factor on 2 keys: \n"
" A[x1] = [\n\t-1\n]\n A[x2] = [\n\t1\n]\n b = [ -1 ]\n No noise "
"model\n\n\n"
" 1 Leaf Jacobian factor on 2 keys: \n A[x1] = [\n\t-1\n]\n A[x2] = "
"[\n\t1\n]\n b = [ -0 ]\n"
" No noise model\n\n\n}\nfactor 1: [ x2 x3; m2 ]{\n Choice(m2) \n"
" 0 Leaf Jacobian factor on 2 keys: \n A[x2] = [\n\t-1\n]\n A[x3] = [\n\t1\n]\n b = [ -1 ]\n"
" No noise model\n\n\n 1 Leaf Jacobian factor on 2 keys: \n A[x2] = [\n\t-1\n]\n A[x3] = [\n\t1\n]\n"
" b = [ -0 ]\n No noise model\n\n\n}\nGaussianGraph \nsize: 4\nfactor 0: \n A[x1] = [\n\t10\n]\n"
" b = [ -10 ]\n No noise model\nfactor 1: \n A[x1] = [\n\t10\n]\n b = [ -10 ]\n No noise model\n"
"factor 2: \n A[x2] = [\n\t10\n]\n b = [ -10 ]\n No noise model\nfactor 3: \n A[x3] = [\n\t10\n]\n"
" 0 Leaf Jacobian factor on 2 keys: \n A[x2] = [\n\t-1\n]\n A[x3] = "
"[\n\t1\n]\n b = [ -1 ]\n"
" No noise model\n\n\n 1 Leaf Jacobian factor on 2 keys: \n A[x2] = "
"[\n\t-1\n]\n A[x3] = [\n\t1\n]\n"
" b = [ -0 ]\n No noise model\n\n\n}\nGaussianGraph \nsize: 4\nfactor "
"0: \n A[x1] = [\n\t10\n]\n"
" b = [ -10 ]\n No noise model\nfactor 1: \n A[x1] = [\n\t10\n]\n b "
"= [ -10 ]\n No noise model\n"
"factor 2: \n A[x2] = [\n\t10\n]\n b = [ -10 ]\n No noise "
"model\nfactor 3: \n A[x3] = [\n\t10\n]\n"
" b = [ -10 ]\n No noise model\n";
EXPECT(assert_print_equal(expected_hybridFactorGraph, linearizedFactorGraph));

Expand Down
Loading

0 comments on commit 4fe4c2f

Please sign in to comment.