Skip to content

Commit

Permalink
Merge pull request #1311 from borglab/hybrid/switching-0-index
Browse files Browse the repository at this point in the history
  • Loading branch information
varunagrawal authored Nov 2, 2022
2 parents 0b79399 + ebb29ef commit 7f8bd8a
Show file tree
Hide file tree
Showing 8 changed files with 327 additions and 317 deletions.
42 changes: 26 additions & 16 deletions gtsam/hybrid/tests/Switching.h
Original file line number Diff line number Diff line change
Expand Up @@ -134,26 +134,26 @@ struct Switching {
Switching(size_t K, double between_sigma = 1.0, double prior_sigma = 0.1,
std::vector<double> measurements = {})
: K(K) {
// Create DiscreteKeys for binary K modes, modes[0] will not be used.
for (size_t k = 0; k <= K; k++) {
// Create DiscreteKeys for binary K modes.
for (size_t k = 0; k < K; k++) {
modes.emplace_back(M(k), 2);
}

// If measurements are not provided, we just have the robot moving forward.
if (measurements.size() == 0) {
for (size_t k = 1; k <= K; k++) {
measurements.push_back(k - 1);
for (size_t k = 0; k < K; k++) {
measurements.push_back(k);
}
}

// Create hybrid factor graph.
// Add a prior on X(1).
auto prior = boost::make_shared<PriorFactor<double>>(
X(1), measurements.at(0), noiseModel::Isotropic::Sigma(1, prior_sigma));
X(0), measurements.at(0), noiseModel::Isotropic::Sigma(1, prior_sigma));
nonlinearFactorGraph.push_nonlinear(prior);

// Add "motion models".
for (size_t k = 1; k < K; k++) {
for (size_t k = 0; k < K - 1; k++) {
KeyVector keys = {X(k), X(k + 1)};
auto motion_models = motionModels(k, between_sigma);
std::vector<NonlinearFactor::shared_ptr> components;
Expand All @@ -166,17 +166,17 @@ struct Switching {

// Add measurement factors
auto measurement_noise = noiseModel::Isotropic::Sigma(1, prior_sigma);
for (size_t k = 2; k <= K; k++) {
for (size_t k = 1; k < K; k++) {
nonlinearFactorGraph.emplace_nonlinear<PriorFactor<double>>(
X(k), measurements.at(k - 1), measurement_noise);
X(k), measurements.at(k), measurement_noise);
}

// Add "mode chain"
addModeChain(&nonlinearFactorGraph);

// Create the linearization point.
for (size_t k = 1; k <= K; k++) {
linearizationPoint.insert<double>(X(k), static_cast<double>(k));
for (size_t k = 0; k < K; k++) {
linearizationPoint.insert<double>(X(k), static_cast<double>(k + 1));
}

// The ground truth is robot moving forward
Expand All @@ -195,23 +195,33 @@ struct Switching {
return {still, moving};
}

// Add "mode chain" to HybridNonlinearFactorGraph
/**
* @brief Add "mode chain" to HybridNonlinearFactorGraph from M(0) to M(K-2).
* E.g. if K=4, we want M0, M1 and M2.
*
* @param fg The nonlinear factor graph to which the mode chain is added.
*/
void addModeChain(HybridNonlinearFactorGraph *fg) {
auto prior = boost::make_shared<DiscreteDistribution>(modes[1], "1/1");
auto prior = boost::make_shared<DiscreteDistribution>(modes[0], "1/1");
fg->push_discrete(prior);
for (size_t k = 1; k < K - 1; k++) {
for (size_t k = 0; k < K - 2; k++) {
auto parents = {modes[k]};
auto conditional = boost::make_shared<DiscreteConditional>(
modes[k + 1], parents, "1/2 3/2");
fg->push_discrete(conditional);
}
}

// Add "mode chain" to HybridGaussianFactorGraph
/**
* @brief Add "mode chain" to HybridGaussianFactorGraph from M(0) to M(K-2).
* E.g. if K=4, we want M0, M1 and M2.
*
* @param fg The gaussian factor graph to which the mode chain is added.
*/
void addModeChain(HybridGaussianFactorGraph *fg) {
auto prior = boost::make_shared<DiscreteDistribution>(modes[1], "1/1");
auto prior = boost::make_shared<DiscreteDistribution>(modes[0], "1/1");
fg->push_discrete(prior);
for (size_t k = 1; k < K - 1; k++) {
for (size_t k = 0; k < K - 2; k++) {
auto parents = {modes[k]};
auto conditional = boost::make_shared<DiscreteConditional>(
modes[k + 1], parents, "1/2 3/2");
Expand Down
32 changes: 16 additions & 16 deletions gtsam/hybrid/tests/testHybridBayesNet.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,9 @@ TEST(HybridBayesNet, Choose) {
s.linearizedFactorGraph.eliminatePartialSequential(ordering);

DiscreteValues assignment;
assignment[M(0)] = 1;
assignment[M(1)] = 1;
assignment[M(2)] = 1;
assignment[M(3)] = 0;
assignment[M(2)] = 0;

GaussianBayesNet gbn = hybridBayesNet->choose(assignment);

Expand Down Expand Up @@ -120,20 +120,20 @@ TEST(HybridBayesNet, OptimizeAssignment) {
s.linearizedFactorGraph.eliminatePartialSequential(ordering);

DiscreteValues assignment;
assignment[M(0)] = 1;
assignment[M(1)] = 1;
assignment[M(2)] = 1;
assignment[M(3)] = 1;

VectorValues delta = hybridBayesNet->optimize(assignment);

// The linearization point has the same value as the key index,
// e.g. X(1) = 1, X(2) = 2,
// e.g. X(0) = 1, X(1) = 2,
// but the factors specify X(k) = k-1, so delta should be -1.
VectorValues expected_delta;
expected_delta.insert(make_pair(X(0), -Vector1::Ones()));
expected_delta.insert(make_pair(X(1), -Vector1::Ones()));
expected_delta.insert(make_pair(X(2), -Vector1::Ones()));
expected_delta.insert(make_pair(X(3), -Vector1::Ones()));
expected_delta.insert(make_pair(X(4), -Vector1::Ones()));

EXPECT(assert_equal(expected_delta, delta));
}
Expand All @@ -150,16 +150,16 @@ TEST(HybridBayesNet, Optimize) {
HybridValues delta = hybridBayesNet->optimize();

DiscreteValues expectedAssignment;
expectedAssignment[M(1)] = 1;
expectedAssignment[M(2)] = 0;
expectedAssignment[M(3)] = 1;
expectedAssignment[M(0)] = 1;
expectedAssignment[M(1)] = 0;
expectedAssignment[M(2)] = 1;
EXPECT(assert_equal(expectedAssignment, delta.discrete()));

VectorValues expectedValues;
expectedValues.insert(X(1), -0.999904 * Vector1::Ones());
expectedValues.insert(X(2), -0.99029 * Vector1::Ones());
expectedValues.insert(X(3), -1.00971 * Vector1::Ones());
expectedValues.insert(X(4), -1.0001 * Vector1::Ones());
expectedValues.insert(X(0), -0.999904 * Vector1::Ones());
expectedValues.insert(X(1), -0.99029 * Vector1::Ones());
expectedValues.insert(X(2), -1.00971 * Vector1::Ones());
expectedValues.insert(X(3), -1.0001 * Vector1::Ones());

EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
}
Expand All @@ -175,10 +175,10 @@ TEST(HybridBayesNet, OptimizeMultifrontal) {
HybridValues delta = hybridBayesTree->optimize();

VectorValues expectedValues;
expectedValues.insert(X(1), -0.999904 * Vector1::Ones());
expectedValues.insert(X(2), -0.99029 * Vector1::Ones());
expectedValues.insert(X(3), -1.00971 * Vector1::Ones());
expectedValues.insert(X(4), -1.0001 * Vector1::Ones());
expectedValues.insert(X(0), -0.999904 * Vector1::Ones());
expectedValues.insert(X(1), -0.99029 * Vector1::Ones());
expectedValues.insert(X(2), -1.00971 * Vector1::Ones());
expectedValues.insert(X(3), -1.0001 * Vector1::Ones());

EXPECT(assert_equal(expectedValues, delta.continuous(), 1e-5));
}
Expand Down
8 changes: 4 additions & 4 deletions gtsam/hybrid/tests/testHybridBayesTree.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -60,26 +60,26 @@ TEST(HybridBayesTree, OptimizeAssignment) {
isam.update(graph1);

DiscreteValues assignment;
assignment[M(0)] = 1;
assignment[M(1)] = 1;
assignment[M(2)] = 1;
assignment[M(3)] = 1;

VectorValues delta = isam.optimize(assignment);

// The linearization point has the same value as the key index,
// e.g. X(1) = 1, X(2) = 2,
// but the factors specify X(k) = k-1, so delta should be -1.
VectorValues expected_delta;
expected_delta.insert(make_pair(X(0), -Vector1::Ones()));
expected_delta.insert(make_pair(X(1), -Vector1::Ones()));
expected_delta.insert(make_pair(X(2), -Vector1::Ones()));
expected_delta.insert(make_pair(X(3), -Vector1::Ones()));
expected_delta.insert(make_pair(X(4), -Vector1::Ones()));

EXPECT(assert_equal(expected_delta, delta));

// Create ordering.
Ordering ordering;
for (size_t k = 1; k <= s.K; k++) ordering += X(k);
for (size_t k = 0; k < s.K; k++) ordering += X(k);

HybridBayesNet::shared_ptr hybridBayesNet;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
Expand Down Expand Up @@ -123,7 +123,7 @@ TEST(HybridBayesTree, Optimize) {

// Create ordering.
Ordering ordering;
for (size_t k = 1; k <= s.K; k++) ordering += X(k);
for (size_t k = 0; k < s.K; k++) ordering += X(k);

HybridBayesNet::shared_ptr hybridBayesNet;
HybridGaussianFactorGraph::shared_ptr remainingFactorGraph;
Expand Down
10 changes: 5 additions & 5 deletions gtsam/hybrid/tests/testHybridEstimation.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,9 @@ TEST(HybridNonlinearISAM, Incremental) {
HybridNonlinearFactorGraph graph;
Values initial;

// Add the X(1) prior
// Add the X(0) prior
graph.push_back(switching.nonlinearFactorGraph.at(0));
initial.insert(X(1), switching.linearizationPoint.at<double>(X(1)));
initial.insert(X(0), switching.linearizationPoint.at<double>(X(0)));

HybridGaussianFactorGraph linearized;
HybridGaussianFactorGraph bayesNet;
Expand All @@ -96,7 +96,7 @@ TEST(HybridNonlinearISAM, Incremental) {
// Measurement
graph.push_back(switching.nonlinearFactorGraph.at(k + K - 1));

initial.insert(X(k + 1), switching.linearizationPoint.at<double>(X(k + 1)));
initial.insert(X(k), switching.linearizationPoint.at<double>(X(k)));

bayesNet = smoother.hybridBayesNet();
linearized = *graph.linearize(initial);
Expand All @@ -111,13 +111,13 @@ TEST(HybridNonlinearISAM, Incremental) {

DiscreteValues expected_discrete;
for (size_t k = 0; k < K - 1; k++) {
expected_discrete[M(k + 1)] = discrete_seq[k];
expected_discrete[M(k)] = discrete_seq[k];
}
EXPECT(assert_equal(expected_discrete, delta.discrete()));

Values expected_continuous;
for (size_t k = 0; k < K; k++) {
expected_continuous.insert(X(k + 1), measurements[k]);
expected_continuous.insert(X(k), measurements[k]);
}
EXPECT(assert_equal(expected_continuous, result));
}
Expand Down
16 changes: 8 additions & 8 deletions gtsam/hybrid/tests/testHybridGaussianFactorGraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -531,34 +531,34 @@ TEST(HybridGaussianFactorGraph, Conditionals) {

hfg.push_back(switching.linearizedFactorGraph.at(0)); // P(X1)
Ordering ordering;
ordering.push_back(X(1));
ordering.push_back(X(0));
HybridBayesNet::shared_ptr bayes_net = hfg.eliminateSequential(ordering);

hfg.push_back(switching.linearizedFactorGraph.at(1)); // P(X1, X2 | M1)
hfg.push_back(*bayes_net);
hfg.push_back(switching.linearizedFactorGraph.at(2)); // P(X2, X3 | M2)
hfg.push_back(switching.linearizedFactorGraph.at(5)); // P(M1)
ordering.push_back(X(1));
ordering.push_back(X(2));
ordering.push_back(X(3));
ordering.push_back(M(0));
ordering.push_back(M(1));
ordering.push_back(M(2));

bayes_net = hfg.eliminateSequential(ordering);

HybridValues result = bayes_net->optimize();

Values expected_continuous;
expected_continuous.insert<double>(X(1), 0);
expected_continuous.insert<double>(X(2), 1);
expected_continuous.insert<double>(X(3), 2);
expected_continuous.insert<double>(X(4), 4);
expected_continuous.insert<double>(X(0), 0);
expected_continuous.insert<double>(X(1), 1);
expected_continuous.insert<double>(X(2), 2);
expected_continuous.insert<double>(X(3), 4);
Values result_continuous =
switching.linearizationPoint.retract(result.continuous());
EXPECT(assert_equal(expected_continuous, result_continuous));

DiscreteValues expected_discrete;
expected_discrete[M(0)] = 1;
expected_discrete[M(1)] = 1;
expected_discrete[M(2)] = 1;
EXPECT(assert_equal(expected_discrete, result.discrete()));
}

Expand Down
Loading

0 comments on commit 7f8bd8a

Please sign in to comment.