45 using namespace gtsam;
51 std::vector<double>
measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6,
52 7, 8, 9, 9, 9, 10, 11, 11, 11, 11};
54 std::vector<size_t>
discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0,
55 1, 1, 1, 0, 0, 1, 1, 0, 0, 0};
58 const size_t K,
const double between_sigma,
const double measurement_sigma,
60 const std::string& transitionProbabilityTable,
63 transitionProbabilityTable);
77 TEST(HybridEstimation, Full) {
88 for (
size_t k = 0; k <
K; k++) {
89 hybridOrdering.push_back(
X(k));
91 for (
size_t k = 0; k <
K - 1; k++) {
92 hybridOrdering.push_back(
M(k));
105 for (
size_t k = 0; k <
K - 1; k++) {
110 Values expected_continuous;
111 for (
size_t k = 0; k <
K; k++) {
119 TEST(HybridEstimation, IncrementalSmoother) {
134 constexpr
size_t maxNrLeaves = 3;
135 for (
size_t k = 1; k <
K; k++) {
154 for (
size_t k = 0; k <
K - 1; k++) {
159 Values expected_continuous;
160 for (
size_t k = 0; k <
K; k++) {
168 TEST(HybridEstimation, ValidPruningError) {
181 constexpr
size_t maxNrLeaves = 3;
182 for (
size_t k = 1; k <
K; k++) {
202 for (
size_t k = 0; k <
K - 1; k++) {
207 Values expected_continuous;
208 for (
size_t k = 0; k <
K; k++) {
231 const size_t maxNrLeaves = 3;
232 for (
size_t k = 1; k <
K; k++) {
251 for (
size_t k = 0; k <
K - 1; k++) {
256 Values expected_continuous;
257 for (
size_t k = 0; k <
K; k++) {
276 const auto [
bayesNet, remainingGraph] =
277 graph.eliminatePartialSequential(continuous);
280 DiscreteKeys discrete_keys = last_conditional->discreteKeys();
282 const std::vector<DiscreteValues> assignments =
283 DiscreteValues::CartesianProduct(discrete_keys);
285 std::reverse(discrete_keys.begin(), discrete_keys.end());
287 vector<VectorValues::shared_ptr> vector_values;
290 vector_values.push_back(std::make_shared<VectorValues>(
values));
296 std::vector<double> probPrimes;
302 if (
delta.size() == 0) {
303 probPrimes.push_back(0.0);
311 return probPrimeTree;
318 TEST(HybridEstimation, Probability) {
321 constexpr
size_t K = 4;
322 double between_sigma = 1.0, measurement_sigma = 0.1;
333 graph.eliminatePartialSequential(continuous_ordering);
337 auto discreteBayesNet = discreteGraph->eliminateSequential(discrete_ordering);
340 for (
auto discrete_conditional : *discreteBayesNet) {
357 TEST(HybridEstimation, ProbabilityMultifrontal) {
360 constexpr
size_t K = 4;
362 double between_sigma = 1.0, measurement_sigma = 0.1;
375 const auto [bayesTree, discreteGraph] =
376 graph.eliminatePartialMultifrontal(continuous_ordering);
379 Key last_continuous_key =
380 continuous_ordering.at(continuous_ordering.size() - 1);
381 auto last_conditional = (*bayesTree)[last_continuous_key]->conditional();
382 DiscreteKeys discrete_keys = last_conditional->discreteKeys();
385 auto discreteBayesTree = discreteGraph->eliminateMultifrontal(discrete);
389 auto discrete_clique = (*discreteBayesTree)[discrete.at(0)];
391 std::set<HybridBayesTreeClique::shared_ptr> clique_set;
392 for (
auto node : bayesTree->nodes()) {
393 clique_set.insert(node.second);
397 for (
auto clique : clique_set) {
398 if (clique->conditional()->parents() ==
399 discrete_clique->conditional()->frontals()) {
400 discreteBayesTree->addClique(clique, discrete_clique);
405 auto clique_it = std::find(clique->parent()->children.begin(),
406 clique->parent()->children.end(), clique);
407 clique->parent()->children.erase(clique_it);
408 discreteBayesTree->addClique(clique, clique->parent());
412 HybridValues hybrid_values = discreteBayesTree->optimize();
425 constexpr
double sigma = 0.5;
434 const auto zero_motion =
435 std::make_shared<BetweenFactor<double>>(
X(0),
X(1), 0,
noise_model);
436 const auto one_motion =
437 std::make_shared<BetweenFactor<double>>(
X(0),
X(1), 1,
noise_model);
438 std::vector<NoiseModelFactor::shared_ptr>
components = {zero_motion,
452 double z0 = 0.0,
z1 = 1.0;
461 TEST(HybridEstimation, EliminateSequentialRegression) {
474 auto dc1 = bn1->back()->asDiscrete();
482 auto dc2 = bn2->back()->asDiscrete();
496 TEST(HybridEstimation, CorrectnessViaSampling) {
504 std::mt19937_64
rng(11);
507 auto compute_ratio = [&](
const HybridValues& sample) ->
double {
508 return bn->evaluate(sample) / fg->probPrime(sample);
514 double expected_ratio = compute_ratio(sample);
517 constexpr
int num_samples = 10;
518 for (
size_t i = 0;
i < num_samples;
i++) {
528 TEST(HybridEstimation, ModeSelection) {
532 auto measurement_model = noiseModel::Isotropic::Sigma(1, 0.1);
533 auto motion_model = noiseModel::Isotropic::Sigma(1, 1.0);
540 double noise_tight = 0.5, noise_loose = 5.0;
542 auto model0 = std::make_shared<MotionModel>(
543 X(0),
X(1), 0.0, noiseModel::Isotropic::Sigma(
d, noise_loose)),
544 model1 = std::make_shared<MotionModel>(
545 X(0),
X(1), 0.0, noiseModel::Isotropic::Sigma(
d, noise_tight));
568 GaussianConditional::sharedMeanAndStddev(
Z(0), -I_1x1,
X(0), Z_1x1, 0.1));
570 GaussianConditional::sharedMeanAndStddev(
Z(0), -I_1x1,
X(1), Z_1x1, 0.1));
572 std::vector<std::pair<Vector, double>>
parameters{{Z_1x1, noise_loose},
573 {Z_1x1, noise_tight}};
587 TEST(HybridEstimation, ModeSelection2) {
592 double noise_tight = 0.5, noise_loose = 5.0;
598 GaussianConditional::sharedMeanAndStddev(
Z(0), -I_3x3,
X(0),
Z_3x1, 0.1));
600 GaussianConditional::sharedMeanAndStddev(
Z(0), -I_3x3,
X(1),
Z_3x1, 0.1));
602 std::vector<std::pair<Vector, double>>
parameters{{
Z_3x1, noise_loose},
603 {
Z_3x1, noise_tight}};
619 auto measurement_model = noiseModel::Isotropic::Sigma(
d, 0.1);
620 auto motion_model = noiseModel::Isotropic::Sigma(
d, 1.0);
625 auto model0 = std::make_shared<BetweenFactor<Vector3>>(
626 X(0),
X(1),
Z_3x1, noiseModel::Isotropic::Sigma(
d, noise_loose)),
628 X(0),
X(1),
Z_3x1, noiseModel::Isotropic::Sigma(
d, noise_tight));