44 using namespace gtsam;
49 TEST(HybridEstimation, Full) {
51 std::vector<double> measurements = {0, 1, 2, 2, 2, 3};
53 std::vector<size_t> discrete_seq = {1, 1, 0, 0, 1};
56 Switching switching(K, 1.0, 0.1, measurements,
"1/1 1/1");
60 for (
size_t k = 0; k <
K; k++) {
61 hybridOrdering.push_back(
X(k));
63 for (
size_t k = 0; k < K - 1; k++) {
64 hybridOrdering.push_back(
M(k));
77 for (
size_t k = 0; k < K - 1; k++) {
78 expected_discrete[
M(k)] = discrete_seq[k];
82 Values expected_continuous;
83 for (
size_t k = 0; k <
K; k++) {
84 expected_continuous.
insert(
X(k), measurements[k]);
91 TEST(HybridEstimation, IncrementalSmoother) {
93 std::vector<double> measurements = {0, 1, 2, 2, 2, 2, 3, 4, 5, 6, 6,
94 7, 8, 9, 9, 9, 10, 11, 11, 11, 11};
96 std::vector<size_t> discrete_seq = {1, 1, 0, 0, 0, 1, 1, 1, 1, 0,
97 1, 1, 1, 0, 0, 1, 1, 0, 0, 0};
100 Switching switching(K, 1.0, 0.1, measurements,
"1/1 1/1");
111 for (
size_t k = 1; k <
K; k++) {
122 smoother.
update(linearized, 3, ordering);
131 for (
size_t k = 0; k < K - 1; k++) {
132 expected_discrete[
M(k)] = discrete_seq[k];
136 Values expected_continuous;
137 for (
size_t k = 0; k <
K; k++) {
138 expected_continuous.
insert(
X(k), measurements[k]);
156 size_t K,
const std::vector<double>& measurements,
157 const std::vector<size_t>& discrete_seq,
double measurement_sigma = 0.1,
158 double between_sigma = 1.0) {
160 Values linearizationPoint;
163 auto measurement_noise = noiseModel::Isotropic::Sigma(1, measurement_sigma);
164 for (
size_t k = 0; k <
K; k++) {
167 linearizationPoint.
insert<
double>(
X(k),
static_cast<double>(k + 1));
173 auto motion_noise_model = noiseModel::Isotropic::Sigma(1, between_sigma);
174 for (
size_t k = 0; k < K - 1; k++) {
175 auto motion_model = std::make_shared<MotionModel>(
176 X(k),
X(k + 1), discrete_seq.at(k), motion_noise_model);
193 std::bitset<
K - 1>
seq =
x;
194 std::vector<size_t> discrete_seq(
K - 1);
195 for (
size_t i = 0;
i <
K - 1;
i++) {
197 discrete_seq[K - 2 -
i] = seq[
i];
215 const auto [bayesNet, remainingGraph] =
218 auto last_conditional = bayesNet->at(bayesNet->size() - 1);
219 DiscreteKeys discrete_keys = last_conditional->discreteKeys();
221 const std::vector<DiscreteValues> assignments =
222 DiscreteValues::CartesianProduct(discrete_keys);
224 std::reverse(discrete_keys.begin(), discrete_keys.end());
226 vector<VectorValues::shared_ptr> vector_values;
229 vector_values.push_back(std::make_shared<VectorValues>(values));
235 std::vector<double> probPrimes;
241 if (delta.
size() == 0) {
242 probPrimes.push_back(0.0);
247 probPrimes.push_back(
exp(-error));
250 return probPrimeTree;
257 TEST(HybridEstimation, Probability) {
258 constexpr
size_t K = 4;
259 std::vector<double> measurements = {0, 1, 2, 2};
260 double between_sigma = 1.0, measurement_sigma = 0.1;
264 Switching switching(K, between_sigma, measurement_sigma, measurements,
270 auto [bayesNet, discreteGraph] =
271 graph.eliminatePartialSequential(continuous_ordering);
275 auto discreteBayesNet = discreteGraph->eliminateSequential(discrete_ordering);
278 for (
auto discrete_conditional : *discreteBayesNet) {
279 bayesNet->add(discrete_conditional);
281 auto discreteConditional = discreteBayesNet->at(0)->asDiscrete();
287 discrete_seq[
M(0)] = 1;
288 discrete_seq[
M(1)] = 1;
289 discrete_seq[
M(2)] = 0;
300 TEST(HybridEstimation, ProbabilityMultifrontal) {
301 constexpr
size_t K = 4;
302 std::vector<double> measurements = {0, 1, 2, 2};
304 double between_sigma = 1.0, measurement_sigma = 0.1;
308 Switching switching(K, between_sigma, measurement_sigma, measurements,
317 const auto [bayesTree, discreteGraph] =
318 graph.eliminatePartialMultifrontal(continuous_ordering);
321 Key last_continuous_key =
322 continuous_ordering.
at(continuous_ordering.size() - 1);
323 auto last_conditional = (*bayesTree)[last_continuous_key]->conditional();
324 DiscreteKeys discrete_keys = last_conditional->discreteKeys();
327 auto discreteBayesTree = discreteGraph->eliminateMultifrontal(discrete);
331 auto discrete_clique = (*discreteBayesTree)[discrete.at(0)];
333 std::set<HybridBayesTreeClique::shared_ptr> clique_set;
334 for (
auto node : bayesTree->nodes()) {
335 clique_set.insert(node.second);
339 for (
auto clique : clique_set) {
340 if (clique->conditional()->parents() ==
341 discrete_clique->conditional()->frontals()) {
342 discreteBayesTree->addClique(clique, discrete_clique);
347 auto clique_it = std::find(clique->parent()->children.begin(),
348 clique->parent()->children.end(), clique);
349 clique->parent()->children.erase(clique_it);
350 discreteBayesTree->addClique(clique, clique->parent());
354 HybridValues hybrid_values = discreteBayesTree->optimize();
358 discrete_seq[
M(0)] = 1;
359 discrete_seq[
M(1)] = 1;
360 discrete_seq[
M(2)] = 0;
371 constexpr
double sigma = 0.5;
372 const auto noise_model = noiseModel::Isotropic::Sigma(1, sigma);
380 const auto zero_motion =
381 std::make_shared<BetweenFactor<double>>(
X(0),
X(1), 0, noise_model);
382 const auto one_motion =
383 std::make_shared<BetweenFactor<double>>(
X(0),
X(1), 1, noise_model);
386 std::vector<NonlinearFactor::shared_ptr>{zero_motion, one_motion});
398 double z0 = 0.0,
z1 = 1.0;
399 initial.
insert<
double>(
X(0), z0);
407 TEST(HybridEstimation, eliminateSequentialRegression) {
420 auto dc1 = bn1->back()->asDiscrete();
428 auto dc2 = bn2->back()->asDiscrete();
442 TEST(HybridEstimation, CorrectnessViaSampling) {
450 std::mt19937_64
rng(11);
453 auto compute_ratio = [&](
const HybridValues& sample) ->
double {
454 return bn->evaluate(sample) / fg->probPrime(sample);
460 double expected_ratio = compute_ratio(sample);
465 constexpr
int num_samples = 10;
466 for (
size_t i = 0;
i < num_samples;
i++) {
482 double noise_tight,
double noise_loose,
size_t d,
size_t tight_index) {
485 constexpr
double log2pi = 1.8378770664093454835606594728112;
487 double logNormalizationConstant =
log(1.0 / noise_tight);
488 double logConstant = -0.5 * d * log2pi + logNormalizationConstant;
491 const GaussianFactor::shared_ptr& gf) {
492 if (assignment.at(mode) != tight_index) {
493 double factor_log_constant = -0.5 * d * log2pi +
log(1.0 / noise_loose);
498 for (
size_t i = 0;
i <
d;
i++) {
499 c(
i) =
std::sqrt(2.0 * (logConstant - factor_log_constant));
503 return std::make_shared<JacobianFactor>(_gfg);
508 auto updated_components = gmf->factors().apply(
func);
509 auto updated_gmf = std::make_shared<GaussianMixtureFactor>(
510 gmf->continuousKeys(), gmf->discreteKeys(), updated_components);
516 TEST(HybridEstimation, ModeSelection) {
520 auto measurement_model = noiseModel::Isotropic::Sigma(1, 0.1);
521 auto motion_model = noiseModel::Isotropic::Sigma(1, 1.0);
527 modes.emplace_back(
M(0), 2);
531 double noise_tight = 0.5, noise_loose = 5.0;
533 auto model0 = std::make_shared<MotionModel>(
534 X(0),
X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_loose)),
535 model1 = std::make_shared<MotionModel>(
536 X(0),
X(1), 0.0, noiseModel::Isotropic::Sigma(d, noise_tight));
538 std::vector<NonlinearFactor::shared_ptr> components = {model0,
model1};
562 GaussianConditional::sharedMeanAndStddev(
Z(0), -I_1x1,
X(0), Z_1x1, 0.1));
564 GaussianConditional::sharedMeanAndStddev(
Z(0), -I_1x1,
X(1), Z_1x1, 0.1));
567 {GaussianConditional::sharedMeanAndStddev(
Z(0), I_1x1,
X(0), -I_1x1,
X(1),
569 GaussianConditional::sharedMeanAndStddev(
Z(0), I_1x1,
X(0), -I_1x1,
X(1),
570 Z_1x1, noise_tight)}));
575 auto fg = bn.toFactorGraph(vv);
576 auto expected_posterior = fg.eliminateSequential();
582 TEST(HybridEstimation, ModeSelection2) {
587 double noise_tight = 0.5, noise_loose = 5.0;
593 GaussianConditional::sharedMeanAndStddev(
Z(0), -I_3x3,
X(0),
Z_3x1, 0.1));
595 GaussianConditional::sharedMeanAndStddev(
Z(0), -I_3x3,
X(1),
Z_3x1, 0.1));
598 {GaussianConditional::sharedMeanAndStddev(
Z(0), I_3x3,
X(0), -I_3x3,
X(1),
600 GaussianConditional::sharedMeanAndStddev(
Z(0), I_3x3,
X(0), -I_3x3,
X(1),
601 Z_3x1, noise_tight)}));
615 auto measurement_model = noiseModel::Isotropic::Sigma(d, 0.1);
616 auto motion_model = noiseModel::Isotropic::Sigma(d, 1.0);
622 modes.emplace_back(
M(0), 2);
624 auto model0 = std::make_shared<BetweenFactor<Vector3>>(
625 X(0),
X(1),
Z_3x1, noiseModel::Isotropic::Sigma(d, noise_loose)),
627 X(0),
X(1),
Z_3x1, noiseModel::Isotropic::Sigma(d, noise_tight));
629 std::vector<NonlinearFactor::shared_ptr> components = {model0,
model1};
std::shared_ptr< GaussianMixtureFactor > mixedVarianceFactor(const MixtureFactor &mf, const Values &initial, const Key &mode, double noise_tight, double noise_loose, size_t d, size_t tight_index)
Matrix< RealScalar, Dynamic, Dynamic > M
An incremental smoother for hybrid factor graphs.
static const Eigen::MatrixBase< Vector3 >::ConstantReturnType Z_3x1
Nonlinear hybrid factor graph that uses type erasure.
IsDerived< DERIVEDFACTOR > emplace_shared(Args &&... args)
Emplace a shared pointer to factor of given type.
static int runAllTests(TestResult &result)
Factor Graph consisting of non-linear factors.
std::vector< size_t > getDiscreteSequence(size_t x)
Get the discrete sequence from the integer x.
const ValueType at(Key j) const
IsDerived< DERIVEDFACTOR > push_back(std::shared_ptr< DERIVEDFACTOR > factor)
Add a factor directly using a shared_ptr.
bool assert_equal(const Matrix &expected, const Matrix &actual, double tol)
std::pair< std::shared_ptr< BayesNetType >, std::shared_ptr< FactorGraphType > > eliminatePartialSequential(const Ordering &ordering, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
static Cal3_S2 K(500, 500, 0.1, 640/2, 480/2)
iterator insert(const std::pair< Key, Vector > &key_value)
const VectorValues & continuous() const
Return the multi-dimensional vector values.
const HybridBayesNet & hybridBayesNet() const
Return the Bayes Net posterior.
Nonlinear Mixture factor of continuous and discrete.
AlgebraicDecisionTree< Key > getProbPrimeTree(const HybridGaussianFactorGraph &graph)
Helper method to get the tree of unnormalized probabilities as per the elimination scheme...
NonlinearFactorGraph graph
EIGEN_DEVICE_FUNC const LogReturnType log() const
static enum @1107 ordering
#define EXPECT_DOUBLES_EQUAL(expected, actual, threshold)
noiseModel::Isotropic::shared_ptr model1
IsDerived< DERIVEDFACTOR > add(std::shared_ptr< DERIVEDFACTOR > factor)
add is a synonym for push_back.
A Bayes net of Gaussian Conditionals indexed by discrete keys.
Values retract(const VectorValues &delta) const
std::shared_ptr< GaussianFactorGraph > linearize(const Values &linearizationPoint) const
Linearize a nonlinear factor graph.
EIGEN_DEVICE_FUNC const ExpReturnType exp() const
static const VectorValues vv
HybridGaussianFactorGraph linearizedFactorGraph
A conditional of gaussian mixtures indexed by discrete variables, as part of a Bayes Network...
gtsam::enable_if_t< needs_eigen_aligned_allocator< T >::value, std::shared_ptr< T > > make_shared(Args &&... args)
HybridValues optimize() const
Solve the HybridBayesNet by first computing the MPE of all the discrete variables and then optimizing...
GaussianFactor::shared_ptr linearize(const Values &continuousValues, const DiscreteValues &discreteValues) const
AlgebraicDecisionTree< Key > error(const VectorValues &continuousValues) const
Compute error for each discrete assignment, and return as a tree.
#define EXPECT(condition)
const KeySet continuousKeySet() const
Get all the continuous keys in the factor graph.
std::shared_ptr< BayesNetType > eliminateSequential(OptionalOrderingType orderingType={}, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
Array< double, 1, 3 > e(1./3., 0.5, 2.)
Linear Factor Graph where all factors are Gaussians.
std::shared_ptr< This > shared_ptr
shared_ptr to This
void push_back(std::shared_ptr< HybridConditional > conditional)
Add a hybrid conditional using a shared_ptr.
std::shared_ptr< This > shared_ptr
shared_ptr to this class
std::shared_ptr< This > shared_ptr
void emplace_back(Conditional *conditional)
internal::enable_if<!(symbolic::is_symbolic< FirstType >::value||symbolic::is_symbolic< LastType >::value), ArithmeticSequence< typename internal::cleanup_index_type< FirstType >::type, Index > >::type seq(FirstType f, LastType l)
virtual void resize(size_t size)
Values linearizationPoint
#define EXPECT_LONGS_EQUAL(expected, actual)
HybridNonlinearFactorGraph nonlinearFactorGraph
void reverse(const MatrixType &m)
const sharedFactor at(size_t i) const
const DiscreteValues & discrete() const
Return the discrete values.
std::pair< Key, size_t > DiscreteKey
std::shared_ptr< HybridGaussianFactorGraph > linearize(const Values &continuousValues) const
Linearize all the continuous factors in the HybridNonlinearFactorGraph.
Chordal Bayes Net, the result of eliminating a factor graph.
static const double sigma
TEST(HybridEstimation, Full)
void insert(Key j, const Value &val)
Jet< T, N > sqrt(const Jet< T, N > &f)
static const DiscreteKey mode(modeKey, 2)
Ordering getOrdering(const HybridGaussianFactorGraph &newFactors)
Implementation of a discrete conditional mixture factor.
static HybridNonlinearFactorGraph createHybridNonlinearFactorGraph()
FastVector< Key > KeyVector
Define collection type once and for all - also used in wrappers.
GaussianFactorGraph::shared_ptr specificModesFactorGraph(size_t K, const std::vector< double > &measurements, const std::vector< size_t > &discrete_seq, double measurement_sigma=0.1, double between_sigma=1.0)
A function to get a specific 1D robot motion problem as a linearized factor graph. This is the problem P(X|Z, M), i.e. estimating the continuous positions given the measurements and discrete sequence.
void update(HybridGaussianFactorGraph graph, std::optional< size_t > maxNrLeaves={}, const std::optional< Ordering > given_ordering={})
set noclip points set clip one set noclip two set bar set border lt lw set xdata set ydata set zdata set x2data set y2data set boxwidth set dummy x
Gaussian Bayes Tree, the result of eliminating a GaussianJunctionTree.
std::uint64_t Key
Integer nonlinear key type.
std::shared_ptr< HybridBayesNet > shared_ptr
HybridGaussianFactorGraph toFactorGraph(const VectorValues &measurements) const
static HybridGaussianFactorGraph::shared_ptr createHybridGaussianFactorGraph()
DiscreteKeys is a set of keys that can be assembled using the & operator.