HybridSmoother.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
20 
21 #include <algorithm>
22 #include <unordered_set>
23 
24 namespace gtsam {
25 
26 /* ************************************************************************* */
28  const HybridGaussianFactorGraph &newFactors) {
30  factors.push_back(newFactors);
31 
32  // Get all the discrete keys from the factors
33  KeySet allDiscrete = factors.discreteKeySet();
34 
35  // Create KeyVector with continuous keys followed by discrete keys.
36  KeyVector newKeysDiscreteLast;
37  const KeySet newFactorKeys = newFactors.keys();
38  // Insert continuous keys first.
39  for (auto &k : newFactorKeys) {
40  if (!allDiscrete.exists(k)) {
41  newKeysDiscreteLast.push_back(k);
42  }
43  }
44 
45  // Insert discrete keys at the end
46  std::copy(allDiscrete.begin(), allDiscrete.end(),
47  std::back_inserter(newKeysDiscreteLast));
48 
49  const VariableIndex index(factors);
50 
51  // Get an ordering where the new keys are eliminated last
53  index, KeyVector(newKeysDiscreteLast.begin(), newKeysDiscreteLast.end()),
54  true);
55  return ordering;
56 }
57 
58 /* ************************************************************************* */
60  std::optional<size_t> maxNrLeaves,
61  const std::optional<Ordering> given_ordering) {
63  // If no ordering provided, then we compute one
64  if (!given_ordering.has_value()) {
65  ordering = this->getOrdering(graph);
66  } else {
67  ordering = *given_ordering;
68  }
69 
70  // Add the necessary conditionals from the previous timestep(s).
71  std::tie(graph, hybridBayesNet_) =
73 
74  // Eliminate.
75  HybridBayesNet bayesNetFragment = *graph.eliminateSequential(ordering);
76 
78  if (maxNrLeaves) {
79  // `pruneBayesNet` sets the leaves with 0 in discreteFactor to nullptr in
80  // all the conditionals with the same keys in bayesNetFragment.
81  bayesNetFragment = bayesNetFragment.prune(*maxNrLeaves);
82  }
83 
84  // Add the partial bayes net to the posterior bayes net.
85  hybridBayesNet_.add(bayesNetFragment);
86 }
87 
88 /* ************************************************************************* */
89 std::pair<HybridGaussianFactorGraph, HybridBayesNet>
91  const HybridBayesNet &originalHybridBayesNet,
92  const Ordering &ordering) const {
93  HybridGaussianFactorGraph graph(originalGraph);
94  HybridBayesNet hybridBayesNet(originalHybridBayesNet);
95 
96  // If hybridBayesNet is not empty,
97  // it means we have conditionals to add to the factor graph.
98  if (!hybridBayesNet.empty()) {
99  // We add all relevant hybrid conditionals on the last continuous variable
100  // in the previous `hybridBayesNet` to the graph
101 
102  // Conditionals to remove from the bayes net
103  // since the conditional will be updated.
104  std::vector<HybridConditional::shared_ptr> conditionals_to_erase;
105 
106  // New conditionals to add to the graph
107  gtsam::HybridBayesNet newConditionals;
108 
109  // NOTE(Varun) Using a for-range loop doesn't work since some of the
110  // conditionals are invalid pointers
111  for (size_t i = 0; i < hybridBayesNet.size(); i++) {
112  auto conditional = hybridBayesNet.at(i);
113 
114  for (auto &key : conditional->frontals()) {
115  if (std::find(ordering.begin(), ordering.end(), key) !=
116  ordering.end()) {
117  newConditionals.push_back(conditional);
118  conditionals_to_erase.push_back(conditional);
119 
120  break;
121  }
122  }
123  }
124  // Remove conditionals at the end so we don't affect the order in the
125  // original bayes net.
126  for (auto &&conditional : conditionals_to_erase) {
127  auto it = find(hybridBayesNet.begin(), hybridBayesNet.end(), conditional);
128  hybridBayesNet.erase(it);
129  }
130 
131  graph.push_back(newConditionals);
132  }
133  return {graph, hybridBayesNet};
134 }
135 
136 /* ************************************************************************* */
138  size_t index) const {
139  return hybridBayesNet_.at(index)->asHybrid();
140 }
141 
142 /* ************************************************************************* */
144  return hybridBayesNet_;
145 }
146 
147 } // namespace gtsam
gtsam::HybridSmoother::hybridBayesNet
const HybridBayesNet & hybridBayesNet() const
Return the Bayes Net posterior.
Definition: HybridSmoother.cpp:143
gtsam::Ordering::ColamdConstrainedLast
static Ordering ColamdConstrainedLast(const FACTOR_GRAPH &graph, const KeyVector &constrainLast, bool forceOrder=false)
Definition: inference/Ordering.h:112
gtsam::HybridSmoother::update
void update(HybridGaussianFactorGraph graph, std::optional< size_t > maxNrLeaves={}, const std::optional< Ordering > given_ordering={})
Definition: HybridSmoother.cpp:59
gtsam::HybridGaussianConditional::shared_ptr
std::shared_ptr< This > shared_ptr
Definition: HybridGaussianConditional.h:59
gtsam::HybridBayesNet
Definition: HybridBayesNet.h:37
gtsam::FastSet::exists
bool exists(const VALUE &e) const
Definition: FastSet.h:98
simple_graph::factors
const GaussianFactorGraph factors
Definition: testJacobianFactor.cpp:213
gtsam::HybridBayesNet::prune
HybridBayesNet prune(size_t maxNrLeaves) const
Prune the Bayes Net such that we have at most maxNrLeaves leaves.
Definition: HybridBayesNet.cpp:48
gtsam::FastSet< Key >
gtsam::HybridSmoother::addConditionals
std::pair< HybridGaussianFactorGraph, HybridBayesNet > addConditionals(const HybridGaussianFactorGraph &graph, const HybridBayesNet &hybridBayesNet, const Ordering &ordering) const
Add conditionals from previous timestep as part of liquefication.
Definition: HybridSmoother.cpp:90
gtsam::KeyVector
FastVector< Key > KeyVector
Define collection type once and for all - also used in wrappers.
Definition: Key.h:92
HybridSmoother.h
An incremental smoother for hybrid factor graphs.
gtsam::FactorGraph::erase
iterator erase(iterator item)
Definition: FactorGraph.h:377
gtsam::FactorGraph::at
const sharedFactor at(size_t i) const
Definition: FactorGraph.h:306
gtsam::FactorGraph::keys
KeySet keys() const
Definition: FactorGraph-inst.h:85
gtsam::HybridGaussianFactorGraph
Definition: HybridGaussianFactorGraph.h:106
gtsam::HybridSmoother::getOrdering
Ordering getOrdering(const HybridGaussianFactorGraph &newFactors)
Definition: HybridSmoother.cpp:27
gtsam::VariableIndex
Definition: VariableIndex.h:41
ordering
static enum @1096 ordering
key
const gtsam::Symbol key('X', 0)
gtsam::FactorGraph::size
size_t size() const
Definition: FactorGraph.h:297
gtsam::HybridSmoother::hybridBayesNet_
HybridBayesNet hybridBayesNet_
Definition: HybridSmoother.h:29
gtsam::FactorGraph::empty
bool empty() const
Definition: FactorGraph.h:301
gtsam::HybridBayesNet::push_back
void push_back(std::shared_ptr< HybridConditional > conditional)
Add a hybrid conditional using a shared_ptr.
Definition: HybridBayesNet.h:76
gtsam
traits
Definition: SFMdata.h:40
gtsam::HybridSmoother::gaussianMixture
HybridGaussianConditional::shared_ptr gaussianMixture(size_t index) const
Get the hybrid Gaussian conditional from the Bayes Net posterior at index.
Definition: HybridSmoother.cpp:137
gtsam::FactorGraph::end
const_iterator end() const
Definition: FactorGraph.h:342
gtsam::FactorGraph::begin
const_iterator begin() const
Definition: FactorGraph.h:339
gtsam::FactorGraph::add
IsDerived< DERIVEDFACTOR > add(std::shared_ptr< DERIVEDFACTOR > factor)
add is a synonym for push_back.
Definition: FactorGraph.h:171
graph
NonlinearFactorGraph graph
Definition: doc/Code/OdometryExample.cpp:2
gtsam::Ordering
Definition: inference/Ordering.h:33
i
int i
Definition: BiCGSTAB_step_by_step.cpp:9


gtsam
Author(s):
autogenerated on Sat Nov 16 2024 04:02:26