HybridSmoother.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
20 
21 #include <algorithm>
22 #include <unordered_set>
23 
24 // #define DEBUG_SMOOTHER
25 namespace gtsam {
26 
27 /* ************************************************************************* */
29  hybridBayesNet_ = std::move(hybridBayesNet);
30 }
31 
32 /* ************************************************************************* */
34  this->reInitialize(std::move(hybridBayesNet));
35 }
36 
37 /* ************************************************************************* */
39  const KeySet &lastKeysToEliminate) {
40  // Get all the discrete keys from the factors
41  KeySet allDiscrete = factors.discreteKeySet();
42 
43  // Create KeyVector with continuous keys followed by discrete keys.
44  KeyVector lastKeys;
45 
46  // Insert continuous keys first.
47  for (auto &k : lastKeysToEliminate) {
48  if (!allDiscrete.exists(k)) {
49  lastKeys.push_back(k);
50  }
51  }
52 
53  // Insert discrete keys at the end
54  std::copy(allDiscrete.begin(), allDiscrete.end(),
55  std::back_inserter(lastKeys));
56 
57  // Get an ordering where the new keys are eliminated last
59  factors, KeyVector(lastKeys.begin(), lastKeys.end()), true);
60 
61  return ordering;
62 }
63 
64 /* ************************************************************************* */
66  const HybridGaussianFactorGraph &updatedGraph,
67  const std::optional<Ordering> givenOrdering) {
69  // If no ordering provided, then we compute one
70  if (!givenOrdering.has_value()) {
71  // Get the keys from the new factors
72  KeySet continuousKeysToInclude; // Scheme 1: empty, 15sec/2000, 64sec/3000
73  // (69s without TF)
74  // continuousKeysToInclude = newFactors.keys(); // Scheme 2: all,
75  // 8sec/2000, 160sec/3000 continuousKeysToInclude = updatedGraph.keys(); //
76  // Scheme 3: all, stopped after 80sec/2000
77 
78  // Since updatedGraph now has all the connected conditionals,
79  // we can get the correct ordering.
80  ordering = this->getOrdering(updatedGraph, continuousKeysToInclude);
81  } else {
82  ordering = *givenOrdering;
83  }
84 
85  return ordering;
86 }
87 
88 /* ************************************************************************* */
91  const HybridGaussianFactorGraph &newFactors) {
92  // Initialize graph
93  HybridGaussianFactorGraph updatedGraph(graph);
94 
95  for (DiscreteKey dkey : newFactors.discreteKeys()) {
96  Key key = dkey.first;
97  if (fixedValues_.find(key) != fixedValues_.end()) {
98  // Add corresponding discrete factor to reintroduce the information
99  std::vector<double> probabilities(
100  dkey.second, (1 - *marginalThreshold_) / dkey.second);
101  probabilities[fixedValues_[key]] = *marginalThreshold_;
102  DecisionTreeFactor dtf({dkey}, probabilities);
103  updatedGraph.push_back(dtf);
104 
105  // Remove fixed value
106  fixedValues_.erase(key);
107  }
108  }
109 
110  return updatedGraph;
111 }
112 
113 /* ************************************************************************* */
115  const Values &initial,
116  std::optional<size_t> maxNrLeaves,
117  const std::optional<Ordering> given_ordering) {
118  HybridGaussianFactorGraph linearizedFactors = *newFactors.linearize(initial);
119 
120  // Record the new nonlinear factors and
121  // linearization point for relinearization
122  allFactors_.push_back(newFactors);
124 
125  const KeySet originalNewFactorKeys = newFactors.keys();
126 #ifdef DEBUG_SMOOTHER
127  std::cout << "hybridBayesNet_ size before: " << hybridBayesNet_.size()
128  << std::endl;
129  std::cout << "newFactors size: " << newFactors.size() << std::endl;
130 #endif
131  HybridGaussianFactorGraph updatedGraph;
132  // Add the necessary conditionals from the previous timestep(s).
133  std::tie(updatedGraph, hybridBayesNet_) =
134  addConditionals(linearizedFactors, hybridBayesNet_);
135 #ifdef DEBUG_SMOOTHER
136  // print size of newFactors, updatedGraph, hybridBayesNet_
137  std::cout << "updatedGraph size: " << updatedGraph.size() << std::endl;
138  std::cout << "hybridBayesNet_ size after: " << hybridBayesNet_.size()
139  << std::endl;
140  std::cout << "total size: " << updatedGraph.size() + hybridBayesNet_.size()
141  << std::endl;
142 #endif
143 
144  if (marginalThreshold_) {
145  // Remove fixed values for discrete keys which are introduced in newFactors
146  updatedGraph = removeFixedValues(updatedGraph, newFactors);
147  }
148 
149  Ordering ordering = this->maybeComputeOrdering(updatedGraph, given_ordering);
150 
151 #if GTSAM_HYBRID_TIMING
152  gttic_(HybridSmootherEliminate);
153 #endif
154  // Eliminate.
155  HybridBayesNet bayesNetFragment = *updatedGraph.eliminateSequential(ordering);
156 #if GTSAM_HYBRID_TIMING
157  gttoc_(HybridSmootherEliminate);
158 #endif
159 
160 #ifdef DEBUG_SMOOTHER_DETAIL
161  for (auto conditional : bayesNetFragment) {
162  auto e = std::dynamic_pointer_cast<HybridConditional::BaseConditional>(
163  conditional);
164  GTSAM_PRINT(*e);
165  }
166 #endif
167 
168 #ifdef DEBUG_SMOOTHER
169  // Print discrete keys in the bayesNetFragment:
170  std::cout << "Discrete keys in bayesNetFragment: ";
171  for (auto &key : HybridFactorGraph(bayesNetFragment).discreteKeySet()) {
172  std::cout << DefaultKeyFormatter(key) << " ";
173  }
174 #endif
175 
177  if (maxNrLeaves) {
178 #if GTSAM_HYBRID_TIMING
179  gttic_(HybridSmootherPrune);
180 #endif
181  // `pruneBayesNet` sets the leaves with 0 in discreteFactor to nullptr in
182  // all the conditionals with the same keys in bayesNetFragment.
183  DiscreteValues newlyFixedValues;
184  bayesNetFragment = bayesNetFragment.prune(*maxNrLeaves, marginalThreshold_,
185  &newlyFixedValues);
186  fixedValues_.insert(newlyFixedValues);
187 #if GTSAM_HYBRID_TIMING
188  gttoc_(HybridSmootherPrune);
189 #endif
190  }
191 
192 #ifdef DEBUG_SMOOTHER
193  // Print discrete keys in the bayesNetFragment:
194  std::cout << "\nAfter pruning: ";
195  for (auto &key : HybridFactorGraph(bayesNetFragment).discreteKeySet()) {
196  std::cout << DefaultKeyFormatter(key) << " ";
197  }
198  std::cout << std::endl << std::endl;
199 #endif
200 
201 #ifdef DEBUG_SMOOTHER_DETAIL
202  for (auto conditional : bayesNetFragment) {
203  auto c = std::dynamic_pointer_cast<HybridConditional::BaseConditional>(
204  conditional);
205  GTSAM_PRINT(*c);
206  }
207 #endif
208 
209  // Add the partial bayes net to the posterior bayes net.
210  hybridBayesNet_.add(bayesNetFragment);
211 }
212 
213 /* ************************************************************************* */
214 std::pair<HybridGaussianFactorGraph, HybridBayesNet>
216  const HybridBayesNet &hybridBayesNet) const {
217  HybridGaussianFactorGraph graph(newFactors);
218  HybridBayesNet updatedHybridBayesNet(hybridBayesNet);
219 
220  KeySet involvedKeys = newFactors.keys();
221  auto involved = [](const KeySet &involvedKeys, const Key &key) {
222  return involvedKeys.find(key) != involvedKeys.end();
223  };
224 
225  // If hybridBayesNet is not empty,
226  // it means we have conditionals to add to the factor graph.
227  if (!hybridBayesNet.empty()) {
228  // We add all relevant hybrid conditionals on the last continuous variable
229  // in the previous `hybridBayesNet` to the graph
230 
231  // New conditionals to add to the graph
232  HybridBayesNet newConditionals;
233 
234  // NOTE(Varun) Using a for-range loop doesn't work since some of the
235  // conditionals are invalid pointers
236 
237  // First get all the keys involved.
238  // We do this by iterating over all conditionals, and checking if their
239  // frontals are involved in the factor graph. If yes, then also make the
240  // parent keys involved in the factor graph.
241  for (size_t i = 0; i < hybridBayesNet.size(); i++) {
242  auto conditional = hybridBayesNet.at(i);
243 
244  for (auto &key : conditional->frontals()) {
245  if (involved(involvedKeys, key)) {
246  // Add the conditional parents to involvedKeys
247  // so we add those conditionals too.
248  for (auto &&parentKey : conditional->parents()) {
249  involvedKeys.insert(parentKey);
250  }
251  // Break so we don't add parents twice.
252  break;
253  }
254  }
255  }
256 #ifdef DEBUG_SMOOTHER
257  PrintKeySet(involvedKeys);
258 #endif
259 
260  for (size_t i = 0; i < hybridBayesNet.size(); i++) {
261  auto conditional = hybridBayesNet.at(i);
262 
263  for (auto &key : conditional->frontals()) {
264  if (involved(involvedKeys, key)) {
265  newConditionals.push_back(conditional);
266 
267  // Remove the conditional from the updated Bayes net
268  auto it = find(updatedHybridBayesNet.begin(),
269  updatedHybridBayesNet.end(), conditional);
270  updatedHybridBayesNet.erase(it);
271 
272  break;
273  }
274  }
275  }
276 
277  graph.push_back(newConditionals);
278  }
279 
280  return {graph, updatedHybridBayesNet};
281 }
282 
283 /* ************************************************************************* */
285  size_t index) const {
286  return hybridBayesNet_.at(index)->asHybrid();
287 }
288 
289 /* ************************************************************************* */
291  return hybridBayesNet_;
292 }
293 
294 /* ************************************************************************* */
296  // Solve for the MPE
298 
299  // Add fixed values to the MPE.
300  mpe.insert(fixedValues_);
301 
302  // Given the MPE, compute the optimal continuous values.
304  const VectorValues continuous = gbn.optimize();
305  if (std::find(gbn.begin(), gbn.end(), nullptr) != gbn.end()) {
306  throw std::runtime_error("At least one nullptr factor in hybridBayesNet_");
307  }
308  return HybridValues(continuous, mpe);
309 }
310 
311 /* ************************************************************************* */
312 void HybridSmoother::relinearize(const std::optional<Ordering> givenOrdering) {
316 
317  // Compute ordering if not given
318  Ordering ordering = this->maybeComputeOrdering(*linearized, givenOrdering);
319 
321  linearized->eliminateSequential(ordering);
322  HybridValues delta = bayesNet->optimize();
324 
326 }
327 
328 /* ************************************************************************* */
330  return linearizationPoint_;
331 }
332 
333 /* ************************************************************************* */
335  return allFactors_;
336 }
337 
338 } // namespace gtsam
gtsam::HybridSmoother::hybridBayesNet
const HybridBayesNet & hybridBayesNet() const
Return the Bayes Net posterior.
Definition: HybridSmoother.cpp:290
gtsam::Ordering::ColamdConstrainedLast
static Ordering ColamdConstrainedLast(const FACTOR_GRAPH &graph, const KeyVector &constrainLast, bool forceOrder=false)
Definition: inference/Ordering.h:112
gtsam::HybridGaussianFactorGraph::shared_ptr
std::shared_ptr< This > shared_ptr
shared_ptr to This
Definition: HybridGaussianFactorGraph.h:120
gtsam::HybridSmoother::linearizationPoint
Values linearizationPoint() const
Return the current linearization point.
Definition: HybridSmoother.cpp:329
gtsam::EliminateableFactorGraph::eliminateSequential
std::shared_ptr< BayesNetType > eliminateSequential(OptionalOrderingType orderingType={}, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
Definition: EliminateableFactorGraph-inst.h:29
gtsam::HybridSmoother::marginalThreshold_
std::optional< double > marginalThreshold_
The threshold above which we make a decision about a mode.
Definition: HybridSmoother.h:35
gtsam::HybridFactorGraph
Definition: HybridFactorGraph.h:38
gtsam::HybridFactorGraph::discreteKeys
std::set< DiscreteKey > discreteKeys() const
Get all the discrete keys in the factor graph.
Definition: HybridFactorGraph.cpp:26
gtsam::DecisionTreeFactor
Definition: DecisionTreeFactor.h:45
gtsam::HybridValues
Definition: HybridValues.h:37
asia::bayesNet
static const DiscreteBayesNet bayesNet
Definition: testDiscreteSearch.cpp:30
gtsam::HybridSmoother::update
void update(const HybridNonlinearFactorGraph &graph, const Values &initial, std::optional< size_t > maxNrLeaves={}, const std::optional< Ordering > givenOrdering={})
Definition: HybridSmoother.cpp:114
gtsam::HybridGaussianConditional::shared_ptr
std::shared_ptr< This > shared_ptr
Definition: HybridGaussianConditional.h:60
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
gtsam::HybridSmoother::allFactors
HybridNonlinearFactorGraph allFactors() const
Return all the recorded nonlinear factors.
Definition: HybridSmoother.cpp:334
gtsam::HybridBayesNet
Definition: HybridBayesNet.h:37
gtsam::HybridNonlinearFactorGraph
Definition: HybridNonlinearFactorGraph.h:33
gtsam::HybridSmoother::removeFixedValues
HybridGaussianFactorGraph removeFixedValues(const HybridGaussianFactorGraph &graph, const HybridGaussianFactorGraph &newFactors)
Remove fixed discrete values for discrete keys introduced in newFactors, and reintroduce discrete fac...
Definition: HybridSmoother.cpp:89
c
Scalar Scalar * c
Definition: benchVecAdd.cpp:17
gtsam::HybridSmoother::maybeComputeOrdering
Ordering maybeComputeOrdering(const HybridGaussianFactorGraph &updatedGraph, const std::optional< Ordering > givenOrdering)
Helper to compute the ordering if ordering is not given.
Definition: HybridSmoother.cpp:65
gtsam::FastSet::exists
bool exists(const VALUE &e) const
Definition: FastSet.h:102
gtsam::HybridSmoother::getOrdering
Ordering getOrdering(const HybridGaussianFactorGraph &factors, const KeySet &newFactorKeys)
Get an elimination ordering which eliminates continuous and then discrete.
Definition: HybridSmoother.cpp:38
simple_graph::factors
const GaussianFactorGraph factors
Definition: testJacobianFactor.cpp:213
gtsam::FastSet< Key >
gtsam::KeyVector
FastVector< Key > KeyVector
Define collection type once and for all - also used in wrappers.
Definition: Key.h:91
gtsam::HybridSmoother::relinearize
void relinearize(const std::optional< Ordering > givenOrdering={})
Relinearize the nonlinear factor graph with the latest stored linearization point.
Definition: HybridSmoother.cpp:312
HybridSmoother.h
An incremental smoother for hybrid factor graphs.
gtsam::DefaultKeyFormatter
KeyFormatter DefaultKeyFormatter
Assign default key formatter.
Definition: Key.cpp:30
gtsam::FactorGraph::erase
iterator erase(iterator item)
Definition: FactorGraph.h:377
gtsam::FactorGraph::at
const sharedFactor at(size_t i) const
Definition: FactorGraph.h:306
gtsam::Values::retract
Values retract(const VectorValues &delta) const
Definition: Values.cpp:99
gtsam.examples.PlanarManipulatorExample.delta
def delta(g0, g1)
Definition: PlanarManipulatorExample.py:45
gtsam::VectorValues
Definition: VectorValues.h:74
gttoc_
#define gttoc_(label)
Definition: timing.h:273
gttic_
#define gttic_(label)
Definition: timing.h:268
gtsam::FactorGraph::keys
KeySet keys() const
Definition: FactorGraph-inst.h:85
gtsam::HybridGaussianFactorGraph
Definition: HybridGaussianFactorGraph.h:106
gtsam::HybridSmoother::optimize
HybridValues optimize() const
Optimize the hybrid Bayes Net, taking into accound fixed values.
Definition: HybridSmoother.cpp:295
gtsam::HybridSmoother::addConditionals
std::pair< HybridGaussianFactorGraph, HybridBayesNet > addConditionals(const HybridGaussianFactorGraph &graph, const HybridBayesNet &hybridBayesNet) const
Add conditionals from previous timestep as part of liquefication.
Definition: HybridSmoother.cpp:215
gtsam::HybridBayesNet::mpe
DiscreteValues mpe() const
Compute the Most Probable Explanation (MPE) of the discrete variables.
Definition: HybridBayesNet.cpp:139
GTSAM_PRINT
#define GTSAM_PRINT(x)
Definition: Testable.h:43
sampling::gbn
static const GaussianBayesNet gbn
Definition: testGaussianBayesNet.cpp:171
gtsam::HybridSmoother::linearizationPoint_
Values linearizationPoint_
Definition: HybridSmoother.h:31
asia::mpe
static const DiscreteValues mpe
Definition: testDiscreteSearch.cpp:34
ordering
static enum @1096 ordering
key
const gtsam::Symbol key('X', 0)
gtsam::FactorGraph::size
size_t size() const
Definition: FactorGraph.h:297
gtsam::HybridSmoother::reInitialize
void reInitialize(HybridBayesNet &&hybridBayesNet)
Definition: HybridSmoother.cpp:28
gtsam::HybridSmoother::hybridBayesNet_
HybridBayesNet hybridBayesNet_
Definition: HybridSmoother.h:33
gtsam::FactorGraph::empty
bool empty() const
Definition: FactorGraph.h:301
gtsam::HybridBayesNet::push_back
void push_back(std::shared_ptr< HybridConditional > conditional)
Add a hybrid conditional using a shared_ptr.
Definition: HybridBayesNet.h:76
gtsam::HybridNonlinearFactorGraph::restrict
HybridNonlinearFactorGraph restrict(const DiscreteValues &assignment) const
Restrict all factors in the graph to the given discrete values.
Definition: HybridNonlinearFactorGraph.cpp:224
gtsam
traits
Definition: ABC.h:17
gtsam::HybridSmoother::gaussianMixture
HybridGaussianConditional::shared_ptr gaussianMixture(size_t index) const
Get the hybrid Gaussian conditional from the Bayes Net posterior at index.
Definition: HybridSmoother.cpp:284
gtsam::DiscreteValues
Definition: DiscreteValues.h:34
gtsam::FactorGraph::push_back
IsDerived< DERIVEDFACTOR > push_back(std::shared_ptr< DERIVEDFACTOR > factor)
Add a factor directly using a shared_ptr.
Definition: FactorGraph.h:147
gtsam::Values
Definition: Values.h:65
gtsam::DiscreteKey
std::pair< Key, size_t > DiscreteKey
Definition: DiscreteKey.h:38
gtsam::FactorGraph::end
const_iterator end() const
Definition: FactorGraph.h:342
gtsam::DiscreteValues::insert
std::pair< iterator, bool > insert(const value_type &value)
Definition: DiscreteValues.h:71
gtsam::HybridNonlinearFactorGraph::linearize
std::shared_ptr< HybridGaussianFactorGraph > linearize(const Values &continuousValues) const
Linearize all the continuous factors in the HybridNonlinearFactorGraph.
Definition: HybridNonlinearFactorGraph.cpp:138
gtsam::HybridSmoother::fixedValues_
DiscreteValues fixedValues_
Definition: HybridSmoother.h:36
gtsam::FactorGraph::begin
const_iterator begin() const
Definition: FactorGraph.h:339
initial
Definition: testScenarioRunner.cpp:148
gtsam::HybridBayesNet::shared_ptr
std::shared_ptr< HybridBayesNet > shared_ptr
Definition: HybridBayesNet.h:42
gtsam::FactorGraph::add
IsDerived< DERIVEDFACTOR > add(std::shared_ptr< DERIVEDFACTOR > factor)
add is a synonym for push_back.
Definition: FactorGraph.h:171
graph
NonlinearFactorGraph graph
Definition: doc/Code/OdometryExample.cpp:2
gtsam::Key
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:97
gtsam::PrintKeySet
void PrintKeySet(const KeySet &keys, const string &s, const KeyFormatter &keyFormatter)
Utility function to print sets of keys with optional prefix.
Definition: Key.cpp:89
gtsam::Ordering
Definition: inference/Ordering.h:33
i
int i
Definition: BiCGSTAB_step_by_step.cpp:9
gtsam::HybridSmoother::allFactors_
HybridNonlinearFactorGraph allFactors_
Definition: HybridSmoother.h:30
gtsam::GaussianBayesNet
Definition: GaussianBayesNet.h:35
k
static constexpr double k
Definition: GEKF_Rot3Example.cpp:31
gtsam::HybridBayesNet::choose
GaussianBayesNet choose(const DiscreteValues &assignment) const
Get the Gaussian Bayes net P(X|M=m) corresponding to a specific assignment m for the discrete variabl...
Definition: HybridBayesNet.cpp:119
gtsam::Values::insert_or_assign
void insert_or_assign(Key j, const Value &val)
If key j exists, update value, else perform an insert.
Definition: Values.cpp:193


gtsam
Author(s):
autogenerated on Wed May 28 2025 03:01:25