GaussianBayesNet.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
18 #include <gtsam/base/timing.h>
22 
23 #include <fstream>
24 #include <iterator>
25 
26 using namespace std;
27 using namespace gtsam;
28 
29 namespace gtsam {
30 
31  // Instantiate base class
32  template class FactorGraph<GaussianConditional>;
33 
34  /* ************************************************************************* */
35  bool GaussianBayesNet::equals(const This& bn, double tol) const
36  {
37  return Base::equals(bn, tol);
38  }
39 
40  /* ************************************************************************ */
42  VectorValues solution; // no missing variables -> create an empty vector
43  return optimize(solution);
44  }
45 
47  VectorValues solution = given;
48  // (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas)
49  // solve each node in reverse topological sort order (parents first)
50  for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) {
51  // i^th part of R*x=y, x=inv(R)*y
52  // (Rii*xi + R_i*x(i+1:))./si = yi =>
53  // xi = inv(Rii)*(yi.*si - R_i*x(i+1:))
54  solution.insert((*it)->solve(solution));
55  }
56  return solution;
57  }
58 
59  /* ************************************************************************ */
60  VectorValues GaussianBayesNet::sample(std::mt19937_64* rng) const {
61  VectorValues result; // no missing variables -> create an empty vector
62  return sample(result, rng);
63  }
64 
65  VectorValues GaussianBayesNet::sample(const VectorValues& given,
66  std::mt19937_64* rng) const {
67  VectorValues result(given);
68  // sample each node in reverse topological sort order (parents first)
69  for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) {
70  const VectorValues sampled = (*it)->sample(result, rng);
71  result.insert(sampled);
72  }
73  return result;
74  }
75 
76  /* ************************************************************************ */
77  VectorValues GaussianBayesNet::optimizeGradientSearch() const
78  {
79  gttic(GaussianBayesTree_optimizeGradientSearch);
81  }
82 
83  /* ************************************************************************* */
84  VectorValues GaussianBayesNet::gradient(const VectorValues& x0) const {
85  return GaussianFactorGraph(*this).gradient(x0);
86  }
87 
88  /* ************************************************************************* */
89  VectorValues GaussianBayesNet::gradientAtZero() const {
90  return GaussianFactorGraph(*this).gradientAtZero();
91  }
92 
93  /* ************************************************************************* */
94  double GaussianBayesNet::error(const VectorValues& x) const {
95  double sum = 0.;
96  for (const auto& gc : *this) {
97  if (gc) sum += gc->error(x);
98  }
99  return sum;
100  }
101 
102  /* ************************************************************************* */
103  double GaussianBayesNet::logProbability(const VectorValues& x) const {
104  double sum = 0.;
105  for (const auto& gc : *this) {
106  if (gc) sum += gc->logProbability(x);
107  }
108  return sum;
109  }
110 
111  /* ************************************************************************* */
112  double GaussianBayesNet::evaluate(const VectorValues& x) const {
113  return exp(logProbability(x));
114  }
115 
116  /* ************************************************************************* */
117  VectorValues GaussianBayesNet::backSubstitute(const VectorValues& rhs) const
118  {
120  // TODO this looks pretty sketchy. result is passed as the parents argument
121  // as it's filled up by solving the gaussian conditionals.
122  for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) {
123  result.insert((*it)->solveOtherRHS(result, rhs));
124  }
125  return result;
126  }
127 
128 
129  /* ************************************************************************* */
130  // gy=inv(L)*gx by solving L*gy=gx.
131  // gy=inv(R'*inv(Sigma))*gx
132  // gz'*R'=gx', gy = gz.*sigmas
133  VectorValues GaussianBayesNet::backSubstituteTranspose(const VectorValues& gx) const
134  {
135  // Initialize gy from gx
136  // TODO: used to insert zeros if gx did not have an entry for a variable in bn
137  VectorValues gy = gx;
138 
139  // we loop from first-eliminated to last-eliminated
140  // i^th part of L*gy=gx is done block-column by block-column of L
141  for(const sharedConditional& cg: *this)
142  cg->solveTransposeInPlace(gy);
143 
144  return gy;
145  }
146 
148  //VectorValues GaussianBayesNet::optimizeGradientSearch() const
149  //{
150  // gttic(Compute_Gradient);
151  // // Compute gradient (call gradientAtZero function, which is defined for various linear systems)
152  // VectorValues grad = gradientAtZero();
153  // double gradientSqNorm = grad.dot(grad);
154  // gttoc(Compute_Gradient);
155 
156  // gttic(Compute_Rg);
157  // // Compute R * g
158  // Errors Rg = GaussianFactorGraph(*this) * grad;
159  // gttoc(Compute_Rg);
160 
161  // gttic(Compute_minimizing_step_size);
162  // // Compute minimizing step size
163  // double step = -gradientSqNorm / dot(Rg, Rg);
164  // gttoc(Compute_minimizing_step_size);
165 
166  // gttic(Compute_point);
167  // // Compute steepest descent point
168  // scal(step, grad);
169  // gttoc(Compute_point);
170 
171  // return grad;
172  //}
173 
174  /* ************************************************************************* */
177  auto keys = factorGraph.keys();
178  // add frontal keys in order
180  for (const sharedConditional& cg : *this)
181  if (cg) {
182  for (Key key : cg->frontals()) {
183  ordering.push_back(key);
184  keys.erase(key);
185  }
186  }
187  // add remaining keys in case Bayes net is incomplete
188  for (Key key : keys) ordering.push_back(key);
189  return ordering;
190  }
191 
192  /* ************************************************************************* */
193  pair<Matrix, Vector> GaussianBayesNet::matrix(const Ordering& ordering) const {
194  // Convert to a GaussianFactorGraph and use its machinery
196  return factorGraph.jacobian(ordering);
197  }
198 
199  /* ************************************************************************* */
200  pair<Matrix, Vector> GaussianBayesNet::matrix() const {
201  // recursively call with default ordering
202  const auto defaultOrdering = this->ordering();
203  return matrix(defaultOrdering);
204  }
205 
207  //VectorValues GaussianBayesNet::gradient(const VectorValues& x0) const
208  //{
209  // return GaussianFactorGraph(*this).gradient(x0);
210  //}
211 
213  //VectorValues GaussianBayesNet::gradientAtZero() const
214  //{
215  // return GaussianFactorGraph(*this).gradientAtZero();
216  //}
217 
218  /* ************************************************************************* */
220  {
221  return exp(logDeterminant());
222  }
223 
224  /* ************************************************************************* */
226  double logDet = 0.0;
227  for (const sharedConditional& cg : *this) {
228  logDet += cg->logDeterminant();
229  }
230  return logDet;
231  }
232 
233  /* ************************************************************************* */
234  double GaussianBayesNet::negLogConstant() const {
235  /*
236  normalization constant = 1.0 / sqrt((2*pi)^n*det(Sigma))
237  negLogConstant = -log(normalizationConstant)
238  = 0.5 * n*log(2*pi) + 0.5 * log(det(Sigma))
239 
240  log(det(Sigma)) = -2.0 * logDeterminant()
241  thus, negLogConstant = 0.5*n*log(2*pi) - logDeterminant()
242 
243  BayesNet negLogConstant = sum(0.5*n_i*log(2*pi) - logDeterminant_i())
244  = sum(0.5*n_i*log(2*pi)) + sum(logDeterminant_i())
245  = sum(0.5*n_i*log(2*pi)) + bn->logDeterminant()
246  */
247  double negLogNormConst = 0.0;
248  for (const sharedConditional& cg : *this) {
249  negLogNormConst += cg->negLogConstant();
250  }
251  return negLogNormConst;
252  }
253 
254  /* ************************************************************************* */
255 
256 } // namespace gtsam
timing.h
Timing utilities.
GaussianFactorGraph.h
Linear Factor Graph where all factors are Gaussians.
rng
static std::mt19937 rng
Definition: timeFactorOverhead.cpp:31
optimize
int optimize(const SfmData &db, const NonlinearFactorGraph &graph, const Values &initial, bool separateCalibration=false)
Definition: timeSFMBAL.h:64
keys
const KeyVector keys
Definition: testRegularImplicitSchurFactor.cpp:40
gtsam::DiscreteFactorGraph::keys
KeySet keys() const
Definition: DiscreteFactorGraph.cpp:45
gtsam::VectorValues::insert
iterator insert(const std::pair< Key, Vector > &key_value)
Definition: VectorValues.cpp:90
x
set noclip points set clip one set noclip two set bar set border lt lw set xdata set ydata set zdata set x2data set y2data set boxwidth set dummy x
Definition: gnuplot_common_settings.hh:12
gtsam::GaussianBayesNet::sharedConditional
std::shared_ptr< ConditionalType > sharedConditional
Definition: GaussianBayesNet.h:43
GaussianBayesNet.h
Chordal Bayes Net, the result of eliminating a factor graph.
exp
const EIGEN_DEVICE_FUNC ExpReturnType exp() const
Definition: ArrayCwiseUnaryOps.h:97
result
Values result
Definition: OdometryOptimize.cpp:8
asia::factorGraph
static const DiscreteFactorGraph factorGraph(bayesNet)
determinant
void determinant(const MatrixType &m)
Definition: determinant.cpp:14
FactorGraph-inst.h
Factor Graph Base Class.
gtsam::FactorGraph< GaussianConditional >
gtsam::GaussianFactorGraph::gradient
VectorValues gradient(const VectorValues &x0) const
Definition: GaussianFactorGraph.cpp:357
gtsam::GaussianFactorGraph
Definition: GaussianFactorGraph.h:73
gtsam::VectorValues
Definition: VectorValues.h:74
gtsam::GaussianFactorGraph::gradientAtZero
virtual VectorValues gradientAtZero() const
Definition: GaussianFactorGraph.cpp:369
gtsam::internal::logDeterminant
double logDeterminant(const typename BAYESTREE::sharedClique &clique)
Definition: GaussianBayesTree-inl.h:42
x0
static Symbol x0('x', 0)
ordering
static enum @1096 ordering
matrix
Map< Matrix< T, Dynamic, Dynamic, ColMajor >, 0, OuterStride<> > matrix(T *data, int rows, int cols, int stride)
Definition: gtsam/3rdparty/Eigen/blas/common.h:110
key
const gtsam::Symbol key('X', 0)
gtsam::GaussianFactorGraph::optimizeGradientSearch
VectorValues optimizeGradientSearch() const
Definition: GaussianFactorGraph.cpp:381
different_sigmas::gc
const auto gc
Definition: testHybridBayesNet.cpp:233
gtsam
traits
Definition: ABC.h:17
error
static double error
Definition: testRot3.cpp:37
make_reverse_iterator
std::reverse_iterator< Iterator > make_reverse_iterator(Iterator i)
Definition: stl_iterators.cpp:16
std
Definition: BFloat16.h:88
gtsam::Values::insert
void insert(Key j, const Value &val)
Definition: Values.cpp:156
gtsam::tol
const G double tol
Definition: Group.h:79
Eigen::placeholders::end
static const EIGEN_DEPRECATED end_t end
Definition: IndexedViewHelper.h:181
gtsam::Key
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:97
gtsam::Ordering
Definition: inference/Ordering.h:33
gtsam::GaussianBayesNet
Definition: GaussianBayesNet.h:35
gttic
#define gttic(label)
Definition: timing.h:326


gtsam
Author(s):
autogenerated on Wed May 28 2025 03:01:18