GaussianBayesNet.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
18 #include <gtsam/base/timing.h>
22 
23 #include <fstream>
24 #include <iterator>
25 
26 using namespace std;
27 using namespace gtsam;
28 
29 // In Wrappers we have no access to this so have a default ready
30 static std::mt19937_64 kRandomNumberGenerator(42);
31 
32 namespace gtsam {
33 
34  // Instantiate base class
35  template class FactorGraph<GaussianConditional>;
36 
37  /* ************************************************************************* */
38  bool GaussianBayesNet::equals(const This& bn, double tol) const
39  {
40  return Base::equals(bn, tol);
41  }
42 
43  /* ************************************************************************ */
45  VectorValues solution; // no missing variables -> create an empty vector
46  return optimize(solution);
47  }
48 
50  VectorValues solution = given;
51  // (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas)
52  // solve each node in reverse topological sort order (parents first)
53  for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) {
54  // i^th part of R*x=y, x=inv(R)*y
55  // (Rii*xi + R_i*x(i+1:))./si = yi =>
56  // xi = inv(Rii)*(yi.*si - R_i*x(i+1:))
57  solution.insert((*it)->solve(solution));
58  }
59  return solution;
60  }
61 
62  /* ************************************************************************ */
63  VectorValues GaussianBayesNet::sample(std::mt19937_64* rng) const {
64  VectorValues result; // no missing variables -> create an empty vector
65  return sample(result, rng);
66  }
67 
68  VectorValues GaussianBayesNet::sample(const VectorValues& given,
69  std::mt19937_64* rng) const {
70  VectorValues result(given);
71  // sample each node in reverse topological sort order (parents first)
72  for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) {
73  const VectorValues sampled = (*it)->sample(result, rng);
74  result.insert(sampled);
75  }
76  return result;
77  }
78 
79  /* ************************************************************************ */
80  VectorValues GaussianBayesNet::sample() const {
81  return sample(&kRandomNumberGenerator);
82  }
83 
84  VectorValues GaussianBayesNet::sample(const VectorValues& given) const {
85  return sample(given, &kRandomNumberGenerator);
86  }
87 
88  /* ************************************************************************ */
89  VectorValues GaussianBayesNet::optimizeGradientSearch() const
90  {
91  gttic(GaussianBayesTree_optimizeGradientSearch);
93  }
94 
95  /* ************************************************************************* */
96  VectorValues GaussianBayesNet::gradient(const VectorValues& x0) const {
97  return GaussianFactorGraph(*this).gradient(x0);
98  }
99 
100  /* ************************************************************************* */
101  VectorValues GaussianBayesNet::gradientAtZero() const {
102  return GaussianFactorGraph(*this).gradientAtZero();
103  }
104 
105  /* ************************************************************************* */
106  double GaussianBayesNet::error(const VectorValues& x) const {
107  double sum = 0.;
108  for (const auto& gc : *this) {
109  if (gc) sum += gc->error(x);
110  }
111  return sum;
112  }
113 
114  /* ************************************************************************* */
115  double GaussianBayesNet::logProbability(const VectorValues& x) const {
116  double sum = 0.;
117  for (const auto& gc : *this) {
118  if (gc) sum += gc->logProbability(x);
119  }
120  return sum;
121  }
122 
123  /* ************************************************************************* */
124  double GaussianBayesNet::evaluate(const VectorValues& x) const {
125  return exp(logProbability(x));
126  }
127 
128  /* ************************************************************************* */
129  VectorValues GaussianBayesNet::backSubstitute(const VectorValues& rhs) const
130  {
132  // TODO this looks pretty sketchy. result is passed as the parents argument
133  // as it's filled up by solving the gaussian conditionals.
134  for (auto it = std::make_reverse_iterator(end()); it != std::make_reverse_iterator(begin()); ++it) {
135  result.insert((*it)->solveOtherRHS(result, rhs));
136  }
137  return result;
138  }
139 
140 
141  /* ************************************************************************* */
142  // gy=inv(L)*gx by solving L*gy=gx.
143  // gy=inv(R'*inv(Sigma))*gx
144  // gz'*R'=gx', gy = gz.*sigmas
145  VectorValues GaussianBayesNet::backSubstituteTranspose(const VectorValues& gx) const
146  {
147  // Initialize gy from gx
148  // TODO: used to insert zeros if gx did not have an entry for a variable in bn
149  VectorValues gy = gx;
150 
151  // we loop from first-eliminated to last-eliminated
152  // i^th part of L*gy=gx is done block-column by block-column of L
153  for(const sharedConditional& cg: *this)
154  cg->solveTransposeInPlace(gy);
155 
156  return gy;
157  }
158 
160  //VectorValues GaussianBayesNet::optimizeGradientSearch() const
161  //{
162  // gttic(Compute_Gradient);
163  // // Compute gradient (call gradientAtZero function, which is defined for various linear systems)
164  // VectorValues grad = gradientAtZero();
165  // double gradientSqNorm = grad.dot(grad);
166  // gttoc(Compute_Gradient);
167 
168  // gttic(Compute_Rg);
169  // // Compute R * g
170  // Errors Rg = GaussianFactorGraph(*this) * grad;
171  // gttoc(Compute_Rg);
172 
173  // gttic(Compute_minimizing_step_size);
174  // // Compute minimizing step size
175  // double step = -gradientSqNorm / dot(Rg, Rg);
176  // gttoc(Compute_minimizing_step_size);
177 
178  // gttic(Compute_point);
179  // // Compute steepest descent point
180  // scal(step, grad);
181  // gttoc(Compute_point);
182 
183  // return grad;
184  //}
185 
186  /* ************************************************************************* */
188  GaussianFactorGraph factorGraph(*this);
189  auto keys = factorGraph.keys();
190  // add frontal keys in order
192  for (const sharedConditional& cg : *this)
193  if (cg) {
194  for (Key key : cg->frontals()) {
195  ordering.push_back(key);
196  keys.erase(key);
197  }
198  }
199  // add remaining keys in case Bayes net is incomplete
200  for (Key key : keys) ordering.push_back(key);
201  return ordering;
202  }
203 
204  /* ************************************************************************* */
205  pair<Matrix, Vector> GaussianBayesNet::matrix(const Ordering& ordering) const {
206  // Convert to a GaussianFactorGraph and use its machinery
207  GaussianFactorGraph factorGraph(*this);
208  return factorGraph.jacobian(ordering);
209  }
210 
211  /* ************************************************************************* */
212  pair<Matrix, Vector> GaussianBayesNet::matrix() const {
213  // recursively call with default ordering
214  const auto defaultOrdering = this->ordering();
215  return matrix(defaultOrdering);
216  }
217 
219  //VectorValues GaussianBayesNet::gradient(const VectorValues& x0) const
220  //{
221  // return GaussianFactorGraph(*this).gradient(x0);
222  //}
223 
225  //VectorValues GaussianBayesNet::gradientAtZero() const
226  //{
227  // return GaussianFactorGraph(*this).gradientAtZero();
228  //}
229 
230  /* ************************************************************************* */
232  {
233  return exp(logDeterminant());
234  }
235 
236  /* ************************************************************************* */
238  double logDet = 0.0;
239  for (const sharedConditional& cg : *this) {
240  logDet += cg->logDeterminant();
241  }
242  return logDet;
243  }
244 
245  /* ************************************************************************* */
246 
247 } // namespace gtsam
const gtsam::Symbol key('X', 0)
VectorValues optimizeGradientSearch() const
void determinant(const MatrixType &m)
Definition: determinant.cpp:14
double logDeterminant(const typename BAYESTREE::sharedClique &clique)
static std::mt19937 rng
Definition: BFloat16.h:88
iterator insert(const std::pair< Key, Vector > &key_value)
static enum @1107 ordering
std::reverse_iterator< Iterator > make_reverse_iterator(Iterator i)
std::shared_ptr< ConditionalType > sharedConditional
EIGEN_DEVICE_FUNC const ExpReturnType exp() const
#define gttic(label)
Definition: timing.h:295
Values result
Linear Factor Graph where all factors are Gaussians.
traits
Definition: chartTesting.h:28
static Symbol x0('x', 0)
std::pair< Matrix, Vector > jacobian(const Ordering &ordering) const
static EIGEN_DEPRECATED const end_t end
int optimize(const SfmData &db, const NonlinearFactorGraph &graph, const Values &initial, bool separateCalibration=false)
Definition: timeSFMBAL.h:64
Chordal Bayes Net, the result of eliminating a factor graph.
static double error
Definition: testRot3.cpp:37
static std::mt19937_64 kRandomNumberGenerator(42)
VectorValues gradient(const VectorValues &x0) const
const G double tol
Definition: Group.h:86
const KeyVector keys
Map< Matrix< T, Dynamic, Dynamic, ColMajor >, 0, OuterStride<> > matrix(T *data, int rows, int cols, int stride)
Factor Graph Base Class.
set noclip points set clip one set noclip two set bar set border lt lw set xdata set ydata set zdata set x2data set y2data set boxwidth set dummy x
virtual VectorValues gradientAtZero() const
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:102
Timing utilities.


gtsam
Author(s):
autogenerated on Tue Jul 4 2023 02:34:15