GaussianBayesNet.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
18 #include <gtsam/base/timing.h>
22 
23 #include <boost/range/adaptor/reversed.hpp>
24 #include <fstream>
25 
26 using namespace std;
27 using namespace gtsam;
28 
29 namespace gtsam {
30 
31  // Instantiate base class
32  template class FactorGraph<GaussianConditional>;
33 
34  /* ************************************************************************* */
35  bool GaussianBayesNet::equals(const This& bn, double tol) const
36  {
37  return Base::equals(bn, tol);
38  }
39 
40  /* ************************************************************************* */
42  {
43  VectorValues soln; // no missing variables -> just create an empty vector
44  return optimize(soln);
45  }
46 
47  /* ************************************************************************* */
49  const VectorValues& solutionForMissing) const {
50  VectorValues soln(solutionForMissing); // possibly empty
51  // (R*x)./sigmas = y by solving x=inv(R)*(y.*sigmas)
53  for (auto cg: boost::adaptors::reverse(*this)) {
54  // i^th part of R*x=y, x=inv(R)*y
55  // (Rii*xi + R_i*x(i+1:))./si = yi <-> xi = inv(Rii)*(yi.*si - R_i*x(i+1:))
56  soln.insert(cg->solve(soln));
57  }
58  return soln;
59  }
60 
61  /* ************************************************************************* */
62  VectorValues GaussianBayesNet::optimizeGradientSearch() const
63  {
64  gttic(GaussianBayesTree_optimizeGradientSearch);
66  }
67 
68  /* ************************************************************************* */
69  VectorValues GaussianBayesNet::gradient(const VectorValues& x0) const {
70  return GaussianFactorGraph(*this).gradient(x0);
71  }
72 
73  /* ************************************************************************* */
74  VectorValues GaussianBayesNet::gradientAtZero() const {
75  return GaussianFactorGraph(*this).gradientAtZero();
76  }
77 
78  /* ************************************************************************* */
79  double GaussianBayesNet::error(const VectorValues& x) const {
80  return GaussianFactorGraph(*this).error(x);
81  }
82 
83  /* ************************************************************************* */
84  VectorValues GaussianBayesNet::backSubstitute(const VectorValues& rhs) const
85  {
87  // TODO this looks pretty sketchy. result is passed as the parents argument
88  // as it's filled up by solving the gaussian conditionals.
89  for (auto cg: boost::adaptors::reverse(*this)) {
90  result.insert(cg->solveOtherRHS(result, rhs));
91  }
92  return result;
93  }
94 
95 
96  /* ************************************************************************* */
97  // gy=inv(L)*gx by solving L*gy=gx.
98  // gy=inv(R'*inv(Sigma))*gx
99  // gz'*R'=gx', gy = gz.*sigmas
100  VectorValues GaussianBayesNet::backSubstituteTranspose(const VectorValues& gx) const
101  {
102  // Initialize gy from gx
103  // TODO: used to insert zeros if gx did not have an entry for a variable in bn
104  VectorValues gy = gx;
105 
106  // we loop from first-eliminated to last-eliminated
107  // i^th part of L*gy=gx is done block-column by block-column of L
108  for(const sharedConditional& cg: *this)
109  cg->solveTransposeInPlace(gy);
110 
111  return gy;
112  }
113 
115  //VectorValues GaussianBayesNet::optimizeGradientSearch() const
116  //{
117  // gttic(Compute_Gradient);
118  // // Compute gradient (call gradientAtZero function, which is defined for various linear systems)
119  // VectorValues grad = gradientAtZero();
120  // double gradientSqNorm = grad.dot(grad);
121  // gttoc(Compute_Gradient);
122 
123  // gttic(Compute_Rg);
124  // // Compute R * g
125  // Errors Rg = GaussianFactorGraph(*this) * grad;
126  // gttoc(Compute_Rg);
127 
128  // gttic(Compute_minimizing_step_size);
129  // // Compute minimizing step size
130  // double step = -gradientSqNorm / dot(Rg, Rg);
131  // gttoc(Compute_minimizing_step_size);
132 
133  // gttic(Compute_point);
134  // // Compute steepest descent point
135  // scal(step, grad);
136  // gttoc(Compute_point);
137 
138  // return grad;
139  //}
140 
141  /* ************************************************************************* */
143  GaussianFactorGraph factorGraph(*this);
144  auto keys = factorGraph.keys();
145  // add frontal keys in order
147  for (const sharedConditional& cg : *this)
148  if (cg) {
149  for (Key key : cg->frontals()) {
150  ordering.push_back(key);
151  keys.erase(key);
152  }
153  }
154  // add remaining keys in case Bayes net is incomplete
155  for (Key key : keys) ordering.push_back(key);
156  return ordering;
157  }
158 
159  /* ************************************************************************* */
160  pair<Matrix, Vector> GaussianBayesNet::matrix(const Ordering& ordering) const {
161  // Convert to a GaussianFactorGraph and use its machinery
162  GaussianFactorGraph factorGraph(*this);
163  return factorGraph.jacobian(ordering);
164  }
165 
166  /* ************************************************************************* */
167  pair<Matrix, Vector> GaussianBayesNet::matrix() const {
168  // recursively call with default ordering
169  const auto defaultOrdering = this->ordering();
170  return matrix(defaultOrdering);
171  }
172 
174  //VectorValues GaussianBayesNet::gradient(const VectorValues& x0) const
175  //{
176  // return GaussianFactorGraph(*this).gradient(x0);
177  //}
178 
180  //VectorValues GaussianBayesNet::gradientAtZero() const
181  //{
182  // return GaussianFactorGraph(*this).gradientAtZero();
183  //}
184 
185  /* ************************************************************************* */
187  {
188  return exp(logDeterminant());
189  }
190 
191  /* ************************************************************************* */
193  double logDet = 0.0;
194  for (const sharedConditional& cg : *this) {
195  if (cg->get_model()) {
196  Vector diag = cg->R().diagonal();
197  cg->get_model()->whitenInPlace(diag);
198  logDet += diag.unaryExpr([](double x) { return log(x); }).sum();
199  } else {
200  logDet +=
201  cg->R().diagonal().unaryExpr([](double x) { return log(x); }).sum();
202  }
203  }
204  return logDet;
205  }
206 
207  /* ************************************************************************* */
208  void GaussianBayesNet::saveGraph(const std::string& s,
209  const KeyFormatter& keyFormatter) const {
210  std::ofstream of(s.c_str());
211  of << "digraph G{\n";
212 
213  for (auto conditional : boost::adaptors::reverse(*this)) {
214  typename GaussianConditional::Frontals frontals = conditional->frontals();
215  Key me = frontals.front();
216  typename GaussianConditional::Parents parents = conditional->parents();
217  for (Key p : parents)
218  of << keyFormatter(p) << "->" << keyFormatter(me) << std::endl;
219  }
220 
221  of << "}";
222  of.close();
223  }
224 
225  /* ************************************************************************* */
226 
227 } // namespace gtsam
Matrix diag(const std::vector< Matrix > &Hs)
Definition: Matrix.cpp:206
EIGEN_DEVICE_FUNC const ExpReturnType exp() const
static enum @843 ordering
void determinant(const MatrixType &m)
Definition: determinant.cpp:14
double logDeterminant(const typename BAYESTREE::sharedClique &clique)
EIGEN_DEVICE_FUNC const LogReturnType log() const
Definition: Half.h:150
VectorValues optimizeGradientSearch() const
double error(const VectorValues &x) const
#define gttic(label)
Definition: timing.h:280
Eigen::VectorXd Vector
Definition: Vector.h:38
Values result
VectorValues gradient(const VectorValues &x0) const
std::function< std::string(Key)> KeyFormatter
Typedef for a function to format a key, i.e. to convert it to a string.
Definition: Key.h:35
RealScalar s
const mpreal sum(const mpreal tab[], const unsigned long int n, int &status, mp_rnd_t mode=mpreal::get_default_rnd())
Definition: mpreal.h:2381
Linear Factor Graph where all factors are Gaussians.
std::pair< Matrix, Vector > jacobian(const Ordering &ordering) const
static Symbol x0('x', 0)
traits
Definition: chartTesting.h:28
void reverse(const MatrixType &m)
boost::shared_ptr< ConditionalType > sharedConditional
float * p
int optimize(const SfmData &db, const NonlinearFactorGraph &graph, const Values &initial, bool separateCalibration=false)
Definition: timeSFMBAL.h:63
Chordal Bayes Net, the result of eliminating a factor graph.
static double error
Definition: testRot3.cpp:39
const G double tol
Definition: Group.h:83
const KeyVector keys
Map< Matrix< T, Dynamic, Dynamic, ColMajor >, 0, OuterStride<> > matrix(T *data, int rows, int cols, int stride)
boost::iterator_range< typename JacobianFactor::const_iterator > Parents
Definition: Conditional.h:55
set noclip points set clip one set noclip two set bar set border lt lw set xdata set ydata set zdata set x2data set y2data set boxwidth set dummy x
iterator insert(const std::pair< Key, Vector > &key_value)
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:61
boost::iterator_range< typename JacobianFactor::const_iterator > Frontals
Definition: Conditional.h:52
Timing utilities.
virtual VectorValues gradientAtZero() const


gtsam
Author(s):
autogenerated on Sat May 8 2021 02:42:05