testGaussianBayesNet.cpp
Go to the documentation of this file.
1 /* ----------------------------------------------------------------------------
2 
3  * GTSAM Copyright 2010-2022, Georgia Tech Research Corporation,
4  * Atlanta, Georgia 30332-0415
5  * All Rights Reserved
6  * Authors: Frank Dellaert, et al. (see THANKS for the full author list)
7 
8  * See LICENSE for the license information
9 
10  * -------------------------------------------------------------------------- */
11 
22 #include <gtsam/base/Testable.h>
24 #include <gtsam/inference/Symbol.h>
25 
27 
28 // STL/C++
29 #include <iostream>
30 #include <sstream>
31 
32 using namespace std::placeholders;
33 using namespace std;
34 using namespace gtsam;
36 
37 static const Key _x_ = 11, _y_ = 22, _z_ = 33;
38 
40  std::make_shared<GaussianConditional>(_x_, Vector1::Constant(9), I_1x1, _y_, I_1x1),
41  std::make_shared<GaussianConditional>(_y_, Vector1::Constant(5), I_1x1)};
42 
44  std::make_shared<GaussianConditional>(_x_, Vector1::Constant(9), I_1x1, _y_, I_1x1,
45  noiseModel::Isotropic::Sigma(1, 2.0)),
46  std::make_shared<GaussianConditional>(_y_, Vector1::Constant(5), I_1x1,
47  noiseModel::Isotropic::Sigma(1, 3.0))};
48 
49 /* ************************************************************************* */
51 {
52  const auto [R, d] = smallBayesNet.matrix(); // find matrix and RHS
53 
54  Matrix R1 = (Matrix2() <<
55  1.0, 1.0,
56  0.0, 1.0
57  ).finished();
58  Vector d1 = Vector2(9.0, 5.0);
59 
61  EXPECT(assert_equal(d,d1));
62 }
63 
64 /* ************************************************************************* */
65 // Check that the evaluate function matches direct calculation with R.
66 TEST(GaussianBayesNet, Evaluate1) {
67  // Let's evaluate at the mean
69 
70  // We get the matrix, which has noise model applied!
71  const Matrix R = smallBayesNet.matrix().first;
72  const Matrix invSigma = R.transpose() * R;
73 
74  // The Bayes net is a Gaussian density ~ exp (-0.5*(Rx-d)'*(Rx-d))
75  // which at the mean is 1.0! So, the only thing we need to calculate is
76  // the normalization constant 1.0/sqrt((2*pi*Sigma).det()).
77  // The covariance matrix inv(Sigma) = R'*R, so the determinant is
78  const double constant = sqrt((invSigma / (2 * M_PI)).determinant());
79  EXPECT_DOUBLES_EQUAL(-log(constant),
80  smallBayesNet.at(0)->negLogConstant() +
81  smallBayesNet.at(1)->negLogConstant(),
82  1e-9);
84  const double actual = smallBayesNet.evaluate(mean);
85  EXPECT_DOUBLES_EQUAL(constant, actual, 1e-9);
86 }
87 
88 // Check the evaluate with non-unit noise.
89 TEST(GaussianBayesNet, Evaluate2) {
90  // See comments in test above.
92  const Matrix R = noisyBayesNet.matrix().first;
93  const Matrix invSigma = R.transpose() * R;
94  const double constant = sqrt((invSigma / (2 * M_PI)).determinant());
95  const double actual = noisyBayesNet.evaluate(mean);
96  EXPECT_DOUBLES_EQUAL(constant, actual, 1e-9);
97 }
98 
99 /* ************************************************************************* */
100 TEST( GaussianBayesNet, NoisyMatrix )
101 {
102  const auto [R, d] = noisyBayesNet.matrix(); // find matrix and RHS
103 
104  Matrix R1 = (Matrix2() <<
105  0.5, 0.5,
106  0.0, 1./3.
107  ).finished();
108  Vector d1 = Vector2(9./2., 5./3.);
109 
111  EXPECT(assert_equal(d,d1));
112 }
113 
114 /* ************************************************************************* */
115 TEST(GaussianBayesNet, Optimize) {
116  const VectorValues expected{{_x_, Vector1::Constant(4)},
117  {_y_, Vector1::Constant(5)}};
118  const VectorValues actual = smallBayesNet.optimize();
119  EXPECT(assert_equal(expected, actual));
120  }
121 
122 /* ************************************************************************* */
123 TEST(GaussianBayesNet, NoisyOptimize) {
124  const auto [R, d] = noisyBayesNet.matrix(); // find matrix and RHS
125  const Vector x = R.inverse() * d;
126  const VectorValues expected{{_x_, x.head(1)}, {_y_, x.tail(1)}};
127 
129  EXPECT(assert_equal(expected, actual));
130 }
131 
132 /* ************************************************************************* */
133 TEST( GaussianBayesNet, optimizeIncomplete )
134 {
135  static GaussianBayesNet incompleteBayesNet;
136  incompleteBayesNet.emplace_shared<GaussianConditional>(
137  _x_, Vector1::Constant(9), I_1x1, _y_, I_1x1);
138 
139  VectorValues solutionForMissing { {_y_, Vector1::Constant(5)} };
140 
141  VectorValues actual = incompleteBayesNet.optimize(solutionForMissing);
142 
143  VectorValues expected{{_x_, Vector1::Constant(4)},
144  {_y_, Vector1::Constant(5)}};
145 
146  EXPECT(assert_equal(expected,actual));
147 }
148 
149 /* ************************************************************************* */
150 TEST( GaussianBayesNet, optimize3 )
151 {
152  // y = R*x, x=inv(R)*y
153  // 4 = 1 1 -1
154  // 5 1 5
155  // NOTE: we are supplying a new RHS here
156 
157  VectorValues expected { {_x_, Vector1::Constant(-1)},
158  {_y_, Vector1::Constant(5)} };
159 
160  // Test different RHS version
161  VectorValues gx{{_x_, Vector1::Constant(4)}, {_y_, Vector1::Constant(5)}};
163  EXPECT(assert_equal(expected, actual));
164 }
165 
166 /* ************************************************************************* */
167 namespace sampling {
168 static Matrix A1 = (Matrix(2, 2) << 1., 2., 3., 4.).finished();
169 static const Vector2 mean(20, 40), b(10, 10);
170 static const double sigma = 0.01;
171 static const GaussianBayesNet gbn = {
172  GaussianConditional::sharedMeanAndStddev(X(0), A1, X(1), b, sigma),
173  GaussianDensity::sharedMeanAndStddev(X(1), mean, sigma)};
174 } // namespace sampling
175 
176 /* ************************************************************************* */
178  using namespace sampling;
179 
180  auto actual = gbn.sample();
181  EXPECT_LONGS_EQUAL(2, actual.size());
182  EXPECT(assert_equal(mean, actual[X(1)], 50 * sigma));
183  EXPECT(assert_equal(A1 * mean + b, actual[X(0)], 50 * sigma));
184 
185  // Use a specific random generator
186  std::mt19937_64 rng(4242);
187  auto actual3 = gbn.sample(&rng);
188  EXPECT_LONGS_EQUAL(2, actual.size());
189 
190  // regressions
191 #if __APPLE__ || _WIN32
192  EXPECT(assert_equal(Vector2(20.0129382, 40.0039798), actual[X(1)], 1e-5));
193  EXPECT(assert_equal(Vector2(110.032083, 230.039811), actual[X(0)], 1e-5));
194 #elif __linux__
195  EXPECT(assert_equal(Vector2(20.0070499, 39.9942591), actual[X(1)], 1e-5));
196  EXPECT(assert_equal(Vector2(109.976501, 229.990945), actual[X(0)], 1e-5));
197 #endif
198 }
199 
200 /* ************************************************************************* */
201 // Do Monte Carlo integration of square deviation, should be equal to 9.0.
202 TEST(GaussianBayesNet, MonteCarloIntegration) {
205 
206  double sum = 0.0;
207  constexpr size_t N = 1000;
208  // loop for N samples:
209  for (size_t i = 0; i < N; i++) {
210  const auto X_i = gbn.sample();
211  sum += pow(X_i[_y_].x() - 5.0, 2.0);
212  }
213  // Expected is variance = 3*3
214  EXPECT_DOUBLES_EQUAL(9.0, sum / N, 0.5); // Pretty high.
215 }
216 
217 /* ************************************************************************* */
219 {
220  const Ordering expected{_x_, _y_};
221  const auto actual = noisyBayesNet.ordering();
222  EXPECT(assert_equal(expected, actual));
223 }
224 
225 /* ************************************************************************* */
226 TEST( GaussianBayesNet, MatrixStress )
227 {
228  GaussianBayesNet bn;
229  using GC = GaussianConditional;
230  bn.emplace_shared<GC>(_x_, Vector2(1, 2), 1 * I_2x2, _y_, 2 * I_2x2, _z_, 3 * I_2x2);
231  bn.emplace_shared<GC>(_y_, Vector2(3, 4), 4 * I_2x2, _z_, 5 * I_2x2);
232  bn.emplace_shared<GC>(_z_, Vector2(5, 6), 6 * I_2x2);
233 
234  const VectorValues expected = bn.optimize();
235  for (const auto& keys :
236  {KeyVector({_x_, _y_, _z_}), KeyVector({_x_, _z_, _y_}),
237  KeyVector({_y_, _x_, _z_}), KeyVector({_y_, _z_, _x_}),
238  KeyVector({_z_, _x_, _y_}), KeyVector({_z_, _y_, _x_})}) {
239  const Ordering ordering(keys);
240  const auto [R, d] = bn.matrix(ordering);
241  EXPECT(assert_equal(expected.vector(ordering), R.inverse() * d));
242  }
243 }
244 
245 /* ************************************************************************* */
246 TEST( GaussianBayesNet, backSubstituteTranspose )
247 {
248  // x=R'*y, expected=inv(R')*x
249  // 2 = 1 2
250  // 5 1 1 3
251  const VectorValues x{{_x_, Vector1::Constant(2)},
252  {_y_, Vector1::Constant(5)}},
253  expected{{_x_, Vector1::Constant(2)}, {_y_, Vector1::Constant(3)}};
254 
256  EXPECT(assert_equal(expected, actual));
257 
258  const auto ordering = noisyBayesNet.ordering();
259  const Matrix R = smallBayesNet.matrix(ordering).first;
260  const Vector expected_vector = R.transpose().inverse() * x.vector(ordering);
261  EXPECT(assert_equal(expected_vector, actual.vector(ordering)));
262 }
263 
264 /* ************************************************************************* */
265 TEST( GaussianBayesNet, backSubstituteTransposeNoisy )
266 {
267  // x=R'*y, expected=inv(R')*x
268  // 2 = 1 2
269  // 5 1 1 3
270  VectorValues x{{_x_, Vector1::Constant(2)}, {_y_, Vector1::Constant(5)}},
271  expected{{_x_, Vector1::Constant(4)}, {_y_, Vector1::Constant(9)}};
272 
274  EXPECT(assert_equal(expected, actual));
275 
276  const auto ordering = noisyBayesNet.ordering();
277  const Matrix R = noisyBayesNet.matrix(ordering).first;
278  const Vector expected_vector = R.transpose().inverse() * x.vector(ordering);
279  EXPECT(assert_equal(expected_vector, actual.vector(ordering)));
280 }
281 
282 /* ************************************************************************* */
283 // Tests computing Determinant
284 TEST( GaussianBayesNet, DeterminantTest )
285 {
286  GaussianBayesNet cbn;
288  0, Vector2(3.0, 4.0), (Matrix2() << 1.0, 3.0, 0.0, 4.0).finished(),
289  1, (Matrix2() << 2.0, 1.0, 2.0, 3.0).finished(), noiseModel::Isotropic::Sigma(2, 2.0));
290 
292  1, Vector2(5.0, 6.0), (Matrix2() << 1.0, 1.0, 0.0, 3.0).finished(),
293  2, (Matrix2() << 1.0, 0.0, 5.0, 2.0).finished(), noiseModel::Isotropic::Sigma(2, 2.0));
294 
296  3, Vector2(7.0, 8.0), (Matrix2() << 1.0, 1.0, 0.0, 5.0).finished(), noiseModel::Isotropic::Sigma(2, 2.0));
297 
298  double expectedDeterminant = 60.0 / 64.0;
299  double actualDeterminant = cbn.determinant();
300 
301  EXPECT_DOUBLES_EQUAL( expectedDeterminant, actualDeterminant, 1e-9);
302 }
303 
304 /* ************************************************************************* */
305 namespace {
306  double computeError(const GaussianBayesNet& gbn, const Vector10& values)
307  {
308  pair<Matrix,Vector> Rd = GaussianFactorGraph(gbn).jacobian();
309  return 0.5 * (Rd.first * values - Rd.second).squaredNorm();
310  }
311 }
312 
313 /* ************************************************************************* */
314 TEST(GaussianBayesNet, ComputeSteepestDescentPoint) {
315 
316  // Create an arbitrary Bayes Net
319  0, Vector2(1.0,2.0), (Matrix2() << 3.0,4.0,0.0,6.0).finished(),
320  3, (Matrix2() << 7.0,8.0,9.0,10.0).finished(),
321  4, (Matrix2() << 11.0,12.0,13.0,14.0).finished());
323  1, Vector2(15.0,16.0), (Matrix2() << 17.0,18.0,0.0,20.0).finished(),
324  2, (Matrix2() << 21.0,22.0,23.0,24.0).finished(),
325  4, (Matrix2() << 25.0,26.0,27.0,28.0).finished());
327  2, Vector2(29.0,30.0), (Matrix2() << 31.0,32.0,0.0,34.0).finished(),
328  3, (Matrix2() << 35.0,36.0,37.0,38.0).finished());
330  3, Vector2(39.0,40.0), (Matrix2() << 41.0,42.0,0.0,44.0).finished(),
331  4, (Matrix2() << 45.0,46.0,47.0,48.0).finished());
333  4, Vector2(49.0,50.0), (Matrix2() << 51.0,52.0,0.0,54.0).finished());
334 
335  // Compute the Hessian numerically
336  Matrix hessian = numericalHessian<Vector10>(
337  std::bind(&computeError, gbn, std::placeholders::_1), Vector10::Zero());
338 
339  // Compute the gradient numerically
340  Vector gradient = numericalGradient<Vector10>(
341  std::bind(&computeError, gbn, std::placeholders::_1), Vector10::Zero());
342 
343  // Compute the gradient using dense matrices
344  Matrix augmentedHessian = GaussianFactorGraph(gbn).augmentedHessian();
345  LONGS_EQUAL(11, (long)augmentedHessian.cols());
346  Vector denseMatrixGradient = -augmentedHessian.col(10).segment(0,10);
347  EXPECT(assert_equal(gradient, denseMatrixGradient, 1e-5));
348 
349  // Compute the steepest descent point
350  double step = -gradient.squaredNorm() / (gradient.transpose() * hessian * gradient)(0);
351  Vector expected = gradient * step;
352 
353  // Compute the steepest descent point with the dogleg function
355 
356  // Check that points agree
357  KeyVector keys {0, 1, 2, 3, 4};
358  Vector actualAsVector = actual.vector(keys);
359  EXPECT(assert_equal(expected, actualAsVector, 1e-5));
360 
361  // Check that point causes a decrease in error
362  double origError = GaussianFactorGraph(gbn).error(VectorValues::Zero(actual));
363  double newError = GaussianFactorGraph(gbn).error(actual);
364  EXPECT(newError < origError);
365 }
366 
367 /* ************************************************************************* */
369  GaussianBayesNet fragment;
370  DotWriter writer;
371  writer.variablePositions.emplace(_x_, Vector2(10, 20));
372  writer.variablePositions.emplace(_y_, Vector2(50, 20));
373 
374  auto position = writer.variablePos(_x_);
375  CHECK(position);
376  EXPECT(assert_equal(Vector2(10, 20), *position, 1e-5));
377 
378  string actual = noisyBayesNet.dot(DefaultKeyFormatter, writer);
379  EXPECT(actual ==
380  "digraph {\n"
381  " size=\"5,5\";\n"
382  "\n"
383  " var11[label=\"11\", pos=\"10,20!\"];\n"
384  " var22[label=\"22\", pos=\"50,20!\"];\n"
385  "\n"
386  " var22->var11\n"
387  "}");
388 }
389 
390 /* ************************************************************************* */
391 int main() {
392  TestResult tr;
393  return TestRegistry::runAllTests(tr);
394 }
395 /* ************************************************************************* */
gtsam::GaussianBayesNet::negLogConstant
double negLogConstant() const
Get the negative log of the normalization constant corresponding to the joint Gaussian density repres...
Definition: GaussianBayesNet.cpp:246
TestRegistry::runAllTests
static int runAllTests(TestResult &result)
Definition: TestRegistry.cpp:27
GaussianFactorGraph.h
Linear Factor Graph where all factors are Gaussians.
rng
static std::mt19937 rng
Definition: timeFactorOverhead.cpp:31
gtsam::DotWriter::variablePositions
std::map< Key, Vector2 > variablePositions
Definition: DotWriter.h:48
gtsam::GaussianBayesNet::optimizeGradientSearch
VectorValues optimizeGradientSearch() const
Definition: GaussianBayesNet.cpp:89
e
Array< double, 1, 3 > e(1./3., 0.5, 2.)
d
static const double d[K][N]
Definition: igam.h:11
EXPECT_LONGS_EQUAL
#define EXPECT_LONGS_EQUAL(expected, actual)
Definition: Test.h:154
Testable.h
Concept check for values that can be used in unit tests.
EXPECT
#define EXPECT(condition)
Definition: Test.h:150
gtsam::Vector2
Eigen::Vector2d Vector2
Definition: Vector.h:43
TestHarness.h
keys
const KeyVector keys
Definition: testRegularImplicitSchurFactor.cpp:40
_x_
static const Key _x_
Definition: testGaussianBayesNet.cpp:37
gtsam::BayesNet::dot
void dot(std::ostream &os, const KeyFormatter &keyFormatter=DefaultKeyFormatter, const DotWriter &writer=DotWriter()) const
Output to graphviz format, stream version.
Definition: BayesNet-inst.h:45
x
set noclip points set clip one set noclip two set bar set border lt lw set xdata set ydata set zdata set x2data set y2data set boxwidth set dummy x
Definition: gnuplot_common_settings.hh:12
gtsam::GaussianBayesNet::ordering
Ordering ordering() const
Definition: GaussianBayesNet.cpp:187
gtsam::GaussianFactorGraph::jacobian
std::pair< Matrix, Vector > jacobian(const Ordering &ordering) const
Definition: GaussianFactorGraph.cpp:232
gtsam::Matrix
Eigen::MatrixXd Matrix
Definition: base/Matrix.h:39
smallBayesNet
static GaussianBayesNet smallBayesNet
Definition: testGaussianBayesNet.cpp:39
different_sigmas::values
HybridValues values
Definition: testHybridBayesNet.cpp:247
log
const EIGEN_DEVICE_FUNC LogReturnType log() const
Definition: ArrayCwiseUnaryOps.h:128
X
#define X
Definition: icosphere.cpp:20
GaussianBayesNet.h
Chordal Bayes Net, the result of eliminating a factor graph.
gtsam::Vector
Eigen::VectorXd Vector
Definition: Vector.h:39
gtsam::KeyVector
FastVector< Key > KeyVector
Define collection type once and for all - also used in wrappers.
Definition: Key.h:92
determinant
void determinant(const MatrixType &m)
Definition: determinant.cpp:14
sampling::sigma
static const double sigma
Definition: testGaussianBayesNet.cpp:170
gtsam::DefaultKeyFormatter
KeyFormatter DefaultKeyFormatter
Assign default key formatter.
Definition: Key.cpp:30
gtsam::DotWriter::variablePos
std::optional< Vector2 > variablePos(Key key) const
Return variable position or none.
Definition: DotWriter.cpp:79
so3::R1
SO3 R1
Definition: testShonanFactor.cpp:41
_z_
static const Key _z_
Definition: testGaussianBayesNet.cpp:37
gtsam::FactorGraph::at
const sharedFactor at(size_t i) const
Definition: FactorGraph.h:306
gtsam::GaussianFactorGraph
Definition: GaussianFactorGraph.h:73
ceres::Matrix
Eigen::Matrix< double, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor > Matrix
Definition: gtsam/3rdparty/ceres/eigen.h:42
numericalDerivative.h
Some functions to compute numerical derivatives.
gtsam::VectorValues
Definition: VectorValues.h:74
TEST
TEST(GaussianBayesNet, Matrix)
Definition: testGaussianBayesNet.cpp:50
gtsam::GaussianConditional
Definition: GaussianConditional.h:40
sampling
Definition: testGaussianBayesNet.cpp:167
cholesky::expected
Matrix expected
Definition: testMatrix.cpp:971
gtsam::GaussianBayesNet::optimize
VectorValues optimize() const
Definition: GaussianBayesNet.cpp:44
gtsam::utils.visual_isam.step
def step(data, isam, result, truth, currPoseIndex, isamArgs=())
Definition: visual_isam.py:82
gtsam::GaussianBayesNet::evaluate
double evaluate(const VectorValues &x) const
Definition: GaussianBayesNet.cpp:124
Symbol.h
GaussianDensity.h
A Gaussian Density.
Vector2
Definition: test_operator_overloading.cpp:18
gtsam::GaussianBayesNet::matrix
std::pair< Matrix, Vector > matrix(const Ordering &ordering) const
Definition: GaussianBayesNet.cpp:205
gtsam::GaussianBayesNet::backSubstituteTranspose
VectorValues backSubstituteTranspose(const VectorValues &gx) const
Definition: GaussianBayesNet.cpp:145
ceres::pow
Jet< T, N > pow(const Jet< T, N > &f, double g)
Definition: jet.h:570
gtsam::Rot2::transpose
Matrix2 transpose() const
Definition: Rot2.cpp:92
sampling::gbn
static const GaussianBayesNet gbn
Definition: testGaussianBayesNet.cpp:171
main
int main()
Definition: testGaussianBayesNet.cpp:391
EXPECT_DOUBLES_EQUAL
#define EXPECT_DOUBLES_EQUAL(expected, actual, threshold)
Definition: Test.h:161
ordering
static enum @1096 ordering
TestResult
Definition: TestResult.h:26
sampling::b
static const Vector2 b(10, 10)
JacobianFactor.h
gtsam::GaussianFactorGraph::augmentedHessian
Matrix augmentedHessian(const Ordering &ordering) const
Definition: GaussianFactorGraph.cpp:247
gtsam::GaussianBayesNet::determinant
double determinant() const
Definition: GaussianBayesNet.cpp:231
gtsam
traits
Definition: SFMdata.h:40
gtsam::FactorGraph::push_back
IsDerived< DERIVEDFACTOR > push_back(std::shared_ptr< DERIVEDFACTOR > factor)
Add a factor directly using a shared_ptr.
Definition: FactorGraph.h:147
_y_
static const Key _y_
Definition: testGaussianBayesNet.cpp:37
gtsam::GaussianFactorGraph::error
double error(const VectorValues &x) const
Definition: GaussianFactorGraph.cpp:71
CHECK
#define CHECK(condition)
Definition: Test.h:108
noisyBayesNet
static GaussianBayesNet noisyBayesNet
Definition: testGaussianBayesNet.cpp:43
std
Definition: BFloat16.h:88
gtsam::GaussianBayesNet::backSubstitute
VectorValues backSubstitute(const VectorValues &gx) const
Definition: GaussianBayesNet.cpp:129
A1
static const double A1[]
Definition: expn.h:6
exampleQR::Rd
Matrix Rd
Definition: testNoiseModel.cpp:215
gtsam::internal::position
Point3 position(const NavState &X, OptionalJacobian< 3, 9 > H)
Definition: navigation/expressions.h:25
gtsam::assert_equal
bool assert_equal(const Matrix &expected, const Matrix &actual, double tol)
Definition: Matrix.cpp:41
N
#define N
Definition: igam.h:9
M_PI
#define M_PI
Definition: mconf.h:117
sampling::mean
static const Vector2 mean(20, 40)
gtsam::Key
std::uint64_t Key
Integer nonlinear key type.
Definition: types.h:97
gtsam::Ordering
Definition: inference/Ordering.h:33
gtsam::Rot2::inverse
Rot2 inverse() const
Definition: Rot2.h:117
LONGS_EQUAL
#define LONGS_EQUAL(expected, actual)
Definition: Test.h:134
ceres::sqrt
Jet< T, N > sqrt(const Jet< T, N > &f)
Definition: jet.h:418
i
int i
Definition: BiCGSTAB_step_by_step.cpp:9
gtsam::DotWriter
DotWriter is a helper class for writing graphviz .dot files.
Definition: DotWriter.h:36
gtsam::GaussianBayesNet::sample
VectorValues sample(std::mt19937_64 *rng) const
Definition: GaussianBayesNet.cpp:63
R
Rot2 R(Rot2::fromAngle(0.1))
gtsam::GaussianBayesNet
Definition: GaussianBayesNet.h:35
gtsam::FactorGraph::emplace_shared
IsDerived< DERIVEDFACTOR > emplace_shared(Args &&... args)
Emplace a shared pointer to factor of given type.
Definition: FactorGraph.h:153
gtsam::VectorValues::vector
Vector vector() const
Definition: VectorValues.cpp:175


gtsam
Author(s):
autogenerated on Sun Feb 16 2025 04:06:16