Go to the documentation of this file.
35 using namespace gtsam;
46 expected{
key(3, 3),
key(2, 3),
key(1, 3),
key(3, 2),
key(2, 2),
54 double total_error = 0.;
101 auto Rc1 = *Ab1.eliminateSequential(ord);
117 Matrix Abar(13 * 2, 9 * 2);
118 Abar.topRows(9 * 2) = Matrix::Identity(9 * 2, 9 * 2);
153 const double alpha = 0.5;
155 for (
size_t i = 0;
i < 13;
i++) {
159 Vector ee1(13 * 2), ee2(13 * 2);
160 ee1 << Vector::Ones(9 * 2), Vector::Zero(4 * 2);
161 ee2 << Vector::Zero(9 * 2), Vector::Ones(4 * 2);
166 Vector expected_y =
alpha * Abar.transpose() * ee1;
172 expected_y =
alpha * Abar.transpose() * ee2;
177 Vector expected_g = Vector::Zero(18);
static int runAllTests(TestResult &result)
SO inverse() const
inverse of a rotation = transpose
Linear Factor Graph where all factors are Gaussians.
Array< double, 1, 3 > e(1./3., 0.5, 2.)
#define EXPECT(condition)
set noclip points set clip one set noclip two set bar set border lt lw set xdata set ydata set zdata set x2data set y2data set boxwidth set dummy x
double error(const VectorValues &y) const
static double error(const GaussianFactorGraph &fg, const VectorValues &x)
double error(const DiscreteValues &values) const override
Calculate error for DiscreteValues x, is -log(probability).
Eigen::Triplet< double > T
Variable ordering for the elimination algorithm.
std::pair< GaussianFactorGraph, GaussianFactorGraph > splitOffPlanarTree(size_t N, const GaussianFactorGraph &original)
Matrix< SCALARA, Dynamic, Dynamic, opt_A > A
DecisionTreeFactor factor(D &C &B &A, "0.0 0.0 0.0 0.60658897 0.61241912 0.61241969 0.61247685 0.61247742 0.0 " "0.0 0.0 0.99995287 1.0 1.0 1.0 1.0")
VectorValues x(const VectorValues &y) const
Vector conjugateGradientDescent(const System &Ab, const Vector &x, const ConjugateGradientParameters ¶meters)
void transposeMultiplyAdd(double alpha, const Errors &e, VectorValues &y) const
Some functions to compute numerical derivatives.
V conjugateGradients(const S &Ab, V x, const ConjugateGradientParameters ¶meters, bool steepest)
utility functions for loading datasets
VectorValues gradient(const VectorValues &y) const
const MatrixNN & matrix() const
Return matrix.
#define DOUBLES_EQUAL(expected, actual, threshold)
static ConjugateGradientParameters parameters
std::shared_ptr< This > shared_ptr
shared_ptr to this class
VectorValues optimize() const
Iterative methods, implementation.
Ordering planarOrdering(size_t N)
void g(const string &key, int i)
static enum @1096 ordering
TEST(SubgraphPreconditioner, planarOrdering)
std::pair< GaussianFactorGraph, VectorValues > planarGraph(size_t N)
Matrix< Scalar, Dynamic, Dynamic > C
static const Eigen::MatrixBase< Vector2 >::ConstantReturnType Z_2x1
bool assert_equal(const Matrix &expected, const Matrix &actual, double tol)
Create small example with two poses and one landmark.
#define LONGS_EQUAL(expected, actual)
VectorValues zero() const
gtsam
Author(s):
autogenerated on Sun Dec 22 2024 04:17:31