42 return e !=
nullptr && FixedLagSmoother::equals(*e, tol)
49 "BatchFixedLagSmoother::marginalCovariance not implemented");
58 gttic(augment_system);
60 theta_.insert(newTheta);
62 for (
const auto key : newTheta.
keys()) {
63 ordering_.push_back(
key);
69 insertFactors(newFactors);
70 gttoc(augment_system);
73 for(
const size_t i : factorsToRemove){
79 updateKeyTimestampMap(timestamps);
82 double current_timestamp = getCurrentTimestamp();
85 KeyVector marginalizableKeys = findKeysBefore(
86 current_timestamp - smootherLag_);
90 reorder(marginalizableKeys);
96 if (factors_.size() > 0) {
103 if (marginalizableKeys.size() > 0) {
104 marginalize(marginalizableKeys);
112 void BatchFixedLagSmoother::insertFactors(
114 for(
const auto& factor: newFactors) {
117 if (availableSlots_.size() > 0) {
118 index = availableSlots_.front();
119 availableSlots_.pop();
120 factors_.replace(index, factor);
122 index = factors_.size();
123 factors_.push_back(factor);
127 factorIndex_[
key].insert(index);
133 void BatchFixedLagSmoother::removeFactors(
134 const set<size_t>& deleteFactors) {
135 for(
size_t slot: deleteFactors) {
136 if (factors_.at(slot)) {
138 for(
Key key: *(factors_.at(slot))) {
139 factorIndex_[
key].erase(slot);
142 factors_.remove(slot);
144 availableSlots_.push(slot);
147 cout <<
"Attempting to remove a factor from slot " << slot
148 <<
", but it is already nullptr." << endl;
161 factorIndex_.erase(
key);
164 if (linearValues_.exists(
key)) {
165 linearValues_.erase(
key);
169 eraseKeyTimestampMap(keys);
173 ordering_.erase(find(ordering_.begin(), ordering_.end(),
key));
179 void BatchFixedLagSmoother::reorder(
const KeyVector& marginalizeKeys) {
181 ordering_ = Ordering::ColamdConstrainedFirst(factors_, marginalizeKeys);
193 double lambda = parameters_.lambdaInitial;
194 double lambdaFactor = parameters_.lambdaFactor;
195 double lambdaUpperBound = parameters_.lambdaUpperBound;
196 double lambdaLowerBound = 1.0e-10;
197 size_t maxIterations = parameters_.maxIterations;
198 double relativeErrorTol = parameters_.relativeErrorTol;
199 double absoluteErrorTol = parameters_.absoluteErrorTol;
200 double errorTol = parameters_.errorTol;
204 result.
error = factors_.error(evalpoint);
207 if (result.
error <= errorTol) {
212 double previousError;
215 previousError = result.
error;
218 gttic(optimizer_iteration);
229 dampedFactorGraph.
reserve(linearFactorGraph.
size() + delta_.size());
233 for(
const auto& key_value: delta_) {
234 size_t dim = key_value.second.size();
235 Matrix A = Matrix::Identity(dim, dim);
248 newDelta = dampedFactorGraph.
optimize(ordering_,
249 parameters_.getEliminationFunction());
251 evalpoint = theta_.
retract(newDelta);
255 gttic(compute_error);
256 double error = factors_.error(evalpoint);
257 gttoc(compute_error);
259 if (error < result.
error) {
268 if (enforceConsistency_ && (linearValues_.size() > 0)) {
269 theta_.
update(linearValues_);
270 for(
const auto key: linearValues_.keys()) {
275 lambda /= lambdaFactor;
276 if (lambda < lambdaLowerBound) {
277 lambda = lambdaLowerBound;
283 if (lambda >= lambdaUpperBound) {
285 if(parameters_.verbosity >= NonlinearOptimizerParams::TERMINATION
286 || parameters_.verbosityLM == LevenbergMarquardtParams::SUMMARY) {
288 <<
"Warning: Levenberg-Marquardt giving up because cannot decrease error with maximum lambda" 294 lambda *= lambdaFactor;
299 gttoc(optimizer_iteration);
304 previousError, result.
error, NonlinearOptimizerParams::SILENT));
310 void BatchFixedLagSmoother::marginalize(
const KeyVector& marginalizeKeys) {
318 set<size_t> removedFactorSlots;
320 for(
Key key: marginalizeKeys) {
321 const auto& slots = variableIndex[
key];
322 removedFactorSlots.insert(slots.begin(), slots.end());
327 for(
size_t slot: removedFactorSlots) {
328 if (factors_.at(slot)) {
329 removedFactors.
push_back(factors_.at(slot));
335 removedFactors, theta_, marginalizeKeys, parameters_.getEliminationFunction());
338 removeFactors(removedFactorSlots);
341 eraseKeys(marginalizeKeys);
344 insertFactors(marginalFactors);
349 const string& label) {
359 const string& label) {
368 void BatchFixedLagSmoother::PrintSymbolicFactor(
372 for(
Key key: factor->keys()) {
378 cout <<
" )" << endl;
382 void BatchFixedLagSmoother::PrintSymbolicFactor(
385 for(
Key key: factor->keys()) {
388 cout <<
" )" << endl;
392 void BatchFixedLagSmoother::PrintSymbolicGraph(
394 cout << label << endl;
395 for(
const auto& factor: graph) {
396 PrintSymbolicFactor(factor);
402 const string& label) {
403 cout << label << endl;
404 for(
const auto& factor: graph) {
405 PrintSymbolicFactor(factor);
412 const GaussianFactorGraph::Eliminate& eliminateFunction) {
413 if (keys.size() == 0) {
425 const GaussianFactorGraph::Eliminate& eliminateFunction) {
426 if (keys.size() == 0) {
431 const auto linearFactorGraph = graph.
linearize(theta);
433 const auto marginalLinearFactors =
434 CalculateMarginalFactors(*linearFactorGraph, keys, eliminateFunction);
437 return LinearContainerFactor::ConvertLinearGraph(marginalLinearFactors, theta);
const gtsam::Symbol key('X', 0)
size_t iterations
The number of optimizer iterations performed.
void PrintKeySet(const KeySet &keys, const string &s, const KeyFormatter &keyFormatter)
Utility function to print sets of keys with optional prefix.
std::map< Key, double > KeyTimestampMap
Typedef for a Key-Timestamp map/database.
std::pair< std::shared_ptr< BayesTreeType >, std::shared_ptr< FactorGraphType > > eliminatePartialMultifrontal(const Ordering &ordering, const Eliminate &function=EliminationTraitsType::DefaultEliminate, OptionalVariableIndex variableIndex={}) const
size_t linearVariables
The number of variables that must keep a constant linearization point.
Wrap Jacobian and Hessian linear factors to allow simple injection into a nonlinear graph...
double error
The final factor graph error.
Point2 prior(const Point2 &x)
Prior on a single pose.
noiseModel::Diagonal::shared_ptr model
IsDerived< DERIVEDFACTOR > push_back(std::shared_ptr< DERIVEDFACTOR > factor)
Add a factor directly using a shared_ptr.
A factor with a quadratic error function - a Gaussian.
void update(Key j, const Value &val)
EIGEN_STRONG_INLINE Packet4f print(const Packet4f &a)
Matrix< SCALARA, Dynamic, Dynamic, opt_A > A
NonlinearFactorGraph graph
static const KeyFormatter DefaultKeyFormatter
VectorValues optimize(const Eliminate &function=EliminationTraitsType::DefaultEliminate) const
Values retract(const VectorValues &delta) const
std::shared_ptr< GaussianFactorGraph > linearize(const Values &linearizationPoint) const
Linearize a nonlinear factor graph.
size_t intermediateSteps
The number of intermediate steps performed within the optimization. For L-M, this is the number of la...
bool checkConvergence(double relativeErrorTreshold, double absoluteErrorTreshold, double errorThreshold, double currentError, double newError, NonlinearOptimizerParams::Verbosity verbosity)
FastVector< FactorIndex > FactorIndices
Define collection types:
VectorValues zeroVectors() const
Array< double, 1, 3 > e(1./3., 0.5, 2.)
Linear Factor Graph where all factors are Gaussians.
std::function< std::string(Key)> KeyFormatter
Typedef for a function to format a key, i.e. to convert it to a string.
std::shared_ptr< This > shared_ptr
shared_ptr to this class
size_t nonlinearVariables
The number of variables that can be relinearized.
cout<< "The eigenvalues of A are:"<< endl<< ces.eigenvalues()<< endl;cout<< "The matrix of eigenvectors, V, is:"<< endl<< ces.eigenvectors()<< endl<< endl;complex< float > lambda
noiseModel::Diagonal::shared_ptr SharedDiagonal
std::shared_ptr< This > shared_ptr
int optimize(const SfmData &db, const NonlinearFactorGraph &graph, const Values &initial, bool separateCalibration=false)
static const double sigma
void reserve(size_t size)
Jet< T, N > sqrt(const Jet< T, N > &f)
NonlinearFactorGraph factors_
FastVector< Key > KeyVector
Define collection type once and for all - also used in wrappers.
std::uint64_t Key
Integer nonlinear key type.
bool equals(const FixedLagSmoother &rhs, double tol=1e-9) const override