Go to the documentation of this file.00001
00029 #include <iomanip>
00030 #include <vector>
00031 #include <map>
00032 #include <list>
00033
00034 #include "isam/util.h"
00035 #include "isam/SparseSystem.h"
00036 #include "isam/OptimizationInterface.h"
00037 #include "isam/covariance.h"
00038
00039 #include "isam/Slam.h"
00040
00041 using namespace std;
00042 using namespace Eigen;
00043
00044 namespace isam {
00045
00046
00047 int Node::_next_id = 0;
00048 int Factor::_next_id = 0;
00049
00050 struct DeleteOnReturn
00051 {
00052 SparseVector** _ptr;
00053 explicit DeleteOnReturn(SparseVector** ptr) : _ptr(ptr) {}
00054 ~DeleteOnReturn() { delete [] _ptr; }
00055 };
00056
00057
00058
00059 void Slam::update_starts() {
00060 int start = 0;
00061 const list<Node*>& nodes = get_nodes();
00062 for (list<Node*>::const_iterator it = nodes.begin(); it!=nodes.end(); it++) {
00063 Node* node = *it;
00064 node->_start = start;
00065 start += node->dim();
00066 }
00067 }
00068
00069 Slam::Slam()
00070 : Graph(),
00071 _step(0), _prop(Properties()),
00072 _covariances(this),
00073 _require_batch(true), _cost_func(NULL),
00074 _dim_nodes(0), _dim_measure(0),
00075 _num_new_measurements(0), _num_new_rows(0),
00076 _opt(*this)
00077 {
00078 }
00079
00080 Slam::~Slam()
00081 {
00082 }
00083
00084 void Slam::save(const string fname) const {
00085 ofstream out(fname.c_str(), ios::out | ios::binary);
00086 require(out, "Slam.save: Cannot open output file.");
00087 write(out);
00088 out.close();
00089 }
00090
00091 void Slam::add_node(Node* node) {
00092 Graph::add_node(node);
00093 _dim_nodes += node->dim();
00094 }
00095
00096 void Slam::add_factor(Factor* factor) {
00097
00098 factor->initialize_internal();
00099
00100 factor->set_cost_function(&_cost_func);
00101 Graph::add_factor(factor);
00102 _num_new_measurements++;
00103 _num_new_rows += factor->dim();
00104 _dim_measure += factor->dim();
00105 }
00106
00107 void Slam::remove_node(Node* node) {
00108
00109 list<Factor*> factors = node->factors();
00110 for (list<Factor*>::iterator factor = factors.begin(); factor!=factors.end(); factor++) {
00111 remove_factor(*factor);
00112 }
00113 _dim_nodes -= node->dim();
00114 Graph::remove_node(node);
00115 _require_batch = true;
00116 }
00117
00118 void Slam::remove_factor(Factor* factor) {
00119 vector<Node*> nodes = factor->nodes();
00120 for (vector<Node*>::iterator node = nodes.begin(); node!=nodes.end(); node++) {
00121 (*node)->remove_factor(factor);
00122 }
00123 _dim_measure -= factor->dim();
00124 Graph::remove_factor(factor);
00125 _require_batch = true;
00126 }
00127
00128 void Slam::incremental_update()
00129 {
00130
00131
00132 if (_require_batch)
00133 {
00134 batch_optimization_step();
00135 }
00136 else if (_num_new_measurements > 0)
00137 {
00138 SparseSystem jac_new = jacobian_partial(_num_new_measurements);
00139
00140 _opt.augment_sparse_linear_system(jac_new, _prop);
00141
00142 _num_new_measurements = 0;
00143 _num_new_rows = 0;
00144 }
00145 }
00146
00147 void Slam::batch_optimization_step()
00148 {
00149 _require_batch = false;
00150
00151 _num_new_measurements = 0;
00152 _num_new_rows = 0;
00153
00154 _opt.relinearize(_prop);
00155 }
00156
00157 UpdateStats Slam::update()
00158 {
00159 UpdateStats stats;
00160 stats.batch = false;
00161 stats.solve = false;
00162 if (_step%_prop.mod_update == 0)
00163 {
00164 if (_step%_prop.mod_batch == 0)
00165 {
00166
00167 if (!_prop.quiet)
00168 {
00169 cout << endl;
00170 cout << "step " << _step;
00171 }
00172 batch_optimization_step();
00173 stats.batch = true;
00174 }
00175 else
00176 {
00177
00178 if (!_prop.quiet)
00179 {
00180 cout << ".";
00181 fflush(stdout);
00182 }
00183 incremental_update();
00184 if (_step%_prop.mod_solve == 0)
00185 {
00186 stats.solve = true;
00187
00188 _opt.update_estimate(_prop);
00189 }
00190 }
00191 }
00192 _step++;
00193 stats.step = _step;
00194
00195 return stats;
00196 }
00197
00198 int Slam::batch_optimization()
00199 {
00200 int num_iterations = 0;
00201
00202 int variables_deleted;
00203 int measurements_deleted;
00204 erase_marked(variables_deleted, measurements_deleted);
00205 _dim_nodes -= variables_deleted;
00206 _dim_measure -= measurements_deleted;
00207
00208 _opt.batch_optimize(_prop, &num_iterations);
00209 return num_iterations;
00210 }
00211
00212 void Slam::set_cost_function(cost_func_t func) {
00213 _cost_func = func;
00214 }
00215
00216 void Slam::apply_exmap(const Eigen::VectorXd& x) {
00217 int pos = 0;
00218 for (list<Node*>::iterator node = _nodes.begin(); node != _nodes.end(); node++) {
00219 int dim = (*node)->dim();
00220 const VectorXd& xi = x.segment(pos, dim);
00221 (*node)->apply_exmap(xi);
00222 pos += dim;
00223 }
00224 }
00225
00226 void Slam::self_exmap(const Eigen::VectorXd& x) {
00227 int pos = 0;
00228 for (list<Node*>::iterator node = _nodes.begin(); node != _nodes.end(); node++) {
00229 int dim = (*node)->dim();
00230 VectorXd xi = x.segment(pos, dim);
00231 (*node)->self_exmap(xi);
00232 pos += dim;
00233 }
00234 }
00235
00236 void Slam::linpoint_to_estimate() {
00237 for (list<Node*>::iterator node = _nodes.begin(); node!=_nodes.end(); node++) {
00238 (*node)->linpoint_to_estimate();
00239 }
00240 }
00241
00242 void Slam::estimate_to_linpoint() {
00243 for (list<Node*>::iterator node = _nodes.begin(); node!=_nodes.end(); node++) {
00244 (*node)->estimate_to_linpoint();
00245 }
00246 }
00247
00248 void Slam::swap_estimates() {
00249 for (list<Node*>::iterator node = _nodes.begin(); node!=_nodes.end(); node++) {
00250 (*node)->swap_estimates();
00251 }
00252 }
00253
00254 VectorXd Slam::weighted_errors(Selector s) {
00255 VectorXd werrors(_dim_measure);
00256 const list<Factor*>& factors = get_factors();
00257 int start = 0;
00258 for (list<Factor*>::const_iterator it = factors.begin(); it!=factors.end(); it++) {
00259 int dim = (*it)->dim();
00260 werrors.segment(start, dim) = (*it)->error(s);
00261 start += dim;
00262 }
00263 return werrors;
00264 }
00265
00266 double Slam::chi2(Selector s) {
00267 return weighted_errors(s).squaredNorm();
00268 }
00269
00270 double Slam::local_chi2(int last_n) {
00271
00272 VectorXd werrors(_dim_measure);
00273 const list<Factor*>& factors = get_factors();
00274 int start = _dim_measure;
00275 int n = 0;
00276 for (list<Factor*>::const_reverse_iterator it = factors.rbegin();
00277 it!=factors.rend() && n<last_n;
00278 it++, n++) {
00279 int dim = (*it)->dim();
00280 start -= dim;
00281 werrors.segment(start, dim) = (*it)->error(ESTIMATE);
00282 }
00283
00284 return werrors.tail(_dim_measure-start).squaredNorm();
00285 }
00286
00287 double Slam::normalized_chi2() {
00288 return chi2() / (double)(_dim_measure - _dim_nodes);
00289 }
00290
00291 const SparseSystem& Slam::get_R() const {
00292 return _R;
00293 }
00294
00295 const double epsilon = 0.0001;
00296
00297 SparseSystem Slam::jacobian_numerical_columnwise() {
00298
00299
00300 int num_rows = _dim_measure;
00301 DeleteOnReturn rows_ptr(new SparseVector* [num_rows]);
00302 SparseVector** rows = rows_ptr._ptr;
00303 int pos = 0;
00304 vector<int> factor_offset(get_factors().size());
00305 for (list<Factor*>::const_iterator it = get_factors().begin();
00306 it!=get_factors().end();
00307 it++) {
00308 (*it)->_start = pos;
00309 int dimtotal = 0;
00310 for (vector<Node*>::const_iterator it2 = (*it)->nodes().begin();
00311 it2!=(*it)->nodes().end();
00312 it2++) {
00313 dimtotal += (*it2)->dim();
00314 }
00315 for (int i=0; i<(*it)->dim(); i++, pos++) {
00316
00317 rows[pos] = new SparseVector(dimtotal);
00318 }
00319 }
00320
00321 VectorXd y_plus(num_rows);
00322 VectorXd y_minus(num_rows);
00323 int col = 0;
00324 for (list<Node*>::const_iterator it = get_nodes().begin(); it!=get_nodes().end(); it++) {
00325 Node* node = *it;
00326 int dim_node = node->dim();
00327 VectorXd delta(dim_node);
00328 delta.setZero();
00329 VectorXd original = node->vector0();
00330 for (int c=0; c<dim_node; c++, col++) {
00331
00332 delta(c) = epsilon;
00333 node->self_exmap(delta);
00334 int row = 0;
00335 for (list<Factor*>::const_iterator it_factor = node->factors().begin();
00336 it_factor!=node->factors().end();
00337 it_factor++) {
00338 Factor* factor = *it_factor;
00339 int dim_factor = factor->dim();
00340 y_plus.segment(row, dim_factor) = factor->evaluate();
00341 row += dim_factor;
00342 }
00343 node->update0(original);
00344
00345 delta(c) = - epsilon;
00346 node->self_exmap(delta);
00347 row = 0;
00348 for (list<Factor*>::const_iterator it_factor = node->factors().begin();
00349 it_factor!=node->factors().end();
00350 it_factor++) {
00351 Factor* factor = *it_factor;
00352 int dim_factor = factor->dim();
00353 y_minus.segment(row, dim_factor) = factor->evaluate();
00354 row += dim_factor;
00355 }
00356 node->update0(original);
00357 delta(c) = 0.;
00358
00359 VectorXd diff = (y_plus.head(row) - y_minus.head(row)) / (epsilon + epsilon);
00360
00361 row = 0;
00362 int i = 0;
00363 for (list<Factor*>::const_iterator it_factor = (*it)->factors().begin();
00364 it_factor!=(*it)->factors().end();
00365 it_factor++, i++) {
00366 for (int r=0; r<(*it_factor)->dim(); r++, row++) {
00367 if (diff(row)!=0.) {
00368 int offset = (*it_factor)->_start;
00369 rows[offset+r]->append(col, diff(row));
00370 }
00371 }
00372 }
00373 }
00374 }
00375 VectorXd rhs = weighted_errors(LINPOINT);
00376 return SparseSystem(num_rows, _dim_nodes, rows, rhs);
00377 }
00378
00379 SparseSystem Slam::jacobian() {
00380 if (_prop.force_numerical_jacobian) {
00381
00382
00383 return jacobian_numerical_columnwise();
00384 } else {
00385
00386
00387 return jacobian_partial(-1);
00388 }
00389 }
00390
00391 const Covariances& Slam::covariances() {
00392 return _covariances;
00393 }
00394
00395 SparseSystem Slam::jacobian_partial(int last_n) {
00396 update_starts();
00397
00398 int num_rows = _dim_measure;
00399 if (last_n > 0) {
00400 num_rows = _num_new_rows;
00401 }
00402 DeleteOnReturn rows_ptr(new SparseVector*[num_rows]);
00403 SparseVector** rows = rows_ptr._ptr;
00404
00405 VectorXd rhs(num_rows);
00406 int row = 0;
00407 const list<Factor*>& factors = get_factors();
00408 list<Factor*>::const_iterator it = factors.begin();
00409 if (last_n != -1) {
00410
00411 for (int n = num_factors(); n>last_n; n--, it++);
00412 }
00413 for (; it!=factors.end(); it++) {
00414 Factor* factor = *it;
00415 Jacobian jac = factor->jacobian_internal(_prop.force_numerical_jacobian);
00416 VectorXd jac_rhs = jac.rhs();
00417 for (int r=0; r<jac_rhs.rows(); r++) {
00418 rhs(row+r) = jac_rhs(r);
00419
00420 rows[row+r] = new SparseVector(jac.dimtotal());
00421 }
00422 for (Terms::const_iterator it=jac.terms().begin(); it!=jac.terms().end(); it++) {
00423 int offset = it->node()->_start;
00424 int nr = it->term().rows();
00425 for (int r=0; r<nr; r++) {
00426 rows[row+r]->set(offset, it->term().row(r));
00427 }
00428 }
00429 row += factor->dim();
00430 }
00431 return SparseSystem(num_rows, _dim_nodes, rows, rhs);
00432 }
00433
00434 void Slam::print_stats() {
00435 double nnz = _R.nnz();
00436 double max_per_col = _R.max_nz();
00437 double dim = _dim_nodes;
00438 double per_col = nnz/dim;
00439 double fill_in = nnz/(dim*dim);
00440 cout << "iSAM statistics:" << endl;
00441 cout << " Normalized chi-square value: " << normalized_chi2() << endl;
00442 cout << " Weighted sum of squared errors: " << chi2() << endl;
00443 cout << " Number of nodes: " << num_nodes() << endl;
00444 cout << " Number of factors: " << num_factors() << endl;
00445 cout << " Number of variables: " << _dim_nodes << endl;
00446 cout << " Number of measurements: " << _dim_measure << endl;
00447 cout << " Number of non-zero entries: " << nnz << endl;
00448 cout << " max per column: " << max_per_col << endl;
00449 cout << " avg per column: " << per_col << endl;
00450 cout << " fill in: " << fill_in << "%" << endl;
00451 }
00452
00453 }