00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017 #include "graph_optimizer2d_hchol.h"
00018 #include <iostream>
00019 #include <iomanip>
00020 #include <algorithm>
00021 #include <iterator>
00022 #include <sys/time.h>
00023 #include <hogman_minimal/stuff/macros.h>
00024 #include <hogman_minimal/stuff/os_specific.h>
00025 #include <assert.h>
00026
00027
00028 namespace AISNavigation{
00029 using namespace std;
00030
00031 template <typename PG>
00032 HCholOptimizer<PG>::HCholOptimizer(double maxDistance){
00033 _lowerOptimizer=0;
00034 _upperOptimizer=0;
00035 _cachedChi=0.;
00036 _lastOptChi=0;
00037 _online=false;
00038 _maxDistance=_maxDistance;
00039 _gnuplot=false;
00040
00041 _translationalPropagationError=0.05;
00042 _rotationalPropagationError=0.05;
00043
00044 _globalIncrementalIterations=5;
00045 _downIncrementalIterations=2;
00046 _edgeAnnotationIncrementalIterations=5;
00047 _propagateDown=false;
00048 }
00049
00050 template <typename PG>
00051 HCholOptimizer<PG>::HCholOptimizer(HCholOptimizer<PG>* ll, double maxDistance){
00052 _upperOptimizer=0;
00053 _lowerOptimizer=ll;
00054 _cachedChi=1.;
00055 _lastOptChi=1.;
00056 HCholOptimizer<PG> *lho=dynamic_cast<HCholOptimizer<PG> *>(ll);
00057 if (lho)
00058 lho->_upperOptimizer=this;
00059 _maxDistance=maxDistance;
00060 _gnuplot=false;
00061 _online=false;
00062
00063 _translationalPropagationError=0.1;
00064 _rotationalPropagationError=0.05;
00065 _globalIncrementalIterations=3;
00066 _downIncrementalIterations=3;
00067 _edgeAnnotationIncrementalIterations=5;
00068 _propagateDown=false;
00069 }
00070
00071 template <typename PG>
00072 HCholOptimizer<PG>::HCholOptimizer(int nLevels, double maxDistance){
00073 _lowerOptimizer=0;
00074 _upperOptimizer=0;
00075 _cachedChi=0.;
00076 _lastOptChi=0;
00077 _online=false;
00078 _maxDistance=_maxDistance;
00079 _gnuplot=false;
00080 HCholOptimizer<PG>* lopt=this;
00081 for (int i=1; i<nLevels; i++){
00082 lopt=new HCholOptimizer<PG>(lopt, maxDistance);
00083 }
00084
00085 _translationalPropagationError=0.1;
00086 _rotationalPropagationError=0.05;
00087 _globalIncrementalIterations=3;
00088 _downIncrementalIterations=3;
00089 _edgeAnnotationIncrementalIterations=5;
00090 _propagateDown=false;
00091 }
00092
00093 template <typename PG>
00094 HCholOptimizer<PG>::~HCholOptimizer(){
00095 if (_upperOptimizer){
00096 delete _upperOptimizer;
00097 _upperOptimizer=0;
00098 }
00099 }
00100
00101 template <typename PG>
00102 void HCholOptimizer<PG>::clear(){
00103 if (_upperOptimizer)
00104 _upperOptimizer->clear();
00105 PG::clear();
00106 _rootIDs.clear();
00107 }
00108
00109
00110 template <typename PG>
00111 int HCholOptimizer<PG>::nLevels() const {
00112 assert(! _lowerOptimizer);
00113 const HCholOptimizer<PG>* opt=this;
00114 int l=0;
00115 while (opt){
00116 opt=opt->_upperOptimizer;
00117 l++;
00118 }
00119 return l;
00120 }
00121
00122 template <typename PG>
00123 HCholOptimizer<PG>* HCholOptimizer<PG>::level(int i) {
00124 HCholOptimizer<PG>* opt=this;
00125 while (i!=0 && opt){
00126 if (i<0){
00127 opt=opt->_lowerOptimizer;
00128 i++;
00129 } else {
00130 opt=opt->_upperOptimizer;
00131 i--;
00132 }
00133 }
00134 return opt;
00135 }
00136
00137 template <typename PG>
00138 void HCholOptimizer<PG>::annotateHiearchicalEdge(typename PG::Edge* e, int iterations, double lambda, bool initWithObservations){
00139 if (!_lowerOptimizer)
00140 return;
00141 HVertex* from = dynamic_cast< HVertex* >( e->from() );
00142 HVertex* to = dynamic_cast< HVertex* >( e->to() );
00143 assert (from && to);
00144 Graph::VertexSet jointSet;
00145 std::set_union(from->children().begin(),
00146 from->children().end(),
00147 to->children().begin(),
00148 to->children().end(),
00149 std::insert_iterator<Graph::VertexSet>(jointSet, jointSet.end()));
00150 typename PG::InformationType covariance = PG::InformationType::eye(1.0);
00151 int otherId=to->id();
00152 _lowerOptimizer->backupSubset(jointSet);
00153
00154
00155
00156
00157
00158
00159
00160 _lowerOptimizer->optimizeSubset(from->lowerRoot(), jointSet, iterations, lambda, initWithObservations, otherId, &covariance);
00161 typename PG::TransformationType mean=from->lowerRoot()->transformation.inverse()*to->lowerRoot()->transformation;
00162 _lowerOptimizer->restoreSubset(jointSet);
00163 if (this->verbose()){
00164 cerr << "u[" << jointSet.size() << "] ";
00165 }
00166
00167 if (covariance.det()<0){
00168 cerr << "![" << covariance.det() << "] " << endl;
00169 covariance = typename PG::InformationType().eye(1.)*1e9;
00170 } else
00171 covariance=covariance.inverse();
00172
00173 refineEdge(e, mean, covariance);
00174 }
00175
00176 template <typename PG>
00177 void HCholOptimizer<PG>::annotateHiearchicalEdgeOnDenseGraph(typename PG::TransformationType& mean, typename PG::InformationType& info,
00178 typename PG::Edge* e, int iterations, double lambda, bool initWithObservations){
00179 if (!_lowerOptimizer)
00180 return;
00181 assert (! _upperOptimizer);
00182
00183 HVertex* from = dynamic_cast< HVertex* >( e->from() );
00184 HVertex* to = dynamic_cast< HVertex* >( e->to() );
00185
00186 assert (from && to);
00187 HVertex* fromAux = dynamic_cast< HVertex* >( e->from() );
00188 HVertex* toAux = dynamic_cast< HVertex* >( e->to() );
00189 HCholOptimizer<PG>* optAux=this;
00190
00191 Graph::VertexSet jointSet;
00192 jointSet.insert(fromAux);
00193 jointSet.insert(toAux);
00194 int level=0;
00195 while (optAux->_lowerOptimizer) {
00196
00197 Graph::VertexSet jointAuxSet;
00198 for (Graph::VertexSet::iterator it=jointSet.begin(); it!=jointSet.end(); it++){
00199 HVertex* v = dynamic_cast< HVertex* >(*it);
00200 for (typename HVertexSet::iterator it=v->children().begin(); it!=v->children().end(); it++){
00201
00202 jointAuxSet.insert(*it);
00203 }
00204 }
00205
00206 fromAux=dynamic_cast<HVertex*>(fromAux->lowerRoot());
00207 toAux=dynamic_cast<HVertex*>(toAux->lowerRoot());
00208 optAux=optAux->_lowerOptimizer;
00209 jointSet=jointAuxSet;
00210 level++;
00211 }
00212
00213 for (Graph::VertexSet::iterator it=jointSet.begin(); it!=jointSet.end(); it++){
00214
00215 }
00216
00217 assert (toAux->_optimizer==optAux);
00218
00219 typename PG::InformationType covariance;
00220 int otherId=toAux->id();
00221 optAux->backupSubset(jointSet);
00222 optAux->optimizeSubset(fromAux, jointSet, iterations, lambda, initWithObservations, otherId, &covariance);
00223 mean=fromAux->transformation.inverse()*toAux->transformation;
00224 optAux->restoreSubset(jointSet);
00225 if (covariance.det()<0){
00226 cerr << "![" << covariance.det() << "] " << endl;
00227 info=typename PG::InformationType::eye(1e9);
00228 } else
00229 info=covariance.inverse();
00230 }
00231
00232 template <typename PG>
00233 void HCholOptimizer<PG>::computeTopLevelDenseGraph(CholOptimizer<PG>* chol, int iterations, int lambda, int initWithObservations){
00234 if (_upperOptimizer){
00235 _upperOptimizer->computeTopLevelDenseGraph(chol, iterations, lambda, initWithObservations);
00236 return;
00237 }
00238 chol->clear();
00239 for (typename PG::VertexIDMap::iterator it=this->vertices().begin(); it!=this->vertices().end(); it++){
00240 HVertex* v = dynamic_cast< HVertex* >(it->second);
00241 typename PG::Vertex* vc= chol->addVertex(v->id());
00242 vc->transformation=v->transformation;
00243 }
00244 for (typename PG::EdgeSet::iterator it=this->edges().begin(); it!=this->edges().end(); it++){
00245 typename PG::Edge* e=dynamic_cast<typename PG::Edge*>(*it);
00246 typename PG::TransformationType mean;
00247 typename PG::InformationType info;
00248 annotateHiearchicalEdgeOnDenseGraph(mean, info, e, iterations, lambda, initWithObservations);
00249 typename PG::Vertex* from=dynamic_cast<typename PG::Vertex*>(chol->vertex(e->from()->id()));
00250 typename PG::Vertex* to=dynamic_cast<typename PG::Vertex*>(chol->vertex(e->to()->id()));
00251 chol->addEdge(from, to, mean, info);
00252 }
00253
00254
00255
00256
00257
00258 }
00259
00260 template <typename PG>
00261 int HCholOptimizer<PG>::optimize(int iterations, bool online){
00262 if (! _upperOptimizer){
00263 CholOptimizer<PG>::optimize(iterations, online);
00264 return iterations;
00265 }
00266 assert(!_lowerOptimizer);
00267 if (! online){
00268 _online=false;
00269 updateStructure(false);
00270 HCholOptimizer<PG>* uopt=this;
00271 while(uopt->_upperOptimizer){
00272 uopt=uopt->_upperOptimizer;
00273 }
00274 for (int i=0; i<iterations; i++){
00275 uopt->vCycle(3,0.,1.,false);
00276 }
00277 } else {
00278 optimizePendingIncremental();
00279 }
00280 return 1;
00281 }
00282
00283 }