$search
00001 /* 00002 * HypothesesTree.cpp 00003 * 00004 * Created on: March, 2011 00005 * Author: Jos Elfring, Sjoerd van den Dries 00006 * Affiliation: Eindhoven University of Technology 00007 */ 00008 00009 #include "wire/logic/HypothesesTree.h" 00010 00011 #include "wire/logic/Assignment.h" 00012 #include "wire/logic/AssignmentSet.h" 00013 #include "wire/logic/AssignmentMatrix.h" 00014 #include "wire/logic/Hypothesis.h" 00015 00016 #include "wire/storage/KnowledgeDatabase.h" 00017 #include "wire/storage/ObjectStorage.h" 00018 #include "wire/storage/SemanticObject.h" 00019 00020 #include "wire/core/Evidence.h" 00021 #include "wire/core/EvidenceSet.h" 00022 #include "wire/core/Property.h" 00023 #include "wire/core/ClassModel.h" 00024 00025 #include <queue> 00026 #include <cassert> 00027 #include <float.h> 00028 00029 using namespace std; 00030 00031 #ifdef MHF_MEASURE_TIME 00032 #include <time.h> 00033 #endif 00034 00035 //#define DEBUG_INFO(_msg, ...) printf(_msg, ##__VA_ARGS__) 00036 #define DEBUG_INFO(_msg, ...) 00037 00038 namespace mhf { 00039 00040 /* ****************************************************************************** */ 00041 /* * CONSTRUCTOR / DESTRUCTOR * */ 00042 /* ****************************************************************************** */ 00043 00044 HypothesisTree::HypothesisTree(int num_max_hyps, double max_min_prob_ratio) : n_updates_(0), t_last_update_(-1), 00045 tree_height_(0), num_max_hyps_(num_max_hyps), max_min_prob_ratio_(max_min_prob_ratio) { 00046 00047 // create empty hypothesis (contains no objects) with timestep 0 00048 Hypothesis* empty_hyp = new Hypothesis(t_last_update_, 1.0); 00049 00050 // add empty hypothesis to leaf list and set as root 00051 leafs_.push_back(empty_hyp); 00052 root_ = empty_hyp; 00053 MAP_hypothesis_ = empty_hyp; 00054 } 00055 00056 HypothesisTree::~HypothesisTree() { 00057 root_->deleteChildren(); 00058 delete root_; 00059 } 00060 00061 /* ****************************************************************************** */ 00062 /* * PUBLIC MHT OPERATIONS * */ 00063 /* ****************************************************************************** */ 00064 00065 void HypothesisTree::addEvidence(const EvidenceSet& ev_set) { 00066 DEBUG_INFO("HypothesesTree::processMeasurements\n"); 00067 00068 if (ev_set.size() == 0) { 00069 return; 00070 } 00071 00072 #ifdef MHF_MEASURE_TIME 00073 timespec t_start_total, t_end_total; 00074 clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &t_start_total); 00075 #endif 00076 00077 for(EvidenceSet::const_iterator it_ev = ev_set.begin(); it_ev != ev_set.end(); ++it_ev) { 00078 ObjectStorage::getInstance().match(**it_ev); 00079 } 00080 00081 t_last_update_ = ev_set.getTimestamp(); 00082 00083 expandTree(ev_set); 00084 00085 pruneTree(ev_set.getTimestamp()); 00086 00087 applyAssignments(); 00088 00089 // clear old hypotheses leafs 00090 // The hypotheses will still be there to form a tree, but do not contain any objects anymore 00091 root_->clearInactive(); 00092 00093 root_ = root_->deleteSinglePaths(); 00094 00095 tree_height_ = root_->calculateHeigth(); 00096 00097 DEBUG_INFO("*** Free memory: assignment matrices ***\n"); 00098 00099 ++n_updates_; 00100 00101 #ifdef MHF_MEASURE_TIME 00102 clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &t_end_total); 00103 printf("Total update took %f seconds.\n", (t_end_total.tv_sec - t_start_total.tv_sec) + double(t_end_total.tv_nsec - t_start_total.tv_nsec) / 1e9); 00104 #endif 00105 00106 DEBUG_INFO("HypothesesTree::processMeasurements - end\n"); 00107 } 00108 00109 /* ****************************************************************************** */ 00110 /* * PROTECTED MHT OPERATIONS * */ 00111 /* ****************************************************************************** */ 00112 00113 struct compareAssignmentSets { 00114 bool operator()(const AssignmentSet* a1, const AssignmentSet* a2) const { 00115 return a1->getProbability() < a2->getProbability(); 00116 } 00117 }; 00118 00119 void HypothesisTree::applyAssignments() { 00120 DEBUG_INFO("applyAssignments - begin\n"); 00121 00122 // iterate over all leaf hypotheses 00123 for (list<Hypothesis*>::iterator it = leafs_.begin(); it != leafs_.end(); ++it) { 00124 DEBUG_INFO(" materializing hyp %p, with parent %p\n", (*it), (*it)->getParent()); 00125 (*it)->applyAssignments(); 00126 } 00127 00128 DEBUG_INFO("applyAssignments - end\n"); 00129 } 00130 00131 void HypothesisTree::expandTree(const EvidenceSet& ev_set) { 00132 00133 DEBUG_INFO("expandTree - start\n"); 00134 00135 //** Create new objects based on measurements 00136 00137 list<Assignment*> new_assignments; 00138 list<Assignment*> clutter_assignments; 00139 for(EvidenceSet::const_iterator it_ev = ev_set.begin(); it_ev != ev_set.end(); ++it_ev) { 00140 // new 00141 new_assignments.push_back(new Assignment(Assignment::NEW, *it_ev, 0, KnowledgeDatabase::getInstance().getProbabilityNew(**it_ev))); 00142 00143 // clutter 00144 clutter_assignments.push_back(new Assignment(Assignment::CLUTTER, *it_ev, 0, KnowledgeDatabase::getInstance().getProbabilityClutter(**it_ev))); 00145 } 00146 00147 #ifdef MHF_MEASURE_TIME 00148 timespec t_start, t_end; 00149 clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &t_start); 00150 #endif 00151 00152 //** expand all current leaf hypotheses 00153 00154 priority_queue<AssignmentSet*, vector<AssignmentSet*>, compareAssignmentSets > assignment_sets; 00155 00156 DEBUG_INFO(" - Create assignment matrices and assignment sets\n"); 00157 00158 for (list<Hypothesis*>::iterator it_hyp = leafs_.begin(); it_hyp != leafs_.end(); ++it_hyp) { 00159 Hypothesis* hyp = *it_hyp; 00160 00161 for(list<Assignment*>::iterator it_ass = new_assignments.begin(); it_ass != new_assignments.end(); ++it_ass) { 00162 hyp->addPotentialAssignment(*it_ass); 00163 } 00164 00165 for(list<Assignment*>::iterator it_ass = clutter_assignments.begin(); it_ass != clutter_assignments.end(); ++it_ass) { 00166 hyp->addPotentialAssignment(*it_ass); 00167 } 00168 00169 // sort assignment matrix 00170 hyp->getAssignmentMatrix()->sortAssignments(); 00171 00172 // create empty assignment set 00173 AssignmentSet* ass_set = new AssignmentSet(hyp, hyp->getAssignmentMatrix()); 00174 assignment_sets.push(ass_set); 00175 } 00176 00177 DEBUG_INFO(" - Generate hypotheses\n"); 00178 00179 double min_prob = 0; 00180 00181 // set all current leafs to inactive 00182 for (list<Hypothesis*>::iterator it_hyp = leafs_.begin(); it_hyp != leafs_.end(); ++it_hyp) { 00183 (*it_hyp)->setInactive(); 00184 } 00185 00186 leafs_.clear(); 00187 00188 int n_iterations = 0; 00189 00190 while(!assignment_sets.empty() && leafs_.size() < num_max_hyps_ && assignment_sets.top()->getProbability() > min_prob) { 00191 // assignment_sets.top()->print(); 00192 00193 ++n_iterations; 00194 DEBUG_INFO(" #assignment sets = %i\n", (int)assignment_sets.size()); 00195 AssignmentSet* ass_set = assignment_sets.top(); 00196 00197 DEBUG_INFO(" inspecting assignment set %p with probability %.16f\n", ass_set, ass_set->getProbability()); 00198 00199 assignment_sets.pop(); 00200 Hypothesis* hyp = ass_set->getHypothesis(); 00201 00202 if (ass_set->isValid()) { 00203 /* ************ assignment set is complete! Create hypothesis ************ */ 00204 00205 Hypothesis* hyp_child = new Hypothesis(ev_set.getTimestamp(), ass_set->getProbability()); 00206 hyp_child->setAssignments(ass_set); 00207 hyp->addChildHypothesis(hyp_child); 00208 00209 if (leafs_.empty()) { 00210 // first hypothesis found (and therefore the best one) 00211 min_prob = hyp_child->getProbability() * max_min_prob_ratio_; 00212 00213 MAP_hypothesis_ = hyp_child; 00214 } 00215 00216 /* 00217 if (leafs_.size() <= 3) { 00218 ass_set->print(); 00219 } 00220 */ 00221 00222 //printf("%i: new leaf with prob %f\n", leafs_.size(), hyp_child->probability_); 00223 00224 DEBUG_INFO(" NEW LEAF: %p\n", hyp_child); 00225 leafs_.push_back(hyp_child); 00226 DEBUG_INFO(" #leafs = %i, #old leafs = %i\n", (int)leafs_.size(), n_old_leafs); 00227 00228 /* ************************************************************************* */ 00229 } 00230 00231 list<AssignmentSet*> child_assignment_sets; 00232 ass_set->expand(child_assignment_sets); 00233 for(list<AssignmentSet*>::iterator it_child = child_assignment_sets.begin(); it_child != child_assignment_sets.end(); ++it_child) { 00234 assignment_sets.push(*it_child); 00235 } 00236 00237 } 00238 00239 DEBUG_INFO(" - Free memory (remaining assignment sets)\n"); 00240 00241 assert(leafs_.size() > 0); 00242 00243 // delete remaining assignment sets (the ones that where not used to generate hypotheses) 00244 while(!assignment_sets.empty()) { 00245 delete assignment_sets.top(); 00246 assignment_sets.pop(); 00247 } 00248 00249 DEBUG_INFO(" ... done\n"); 00250 00251 ++tree_height_; 00252 00253 normalizeProbabilities(); 00254 00255 #ifdef MHF_MEASURE_TIME 00256 clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &t_end); 00257 printf("Expansion of hypotheses took %f seconds.\n", (t_end.tv_sec - t_start.tv_sec) + double(t_end.tv_nsec - t_start.tv_nsec) / 1e9); 00258 #endif 00259 00260 DEBUG_INFO("expandTree - done\n"); 00261 } 00262 00263 void HypothesisTree::normalizeProbabilities() { 00264 // Calculate sum of all probabilities 00265 double p_total = 0; 00266 for(list<Hypothesis* >::iterator it_hyp = leafs_.begin(); it_hyp != leafs_.end(); ++it_hyp) { 00267 p_total += (*it_hyp)->getProbability(); 00268 } 00269 00270 // Normalize all probabilities 00271 for(list<Hypothesis* >::iterator it_hyp = leafs_.begin(); it_hyp != leafs_.end(); ++it_hyp) { 00272 (*it_hyp)->setProbability((*it_hyp)->getProbability() / p_total); 00273 } 00274 00275 root_->calculateBranchProbabilities(); 00276 } 00277 00278 void HypothesisTree::pruneTree(const Time& timestamp) { 00279 //return; 00280 00281 00282 DEBUG_INFO("pruneTree - begin\n"); 00283 00284 DEBUG_INFO(" old #leafs = %i\n", (int)leafs_.size()); 00285 00286 // determine best branch leaf per hypothesis 00287 00288 DEBUG_INFO(" - determine best leaf per branch\n"); 00289 root_->determineBestLeaf(); 00290 DEBUG_INFO(" ... done\n"); 00291 00292 double prob_ratios[] = {1e-8, 1e-7, 1e-6, 1e-5, 1e-5, 1e-4, 1}; 00293 00294 list<Hypothesis*> hyp_stack; 00295 hyp_stack.push_back(root_); 00296 00297 while(!hyp_stack.empty()) { 00298 Hypothesis* hyp = hyp_stack.front(); 00299 hyp_stack.pop_front(); 00300 00301 list<Hypothesis*>& children = hyp->getChildHypotheses(); 00302 if (!children.empty()) { 00303 00304 // determine best branch of root hypothesis (highest sum of leaf probabilities) 00305 Hypothesis* best_child = *children.begin(); 00306 for (list<Hypothesis*>::const_iterator it_child = children.begin(); it_child != children.end(); ++it_child) { 00307 if ((*it_child)->getProbability() > best_child->getProbability()) { 00308 best_child = *it_child; 00309 } 00310 //printf(" (%p, %f)", *it_child, (*it_child)->getProbability()); 00311 } 00312 00313 double prob_ratio = 0; 00314 00315 if (hyp->getHeight() > 6) { 00316 prob_ratio = 0; 00317 } else { 00318 prob_ratio = prob_ratios[hyp->getHeight()]; 00319 } 00320 00321 double min_prob = best_child->getProbability() * prob_ratio; 00322 00323 for (list<Hypothesis*>::iterator it_child = children.begin(); it_child != children.end();) { 00324 bool prune_child = false; 00325 00326 if (*it_child != best_child) { 00327 if ((*it_child)->getProbability() == 0) { 00328 prune_child = true; 00329 } else if (hyp->getHeight() > 6) { 00330 DEBUG_INFO(" - Determine hyp similarity between %p and %p\n", best_child->getBestLeaf(), (*it_child)->getBestLeaf()); 00331 double similarity = 1; 00332 DEBUG_INFO(" ... done\n"); 00333 00334 //printf(" similarity = %f\n", similarity); 00335 00336 prune_child = (similarity > 0.5); 00337 } else if ((*it_child)->getProbability() < min_prob) { 00338 prune_child = true; 00339 } 00340 } 00341 00342 if (prune_child) { 00343 // prune hypothesis and all child hypothesis 00344 (*it_child)->deleteChildren(); 00345 delete (*it_child); 00346 it_child = children.erase(it_child); 00347 } else { 00348 hyp_stack.push_front(*it_child); 00349 ++it_child; 00350 } 00351 } 00352 00353 } 00354 } 00355 00356 // clear leaf list and add new leafs of tree 00357 leafs_.clear(); 00358 root_->findActiveLeafs(leafs_); 00359 00360 normalizeProbabilities(); 00361 00362 DEBUG_INFO(" #leafs after pruning = %i\n", (int)leafs_.size()); 00363 00364 DEBUG_INFO("pruneTree - end\n"); 00365 } 00366 00367 /* ****************************************************************************** */ 00368 /* * GETTERS * */ 00369 /* ****************************************************************************** */ 00370 00371 const list<Hypothesis*>& HypothesisTree::getHypotheses() const { 00372 return leafs_; 00373 } 00374 00375 int HypothesisTree::getHeight() const { 00376 return tree_height_; 00377 } 00378 00379 00380 const Hypothesis& HypothesisTree::getMAPHypothesis() const { 00381 return *MAP_hypothesis_; 00382 } 00383 00384 const list<SemanticObject*>& HypothesisTree::getMAPObjects() const { 00385 DEBUG_INFO("getMAPObjects - begin\n"); 00386 return getMAPHypothesis().getObjects(); 00387 } 00388 00389 00390 /* ****************************************************************************** */ 00391 /* * PRINT METHODS * */ 00392 /* ****************************************************************************** */ 00393 00394 void HypothesisTree::showStatistics() { 00395 cout << " Number of hypotheses = " << leafs_.size() << endl; 00396 cout << " Max probability = " << getMAPHypothesis().getProbability() << endl; 00397 cout << " Tree height = " << tree_height_ << endl; 00398 } 00399 00400 }