00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028 #include "rtabmap/core/VWDictionary.h"
00029 #include "VisualWord.h"
00030
00031 #include "rtabmap/core/Signature.h"
00032 #include "rtabmap/core/DBDriver.h"
00033 #include "rtabmap/core/Parameters.h"
00034
00035 #include "rtabmap/utilite/UtiLite.h"
00036
00037 #include <opencv2/gpu/gpu.hpp>
00038
00039 #include <fstream>
00040 #include <string>
00041
00042 namespace rtabmap
00043 {
00044
00045 const int VWDictionary::ID_START = 1;
00046 const int VWDictionary::ID_INVALID = 0;
00047
00048 VWDictionary::VWDictionary(const ParametersMap & parameters) :
00049 _totalActiveReferences(0),
00050 _incrementalDictionary(Parameters::defaultKpIncrementalDictionary()),
00051 _nndrRatio(Parameters::defaultKpNndrRatio()),
00052 _dictionaryPath(Parameters::defaultKpDictionaryPath()),
00053 _newWordsComparedTogether(Parameters::defaultKpNewWordsComparedTogether()),
00054 _lastWordId(0),
00055 _flannIndex(new cv::flann::Index()),
00056 _strategy(kNNBruteForce)
00057 {
00058 this->setNNStrategy((NNStrategy)Parameters::defaultKpNNStrategy());
00059 this->parseParameters(parameters);
00060 }
00061
00062 VWDictionary::~VWDictionary()
00063 {
00064 this->clear();
00065 delete _flannIndex;
00066 }
00067
00068 void VWDictionary::parseParameters(const ParametersMap & parameters)
00069 {
00070 ParametersMap::const_iterator iter;
00071 Parameters::parse(parameters, Parameters::kKpNndrRatio(), _nndrRatio);
00072 Parameters::parse(parameters, Parameters::kKpNewWordsComparedTogether(), _newWordsComparedTogether);
00073
00074 UASSERT_MSG(_nndrRatio > 0.0f, uFormat("String=%s value=%f", uContains(parameters, Parameters::kKpNndrRatio())?parameters.at(Parameters::kKpNndrRatio()).c_str():"", _nndrRatio).c_str());
00075
00076 std::string dictionaryPath = _dictionaryPath;
00077 bool incrementalDictionary = _incrementalDictionary;
00078 if((iter=parameters.find(Parameters::kKpDictionaryPath())) != parameters.end())
00079 {
00080 dictionaryPath = (*iter).second.c_str();
00081 }
00082 if((iter=parameters.find(Parameters::kKpIncrementalDictionary())) != parameters.end())
00083 {
00084 incrementalDictionary = uStr2Bool((*iter).second.c_str());
00085 }
00086
00087
00088 if((iter=parameters.find(Parameters::kKpNNStrategy())) != parameters.end())
00089 {
00090 NNStrategy nnStrategy = (NNStrategy)std::atoi((*iter).second.c_str());
00091 this->setNNStrategy(nnStrategy);
00092 }
00093
00094 if(incrementalDictionary)
00095 {
00096 this->setIncrementalDictionary();
00097 }
00098 else
00099 {
00100 this->setFixedDictionary(dictionaryPath);
00101 }
00102
00103 }
00104
00105 void VWDictionary::setIncrementalDictionary()
00106 {
00107 if(!_incrementalDictionary)
00108 {
00109 _incrementalDictionary = true;
00110 if(_visualWords.size())
00111 {
00112 UWARN("Incremental dictionary set: already loaded visual words (%d) from the fixed dictionary will be included in the incremental one.", _visualWords.size());
00113 }
00114 }
00115 _dictionaryPath = "";
00116 }
00117
00118 void VWDictionary::setFixedDictionary(const std::string & dictionaryPath)
00119 {
00120 if(!dictionaryPath.empty())
00121 {
00122 if((!_incrementalDictionary && _dictionaryPath.compare(dictionaryPath) != 0) ||
00123 _visualWords.size() == 0)
00124 {
00125 std::ifstream file;
00126 file.open(dictionaryPath.c_str(), std::ifstream::in);
00127 if(file.good())
00128 {
00129 UDEBUG("Deleting old dictionary and loading the new one from \"%s\"", dictionaryPath.c_str());
00130 UTimer timer;
00131
00132
00133 std::string str;
00134 std::list<std::string> strList;
00135 std::getline(file, str);
00136 strList = uSplitNumChar(str);
00137 unsigned int dimension = 0;
00138 for(std::list<std::string>::iterator iter = strList.begin(); iter != strList.end(); ++iter)
00139 {
00140 if(uIsDigit(iter->at(0)))
00141 {
00142 dimension = std::atoi(iter->c_str());
00143 break;
00144 }
00145 }
00146
00147 if(dimension == 0 || dimension > 1000)
00148 {
00149 UERROR("Invalid dictionary file, visual word dimension (%d) is not valid, \"%s\"", dimension, dictionaryPath.c_str());
00150 }
00151 else
00152 {
00153
00154 while(file.good())
00155 {
00156 std::getline(file, str);
00157 strList = uSplit(str);
00158 if(strList.size() == dimension+1)
00159 {
00160
00161 std::list<std::string>::iterator iter = strList.begin();
00162 int id = std::atoi(iter->c_str());
00163 cv::Mat descriptor(1, dimension, CV_32F);
00164 ++iter;
00165 unsigned int i=0;
00166
00167
00168 for(;i<dimension && iter != strList.end(); ++i, ++iter)
00169 {
00170 descriptor.at<float>(i) = uStr2Float(*iter);
00171 }
00172 if(i != dimension)
00173 {
00174 UERROR("");
00175 }
00176
00177 VisualWord * vw = new VisualWord(id, descriptor, 0);
00178 _visualWords.insert(_visualWords.end(), std::pair<int, VisualWord*>(id, vw));
00179 _notIndexedWords.insert(_notIndexedWords.end(), id);
00180 }
00181 else
00182 {
00183 UWARN("Cannot parse line \"%s\"", str.c_str());
00184 }
00185 }
00186 this->update();
00187 _incrementalDictionary = false;
00188 }
00189
00190
00191 UDEBUG("Time changing dictionary = %fs", timer.ticks());
00192 }
00193 else
00194 {
00195 UERROR("Cannot open dictionary file \"%s\"", dictionaryPath.c_str());
00196 }
00197 file.close();
00198 }
00199 else if(!_incrementalDictionary)
00200 {
00201 UDEBUG("Dictionary \"%s\" already loaded...", dictionaryPath.c_str());
00202 }
00203 else
00204 {
00205 UERROR("Cannot change to a fixed dictionary if there are already words (%d) in the incremental one.", _visualWords.size());
00206 }
00207 }
00208 else if(_visualWords.size() == 0)
00209 {
00210 _incrementalDictionary = false;
00211 }
00212 else if(_incrementalDictionary)
00213 {
00214 UWARN("Cannot change to fixed dictionary, %d words already loaded as incremental", (int)_visualWords.size());
00215 }
00216 _dictionaryPath = dictionaryPath;
00217 }
00218
00219 void VWDictionary::setNNStrategy(NNStrategy strategy)
00220 {
00221 if(strategy!=kNNUndef)
00222 {
00223 if(strategy == kNNBruteForceGPU && !cv::gpu::getCudaEnabledDeviceCount())
00224 {
00225 UERROR("Nearest neighobr strategy \"kNNBruteForceGPU\" chosen but no CUDA devices found! Doing \"kNNBruteForce\" instead.");
00226 strategy = kNNBruteForce;
00227 }
00228
00229 if(RTABMAP_NONFREE == 0 && strategy == kNNFlannKdTree)
00230 {
00231 UWARN("KdTree (%d) nearest neighbor is not available because RTAB-Map isn't built "
00232 "with OpenCV nonfree module (KdTree only used for SURF/SIFT features). "
00233 "NN strategy is not modified (current=%d).", (int)kNNFlannKdTree, (int)_strategy);
00234 }
00235 else
00236 {
00237 _strategy = strategy;
00238 }
00239 }
00240 }
00241
00242 int VWDictionary::getLastIndexedWordId() const
00243 {
00244 if(_mapIndexId.size())
00245 {
00246 return _mapIndexId.rbegin()->second;
00247 }
00248 else
00249 {
00250 return 0;
00251 }
00252 }
00253
00254 void VWDictionary::update()
00255 {
00256 ULOGGER_DEBUG("");
00257 if(!_incrementalDictionary && !_notIndexedWords.size())
00258 {
00259
00260
00261
00262 return;
00263 }
00264
00265 if(_notIndexedWords.size() || _visualWords.size() == 0 || _removedIndexedWords.size())
00266 {
00267 _mapIndexId.clear();
00268 int oldSize = _dataTree.rows;
00269 _dataTree = cv::Mat();
00270 _flannIndex->release();
00271
00272 if(_visualWords.size())
00273 {
00274 UTimer timer;
00275 timer.start();
00276
00277 int type = _visualWords.begin()->second->getDescriptor().type();
00278 int dim = _visualWords.begin()->second->getDescriptor().cols;
00279
00280 UASSERT(type == CV_32F || type == CV_8U);
00281 UASSERT(dim > 0);
00282
00283
00284 _dataTree = cv::Mat(_visualWords.size(), dim, type);
00285 std::map<int, VisualWord*>::const_iterator iter = _visualWords.begin();
00286 for(unsigned int i=0; i < _visualWords.size(); ++i, ++iter)
00287 {
00288 UASSERT(iter->second->getDescriptor().cols == dim);
00289 UASSERT(iter->second->getDescriptor().type() == type);
00290
00291 iter->second->getDescriptor().copyTo(_dataTree.row(i));
00292 _mapIndexId.insert(_mapIndexId.end(), std::pair<int, int>(i, iter->second->id()));
00293 }
00294
00295 ULOGGER_DEBUG("_mapIndexId.size() = %d, words.size()=%d, _dim=%d",_mapIndexId.size(), _visualWords.size(), dim);
00296 ULOGGER_DEBUG("copying data = %f s", timer.ticks());
00297
00298 switch(_strategy)
00299 {
00300 case kNNFlannNaive:
00301 _flannIndex->build(_dataTree, cv::flann::LinearIndexParams(), type == CV_32F?cvflann::FLANN_DIST_L2:cvflann::FLANN_DIST_HAMMING);
00302 break;
00303 case kNNFlannKdTree:
00304 UASSERT_MSG(type == CV_32F, "To use KdTree dictionary, float descriptors are required!");
00305 _flannIndex->build(_dataTree, cv::flann::KDTreeIndexParams(), cvflann::FLANN_DIST_L2);
00306 break;
00307 case kNNFlannLSH:
00308 UASSERT_MSG(type == CV_8U, "To use LSH dictionary, binary descriptors are required!");
00309 _flannIndex->build(_dataTree, cv::flann::LshIndexParams(12, 20, 2), cvflann::FLANN_DIST_HAMMING);
00310 break;
00311 default:
00312 break;
00313 }
00314
00315 ULOGGER_DEBUG("Time to create kd tree = %f s", timer.ticks());
00316 }
00317 UDEBUG("Dictionary updated! (size=%d->%d added=%d removed=%d)",
00318 oldSize, _dataTree.rows, _notIndexedWords.size(), _removedIndexedWords.size());
00319 }
00320 else
00321 {
00322 UDEBUG("Dictionary has not changed, so no need to update it! (size=%d)", _dataTree.rows);
00323 }
00324 _notIndexedWords.clear();
00325 _removedIndexedWords.clear();
00326 }
00327
00328 void VWDictionary::clear()
00329 {
00330 ULOGGER_DEBUG("");
00331 if(_visualWords.size() && _incrementalDictionary)
00332 {
00333 UWARN("Visual dictionary would be already empty here (%d words still in dictionary).", (int)_visualWords.size());
00334 }
00335 if(_notIndexedWords.size())
00336 {
00337 UWARN("Not indexed words should be empty here (%d words still not indexed)", (int)_notIndexedWords.size());
00338 }
00339 for(std::map<int, VisualWord *>::iterator i=_visualWords.begin(); i!=_visualWords.end(); ++i)
00340 {
00341 delete (*i).second;
00342 }
00343 _visualWords.clear();
00344 _notIndexedWords.clear();
00345 _removedIndexedWords.clear();
00346 _totalActiveReferences = 0;
00347 _lastWordId = 0;
00348 _dataTree = cv::Mat();
00349 _mapIndexId.clear();
00350 _unusedWords.clear();
00351 _flannIndex->release();
00352 }
00353
00354 int VWDictionary::getNextId()
00355 {
00356 return ++_lastWordId;
00357 }
00358
00359 void VWDictionary::addWordRef(int wordId, int signatureId)
00360 {
00361 if(signatureId > 0 && wordId > 0)
00362 {
00363 VisualWord * vw = 0;
00364 vw = uValue(_visualWords, wordId, vw);
00365 if(vw)
00366 {
00367 vw->addRef(signatureId);
00368 _totalActiveReferences += 1;
00369
00370 _unusedWords.erase(vw->id());
00371 }
00372 else
00373 {
00374 UERROR("Not found word %d", wordId);
00375 }
00376 }
00377 }
00378
00379 void VWDictionary::removeAllWordRef(int wordId, int signatureId)
00380 {
00381 VisualWord * vw = 0;
00382 vw = uValue(_visualWords, wordId, vw);
00383 if(vw)
00384 {
00385 _totalActiveReferences -= vw->removeAllRef(signatureId);
00386 if(vw->getReferences().size() == 0)
00387 {
00388 _unusedWords.insert(std::pair<int, VisualWord*>(vw->id(), vw));
00389 }
00390 }
00391 }
00392
00393 std::list<int> VWDictionary::addNewWords(const cv::Mat & descriptors,
00394 int signatureId)
00395 {
00396 UASSERT(signatureId > 0);
00397
00398 UDEBUG("id=%d descriptors=%d", signatureId, descriptors.rows);
00399 UTimer timer;
00400 std::list<int> wordIds;
00401 if(descriptors.rows == 0 || descriptors.cols == 0)
00402 {
00403 UERROR("Descriptors size is null!");
00404 return wordIds;
00405 }
00406 int dim = 0;
00407 int type = -1;
00408 if(_visualWords.size())
00409 {
00410 dim = _visualWords.begin()->second->getDescriptor().cols;
00411 type = _visualWords.begin()->second->getDescriptor().type();
00412 UASSERT(type == CV_32F || type == CV_8U);
00413 }
00414
00415 if(dim && dim != descriptors.cols)
00416 {
00417 UERROR("Descriptors (size=%d) are not the same size as already added words in dictionary(size=%d)", descriptors.cols, dim);
00418 return wordIds;
00419 }
00420 dim = descriptors.cols;
00421
00422 if(type>=0 && type != descriptors.type())
00423 {
00424 UERROR("Descriptors (type=%d) are not the same type as already added words in dictionary(type=%d)", descriptors.type(), type);
00425 return wordIds;
00426 }
00427 type = descriptors.type();
00428
00429 if(!_incrementalDictionary && _visualWords.empty())
00430 {
00431 UERROR("Dictionary mode is set to fixed but no words are in it!");
00432 return wordIds;
00433 }
00434
00435 int dupWordsCountFromDict= 0;
00436 int dupWordsCountFromLast= 0;
00437
00438 unsigned int k=2;
00439
00440 cv::Mat newWords;
00441 std::vector<int> newWordsId;
00442
00443 cv::Mat results;
00444 cv::Mat dists;
00445 std::vector<std::vector<cv::DMatch> > matches;
00446 bool bruteForce = false;
00447
00448 UTimer timerLocal;
00449 timerLocal.start();
00450
00451 if(!_dataTree.empty() && _dataTree.rows >= (int)k)
00452 {
00453
00454 UDEBUG("newPts.total()=%d ", descriptors.rows);
00455
00456 if(_strategy == kNNFlannNaive || _strategy == kNNFlannKdTree || _strategy == kNNFlannLSH)
00457 {
00458 _flannIndex->knnSearch(descriptors, results, dists, k);
00459 }
00460 else if(_strategy == kNNBruteForce)
00461 {
00462 bruteForce = true;
00463 cv::BFMatcher matcher(type==CV_8U?cv::NORM_HAMMING:cv::NORM_L2SQR);
00464 matcher.knnMatch(descriptors, _dataTree, matches, k);
00465 }
00466 else if(_strategy == kNNBruteForceGPU)
00467 {
00468 bruteForce = true;
00469 cv::gpu::GpuMat newDescriptorsGpu(descriptors);
00470 cv::gpu::GpuMat lastDescriptorsGpu(_dataTree);
00471 if(type==CV_8U)
00472 {
00473 cv::gpu::BruteForceMatcher_GPU<cv::Hamming> gpuMatcher;
00474 gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
00475 }
00476 else
00477 {
00478 cv::gpu::BruteForceMatcher_GPU<cv::L2<float> > gpuMatcher;
00479 gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
00480 }
00481 }
00482 else
00483 {
00484 UFATAL("");
00485 }
00486
00487
00488 if(dists.type() == CV_32S)
00489 {
00490 cv::Mat temp;
00491 dists.convertTo(temp, CV_32F);
00492 dists = temp;
00493 }
00494
00495 UDEBUG("Time to find nn = %f s", timerLocal.ticks());
00496 }
00497
00498
00499 for(int i = 0; i < descriptors.rows; ++i)
00500 {
00501 std::multimap<float, int> fullResults;
00502 if(!bruteForce && dists.cols)
00503 {
00504 for(int j=0; j<dists.cols; ++j)
00505 {
00506 if(results.at<int>(i,j) >= 0)
00507 {
00508 float d = dists.at<float>(i,j);
00509 fullResults.insert(std::pair<float, int>(d, uValue(_mapIndexId, results.at<int>(i,j))));
00510 }
00511 }
00512 }
00513 else if(bruteForce && matches.size())
00514 {
00515 for(unsigned int j=0; j<matches.at(i).size(); ++j)
00516 {
00517 if(matches.at(i).at(j).trainIdx >= 0)
00518 {
00519 float d = matches.at(i).at(j).distance;
00520 fullResults.insert(std::pair<float, int>(d, uValue(_mapIndexId, matches.at(i).at(j).trainIdx)));
00521 }
00522 }
00523 }
00524
00525
00526 if(_newWordsComparedTogether && newWords.rows)
00527 {
00528 cv::flann::Index linearSeach;
00529 linearSeach.build(newWords, cv::flann::LinearIndexParams(), type == CV_32F?cvflann::FLANN_DIST_L2:cvflann::FLANN_DIST_HAMMING);
00530 cv::Mat resultsLinear;
00531 cv::Mat distsLinear;
00532 linearSeach.knnSearch(descriptors.row(i), resultsLinear, distsLinear, newWords.rows>1?2:1);
00533
00534 if(distsLinear.type() == CV_32S)
00535 {
00536 cv::Mat temp;
00537 distsLinear.convertTo(temp, CV_32F);
00538 distsLinear = temp;
00539 }
00540 if(resultsLinear.cols)
00541 {
00542 for(int j=0; j<resultsLinear.cols; ++j)
00543 {
00544 if(resultsLinear.at<int>(0,j) >= 0)
00545 {
00546 float d = distsLinear.at<float>(0,j);
00547 fullResults.insert(std::pair<float, int>(d, newWordsId[resultsLinear.at<int>(0,j)]));
00548 }
00549 }
00550 }
00551 }
00552
00553 if(_incrementalDictionary)
00554 {
00555 bool badDist = false;
00556 if(fullResults.size() == 0)
00557 {
00558 badDist = true;
00559 }
00560 if(!badDist)
00561 {
00562 if(fullResults.size() >= 2)
00563 {
00564
00565 if(fullResults.begin()->first > _nndrRatio * (++fullResults.begin())->first)
00566 {
00567 badDist = true;
00568 }
00569 }
00570 else
00571 {
00572 badDist = true;
00573 }
00574 }
00575
00576 if(badDist)
00577 {
00578 VisualWord * vw = new VisualWord(getNextId(), descriptors.row(i), signatureId);
00579 _visualWords.insert(_visualWords.end(), std::pair<int, VisualWord *>(vw->id(), vw));
00580 _notIndexedWords.insert(_notIndexedWords.end(), vw->id());
00581 newWords.push_back(vw->getDescriptor());
00582 newWordsId.push_back(vw->id());
00583 wordIds.push_back(vw->id());
00584 UASSERT(vw->id()>0);
00585 }
00586 else
00587 {
00588 if(_notIndexedWords.find(fullResults.begin()->second) != _notIndexedWords.end())
00589 {
00590 ++dupWordsCountFromLast;
00591 }
00592 else
00593 {
00594 ++dupWordsCountFromDict;
00595 }
00596
00597 this->addWordRef(fullResults.begin()->second, signatureId);
00598 wordIds.push_back(fullResults.begin()->second);
00599 UASSERT(fullResults.begin()->second>0);
00600 }
00601 }
00602 else if(fullResults.size())
00603 {
00604
00605 ++dupWordsCountFromDict;
00606 this->addWordRef(fullResults.begin()->second, signatureId);
00607 wordIds.push_back(fullResults.begin()->second);
00608 UASSERT(fullResults.begin()->second>0);
00609 }
00610 }
00611 ULOGGER_DEBUG("naive search and add ref/words time = %f s", timerLocal.ticks());
00612
00613 ULOGGER_DEBUG("%d new words added...", _notIndexedWords.size());
00614 ULOGGER_DEBUG("%d duplicated words added (from current image = %d)...",
00615 dupWordsCountFromDict+dupWordsCountFromLast, dupWordsCountFromLast);
00616 UDEBUG("total time %fs", timer.ticks());
00617
00618 _totalActiveReferences += _notIndexedWords.size();
00619 return wordIds;
00620 }
00621
00622 std::vector<int> VWDictionary::findNN(const std::list<VisualWord *> & vws) const
00623 {
00624 UTimer timer;
00625 timer.start();
00626 std::vector<int> resultIds(vws.size(), 0);
00627 unsigned int k=2;
00628
00629 if(_visualWords.size() && vws.size())
00630 {
00631 int dim = _visualWords.begin()->second->getDescriptor().cols;
00632 int type = _visualWords.begin()->second->getDescriptor().type();
00633
00634 if(dim != (*vws.begin())->getDescriptor().cols)
00635 {
00636 UERROR("Descriptors (size=%d) are not the same size as already added words in dictionary(size=%d)", (*vws.begin())->getDescriptor().cols, dim);
00637 return resultIds;
00638 }
00639
00640 if(type != (*vws.begin())->getDescriptor().type())
00641 {
00642 UERROR("Descriptors (type=%d) are not the same type as already added words in dictionary(type=%d)", (*vws.begin())->getDescriptor().type(), type);
00643 return resultIds;
00644 }
00645
00646 std::vector<std::vector<cv::DMatch> > matches;
00647 bool bruteForce = false;
00648 cv::Mat results;
00649 cv::Mat dists;
00650
00651
00652 int index = 0;
00653 VisualWord * vw;
00654 cv::Mat query(vws.size(), dim, type);
00655 for(std::list<VisualWord *>::const_iterator iter=vws.begin(); iter!=vws.end(); ++iter, ++index)
00656 {
00657 vw = *iter;
00658 UASSERT(vw);
00659 UASSERT(vw->getDescriptor().cols == dim);
00660 UASSERT(vw->getDescriptor().type() == type);
00661
00662 vw->getDescriptor().copyTo(query.row(index));
00663 }
00664 ULOGGER_DEBUG("Preparation time = %fs", timer.ticks());
00665
00666 if(!_dataTree.empty() && _dataTree.rows >= (int)k)
00667 {
00668
00669 UDEBUG("newPts.total()=%d ", query.total());
00670
00671 if(_strategy == kNNFlannNaive || _strategy == kNNFlannKdTree || _strategy == kNNFlannLSH)
00672 {
00673 _flannIndex->knnSearch(query, results, dists, k);
00674 }
00675 else if(_strategy == kNNBruteForce)
00676 {
00677 bruteForce = true;
00678 cv::BFMatcher matcher(type==CV_8U?cv::NORM_HAMMING:cv::NORM_L2SQR);
00679 matcher.knnMatch(query, _dataTree, matches, k);
00680 }
00681 else if(_strategy == kNNBruteForceGPU)
00682 {
00683 bruteForce = true;
00684 cv::gpu::GpuMat newDescriptorsGpu(query);
00685 cv::gpu::GpuMat lastDescriptorsGpu(_dataTree);
00686 if(type==CV_8U)
00687 {
00688 cv::gpu::BruteForceMatcher_GPU<cv::Hamming> gpuMatcher;
00689 gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
00690 }
00691 else
00692 {
00693 cv::gpu::BruteForceMatcher_GPU<cv::L2<float> > gpuMatcher;
00694 gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
00695 }
00696 }
00697 else
00698 {
00699 UFATAL("");
00700 }
00701
00702
00703 if(dists.type() == CV_32S)
00704 {
00705 cv::Mat temp;
00706 dists.convertTo(temp, CV_32F);
00707 dists = temp;
00708 }
00709 }
00710 ULOGGER_DEBUG("Search dictionary time = %fs", timer.ticks());
00711
00712 cv::Mat resultsNotIndexed;
00713 cv::Mat distsNotIndexed;
00714 std::map<int, int> mapIndexIdNotIndexed;
00715 if(_notIndexedWords.size())
00716 {
00717 cv::Mat dataNotIndexed = cv::Mat::zeros(_notIndexedWords.size(), dim, type);
00718 unsigned int index = 0;
00719 VisualWord * vw;
00720 for(std::set<int>::iterator iter = _notIndexedWords.begin(); iter != _notIndexedWords.end(); ++iter, ++index)
00721 {
00722 vw = _visualWords.at(*iter);
00723 UASSERT(vw != 0 && vw->getDescriptor().cols == dim && vw->getDescriptor().type() == type);
00724 vw->getDescriptor().copyTo(dataNotIndexed.row(index));
00725 mapIndexIdNotIndexed.insert(mapIndexIdNotIndexed.end(), std::pair<int,int>(index, vw->id()));
00726 }
00727
00728
00729 ULOGGER_DEBUG("Searching in words not indexed...");
00730 cv::flann::Index linearSeach;
00731 linearSeach.build(dataNotIndexed, cv::flann::LinearIndexParams(), type == CV_32F?cvflann::FLANN_DIST_L2:cvflann::FLANN_DIST_HAMMING);
00732 linearSeach.knnSearch(query, resultsNotIndexed, distsNotIndexed, _notIndexedWords.size()>1?2:1);
00733
00734 if(distsNotIndexed.type() == CV_32S)
00735 {
00736 cv::Mat temp;
00737 distsNotIndexed.convertTo(temp, CV_32F);
00738 distsNotIndexed = temp;
00739 }
00740 }
00741 ULOGGER_DEBUG("Search not yet indexed words time = %fs", timer.ticks());
00742
00743 for(unsigned int i=0; i<vws.size(); ++i)
00744 {
00745 std::multimap<float, int> fullResults;
00746 if(!bruteForce && dists.cols)
00747 {
00748 for(int j=0; j<dists.cols; ++j)
00749 {
00750 if(results.at<int>(i,j) > 0)
00751 {
00752 float d = dists.at<float>(i,j);
00753 fullResults.insert(std::pair<float, int>(d, uValue(_mapIndexId, results.at<int>(i,j))));
00754 }
00755 }
00756 }
00757 else if(bruteForce && matches.size())
00758 {
00759 for(unsigned int j=0; j<matches.at(i).size(); ++j)
00760 {
00761 if(matches.at(i).at(j).trainIdx > 0)
00762 {
00763 float d = matches.at(i).at(j).distance;
00764 fullResults.insert(std::pair<float, int>(d, uValue(_mapIndexId, matches.at(i).at(j).trainIdx)));
00765 }
00766 }
00767 }
00768
00769
00770 for(int j=0; j<distsNotIndexed.cols; ++j)
00771 {
00772 if(resultsNotIndexed.at<int>(i,j) > 0)
00773 {
00774 float d = distsNotIndexed.at<float>(i,j);
00775 fullResults.insert(std::pair<float, int>(d, uValue(mapIndexIdNotIndexed, resultsNotIndexed.at<int>(i,j))));
00776 }
00777 }
00778
00779 if(_incrementalDictionary)
00780 {
00781 bool badDist = false;
00782 if(fullResults.size() == 0)
00783 {
00784 badDist = true;
00785 }
00786 if(!badDist)
00787 {
00788 if(fullResults.size() >= 2)
00789 {
00790
00791 if(fullResults.begin()->first > _nndrRatio * (++fullResults.begin())->first)
00792 {
00793 badDist = true;
00794 }
00795 }
00796 else
00797 {
00798 badDist = true;
00799 }
00800 }
00801
00802 if(!badDist)
00803 {
00804 resultIds[i] = fullResults.begin()->second;
00805 }
00806 }
00807 else if(fullResults.size())
00808 {
00809
00810 resultIds[i] = fullResults.begin()->second;
00811 }
00812 }
00813 ULOGGER_DEBUG("badDist check time = %fs", timer.ticks());
00814 }
00815 return resultIds;
00816 }
00817
00818 void VWDictionary::addWord(VisualWord * vw)
00819 {
00820 if(vw)
00821 {
00822 _visualWords.insert(std::pair<int, VisualWord *>(vw->id(), vw));
00823 _notIndexedWords.insert(vw->id());
00824 if(vw->getReferences().size())
00825 {
00826 _totalActiveReferences += uSum(uValues(vw->getReferences()));
00827 }
00828 else
00829 {
00830 _unusedWords.insert(std::pair<int, VisualWord *>(vw->id(), vw));
00831 }
00832 }
00833 }
00834
00835 const VisualWord * VWDictionary::getWord(int id) const
00836 {
00837 return uValue(_visualWords, id, (VisualWord *)0);
00838 }
00839
00840 VisualWord * VWDictionary::getUnusedWord(int id) const
00841 {
00842 return uValue(_unusedWords, id, (VisualWord *)0);
00843 }
00844
00845 std::vector<VisualWord*> VWDictionary::getUnusedWords() const
00846 {
00847 if(!_incrementalDictionary)
00848 {
00849 ULOGGER_WARN("This method does nothing on a fixed dictionary");
00850 return std::vector<VisualWord*>();
00851 }
00852 return uValues(_unusedWords);
00853 }
00854
00855 std::vector<int> VWDictionary::getUnusedWordIds() const
00856 {
00857 if(!_incrementalDictionary)
00858 {
00859 ULOGGER_WARN("This method does nothing on a fixed dictionary");
00860 return std::vector<int>();
00861 }
00862 return uKeys(_unusedWords);
00863 }
00864
00865 void VWDictionary::removeWords(const std::vector<VisualWord*> & words)
00866 {
00867 for(unsigned int i=0; i<words.size(); ++i)
00868 {
00869 _visualWords.erase(words[i]->id());
00870 _unusedWords.erase(words[i]->id());
00871 if(_notIndexedWords.erase(words[i]->id()) == 0)
00872 {
00873 _removedIndexedWords.insert(words[i]->id());
00874 }
00875 }
00876 }
00877
00878 void VWDictionary::deleteUnusedWords()
00879 {
00880 std::vector<VisualWord*> unusedWords = uValues(_unusedWords);
00881 removeWords(unusedWords);
00882 for(unsigned int i=0; i<unusedWords.size(); ++i)
00883 {
00884 delete unusedWords[i];
00885 }
00886 }
00887
00888 void VWDictionary::exportDictionary(const char * fileNameReferences, const char * fileNameDescriptors) const
00889 {
00890 FILE* foutRef = 0;
00891 FILE* foutDesc = 0;
00892 #ifdef _MSC_VER
00893 fopen_s(&foutRef, fileNameReferences, "w");
00894 fopen_s(&foutDesc, fileNameDescriptors, "w");
00895 #else
00896 foutRef = fopen(fileNameReferences, "w");
00897 foutDesc = fopen(fileNameDescriptors, "w");
00898 #endif
00899
00900 if(foutRef)
00901 {
00902 fprintf(foutRef, "WordID SignaturesID...\n");
00903 }
00904 if(foutDesc)
00905 {
00906 if(_visualWords.begin() == _visualWords.end())
00907 {
00908 fprintf(foutDesc, "WordID Descriptors...\n");
00909 }
00910 else
00911 {
00912 fprintf(foutDesc, "WordID Descriptors...%d\n", (*_visualWords.begin()).second->getDescriptor().cols);
00913 }
00914 }
00915
00916 for(std::map<int, VisualWord *>::const_iterator iter=_visualWords.begin(); iter!=_visualWords.end(); ++iter)
00917 {
00918
00919 if(foutRef)
00920 {
00921 fprintf(foutRef, "%d ", (*iter).first);
00922 const std::map<int, int> ref = (*iter).second->getReferences();
00923 for(std::map<int, int>::const_iterator jter=ref.begin(); jter!=ref.end(); ++jter)
00924 {
00925 for(int i=0; i<(*jter).second; ++i)
00926 {
00927 fprintf(foutRef, "%d ", (*jter).first);
00928 }
00929 }
00930 fprintf(foutRef, "\n");
00931 }
00932
00933
00934 if(foutDesc)
00935 {
00936 fprintf(foutDesc, "%d ", (*iter).first);
00937 const float * desc = (const float *)(*iter).second->getDescriptor().data;
00938 int dim = (*iter).second->getDescriptor().cols;
00939
00940 for(int i=0; i<dim; i++)
00941 {
00942 fprintf(foutDesc, "%f ", desc[i]);
00943 }
00944 fprintf(foutDesc, "\n");
00945 }
00946 }
00947
00948 if(foutRef)
00949 fclose(foutRef);
00950 if(foutDesc)
00951 fclose(foutDesc);
00952 }
00953
00954 }