VWDictionary.cpp
Go to the documentation of this file.
00001 /*
00002 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
00003 All rights reserved.
00004 
00005 Redistribution and use in source and binary forms, with or without
00006 modification, are permitted provided that the following conditions are met:
00007     * Redistributions of source code must retain the above copyright
00008       notice, this list of conditions and the following disclaimer.
00009     * Redistributions in binary form must reproduce the above copyright
00010       notice, this list of conditions and the following disclaimer in the
00011       documentation and/or other materials provided with the distribution.
00012     * Neither the name of the Universite de Sherbrooke nor the
00013       names of its contributors may be used to endorse or promote products
00014       derived from this software without specific prior written permission.
00015 
00016 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
00017 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
00018 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
00019 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
00020 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
00021 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00022 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
00023 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
00024 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00025 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00026 */
00027 
00028 #include "rtabmap/core/VWDictionary.h"
00029 #include "rtabmap/core/VisualWord.h"
00030 
00031 #include "rtabmap/core/Signature.h"
00032 #include "rtabmap/core/DBDriver.h"
00033 #include "rtabmap/core/Parameters.h"
00034 
00035 #include "rtabmap/utilite/UtiLite.h"
00036 
00037 #include <opencv2/opencv_modules.hpp>
00038 
00039 #if CV_MAJOR_VERSION < 3
00040 #include <opencv2/gpu/gpu.hpp>
00041 #else
00042 #include <opencv2/core/cuda.hpp>
00043 #ifdef HAVE_OPENCV_CUDAFEATURES2D
00044 #include <opencv2/cudafeatures2d.hpp>
00045 #endif
00046 #endif
00047 
00048 #include "rtflann/flann.hpp"
00049 
00050 #include <fstream>
00051 #include <string>
00052 
00053 namespace rtabmap
00054 {
00055 
00056 class FlannIndex
00057 {
00058 public:
00059         FlannIndex():
00060                 index_(0),
00061                 nextIndex_(0),
00062                 featuresType_(0),
00063                 featuresDim_(0),
00064                 isLSH_(false),
00065                 useDistanceL1_(false)
00066         {
00067         }
00068         virtual ~FlannIndex()
00069         {
00070                 this->release();
00071         }
00072 
00073         void release()
00074         {
00075                 if(index_)
00076                 {
00077                         if(featuresType_ == CV_8UC1)
00078                         {
00079                                 delete (rtflann::Index<rtflann::Hamming<unsigned char> >*)index_;
00080                         }
00081                         else
00082                         {
00083                                 if(useDistanceL1_)
00084                                 {
00085                                         delete (rtflann::Index<rtflann::L1<float> >*)index_;
00086                                 }
00087                                 else
00088                                 {
00089                                         delete (rtflann::Index<rtflann::L2<float> >*)index_;
00090                                 }
00091                         }
00092                         index_ = 0;
00093                 }
00094                 nextIndex_ = 0;
00095                 isLSH_ = false;
00096                 addedDescriptors_.clear();
00097                 removedIndexes_.clear();
00098         }
00099 
00100         unsigned int indexedFeatures() const
00101         {
00102                 if(!index_)
00103                 {
00104                         return 0;
00105                 }
00106                 if(featuresType_ == CV_8UC1)
00107                 {
00108                         return ((const rtflann::Index<rtflann::Hamming<unsigned char> >*)index_)->size();
00109                 }
00110                 else
00111                 {
00112                         if(useDistanceL1_)
00113                         {
00114                                 return ((const rtflann::Index<rtflann::L1<float> >*)index_)->size();
00115                         }
00116                         else
00117                         {
00118                                 return ((const rtflann::Index<rtflann::L2<float> >*)index_)->size();
00119                         }
00120                 }
00121         }
00122 
00123         // return KB
00124         unsigned int memoryUsed() const
00125         {
00126                 if(!index_)
00127                 {
00128                         return 0;
00129                 }
00130                 if(featuresType_ == CV_8UC1)
00131                 {       
00132                         return ((const rtflann::Index<rtflann::Hamming<unsigned char> >*)index_)->usedMemory()/1000;
00133                 }
00134                 else
00135                 {
00136                         if(useDistanceL1_)
00137                         {
00138                                 return ((const rtflann::Index<rtflann::L1<float> >*)index_)->usedMemory()/1000;
00139                         }
00140                         else
00141                         {
00142                                 return ((const rtflann::Index<rtflann::L2<float> >*)index_)->usedMemory()/1000;
00143                         }
00144                 }
00145         }
00146 
00147         // Note that useDistanceL1 doesn't have any effect if LSH is used
00148         void build(
00149                         const cv::Mat & features,
00150                         const rtflann::IndexParams& params,
00151                         bool useDistanceL1)
00152         {
00153                 this->release();
00154                 UASSERT(index_ == 0);
00155                 UASSERT(features.type() == CV_32FC1 || features.type() == CV_8UC1);
00156                 featuresType_ = features.type();
00157                 featuresDim_ = features.cols;
00158                 useDistanceL1_ = useDistanceL1;
00159 
00160                 if(featuresType_ == CV_8UC1)
00161                 {
00162                         rtflann::Matrix<unsigned char> dataset(features.data, features.rows, features.cols);
00163                         index_ = new rtflann::Index<rtflann::Hamming<unsigned char> >(dataset, params);
00164                         ((rtflann::Index<rtflann::Hamming<unsigned char> >*)index_)->buildIndex();
00165                 }
00166                 else
00167                 {
00168                         rtflann::Matrix<float> dataset((float*)features.data, features.rows, features.cols);
00169                         if(useDistanceL1_)
00170                         {
00171                                 index_ = new rtflann::Index<rtflann::L1<float> >(dataset, params);
00172                                 ((rtflann::Index<rtflann::L1<float> >*)index_)->buildIndex();
00173                         }
00174                         else
00175                         {
00176                                 index_ = new rtflann::Index<rtflann::L2<float> >(dataset, params);
00177                                 ((rtflann::Index<rtflann::L2<float> >*)index_)->buildIndex();
00178                         }
00179                 }
00180 
00181                 if(features.rows == 1)
00182                 {
00183                         // incremental FLANN
00184                         addedDescriptors_.insert(std::make_pair(nextIndex_, features));
00185                 }
00186                 // else assume that the features are kept in memory outside this class (e.g., dataTree_)
00187 
00188                 nextIndex_ = features.rows;
00189         }
00190 
00191         bool isBuilt()
00192         {
00193                 return index_!=0;
00194         }
00195 
00196         int featuresType() const {return featuresType_;}
00197         int featuresDim() const {return featuresDim_;}
00198 
00199         unsigned int addPoint(const cv::Mat & feature)
00200         {
00201                 if(!index_)
00202                 {
00203                         UERROR("Flann index not yet created!");
00204                         return 0;
00205                 }
00206                 UASSERT(feature.type() == featuresType_);
00207                 UASSERT(feature.cols == featuresDim_);
00208                 UASSERT(feature.rows == 1);
00209                 if(featuresType_ == CV_8UC1)
00210                 {
00211                         rtflann::Matrix<unsigned char> point(feature.data, feature.rows, feature.cols);
00212                         rtflann::Index<rtflann::Hamming<unsigned char> > * index = (rtflann::Index<rtflann::Hamming<unsigned char> >*)index_;
00213                         index->addPoints(point, 0);
00214                         // Rebuild index if it doubles in size
00215                         if(index->sizeAtBuild() * 2 < index->size()+index->removedCount())
00216                         {
00217                                 // clean not used features
00218                                 for(std::list<int>::iterator iter=removedIndexes_.begin(); iter!=removedIndexes_.end(); ++iter)
00219                                 {
00220                                         addedDescriptors_.erase(*iter);
00221                                 }
00222                                 removedIndexes_.clear();
00223                                 index->buildIndex();
00224                         }
00225                 }
00226                 else
00227                 {
00228                         rtflann::Matrix<float> point((float*)feature.data, feature.rows, feature.cols);
00229                         if(useDistanceL1_)
00230                         {
00231                                 rtflann::Index<rtflann::L1<float> > * index = (rtflann::Index<rtflann::L1<float> >*)index_;
00232                                 index->addPoints(point, 0);
00233                                 // Rebuild index if it doubles in size
00234                                 if(index->sizeAtBuild() * 2 < index->size()+index->removedCount())
00235                                 {
00236                                         // clean not used features
00237                                         for(std::list<int>::iterator iter=removedIndexes_.begin(); iter!=removedIndexes_.end(); ++iter)
00238                                         {
00239                                                 addedDescriptors_.erase(*iter);
00240                                         }
00241                                         removedIndexes_.clear();
00242                                         index->buildIndex();
00243                                 }
00244                         }
00245                         else
00246                         {
00247                                 rtflann::Index<rtflann::L2<float> > * index = (rtflann::Index<rtflann::L2<float> >*)index_;
00248                                 index->addPoints(point, 0);
00249                                 // Rebuild index if it doubles in size
00250                                 if(index->sizeAtBuild() * 2 < index->size()+index->removedCount())
00251                                 {
00252                                         // clean not used features
00253                                         for(std::list<int>::iterator iter=removedIndexes_.begin(); iter!=removedIndexes_.end(); ++iter)
00254                                         {
00255                                                 addedDescriptors_.erase(*iter);
00256                                         }
00257                                         removedIndexes_.clear();
00258                                         index->buildIndex();
00259                                 }
00260                         }
00261                 }
00262 
00263                 addedDescriptors_.insert(std::make_pair(nextIndex_, feature));
00264 
00265                 return nextIndex_++;
00266         }
00267 
00268         void removePoint(unsigned int index)
00269         {
00270                 if(!index_)
00271                 {
00272                         UERROR("Flann index not yet created!");
00273                         return;
00274                 }
00275 
00276                 // If a Segmentation fault occurs in removePoint(), verify that you have this fix in your installed "flann/algorithms/nn_index.h":
00277                 // 707 - if (ids_[id]==id) {
00278                 // 707 + if (id < ids_.size() && ids_[id]==id) {
00279                 // ref: https://github.com/mariusmuja/flann/commit/23051820b2314f07cf40ba633a4067782a982ff3#diff-33762b7383f957c2df17301639af5151
00280 
00281                 if(featuresType_ == CV_8UC1)
00282                 {
00283                         ((rtflann::Index<rtflann::Hamming<unsigned char> >*)index_)->removePoint(index);
00284                 }
00285                 else if(useDistanceL1_)
00286                 {
00287                         ((rtflann::Index<rtflann::L1<float> >*)index_)->removePoint(index);
00288                 }
00289                 else
00290                 {
00291                         ((rtflann::Index<rtflann::L2<float> >*)index_)->removePoint(index);
00292                 }
00293 
00294                 removedIndexes_.push_back(index);
00295         }
00296 
00297         void knnSearch(
00298                         const cv::Mat & query,
00299                         cv::Mat & indices,
00300                         cv::Mat & dists,
00301                 int knn,
00302                 const rtflann::SearchParams& params=rtflann::SearchParams())
00303         {
00304                 if(!index_)
00305                 {
00306                         UERROR("Flann index not yet created!");
00307                         return;
00308                 }
00309                 indices.create(query.rows, knn, CV_32S);
00310                 dists.create(query.rows, knn, featuresType_ == CV_8UC1?CV_32S:CV_32F);
00311 
00312                 rtflann::Matrix<int> indicesF((int*)indices.data, indices.rows, indices.cols);
00313 
00314                 if(featuresType_ == CV_8UC1)
00315                 {
00316                         rtflann::Matrix<unsigned int> distsF((unsigned int*)dists.data, dists.rows, dists.cols);
00317                         rtflann::Matrix<unsigned char> queryF(query.data, query.rows, query.cols);
00318                         ((rtflann::Index<rtflann::Hamming<unsigned char> >*)index_)->knnSearch(queryF, indicesF, distsF, knn, params);
00319                 }
00320                 else
00321                 {
00322                         rtflann::Matrix<float> distsF((float*)dists.data, dists.rows, dists.cols);
00323                         rtflann::Matrix<float> queryF((float*)query.data, query.rows, query.cols);
00324                         if(useDistanceL1_)
00325                         {
00326                                 ((rtflann::Index<rtflann::L1<float> >*)index_)->knnSearch(queryF, indicesF, distsF, knn, params);
00327                         }
00328                         else
00329                         {
00330                                 ((rtflann::Index<rtflann::L2<float> >*)index_)->knnSearch(queryF, indicesF, distsF, knn, params);
00331                         }
00332                 }
00333         }
00334 
00335 private:
00336         void * index_;
00337         unsigned int nextIndex_;
00338         int featuresType_;
00339         int featuresDim_;
00340         bool isLSH_;
00341         bool useDistanceL1_; // true=EUCLEDIAN_L2 false=MANHATTAN_L1
00342 
00343         // keep feature in memory until the tree is rebuilt
00344         // (in case the word is deleted when removed from the VWDictionary)
00345         std::map<int, cv::Mat> addedDescriptors_;
00346         std::list<int> removedIndexes_;
00347 };
00348 
00349 const int VWDictionary::ID_START = 1;
00350 const int VWDictionary::ID_INVALID = 0;
00351 
00352 VWDictionary::VWDictionary(const ParametersMap & parameters) :
00353         _totalActiveReferences(0),
00354         _incrementalDictionary(Parameters::defaultKpIncrementalDictionary()),
00355         _incrementalFlann(Parameters::defaultKpIncrementalFlann()),
00356         _nndrRatio(Parameters::defaultKpNndrRatio()),
00357         _dictionaryPath(Parameters::defaultKpDictionaryPath()),
00358         _newWordsComparedTogether(Parameters::defaultKpNewWordsComparedTogether()),
00359         _lastWordId(0),
00360         useDistanceL1_(false),
00361         _flannIndex(new FlannIndex()),
00362         _strategy(kNNBruteForce)
00363 {
00364         this->setNNStrategy((NNStrategy)Parameters::defaultKpNNStrategy());
00365         this->parseParameters(parameters);
00366 }
00367 
00368 VWDictionary::~VWDictionary()
00369 {
00370         this->clear();
00371         delete _flannIndex;
00372 }
00373 
00374 void VWDictionary::parseParameters(const ParametersMap & parameters)
00375 {
00376         ParametersMap::const_iterator iter;
00377         Parameters::parse(parameters, Parameters::kKpNndrRatio(), _nndrRatio);
00378         Parameters::parse(parameters, Parameters::kKpNewWordsComparedTogether(), _newWordsComparedTogether);
00379         Parameters::parse(parameters, Parameters::kKpIncrementalFlann(), _incrementalFlann);
00380 
00381         UASSERT_MSG(_nndrRatio > 0.0f, uFormat("String=%s value=%f", uContains(parameters, Parameters::kKpNndrRatio())?parameters.at(Parameters::kKpNndrRatio()).c_str():"", _nndrRatio).c_str());
00382 
00383         std::string dictionaryPath = _dictionaryPath;
00384         bool incrementalDictionary = _incrementalDictionary;
00385         if((iter=parameters.find(Parameters::kKpDictionaryPath())) != parameters.end())
00386         {
00387                 dictionaryPath = (*iter).second.c_str();
00388         }
00389         if((iter=parameters.find(Parameters::kKpIncrementalDictionary())) != parameters.end())
00390         {
00391                 incrementalDictionary = uStr2Bool((*iter).second.c_str());
00392         }
00393 
00394         // Verifying hypotheses strategy
00395         if((iter=parameters.find(Parameters::kKpNNStrategy())) != parameters.end())
00396         {
00397                 NNStrategy nnStrategy = (NNStrategy)std::atoi((*iter).second.c_str());
00398                 this->setNNStrategy(nnStrategy);
00399         }
00400 
00401         if(incrementalDictionary)
00402         {
00403                 this->setIncrementalDictionary();
00404         }
00405         else
00406         {
00407                 this->setFixedDictionary(dictionaryPath);
00408         }
00409 
00410 }
00411 
00412 void VWDictionary::setIncrementalDictionary()
00413 {
00414         if(!_incrementalDictionary)
00415         {
00416                 _incrementalDictionary = true;
00417                 if(_visualWords.size())
00418                 {
00419                         UWARN("Incremental dictionary set: already loaded visual words (%d) from the fixed dictionary will be included in the incremental one.", _visualWords.size());
00420                 }
00421         }
00422         _dictionaryPath = "";
00423 }
00424 
00425 void VWDictionary::setFixedDictionary(const std::string & dictionaryPath)
00426 {
00427         if(!dictionaryPath.empty())
00428         {
00429                 if((!_incrementalDictionary && _dictionaryPath.compare(dictionaryPath) != 0) ||
00430                    _visualWords.size() == 0)
00431                 {
00432                         std::ifstream file;
00433                         file.open(dictionaryPath.c_str(), std::ifstream::in);
00434                         if(file.good())
00435                         {
00436                                 UDEBUG("Deleting old dictionary and loading the new one from \"%s\"", dictionaryPath.c_str());
00437                                 UTimer timer;
00438 
00439                                 // first line is the header
00440                                 std::string str;
00441                                 std::list<std::string> strList;
00442                                 std::getline(file, str);
00443                                 strList = uSplitNumChar(str);
00444                                 unsigned int dimension  = 0;
00445                                 for(std::list<std::string>::iterator iter = strList.begin(); iter != strList.end(); ++iter)
00446                                 {
00447                                         if(uIsDigit(iter->at(0)))
00448                                         {
00449                                                 dimension = std::atoi(iter->c_str());
00450                                                 break;
00451                                         }
00452                                 }
00453 
00454                                 if(dimension == 0 || dimension > 1000)
00455                                 {
00456                                         UERROR("Invalid dictionary file, visual word dimension (%d) is not valid, \"%s\"", dimension, dictionaryPath.c_str());
00457                                 }
00458                                 else
00459                                 {
00460                                         // Process all words
00461                                         while(file.good())
00462                                         {
00463                                                 std::getline(file, str);
00464                                                 strList = uSplit(str);
00465                                                 if(strList.size() == dimension+1)
00466                                                 {
00467                                                         //first one is the visual word id
00468                                                         std::list<std::string>::iterator iter = strList.begin();
00469                                                         int id = std::atoi(iter->c_str());
00470                                                         cv::Mat descriptor(1, dimension, CV_32F);
00471                                                         ++iter;
00472                                                         unsigned int i=0;
00473 
00474                                                         //get descriptor
00475                                                         for(;i<dimension && iter != strList.end(); ++i, ++iter)
00476                                                         {
00477                                                                 descriptor.at<float>(i) = uStr2Float(*iter);
00478                                                         }
00479                                                         if(i != dimension)
00480                                                         {
00481                                                                 UERROR("");
00482                                                         }
00483 
00484                                                         VisualWord * vw = new VisualWord(id, descriptor, 0);
00485                                                         _visualWords.insert(_visualWords.end(), std::pair<int, VisualWord*>(id, vw));
00486                                                         _notIndexedWords.insert(_notIndexedWords.end(), id);
00487                                                 }
00488                                                 else
00489                                                 {
00490                                                         UWARN("Cannot parse line \"%s\"", str.c_str());
00491                                                 }
00492                                         }
00493                                         this->update();
00494                                         _incrementalDictionary = false;
00495                                 }
00496 
00497 
00498                                 UDEBUG("Time changing dictionary = %fs", timer.ticks());
00499                         }
00500                         else
00501                         {
00502                                 UERROR("Cannot open dictionary file \"%s\"", dictionaryPath.c_str());
00503                         }
00504                         file.close();
00505                 }
00506                 else if(!_incrementalDictionary)
00507                 {
00508                         UDEBUG("Dictionary \"%s\" already loaded...", dictionaryPath.c_str());
00509                 }
00510                 else
00511                 {
00512                         UERROR("Cannot change to a fixed dictionary if there are already words (%d) in the incremental one.", _visualWords.size());
00513                 }
00514         }
00515         else if(_visualWords.size() == 0)
00516         {
00517                 _incrementalDictionary = false;
00518         }
00519         else if(_incrementalDictionary)
00520         {
00521                 UWARN("Cannot change to fixed dictionary, %d words already loaded as incremental", (int)_visualWords.size());
00522         }
00523         _dictionaryPath = dictionaryPath;
00524 }
00525 
00526 void VWDictionary::setNNStrategy(NNStrategy strategy)
00527 {
00528         if(strategy!=kNNUndef)
00529         {
00530 #if CV_MAJOR_VERSION < 3
00531 #ifdef HAVE_OPENCV_GPU
00532                 if(strategy == kNNBruteForceGPU && !cv::gpu::getCudaEnabledDeviceCount())
00533                 {
00534                         UERROR("Nearest neighobr strategy \"kNNBruteForceGPU\" chosen but no CUDA devices found! Doing \"kNNBruteForce\" instead.");
00535                         strategy = kNNBruteForce;
00536                 }
00537 #else
00538                 if(strategy == kNNBruteForceGPU)
00539                 {
00540                         UERROR("Nearest neighobr strategy \"kNNBruteForceGPU\" chosen but OpenCV is not built with GPU/cuda module! Doing \"kNNBruteForce\" instead.");
00541                         strategy = kNNBruteForce;
00542                 }
00543 #endif
00544 #else
00545 #ifdef HAVE_OPENCV_CUDAFEATURES2D
00546                 if(strategy == kNNBruteForceGPU && !cv::cuda::getCudaEnabledDeviceCount())
00547                 {
00548                         UERROR("Nearest neighobr strategy \"kNNBruteForceGPU\" chosen but no CUDA devices found! Doing \"kNNBruteForce\" instead.");
00549                         strategy = kNNBruteForce;
00550                 }
00551 #else
00552                 if(strategy == kNNBruteForceGPU)
00553                 {
00554                         UERROR("Nearest neighobr strategy \"kNNBruteForceGPU\" chosen but OpenCV cudafeatures2d module is not found! Doing \"kNNBruteForce\" instead.");
00555                         strategy = kNNBruteForce;
00556                 }
00557 #endif
00558 #endif
00559 
00560                 bool update = _strategy != strategy;
00561                 _strategy = strategy;
00562                 if(update)
00563                 {
00564                         _dataTree = cv::Mat();
00565                         _notIndexedWords = uKeysSet(_visualWords);
00566                         _removedIndexedWords.clear();
00567                         this->update();
00568                 }
00569         }
00570 }
00571 
00572 int VWDictionary::getLastIndexedWordId() const
00573 {
00574         if(_mapIndexId.size())
00575         {
00576                 return _mapIndexId.rbegin()->second;
00577         }
00578         else
00579         {
00580                 return 0;
00581         }
00582 }
00583 
00584 unsigned int VWDictionary::getIndexedWordsCount() const
00585 {
00586         return _flannIndex->indexedFeatures();
00587 }
00588 
00589 unsigned int VWDictionary::getIndexMemoryUsed() const
00590 {
00591         return _flannIndex->memoryUsed();
00592 }
00593 
00594 void VWDictionary::update()
00595 {
00596         ULOGGER_DEBUG("");
00597         if(!_incrementalDictionary && !_notIndexedWords.size())
00598         {
00599                 // No need to update the search index if we
00600                 // use a fixed dictionary and the index is
00601                 // already built
00602                 return;
00603         }
00604 
00605         if(_notIndexedWords.size() || _visualWords.size() == 0 || _removedIndexedWords.size())
00606         {
00607                 if(_incrementalFlann &&
00608                    _strategy < kNNBruteForce &&
00609                    _visualWords.size())
00610                 {
00611                         ULOGGER_DEBUG("Incremental FLANN: Removing %d words...", (int)_removedIndexedWords.size());
00612                         for(std::set<int>::iterator iter=_removedIndexedWords.begin(); iter!=_removedIndexedWords.end(); ++iter)
00613                         {
00614                                 UASSERT(uContains(_mapIdIndex, *iter));
00615                                 UASSERT(uContains(_mapIndexId, _mapIdIndex.at(*iter)));
00616                                 _flannIndex->removePoint(_mapIdIndex.at(*iter));
00617                                 _mapIndexId.erase(_mapIdIndex.at(*iter));
00618                                 _mapIdIndex.erase(*iter);
00619                         }
00620                         ULOGGER_DEBUG("Incremental FLANN: Removing %d words... done!", (int)_removedIndexedWords.size());
00621 
00622                         if(_notIndexedWords.size())
00623                         {
00624                                 ULOGGER_DEBUG("Incremental FLANN: Inserting %d words...", (int)_notIndexedWords.size());
00625                                 for(std::set<int>::iterator iter=_notIndexedWords.begin(); iter!=_notIndexedWords.end(); ++iter)
00626                                 {
00627                                         VisualWord* w = uValue(_visualWords, *iter, (VisualWord*)0);
00628                                         UASSERT(w);
00629 
00630                                         cv::Mat descriptor;
00631                                         if(w->getDescriptor().type() == CV_8U)
00632                                         {
00633                                                 useDistanceL1_ = true;
00634                                                 if(_strategy == kNNFlannKdTree || _strategy == kNNFlannNaive)
00635                                                 {
00636                                                         w->getDescriptor().convertTo(descriptor, CV_32F);
00637                                                 }
00638                                                 else
00639                                                 {
00640                                                         descriptor = w->getDescriptor();
00641                                                 }
00642                                         }
00643                                         else
00644                                         {
00645                                                 descriptor = w->getDescriptor();
00646                                         }
00647 
00648                                         int index = 0;
00649                                         if(!_flannIndex->isBuilt())
00650                                         {
00651                                                 UDEBUG("Building FLANN index...");
00652                                                 switch(_strategy)
00653                                                 {
00654                                                 case kNNFlannNaive:
00655                                                         _flannIndex->build(descriptor, rtflann::LinearIndexParams(), useDistanceL1_);
00656                                                         break;
00657                                                 case kNNFlannKdTree:
00658                                                         UASSERT_MSG(descriptor.type() == CV_32F, "To use KdTree dictionary, float descriptors are required!");
00659                                                         _flannIndex->build(descriptor, rtflann::KDTreeIndexParams(), useDistanceL1_);
00660                                                         break;
00661                                                 case kNNFlannLSH:
00662                                                         UASSERT_MSG(descriptor.type() == CV_8U, "To use LSH dictionary, binary descriptors are required!");
00663                                                         _flannIndex->build(descriptor, rtflann::LshIndexParams(12, 20, 2), useDistanceL1_);
00664                                                         break;
00665                                                 default:
00666                                                         UFATAL("Not supposed to be here!");
00667                                                         break;
00668                                                 }
00669                                                 UDEBUG("Building FLANN index... done!");
00670                                         }
00671                                         else
00672                                         {
00673                                                 UASSERT(descriptor.cols == _flannIndex->featuresDim());
00674                                                 UASSERT(descriptor.type() == _flannIndex->featuresType());
00675                                                 index = _flannIndex->addPoint(descriptor);
00676                                         }
00677                                         std::pair<std::map<int, int>::iterator, bool> inserted;
00678                                         inserted = _mapIndexId.insert(std::pair<int, int>(index, w->id()));
00679                                         UASSERT(inserted.second);
00680                                         inserted = _mapIdIndex.insert(std::pair<int, int>(w->id(), index));
00681                                         UASSERT(inserted.second);
00682                                 }
00683                                 ULOGGER_DEBUG("Incremental FLANN: Inserting %d words... done!", (int)_notIndexedWords.size());
00684                         }
00685                 }
00686                 else if(_strategy >= kNNBruteForce &&
00687                                 _notIndexedWords.size() &&
00688                                 _removedIndexedWords.size() == 0 &&
00689                                 _visualWords.size() &&
00690                                 _dataTree.rows)
00691                 {
00692                         //just add not indexed words
00693                         int i = _dataTree.rows;
00694                         _dataTree.reserve(_dataTree.rows + _notIndexedWords.size());
00695                         for(std::set<int>::iterator iter=_notIndexedWords.begin(); iter!=_notIndexedWords.end(); ++iter)
00696                         {
00697                                 VisualWord* w = uValue(_visualWords, *iter, (VisualWord*)0);
00698                                 UASSERT(w);
00699                                 UASSERT(w->getDescriptor().cols == _dataTree.cols);
00700                                 UASSERT(w->getDescriptor().type() == _dataTree.type());
00701                                 _dataTree.push_back(w->getDescriptor());
00702                                 _mapIndexId.insert(_mapIndexId.end(), std::pair<int, int>(i, w->id()));
00703                                 std::pair<std::map<int, int>::iterator, bool> inserted = _mapIdIndex.insert(std::pair<int, int>(w->id(), i));
00704                                 UASSERT(inserted.second);
00705                                 ++i;
00706                         }
00707                 }
00708                 else
00709                 {
00710                         _mapIndexId.clear();
00711                         _mapIdIndex.clear();
00712                         _dataTree = cv::Mat();
00713                         _flannIndex->release();
00714 
00715                         if(_visualWords.size())
00716                         {
00717                                 UTimer timer;
00718                                 timer.start();
00719 
00720                                 int type;
00721                                 if(_visualWords.begin()->second->getDescriptor().type() == CV_8U)
00722                                 {
00723                                         useDistanceL1_ = true;
00724                                         if(_strategy == kNNFlannKdTree || _strategy == kNNFlannNaive)
00725                                         {
00726                                                 type = CV_32F;
00727                                         }
00728                                         else
00729                                         {
00730                                                 type = _visualWords.begin()->second->getDescriptor().type();
00731                                         }
00732                                 }
00733                                 else
00734                                 {
00735                                         type = _visualWords.begin()->second->getDescriptor().type();
00736                                 }
00737                                 int dim = _visualWords.begin()->second->getDescriptor().cols;
00738 
00739                                 UASSERT(type == CV_32F || type == CV_8U);
00740                                 UASSERT(dim > 0);
00741 
00742                                 // Create the data matrix
00743                                 _dataTree = cv::Mat(_visualWords.size(), dim, type); // SURF descriptors are CV_32F
00744                                 std::map<int, VisualWord*>::const_iterator iter = _visualWords.begin();
00745                                 for(unsigned int i=0; i < _visualWords.size(); ++i, ++iter)
00746                                 {
00747                                         cv::Mat descriptor;
00748                                         if(iter->second->getDescriptor().type() == CV_8U)
00749                                         {
00750                                                 if(_strategy == kNNFlannKdTree || _strategy == kNNFlannNaive)
00751                                                 {
00752                                                         iter->second->getDescriptor().convertTo(descriptor, CV_32F);
00753                                                 }
00754                                                 else
00755                                                 {
00756                                                         descriptor = iter->second->getDescriptor();
00757                                                 }
00758                                         }
00759                                         else
00760                                         {
00761                                                 descriptor = iter->second->getDescriptor();
00762                                         }
00763 
00764                                         UASSERT(descriptor.cols == dim);
00765                                         UASSERT(descriptor.type() == type);
00766 
00767                                         descriptor.copyTo(_dataTree.row(i));
00768                                         _mapIndexId.insert(_mapIndexId.end(), std::pair<int, int>(i, iter->second->id()));
00769                                         _mapIdIndex.insert(_mapIdIndex.end(), std::pair<int, int>(iter->second->id(), i));
00770                                 }
00771 
00772                                 ULOGGER_DEBUG("_mapIndexId.size() = %d, words.size()=%d, _dim=%d",_mapIndexId.size(), _visualWords.size(), dim);
00773                                 ULOGGER_DEBUG("copying data = %f s", timer.ticks());
00774 
00775                                 switch(_strategy)
00776                                 {
00777                                 case kNNFlannNaive:
00778                                         _flannIndex->build(_dataTree, rtflann::LinearIndexParams(), useDistanceL1_);
00779                                         break;
00780                                 case kNNFlannKdTree:
00781                                         UASSERT_MSG(type == CV_32F, "To use KdTree dictionary, float descriptors are required!");
00782                                         _flannIndex->build(_dataTree, rtflann::KDTreeIndexParams(), useDistanceL1_);
00783                                         break;
00784                                 case kNNFlannLSH:
00785                                         UASSERT_MSG(type == CV_8U, "To use LSH dictionary, binary descriptors are required!");
00786                                         _flannIndex->build(_dataTree, rtflann::LshIndexParams(12, 20, 2), useDistanceL1_);
00787                                         break;
00788                                 default:
00789                                         break;
00790                                 }
00791 
00792                                 ULOGGER_DEBUG("Time to create kd tree = %f s", timer.ticks());
00793                         }
00794                 }
00795                 UDEBUG("Dictionary updated! (size=%d added=%d removed=%d)",
00796                                 _dataTree.rows, _notIndexedWords.size(), _removedIndexedWords.size());
00797         }
00798         else
00799         {
00800                 UDEBUG("Dictionary has not changed, so no need to update it! (size=%d)", _dataTree.rows);
00801         }
00802         _notIndexedWords.clear();
00803         _removedIndexedWords.clear();
00804         UDEBUG("");
00805 }
00806 
00807 void VWDictionary::clear(bool printWarningsIfNotEmpty)
00808 {
00809         ULOGGER_DEBUG("");
00810         if(printWarningsIfNotEmpty)
00811         {
00812                 if(_visualWords.size() && _incrementalDictionary)
00813                 {
00814                         UWARN("Visual dictionary would be already empty here (%d words still in dictionary).", (int)_visualWords.size());
00815                 }
00816                 if(_notIndexedWords.size())
00817                 {
00818                         UWARN("Not indexed words should be empty here (%d words still not indexed)", (int)_notIndexedWords.size());
00819                 }
00820         }
00821         for(std::map<int, VisualWord *>::iterator i=_visualWords.begin(); i!=_visualWords.end(); ++i)
00822         {
00823                 delete (*i).second;
00824         }
00825         _visualWords.clear();
00826         _notIndexedWords.clear();
00827         _removedIndexedWords.clear();
00828         _totalActiveReferences = 0;
00829         _lastWordId = 0;
00830         _dataTree = cv::Mat();
00831         _mapIndexId.clear();
00832         _mapIdIndex.clear();
00833         _unusedWords.clear();
00834         _flannIndex->release();
00835         useDistanceL1_ = false;
00836 }
00837 
00838 int VWDictionary::getNextId()
00839 {
00840         return ++_lastWordId;
00841 }
00842 
00843 void VWDictionary::addWordRef(int wordId, int signatureId)
00844 {
00845         if(signatureId > 0 && wordId > 0)
00846         {
00847                 VisualWord * vw = 0;
00848                 vw = uValue(_visualWords, wordId, vw);
00849                 if(vw)
00850                 {
00851                         vw->addRef(signatureId);
00852                         _totalActiveReferences += 1;
00853 
00854                         _unusedWords.erase(vw->id());
00855                 }
00856                 else
00857                 {
00858                         UERROR("Not found word %d", wordId);
00859                 }
00860         }
00861 }
00862 
00863 void VWDictionary::removeAllWordRef(int wordId, int signatureId)
00864 {
00865         VisualWord * vw = 0;
00866         vw = uValue(_visualWords, wordId, vw);
00867         if(vw)
00868         {
00869                 _totalActiveReferences -= vw->removeAllRef(signatureId);
00870                 if(vw->getReferences().size() == 0)
00871                 {
00872                         _unusedWords.insert(std::pair<int, VisualWord*>(vw->id(), vw));
00873                 }
00874         }
00875 }
00876 
00877 std::list<int> VWDictionary::addNewWords(const cv::Mat & descriptorsIn,
00878                                                            int signatureId)
00879 {
00880         UASSERT(signatureId > 0);
00881 
00882         UDEBUG("id=%d descriptors=%d", signatureId, descriptorsIn.rows);
00883         UTimer timer;
00884         std::list<int> wordIds;
00885         if(descriptorsIn.rows == 0 || descriptorsIn.cols == 0)
00886         {
00887                 UERROR("Descriptors size is null!");
00888                 return wordIds;
00889         }
00890 
00891         if(!_incrementalDictionary && _visualWords.empty())
00892         {
00893                 UERROR("Dictionary mode is set to fixed but no words are in it!");
00894                 return wordIds;
00895         }
00896 
00897         // verify we have the same features
00898         int dim = 0;
00899         int type = -1;
00900         if(_visualWords.size())
00901         {
00902                 dim = _visualWords.begin()->second->getDescriptor().cols;
00903                 type = _visualWords.begin()->second->getDescriptor().type();
00904                 UASSERT(type == CV_32F || type == CV_8U);
00905         }
00906         if(dim && dim != descriptorsIn.cols)
00907         {
00908                 UERROR("Descriptors (size=%d) are not the same size as already added words in dictionary(size=%d)", descriptorsIn.cols, dim);
00909                 return wordIds;
00910         }
00911         if(type>=0 && type != descriptorsIn.type())
00912         {
00913                 UERROR("Descriptors (type=%d) are not the same type as already added words in dictionary(type=%d)", descriptorsIn.type(), type);
00914                 return wordIds;
00915         }
00916 
00917         // now compare with the actual index
00918         cv::Mat descriptors;
00919         if(descriptorsIn.type() == CV_8U)
00920         {
00921                 useDistanceL1_ = true;
00922                 if(_strategy == kNNFlannKdTree || _strategy == kNNFlannNaive)
00923                 {
00924                         descriptorsIn.convertTo(descriptors, CV_32F);
00925                 }
00926                 else
00927                 {
00928                         descriptors = descriptorsIn;
00929                 }
00930         }
00931         else
00932         {
00933                 descriptors = descriptorsIn;
00934         }
00935         dim = 0;
00936         type = -1;
00937         if(_dataTree.rows || _flannIndex->isBuilt())
00938         {
00939                 dim = _flannIndex->isBuilt()?_flannIndex->featuresDim():_dataTree.cols;
00940                 type = _flannIndex->isBuilt()?_flannIndex->featuresType():_dataTree.type();
00941                 UASSERT(type == CV_32F || type == CV_8U);
00942         }
00943 
00944         if(dim && dim != descriptors.cols)
00945         {
00946                 UERROR("Descriptors (size=%d) are not the same size as already added words in dictionary(size=%d)", descriptors.cols, dim);
00947                 return wordIds;
00948         }
00949 
00950         if(type>=0 && type != descriptors.type())
00951         {
00952                 UERROR("Descriptors (type=%d) are not the same type as already added words in dictionary(type=%d)", descriptors.type(), type);
00953                 return wordIds;
00954         }
00955 
00956         int dupWordsCountFromDict= 0;
00957         int dupWordsCountFromLast= 0;
00958 
00959         unsigned int k=2; // k nearest neighbors
00960 
00961         cv::Mat newWords;
00962         std::vector<int> newWordsId;
00963 
00964         cv::Mat results;
00965         cv::Mat dists;
00966         std::vector<std::vector<cv::DMatch> > matches;
00967         bool bruteForce = false;
00968 
00969         UTimer timerLocal;
00970         timerLocal.start();
00971 
00972         if(_flannIndex->isBuilt() || (!_dataTree.empty() && _dataTree.rows >= (int)k))
00973         {
00974                 //Find nearest neighbors
00975                 UDEBUG("newPts.total()=%d ", descriptors.rows);
00976 
00977                 if(_strategy == kNNFlannNaive || _strategy == kNNFlannKdTree || _strategy == kNNFlannLSH)
00978                 {
00979                         _flannIndex->knnSearch(descriptors, results, dists, k);
00980                 }
00981                 else if(_strategy == kNNBruteForce)
00982                 {
00983                         bruteForce = true;
00984                         cv::BFMatcher matcher(descriptors.type()==CV_8U?cv::NORM_HAMMING:cv::NORM_L2SQR);
00985                         matcher.knnMatch(descriptors, _dataTree, matches, k);
00986                 }
00987                 else if(_strategy == kNNBruteForceGPU)
00988                 {
00989                         bruteForce = true;
00990 #if CV_MAJOR_VERSION < 3
00991 #ifdef HAVE_OPENCV_GPU
00992                         cv::gpu::GpuMat newDescriptorsGpu(descriptors);
00993                         cv::gpu::GpuMat lastDescriptorsGpu(_dataTree);
00994                         if(descriptors.type()==CV_8U)
00995                         {
00996                                 cv::gpu::BruteForceMatcher_GPU<cv::Hamming> gpuMatcher;
00997                                 gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
00998                         }
00999                         else
01000                         {
01001                                 cv::gpu::BruteForceMatcher_GPU<cv::L2<float> > gpuMatcher;
01002                                 gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
01003                         }
01004 #else
01005                         UERROR("Cannot use brute Force GPU because OpenCV is not built with gpu module.");
01006 #endif
01007 #else
01008 #ifdef HAVE_OPENCV_CUDAFEATURES2D
01009                         cv::cuda::GpuMat newDescriptorsGpu(descriptors);
01010                         cv::cuda::GpuMat lastDescriptorsGpu(_dataTree);
01011                         cv::Ptr<cv::cuda::DescriptorMatcher> gpuMatcher;
01012                         if(descriptors.type()==CV_8U)
01013                         {
01014                                 gpuMatcher = cv::cuda::DescriptorMatcher::createBFMatcher(cv::NORM_HAMMING);
01015                                 gpuMatcher->knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
01016                         }
01017                         else
01018                         {
01019                                 gpuMatcher = cv::cuda::DescriptorMatcher::createBFMatcher(cv::NORM_L2);
01020                                 gpuMatcher->knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
01021                         }
01022 #else
01023                         UERROR("Cannot use brute Force GPU because OpenCV is not built with cuda module.");
01024 #endif
01025 #endif
01026                 }
01027                 else
01028                 {
01029                         UFATAL("");
01030                 }
01031 
01032                 // In case of binary descriptors
01033                 if(dists.type() == CV_32S)
01034                 {
01035                         cv::Mat temp;
01036                         dists.convertTo(temp, CV_32F);
01037                         dists = temp;
01038                 }
01039 
01040                 UDEBUG("Time to find nn = %f s", timerLocal.ticks());
01041         }
01042 
01043         // Process results
01044         for(int i = 0; i < descriptors.rows; ++i)
01045         {
01046                 std::multimap<float, int> fullResults; // Contains results from the kd-tree search and the naive search in new words
01047                 if(!bruteForce && dists.cols)
01048                 {
01049                         for(int j=0; j<dists.cols; ++j)
01050                         {
01051                                 float d = dists.at<float>(i,j);
01052                                 int id = uValue(_mapIndexId, results.at<int>(i,j));
01053                                 if(d >= 0.0f && id > 0)
01054                                 {
01055                                         fullResults.insert(std::pair<float, int>(d, id));
01056                                 }
01057                                 else
01058                                 {
01059                                         break;
01060                                 }
01061                         }
01062                 }
01063                 else if(bruteForce && matches.size())
01064                 {
01065                         for(unsigned int j=0; j<matches.at(i).size(); ++j)
01066                         {
01067                                 float d = matches.at(i).at(j).distance;
01068                                 int id = uValue(_mapIndexId, matches.at(i).at(j).trainIdx);
01069                                 if(d >= 0.0f && id > 0)
01070                                 {
01071                                         fullResults.insert(std::pair<float, int>(d, id));
01072                                 }
01073                                 else
01074                                 {
01075                                         break;
01076                                 }
01077                         }
01078                 }
01079 
01080                 // Check if this descriptor matches with a word from the last signature (a word not already added to the tree)
01081                 if(_newWordsComparedTogether && newWords.rows)
01082                 {
01083                         std::vector<std::vector<cv::DMatch> > matchesNewWords;
01084                         cv::BFMatcher matcher(descriptors.type()==CV_8U?cv::NORM_HAMMING:useDistanceL1_?cv::NORM_L1:cv::NORM_L2SQR);
01085                         UASSERT(descriptors.cols == newWords.cols && descriptors.type() == newWords.type());
01086                         matcher.knnMatch(descriptors.row(i), newWords, matchesNewWords, newWords.rows>1?2:1);
01087                         UASSERT(matchesNewWords.size() == 1);
01088                         for(unsigned int j=0; j<matchesNewWords.at(0).size(); ++j)
01089                         {
01090                                 float d = matchesNewWords.at(0).at(j).distance;
01091                                 int id = newWordsId[matchesNewWords.at(0).at(j).trainIdx];
01092                                 if(d >= 0.0f && id > 0)
01093                                 {
01094                                         fullResults.insert(std::pair<float, int>(d, id));
01095                                 }
01096                                 else
01097                                 {
01098                                         break;
01099                                 }
01100                         }
01101                 }
01102 
01103                 if(_incrementalDictionary)
01104                 {
01105                         bool badDist = false;
01106                         if(fullResults.size() == 0)
01107                         {
01108                                 badDist = true;
01109                         }
01110                         if(!badDist)
01111                         {
01112                                 if(fullResults.size() >= 2)
01113                                 {
01114                                         // Apply NNDR
01115                                         if(fullResults.begin()->first > _nndrRatio * (++fullResults.begin())->first)
01116                                         {
01117                                                 badDist = true; // Rejected
01118                                         }
01119                                 }
01120                                 else
01121                                 {
01122                                         badDist = true; // Rejected
01123                                 }
01124                         }
01125 
01126                         if(badDist)
01127                         {
01128                                 // use original descriptor
01129                                 VisualWord * vw = new VisualWord(getNextId(), descriptorsIn.row(i), signatureId);
01130                                 _visualWords.insert(_visualWords.end(), std::pair<int, VisualWord *>(vw->id(), vw));
01131                                 _notIndexedWords.insert(_notIndexedWords.end(), vw->id());
01132                                 newWords.push_back(descriptors.row(i));
01133                                 newWordsId.push_back(vw->id());
01134                                 wordIds.push_back(vw->id());
01135                                 UASSERT(vw->id()>0);
01136                         }
01137                         else
01138                         {
01139                                 if(_notIndexedWords.find(fullResults.begin()->second) != _notIndexedWords.end())
01140                                 {
01141                                         ++dupWordsCountFromLast;
01142                                 }
01143                                 else
01144                                 {
01145                                         ++dupWordsCountFromDict;
01146                                 }
01147 
01148                                 this->addWordRef(fullResults.begin()->second, signatureId);
01149                                 wordIds.push_back(fullResults.begin()->second);
01150                         }
01151                 }
01152                 else if(fullResults.size())
01153                 {
01154                         // If the dictionary is not incremental, just take the nearest word
01155                         ++dupWordsCountFromDict;
01156                         this->addWordRef(fullResults.begin()->second, signatureId);
01157                         wordIds.push_back(fullResults.begin()->second);
01158                         UASSERT(fullResults.begin()->second>0);
01159                 }
01160         }
01161         ULOGGER_DEBUG("naive search and add ref/words time = %f s", timerLocal.ticks());
01162 
01163         ULOGGER_DEBUG("%d new words added...", _notIndexedWords.size());
01164         ULOGGER_DEBUG("%d duplicated words added (from current image = %d)...",
01165                         dupWordsCountFromDict+dupWordsCountFromLast, dupWordsCountFromLast);
01166         UDEBUG("total time %fs", timer.ticks());
01167 
01168         _totalActiveReferences += _notIndexedWords.size();
01169         return wordIds;
01170 }
01171 
01172 std::vector<int> VWDictionary::findNN(const std::list<VisualWord *> & vws) const
01173 {
01174         UTimer timer;
01175         timer.start();
01176 
01177         if(_visualWords.size() && vws.size())
01178         {
01179                 int type = (*vws.begin())->getDescriptor().type();
01180                 int dim = (*vws.begin())->getDescriptor().cols;
01181 
01182                 if(dim != _visualWords.begin()->second->getDescriptor().cols)
01183                 {
01184                         UERROR("Descriptors (size=%d) are not the same size as already added words in dictionary(size=%d)", (*vws.begin())->getDescriptor().cols, dim);
01185                         return std::vector<int>(vws.size(), 0);
01186                 }
01187 
01188                 if(type != _visualWords.begin()->second->getDescriptor().type())
01189                 {
01190                         UERROR("Descriptors (type=%d) are not the same type as already added words in dictionary(type=%d)", (*vws.begin())->getDescriptor().type(), type);
01191                         return std::vector<int>(vws.size(), 0);
01192                 }
01193 
01194                 // fill the request matrix
01195                 int index = 0;
01196                 VisualWord * vw;
01197                 cv::Mat query(vws.size(), dim, type);
01198                 for(std::list<VisualWord *>::const_iterator iter=vws.begin(); iter!=vws.end(); ++iter, ++index)
01199                 {
01200                         vw = *iter;
01201                         UASSERT(vw);
01202 
01203                         UASSERT(vw->getDescriptor().cols == dim);
01204                         UASSERT(vw->getDescriptor().type() == type);
01205 
01206                         vw->getDescriptor().copyTo(query.row(index));
01207                 }
01208                 ULOGGER_DEBUG("Preparation time = %fs", timer.ticks());
01209 
01210                 return findNN(query);
01211         }
01212         return std::vector<int>(vws.size(), 0);
01213 }
01214 std::vector<int> VWDictionary::findNN(const cv::Mat & queryIn) const
01215 {
01216         UTimer timer;
01217         timer.start();
01218         std::vector<int> resultIds(queryIn.rows, 0);
01219         unsigned int k=2; // k nearest neighbor
01220 
01221         if(_visualWords.size() && queryIn.rows)
01222         {
01223                 // verify we have the same features
01224                 int dim = _visualWords.begin()->second->getDescriptor().cols;
01225                 int type = _visualWords.begin()->second->getDescriptor().type();
01226                 UASSERT(type == CV_32F || type == CV_8U);
01227 
01228                 if(dim != queryIn.cols)
01229                 {
01230                         UERROR("Descriptors (size=%d) are not the same size as already added words in dictionary(size=%d)", queryIn.cols, dim);
01231                         return resultIds;
01232                 }
01233                 if(type != queryIn.type())
01234                 {
01235                         UERROR("Descriptors (type=%d) are not the same type as already added words in dictionary(type=%d)", queryIn.type(), type);
01236                         return resultIds;
01237                 }
01238 
01239                 // now compare with the actual index
01240                 cv::Mat query;
01241                 if(queryIn.type() == CV_8U)
01242                 {
01243                         if(_strategy == kNNFlannKdTree || _strategy == kNNFlannNaive)
01244                         {
01245                                 queryIn.convertTo(query, CV_32F);
01246                         }
01247                         else
01248                         {
01249                                 query = queryIn;
01250                         }
01251                 }
01252                 else
01253                 {
01254                         query = queryIn;
01255                 }
01256                 dim = 0;
01257                 type = -1;
01258                 if(_dataTree.rows || _flannIndex->isBuilt())
01259                 {
01260                         dim = _flannIndex->isBuilt()?_flannIndex->featuresDim():_dataTree.cols;
01261                         type = _flannIndex->isBuilt()?_flannIndex->featuresType():_dataTree.type();
01262                         UASSERT(type == CV_32F || type == CV_8U);
01263                 }
01264 
01265                 if(dim && dim != query.cols)
01266                 {
01267                         UERROR("Descriptors (size=%d) are not the same size as already added words in dictionary(size=%d)", query.cols, dim);
01268                         return resultIds;
01269                 }
01270 
01271                 if(type>=0 && type != query.type())
01272                 {
01273                         UERROR("Descriptors (type=%d) are not the same type as already added words in dictionary(type=%d)", query.type(), type);
01274                         return resultIds;
01275                 }
01276 
01277                 std::vector<std::vector<cv::DMatch> > matches;
01278                 bool bruteForce = false;
01279                 cv::Mat results;
01280                 cv::Mat dists;
01281 
01282                 if(_flannIndex->isBuilt() || (!_dataTree.empty() && _dataTree.rows >= (int)k))
01283                 {
01284                         //Find nearest neighbors
01285                         UDEBUG("query.rows=%d ", query.rows);
01286 
01287                         if(_strategy == kNNFlannNaive || _strategy == kNNFlannKdTree || _strategy == kNNFlannLSH)
01288                         {
01289                                 _flannIndex->knnSearch(query, results, dists, k);
01290                         }
01291                         else if(_strategy == kNNBruteForce)
01292                         {
01293                                 bruteForce = true;
01294                                 cv::BFMatcher matcher(query.type()==CV_8U?cv::NORM_HAMMING:cv::NORM_L2SQR);
01295                                 matcher.knnMatch(query, _dataTree, matches, k);
01296                         }
01297                         else if(_strategy == kNNBruteForceGPU)
01298                         {
01299                                 bruteForce = true;
01300 #if CV_MAJOR_VERSION < 3
01301 #ifdef HAVE_OPENCV_GPU
01302                                 cv::gpu::GpuMat newDescriptorsGpu(query);
01303                                 cv::gpu::GpuMat lastDescriptorsGpu(_dataTree);
01304                                 if(query.type()==CV_8U)
01305                                 {
01306                                         cv::gpu::BruteForceMatcher_GPU<cv::Hamming> gpuMatcher;
01307                                         gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
01308                                 }
01309                                 else
01310                                 {
01311                                         cv::gpu::BruteForceMatcher_GPU<cv::L2<float> > gpuMatcher;
01312                                         gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
01313                                 }
01314 #else
01315                         UERROR("Cannot use brute Force GPU because OpenCV is not built with gpu module.");
01316 #endif
01317 #else
01318 #ifdef HAVE_OPENCV_CUDAFEATURES2D
01319                                 cv::cuda::GpuMat newDescriptorsGpu(query);
01320                                 cv::cuda::GpuMat lastDescriptorsGpu(_dataTree);
01321                                 cv::Ptr<cv::cuda::DescriptorMatcher> gpuMatcher;
01322                                 if(query.type()==CV_8U)
01323                                 {
01324                                         gpuMatcher = cv::cuda::DescriptorMatcher::createBFMatcher(cv::NORM_HAMMING);
01325                                         gpuMatcher->knnMatchAsync(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
01326                                 }
01327                                 else
01328                                 {
01329                                         gpuMatcher = cv::cuda::DescriptorMatcher::createBFMatcher(cv::NORM_L2);
01330                                         gpuMatcher->knnMatchAsync(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
01331                                 }
01332 #else
01333                         UERROR("Cannot use brute Force GPU because OpenCV is not built with cuda module.");
01334 #endif
01335 #endif
01336                         }
01337                         else
01338                         {
01339                                 UFATAL("");
01340                         }
01341 
01342                         // In case of binary descriptors
01343                         if(dists.type() == CV_32S)
01344                         {
01345                                 cv::Mat temp;
01346                                 dists.convertTo(temp, CV_32F);
01347                                 dists = temp;
01348                         }
01349                 }
01350                 ULOGGER_DEBUG("Search dictionary time = %fs", timer.ticks());
01351 
01352                 std::map<int, int> mapIndexIdNotIndexed;
01353                 std::vector<std::vector<cv::DMatch> > matchesNotIndexed;
01354                 if(_notIndexedWords.size())
01355                 {
01356                         cv::Mat dataNotIndexed = cv::Mat::zeros(_notIndexedWords.size(), query.cols, query.type());
01357                         unsigned int index = 0;
01358                         VisualWord * vw;
01359                         for(std::set<int>::iterator iter = _notIndexedWords.begin(); iter != _notIndexedWords.end(); ++iter, ++index)
01360                         {
01361                                 vw = _visualWords.at(*iter);
01362 
01363                                 cv::Mat descriptor;
01364                                 if(vw->getDescriptor().type() == CV_8U)
01365                                 {
01366                                         if(_strategy == kNNFlannKdTree || _strategy == kNNFlannNaive)
01367                                         {
01368                                                 vw->getDescriptor().convertTo(descriptor, CV_32F);
01369                                         }
01370                                         else
01371                                         {
01372                                                 descriptor = vw->getDescriptor();
01373                                         }
01374                                 }
01375                                 else
01376                                 {
01377                                         descriptor = vw->getDescriptor();
01378                                 }
01379 
01380                                 UASSERT(vw != 0 && descriptor.cols == query.cols && descriptor.type() == query.type());
01381                                 vw->getDescriptor().copyTo(dataNotIndexed.row(index));
01382                                 mapIndexIdNotIndexed.insert(mapIndexIdNotIndexed.end(), std::pair<int,int>(index, vw->id()));
01383                         }
01384 
01385                         // Find nearest neighbor
01386                         ULOGGER_DEBUG("Searching in words not indexed...");
01387                         cv::BFMatcher matcher(query.type()==CV_8U?cv::NORM_HAMMING:useDistanceL1_?cv::NORM_L1:cv::NORM_L2SQR);
01388                         matcher.knnMatch(query, dataNotIndexed, matchesNotIndexed, dataNotIndexed.rows>1?2:1);
01389                 }
01390                 ULOGGER_DEBUG("Search not yet indexed words time = %fs", timer.ticks());
01391 
01392                 for(int i=0; i<query.rows; ++i)
01393                 {
01394                         std::multimap<float, int> fullResults; // Contains results from the kd-tree search [and the naive search in new words]
01395                         if(!bruteForce && dists.cols)
01396                         {
01397                                 for(int j=0; j<dists.cols; ++j)
01398                                 {
01399                                         float d = dists.at<float>(i,j);
01400                                         int id = uValue(_mapIndexId, results.at<int>(i,j));
01401                                         if(d >= 0.0f && id > 0)
01402                                         {
01403                                                 fullResults.insert(std::pair<float, int>(d, id));
01404                                         }
01405                                 }
01406                         }
01407                         else if(bruteForce && matches.size())
01408                         {
01409                                 for(unsigned int j=0; j<matches.at(i).size(); ++j)
01410                                 {
01411                                         float d = matches.at(i).at(j).distance;
01412                                         int id = uValue(_mapIndexId, matches.at(i).at(j).trainIdx);
01413                                         if(d >= 0.0f && id > 0)
01414                                         {
01415                                                 fullResults.insert(std::pair<float, int>(d, id));
01416                                         }
01417                                 }
01418                         }
01419 
01420                         // not indexed..
01421                         if(matchesNotIndexed.size())
01422                         {
01423                                 for(unsigned int j=0; j<matchesNotIndexed.at(i).size(); ++j)
01424                                 {
01425                                         float d = matchesNotIndexed.at(i).at(j).distance;
01426                                         int id = uValue(mapIndexIdNotIndexed, matchesNotIndexed.at(i).at(j).trainIdx);
01427                                         if(d >= 0.0f && id > 0)
01428                                         {
01429                                                 fullResults.insert(std::pair<float, int>(d, id));
01430                                         }
01431                                         else
01432                                         {
01433                                                 break;
01434                                         }
01435                                 }
01436                         }
01437 
01438                         if(_incrementalDictionary)
01439                         {
01440                                 bool badDist = false;
01441                                 if(fullResults.size() == 0)
01442                                 {
01443                                         badDist = true;
01444                                 }
01445                                 if(!badDist)
01446                                 {
01447                                         if(fullResults.size() >= 2)
01448                                         {
01449                                                 // Apply NNDR
01450                                                 if(fullResults.begin()->first > _nndrRatio * (++fullResults.begin())->first)
01451                                                 {
01452                                                         badDist = true; // Rejected
01453                                                 }
01454                                         }
01455                                         else
01456                                         {
01457                                                 badDist = true; // Rejected
01458                                         }
01459                                 }
01460 
01461                                 if(!badDist)
01462                                 {
01463                                         resultIds[i] = fullResults.begin()->second; // Accepted
01464                                 }
01465                         }
01466                         else if(fullResults.size())
01467                         {
01468                                 //Just take the nearest if the dictionary is not incremental
01469                                 resultIds[i] = fullResults.begin()->second; // Accepted
01470                         }
01471                 }
01472                 ULOGGER_DEBUG("badDist check time = %fs", timer.ticks());
01473         }
01474         return resultIds;
01475 }
01476 
01477 void VWDictionary::addWord(VisualWord * vw)
01478 {
01479         if(vw)
01480         {
01481                 _visualWords.insert(std::pair<int, VisualWord *>(vw->id(), vw));
01482                 _notIndexedWords.insert(vw->id());
01483                 if(vw->getReferences().size())
01484                 {
01485                         _totalActiveReferences += uSum(uValues(vw->getReferences()));
01486                 }
01487                 else
01488                 {
01489                         _unusedWords.insert(std::pair<int, VisualWord *>(vw->id(), vw));
01490                 }
01491         }
01492 }
01493 
01494 const VisualWord * VWDictionary::getWord(int id) const
01495 {
01496         return uValue(_visualWords, id, (VisualWord *)0);
01497 }
01498 
01499 VisualWord * VWDictionary::getUnusedWord(int id) const
01500 {
01501         return uValue(_unusedWords, id, (VisualWord *)0);
01502 }
01503 
01504 std::vector<VisualWord*> VWDictionary::getUnusedWords() const
01505 {
01506         if(!_incrementalDictionary)
01507         {
01508                 ULOGGER_WARN("This method does nothing on a fixed dictionary");
01509                 return std::vector<VisualWord*>();
01510         }
01511         return uValues(_unusedWords);
01512 }
01513 
01514 std::vector<int> VWDictionary::getUnusedWordIds() const
01515 {
01516         if(!_incrementalDictionary)
01517         {
01518                 ULOGGER_WARN("This method does nothing on a fixed dictionary");
01519                 return std::vector<int>();
01520         }
01521         return uKeys(_unusedWords);
01522 }
01523 
01524 void VWDictionary::removeWords(const std::vector<VisualWord*> & words)
01525 {
01526         for(unsigned int i=0; i<words.size(); ++i)
01527         {
01528                 _visualWords.erase(words[i]->id());
01529                 _unusedWords.erase(words[i]->id());
01530                 if(_notIndexedWords.erase(words[i]->id()) == 0)
01531                 {
01532                         _removedIndexedWords.insert(words[i]->id());
01533                 }
01534         }
01535 }
01536 
01537 void VWDictionary::deleteUnusedWords()
01538 {
01539         std::vector<VisualWord*> unusedWords = uValues(_unusedWords);
01540         removeWords(unusedWords);
01541         for(unsigned int i=0; i<unusedWords.size(); ++i)
01542         {
01543                 delete unusedWords[i];
01544         }
01545 }
01546 
01547 void VWDictionary::exportDictionary(const char * fileNameReferences, const char * fileNameDescriptors) const
01548 {
01549         UDEBUG("");
01550         if(_visualWords.empty())
01551         {
01552                 UWARN("Dictionary is empty, cannot export it!");
01553                 return;
01554         }
01555         if(_visualWords.begin()->second->getDescriptor().type() != CV_32FC1)
01556         {
01557                 UERROR("Exporting binary descriptors is not implemented!");
01558                 return;
01559         }
01560     FILE* foutRef = 0;
01561     FILE* foutDesc = 0;
01562 #ifdef _MSC_VER
01563     fopen_s(&foutRef, fileNameReferences, "w");
01564     fopen_s(&foutDesc, fileNameDescriptors, "w");
01565 #else
01566     foutRef = fopen(fileNameReferences, "w");
01567     foutDesc = fopen(fileNameDescriptors, "w");
01568 #endif
01569 
01570     if(foutRef)
01571     {
01572         fprintf(foutRef, "WordID SignaturesID...\n");
01573     }
01574         if(foutDesc)
01575         {
01576                 if(_visualWords.begin() == _visualWords.end())
01577                 {
01578                         fprintf(foutDesc, "WordID Descriptors...\n");
01579                 }
01580                 else
01581                 {
01582                         UDEBUG("");
01583                         fprintf(foutDesc, "WordID Descriptors...%d\n", (*_visualWords.begin()).second->getDescriptor().cols);
01584                 }
01585         }
01586 
01587         UDEBUG("Export %d words...", _visualWords.size());
01588     for(std::map<int, VisualWord *>::const_iterator iter=_visualWords.begin(); iter!=_visualWords.end(); ++iter)
01589     {
01590         // References
01591         if(foutRef)
01592         {
01593                         fprintf(foutRef, "%d ", (*iter).first);
01594                         const std::map<int, int> ref = (*iter).second->getReferences();
01595                         for(std::map<int, int>::const_iterator jter=ref.begin(); jter!=ref.end(); ++jter)
01596                         {
01597                                 for(int i=0; i<(*jter).second; ++i)
01598                                 {
01599                                         fprintf(foutRef, "%d ", (*jter).first);
01600                                 }
01601                         }
01602                         fprintf(foutRef, "\n");
01603         }
01604 
01605         //Descriptors
01606         if(foutDesc)
01607         {
01608                         fprintf(foutDesc, "%d ", (*iter).first);
01609                         const float * desc = (const float *)(*iter).second->getDescriptor().data;
01610                         int dim = (*iter).second->getDescriptor().cols;
01611 
01612                         for(int i=0; i<dim; i++)
01613                         {
01614                                 fprintf(foutDesc, "%f ", desc[i]);
01615                         }
01616                         fprintf(foutDesc, "\n");
01617         }
01618     }
01619 
01620         if(foutRef)
01621                 fclose(foutRef);
01622         if(foutDesc)
01623                 fclose(foutDesc);
01624 }
01625 
01626 } // namespace rtabmap


rtabmap
Author(s): Mathieu Labbe
autogenerated on Sat Jul 23 2016 11:44:28