VWDictionary.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2010-2016, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27 
30 
31 #include "rtabmap/core/Signature.h"
32 #include "rtabmap/core/DBDriver.h"
35 
37 
38 #include <opencv2/opencv_modules.hpp>
39 
40 #if CV_MAJOR_VERSION < 3
41 #ifdef HAVE_OPENCV_GPU
42 #include <opencv2/gpu/gpu.hpp>
43 #endif
44 #else
45 #include <opencv2/core/cuda.hpp>
46 #ifdef HAVE_OPENCV_CUDAFEATURES2D
47 #include <opencv2/cudafeatures2d.hpp>
48 #endif
49 #endif
50 
51 #include <fstream>
52 #include <string>
53 
54 #define KDTREE_SIZE 4
55 #define KNN_CHECKS 32
56 
57 namespace rtabmap
58 {
59 
60 const int VWDictionary::ID_START = 1;
61 const int VWDictionary::ID_INVALID = 0;
62 
64  _totalActiveReferences(0),
65  _incrementalDictionary(Parameters::defaultKpIncrementalDictionary()),
66  _incrementalFlann(Parameters::defaultKpIncrementalFlann()),
67  _rebalancingFactor(Parameters::defaultKpFlannRebalancingFactor()),
68  _byteToFloat(Parameters::defaultKpByteToFloat()),
69  _nndrRatio(Parameters::defaultKpNndrRatio()),
70  _newDictionaryPath(Parameters::defaultKpDictionaryPath()),
71  _newWordsComparedTogether(Parameters::defaultKpNewWordsComparedTogether()),
72  _lastWordId(0),
73  useDistanceL1_(false),
74  _flannIndex(new FlannIndex()),
75  _strategy(kNNBruteForce)
76 {
77  this->setNNStrategy((NNStrategy)Parameters::defaultKpNNStrategy());
78  this->parseParameters(parameters);
79 }
80 
82 {
83  this->clear();
84  delete _flannIndex;
85 }
86 
88 {
89  ParametersMap::const_iterator iter;
90  Parameters::parse(parameters, Parameters::kKpNndrRatio(), _nndrRatio);
91  Parameters::parse(parameters, Parameters::kKpNewWordsComparedTogether(), _newWordsComparedTogether);
92  Parameters::parse(parameters, Parameters::kKpIncrementalFlann(), _incrementalFlann);
93  Parameters::parse(parameters, Parameters::kKpFlannRebalancingFactor(), _rebalancingFactor);
94  bool byteToFloat = _byteToFloat;
95  Parameters::parse(parameters, Parameters::kKpByteToFloat(), _byteToFloat);
96 
97  UASSERT_MSG(_nndrRatio > 0.0f, uFormat("String=%s value=%f", uContains(parameters, Parameters::kKpNndrRatio())?parameters.at(Parameters::kKpNndrRatio()).c_str():"", _nndrRatio).c_str());
98 
99  bool incrementalDictionary = _incrementalDictionary;
100  if((iter=parameters.find(Parameters::kKpDictionaryPath())) != parameters.end())
101  {
102  _newDictionaryPath = (*iter).second.c_str();
103  }
104  if((iter=parameters.find(Parameters::kKpIncrementalDictionary())) != parameters.end())
105  {
106  incrementalDictionary = uStr2Bool((*iter).second.c_str());
107  }
108 
109  // Verifying hypotheses strategy
110  bool treeUpdated = false;
111  if((iter=parameters.find(Parameters::kKpNNStrategy())) != parameters.end())
112  {
113  NNStrategy nnStrategy = (NNStrategy)std::atoi((*iter).second.c_str());
114  treeUpdated = this->setNNStrategy(nnStrategy);
115  }
116  if(!treeUpdated && byteToFloat!=_byteToFloat && _strategy == kNNFlannKdTree)
117  {
118  UINFO("KDTree: Binary to Float conversion approach has changed, re-initialize kd-tree.");
119  _dataTree = cv::Mat();
121  _removedIndexedWords.clear();
122  this->update();
123  }
124 
125  if(incrementalDictionary)
126  {
127  this->setIncrementalDictionary();
128  }
129  _incrementalDictionary = incrementalDictionary;
130 }
131 
133 {
135  {
136  _incrementalDictionary = true;
137  if(_visualWords.size())
138  {
139  UWARN("Incremental dictionary set: already loaded visual words (%d) from the fixed dictionary will be included in the incremental one.", _visualWords.size());
140  }
141  }
142  _dictionaryPath = "";
143  _newDictionaryPath = "";
144 }
145 
146 void VWDictionary::setFixedDictionary(const std::string & dictionaryPath)
147 {
148  UDEBUG("");
149  if(!dictionaryPath.empty())
150  {
151  if((!_incrementalDictionary && _dictionaryPath.compare(dictionaryPath) != 0) ||
152  _visualWords.size() == 0)
153  {
154  UINFO("incremental=%d, oldPath=%s newPath=%s, visual words=%d",
155  _incrementalDictionary?1:0, _dictionaryPath.c_str(), dictionaryPath.c_str(), (int)_visualWords.size());
156 
157  if(UFile::getExtension(dictionaryPath).compare("db") == 0)
158  {
159  UWARN("Loading fixed vocabulary \"%s\", this may take a while...", dictionaryPath.c_str());
160  DBDriver * driver = DBDriver::create();
161  if(driver->openConnection(dictionaryPath, false))
162  {
163  driver->load(this, false);
164  for(std::map<int, VisualWord*>::iterator iter=_visualWords.begin(); iter!=_visualWords.end(); ++iter)
165  {
166  iter->second->setSaved(true);
167  }
169  driver->closeConnection(false);
170  }
171  else
172  {
173  UERROR("Could not load dictionary from database %s", dictionaryPath.c_str());
174  }
175  delete driver;
176  }
177  else
178  {
179  UWARN("Loading fixed vocabulary \"%s\", this may take a while...", dictionaryPath.c_str());
180  std::ifstream file;
181  file.open(dictionaryPath.c_str(), std::ifstream::in);
182  if(file.good())
183  {
184  UDEBUG("Deleting old dictionary and loading the new one from \"%s\"", dictionaryPath.c_str());
185  UTimer timer;
186 
187  // first line is the header
188  std::string str;
189  std::list<std::string> strList;
190  std::getline(file, str);
191  strList = uSplitNumChar(str);
192  int dimension = 0;
193  for(std::list<std::string>::iterator iter = strList.begin(); iter != strList.end(); ++iter)
194  {
195  if(uIsDigit(iter->at(0)))
196  {
197  dimension = std::atoi(iter->c_str());
198  break;
199  }
200  }
201  UDEBUG("descriptor dimension = %d", dimension);
202 
203  if(dimension <= 0 || dimension > 1000)
204  {
205  UERROR("Invalid dictionary file, visual word dimension (%d) is not valid, \"%s\"", dimension, dictionaryPath.c_str());
206  }
207  else
208  {
209  // Process all words
210  while(file.good())
211  {
212  std::getline(file, str);
213  strList = uSplit(str);
214  if((int)strList.size() == dimension+1)
215  {
216  //first one is the visual word id
217  std::list<std::string>::iterator iter = strList.begin();
218  int id = std::atoi(iter->c_str());
219  cv::Mat descriptor(1, dimension, CV_32F);
220  ++iter;
221  int i=0;
222 
223  //get descriptor
224  for(;i<dimension && iter != strList.end(); ++i, ++iter)
225  {
226  descriptor.at<float>(i) = uStr2Float(*iter);
227  }
228  if(i != dimension)
229  {
230  UERROR("Loaded word has not the same size (%d) than descriptor size previously detected (%d).", i, dimension);
231  }
232 
233  VisualWord * vw = new VisualWord(id, descriptor, 0);
234  vw->setSaved(true);
235  _visualWords.insert(_visualWords.end(), std::pair<int, VisualWord*>(id, vw));
236  _notIndexedWords.insert(_notIndexedWords.end(), id);
237  _unusedWords.insert(_unusedWords.end(), std::pair<int, VisualWord*>(id, vw));
238  }
239  else if(!str.empty())
240  {
241  UWARN("Cannot parse line \"%s\"", str.c_str());
242  }
243  }
244  if(_visualWords.size())
245  {
246  UWARN("Loaded %d words!", (int)_visualWords.size());
247  }
248  }
249  }
250  else
251  {
252  UERROR("Cannot open dictionary file \"%s\"", dictionaryPath.c_str());
253  }
254  file.close();
255  }
256 
257  if(_visualWords.size() == 0)
258  {
260  UWARN("No words loaded, cannot set a fixed dictionary.", (int)_visualWords.size());
261  }
262  else
263  {
264  _dictionaryPath = dictionaryPath;
265  _newDictionaryPath = dictionaryPath;
266  _incrementalDictionary = false;
267  this->update();
268  UWARN("Loaded %d words!", (int)_visualWords.size());
269  }
270  }
271  else if(!_incrementalDictionary)
272  {
273  UDEBUG("Dictionary \"%s\" already loaded...", dictionaryPath.c_str());
274  }
275  else
276  {
277  UERROR("Cannot change to a fixed dictionary if there are already words (%d) in the incremental one.", _visualWords.size());
278  }
279  }
280  else if(_incrementalDictionary && _visualWords.size())
281  {
282  UWARN("Cannot change to fixed dictionary, %d words already loaded as incremental", (int)_visualWords.size());
283  }
284  else
285  {
286  _incrementalDictionary = false;
287  }
288  _dictionaryPath = dictionaryPath;
289  _newDictionaryPath = dictionaryPath;
290 }
291 
293 {
294 #if CV_MAJOR_VERSION < 3
295 #ifdef HAVE_OPENCV_GPU
296  if(strategy == kNNBruteForceGPU && !cv::gpu::getCudaEnabledDeviceCount())
297  {
298  UERROR("Nearest neighobr strategy \"kNNBruteForceGPU\" chosen but no CUDA devices found! Doing \"kNNBruteForce\" instead.");
299  strategy = kNNBruteForce;
300  }
301 #else
302  if(strategy == kNNBruteForceGPU)
303  {
304  UERROR("Nearest neighobr strategy \"kNNBruteForceGPU\" chosen but OpenCV is not built with GPU/cuda module! Doing \"kNNBruteForce\" instead.");
305  strategy = kNNBruteForce;
306  }
307 #endif
308 #else
309 #ifdef HAVE_OPENCV_CUDAFEATURES2D
310  if(strategy == kNNBruteForceGPU && !cv::cuda::getCudaEnabledDeviceCount())
311  {
312  UERROR("Nearest neighobr strategy \"kNNBruteForceGPU\" chosen but no CUDA devices found! Doing \"kNNBruteForce\" instead.");
313  strategy = kNNBruteForce;
314  }
315 #else
316  if(strategy == kNNBruteForceGPU)
317  {
318  UERROR("Nearest neighobr strategy \"kNNBruteForceGPU\" chosen but OpenCV cudafeatures2d module is not found! Doing \"kNNBruteForce\" instead.");
319  strategy = kNNBruteForce;
320  }
321 #endif
322 #endif
323 
324  if(strategy>=kNNUndef)
325  {
326  UERROR("Nearest neighobr strategy \"%d\" chosen but this strategy cannot be used with a dictionary! Doing \"kNNBruteForce\" instead.");
327  strategy = kNNBruteForce;
328  }
329 
330  bool update = _strategy != strategy;
331  _strategy = strategy;
332  if(update)
333  {
334  if(_notIndexedWords.size() != _visualWords.size() || !_dataTree.empty())
335  {
336  UINFO("Nearest neighbor strategy has changed, re-initialize search tree.");
337  }
338  _dataTree = cv::Mat();
340  _removedIndexedWords.clear();
341  this->update();
342  return true;
343  }
344  return false;
345 }
346 
348 {
349  if(_mapIndexId.size())
350  {
351  return _mapIndexId.rbegin()->second;
352  }
353  else
354  {
355  return 0;
356  }
357 }
358 
360 {
361  return _flannIndex->indexedFeatures();
362 }
363 
365 {
366  return _flannIndex->memoryUsed();
367 }
368 
369 unsigned long VWDictionary::getMemoryUsed() const
370 {
371  long memoryUsage = sizeof(VWDictionary);
372  memoryUsage += getIndexMemoryUsed();
373  memoryUsage += _dataTree.total()*_dataTree.elemSize();
374  if(!_visualWords.empty())
375  {
376  memoryUsage += _visualWords.size()*(sizeof(int) + _visualWords.rbegin()->second->getMemoryUsed() + sizeof(std::map<int, VisualWord *>::iterator)) + sizeof(std::map<int, VisualWord *>);
377  if(_dataTree.empty() &&
378  _visualWords.begin()->second->getDescriptor().type() == CV_8U &&
380  {
381  // Binary descriptors were converted to float, and not included in _dataTree
382  memoryUsage += _visualWords.size() * _visualWords.begin()->second->getDescriptor().total() * sizeof(float) * (_byteToFloat?1:8);
383  }
384  }
385  if(!_unusedWords.empty())
386  {
387  // they are the same words than in _visualWords, so just add the pointer size
388  memoryUsage += _unusedWords.size()*(sizeof(int) + sizeof(VisualWord *)+sizeof(std::map<int, VisualWord *>::iterator)) + sizeof(std::map<int, VisualWord *>);
389  }
390  memoryUsage += _mapIndexId.size() * (sizeof(int)*2+sizeof(std::map<int ,int>::iterator)) + sizeof(std::map<int ,int>);
391  memoryUsage += _mapIdIndex.size() * (sizeof(int)*2+sizeof(std::map<int ,int>::iterator)) + sizeof(std::map<int ,int>);
392  memoryUsage += _notIndexedWords.size() * (sizeof(int)+sizeof(std::set<int>::iterator)) + sizeof(std::set<int>);
393  memoryUsage += _removedIndexedWords.size() * (sizeof(int)+sizeof(std::set<int>::iterator)) + sizeof(std::set<int>);
394  return memoryUsage;
395 }
396 
397 cv::Mat VWDictionary::convertBinTo32F(const cv::Mat & descriptorsIn, bool byteToFloat)
398 {
399  if(byteToFloat)
400  {
401  // Old approach
402  cv::Mat descriptorsOut;
403  descriptorsIn.convertTo(descriptorsOut, CV_32F);
404  return descriptorsOut;
405  }
406  else
407  {
408  // New approach
409  UASSERT(descriptorsIn.type() == CV_8UC1);
410  cv::Mat descriptorsOut(descriptorsIn.rows, descriptorsIn.cols*8, CV_32FC1);
411  for(int i=0; i<descriptorsIn.rows; ++i)
412  {
413  const unsigned char * ptrIn = descriptorsIn.ptr(i);
414  float * ptrOut = descriptorsOut.ptr<float>(i);
415  for(int j=0; j<descriptorsIn.cols; ++j)
416  {
417  int jo = j*8;
418  ptrOut[jo] = (ptrIn[j] & 1) == 1?1.0f:0.0f;
419  ptrOut[jo+1] = (ptrIn[j] & (1<<1)) != 0?1.0f:0.0f;
420  ptrOut[jo+2] = (ptrIn[j] & (1<<2)) != 0?1.0f:0.0f;
421  ptrOut[jo+3] = (ptrIn[j] & (1<<3)) != 0?1.0f:0.0f;
422  ptrOut[jo+4] = (ptrIn[j] & (1<<4)) != 0?1.0f:0.0f;
423  ptrOut[jo+5] = (ptrIn[j] & (1<<5)) != 0?1.0f:0.0f;
424  ptrOut[jo+6] = (ptrIn[j] & (1<<6)) != 0?1.0f:0.0f;
425  ptrOut[jo+7] = (ptrIn[j] & (1<<7)) != 0?1.0f:0.0f;
426  }
427  }
428  return descriptorsOut;
429  }
430 }
431 
432 cv::Mat VWDictionary::convert32FToBin(const cv::Mat & descriptorsIn, bool byteToFloat)
433 {
434  if(byteToFloat)
435  {
436  // Old approach
437  cv::Mat descriptorsOut;
438  descriptorsIn.convertTo(descriptorsOut, CV_8UC1);
439  return descriptorsOut;
440  }
441  else
442  {
443  // New approach
444  UASSERT(descriptorsIn.type() == CV_32FC1 && descriptorsIn.cols % 8 == 0);
445  cv::Mat descriptorsOut(descriptorsIn.rows, descriptorsIn.cols/8, CV_8UC1);
446  for(int i=0; i<descriptorsIn.rows; ++i)
447  {
448  const float * ptrIn = descriptorsIn.ptr<float>(i);
449  unsigned char * ptrOut = descriptorsOut.ptr(i);
450  for(int j=0; j<descriptorsOut.cols; ++j)
451  {
452  int jo = j*8;
453  ptrOut[j] =
454  (unsigned char)(ptrIn[jo] == 0?0:1) |
455  (ptrIn[jo+1] == 0?0:(1<<1)) |
456  (ptrIn[jo+2] == 0?0:(1<<2)) |
457  (ptrIn[jo+3] == 0?0:(1<<3)) |
458  (ptrIn[jo+4] == 0?0:(1<<4)) |
459  (ptrIn[jo+5] == 0?0:(1<<5)) |
460  (ptrIn[jo+6] == 0?0:(1<<6)) |
461  (ptrIn[jo+7] == 0?0:(1<<7));
462  }
463  }
464  return descriptorsOut;
465  }
466 }
467 
469 {
470  ULOGGER_DEBUG("incremental=%d", _incrementalDictionary?1:0);
472  {
473  // reload the fixed dictionary if it has been cleared or not yet initialized
475 
477  {
478  // No need to update the search index if we
479  // use a fixed dictionary and the index is
480  // already built
481  return;
482  }
483  }
484 
485  if(_notIndexedWords.size() || _visualWords.size() == 0 || _removedIndexedWords.size())
486  {
487  if(_incrementalFlann &&
489  _visualWords.size())
490  {
491  ULOGGER_DEBUG("Incremental FLANN: Removing %d words...", (int)_removedIndexedWords.size());
492  for(std::set<int>::iterator iter=_removedIndexedWords.begin(); iter!=_removedIndexedWords.end(); ++iter)
493  {
497  _mapIndexId.erase(_mapIdIndex.at(*iter));
498  _mapIdIndex.erase(*iter);
499  }
500  ULOGGER_DEBUG("Incremental FLANN: Removing %d words... done!", (int)_removedIndexedWords.size());
501 
502  if(_notIndexedWords.size())
503  {
504  ULOGGER_DEBUG("Incremental FLANN: Inserting %d words...", (int)_notIndexedWords.size());
505  for(std::set<int>::iterator iter=_notIndexedWords.begin(); iter!=_notIndexedWords.end(); ++iter)
506  {
508  UASSERT(w);
509 
510  cv::Mat descriptor;
511  if(w->getDescriptor().type() == CV_8U)
512  {
513  useDistanceL1_ = true;
515  {
516  descriptor = convertBinTo32F(w->getDescriptor(), _byteToFloat);
517  }
518  else
519  {
520  descriptor = w->getDescriptor();
521  }
522  }
523  else
524  {
525  descriptor = w->getDescriptor();
526  }
527 
528  int index = 0;
529  if(!_flannIndex->isBuilt())
530  {
531  UDEBUG("Building FLANN index...");
532  switch(_strategy)
533  {
534  case kNNFlannNaive:
536  break;
537  case kNNFlannKdTree:
538  UASSERT_MSG(descriptor.type() == CV_32F, "To use KdTree dictionary, float descriptors are required!");
540  break;
541  case kNNFlannLSH:
542  UASSERT_MSG(descriptor.type() == CV_8U, "To use LSH dictionary, binary descriptors are required!");
544  break;
545  default:
546  UFATAL("Not supposed to be here!");
547  break;
548  }
549  UDEBUG("Building FLANN index... done!");
550  }
551  else
552  {
555  UASSERT(descriptor.rows == 1);
556  index = _flannIndex->addPoints(descriptor).front();
557  }
558  std::pair<std::map<int, int>::iterator, bool> inserted;
559  inserted = _mapIndexId.insert(std::pair<int, int>(index, w->id()));
560  UASSERT(inserted.second);
561  inserted = _mapIdIndex.insert(std::pair<int, int>(w->id(), index));
562  UASSERT(inserted.second);
563  }
564  ULOGGER_DEBUG("Incremental FLANN: Inserting %d words... done!", (int)_notIndexedWords.size());
565  }
566  }
567  else if(_strategy >= kNNBruteForce &&
568  _notIndexedWords.size() &&
569  _removedIndexedWords.size() == 0 &&
570  _visualWords.size() &&
571  _dataTree.rows)
572  {
573  //just add not indexed words
574  int i = _dataTree.rows;
575  _dataTree.reserve(_dataTree.rows + _notIndexedWords.size());
576  for(std::set<int>::iterator iter=_notIndexedWords.begin(); iter!=_notIndexedWords.end(); ++iter)
577  {
579  UASSERT(w);
580  UASSERT(w->getDescriptor().cols == _dataTree.cols);
581  UASSERT(w->getDescriptor().type() == _dataTree.type());
582  _dataTree.push_back(w->getDescriptor());
583  _mapIndexId.insert(_mapIndexId.end(), std::pair<int, int>(i, w->id()));
584  std::pair<std::map<int, int>::iterator, bool> inserted = _mapIdIndex.insert(std::pair<int, int>(w->id(), i));
585  UASSERT(inserted.second);
586  ++i;
587  }
588  }
589  else
590  {
591  _mapIndexId.clear();
592  _mapIdIndex.clear();
593  _dataTree = cv::Mat();
594  _flannIndex->release();
595 
596  if(_visualWords.size())
597  {
598  UTimer timer;
599  timer.start();
600 
601  int dim = _visualWords.begin()->second->getDescriptor().cols;
602  int type;
603  if(_visualWords.begin()->second->getDescriptor().type() == CV_8U)
604  {
605  useDistanceL1_ = true;
607  {
608  type = CV_32F;
609  if(!_byteToFloat)
610  {
611  dim *= 8;
612  }
613  }
614  else
615  {
616  type = _visualWords.begin()->second->getDescriptor().type();
617  }
618  }
619  else
620  {
621  type = _visualWords.begin()->second->getDescriptor().type();
622  }
623 
624  UASSERT(type == CV_32F || type == CV_8U);
625  UASSERT(dim > 0);
626 
627  // Create the data matrix
628  _dataTree = cv::Mat(_visualWords.size(), dim, type); // SURF descriptors are CV_32F
629  std::map<int, VisualWord*>::const_iterator iter = _visualWords.begin();
630  for(unsigned int i=0; i < _visualWords.size(); ++i, ++iter)
631  {
632  cv::Mat descriptor;
633  if(iter->second->getDescriptor().type() == CV_8U)
634  {
636  {
637  descriptor = convertBinTo32F(iter->second->getDescriptor(), _byteToFloat);
638  }
639  else
640  {
641  descriptor = iter->second->getDescriptor();
642  }
643  }
644  else
645  {
646  descriptor = iter->second->getDescriptor();
647  }
648 
649  UASSERT_MSG(descriptor.type() == type, uFormat("%d vs %d", descriptor.type(), type).c_str());
650  UASSERT_MSG(descriptor.cols == dim, uFormat("%d vs %d", descriptor.cols, dim).c_str());
651 
652  descriptor.copyTo(_dataTree.row(i));
653  _mapIndexId.insert(_mapIndexId.end(), std::pair<int, int>(i, iter->second->id()));
654  _mapIdIndex.insert(_mapIdIndex.end(), std::pair<int, int>(iter->second->id(), i));
655  }
656 
657  ULOGGER_DEBUG("_mapIndexId.size() = %d, words.size()=%d, _dim=%d",_mapIndexId.size(), _visualWords.size(), dim);
658  ULOGGER_DEBUG("copying data = %f s", timer.ticks());
659 
660  switch(_strategy)
661  {
662  case kNNFlannNaive:
664  break;
665  case kNNFlannKdTree:
666  UASSERT_MSG(type == CV_32F, "To use KdTree dictionary, float descriptors are required!");
668  break;
669  case kNNFlannLSH:
670  UASSERT_MSG(type == CV_8U, "To use LSH dictionary, binary descriptors are required!");
672  break;
673  default:
674  break;
675  }
676 
677  ULOGGER_DEBUG("Time to create kd tree = %f s", timer.ticks());
678  }
679  }
680  UDEBUG("Dictionary updated! (size=%d added=%d removed=%d)",
681  _dataTree.rows, _notIndexedWords.size(), _removedIndexedWords.size());
682  }
683  else
684  {
685  UDEBUG("Dictionary has not changed, so no need to update it! (size=%d)", _dataTree.rows);
686  }
687  _notIndexedWords.clear();
688  _removedIndexedWords.clear();
689  UDEBUG("");
690 }
691 
692 void VWDictionary::clear(bool printWarningsIfNotEmpty)
693 {
694  ULOGGER_DEBUG("");
695  if(printWarningsIfNotEmpty)
696  {
698  {
699  UWARN("Visual dictionary would be already empty here (%d words still in dictionary).", (int)_visualWords.size());
700  }
701  if(_notIndexedWords.size())
702  {
703  UWARN("Not indexed words should be empty here (%d words still not indexed)", (int)_notIndexedWords.size());
704  }
705  }
706  for(std::map<int, VisualWord *>::iterator i=_visualWords.begin(); i!=_visualWords.end(); ++i)
707  {
708  delete (*i).second;
709  }
710  _visualWords.clear();
711  _notIndexedWords.clear();
712  _removedIndexedWords.clear();
714  _lastWordId = 0;
715  _dataTree = cv::Mat();
716  _mapIndexId.clear();
717  _mapIdIndex.clear();
718  _unusedWords.clear();
719  _flannIndex->release();
720  useDistanceL1_ = false;
721 }
722 
724 {
725  return ++_lastWordId;
726 }
727 
728 void VWDictionary::addWordRef(int wordId, int signatureId)
729 {
730  VisualWord * vw = 0;
731  vw = uValue(_visualWords, wordId, vw);
732  if(vw)
733  {
734  vw->addRef(signatureId);
736 
737  _unusedWords.erase(vw->id());
738  }
739  else
740  {
741  UERROR("Not found word %d (dict size=%d)", wordId, (int)_visualWords.size());
742  }
743 }
744 
745 void VWDictionary::removeAllWordRef(int wordId, int signatureId)
746 {
747  VisualWord * vw = 0;
748  vw = uValue(_visualWords, wordId, vw);
749  if(vw)
750  {
751  _totalActiveReferences -= vw->removeAllRef(signatureId);
752  if(vw->getReferences().size() == 0)
753  {
754  _unusedWords.insert(std::pair<int, VisualWord*>(vw->id(), vw));
755  }
756  }
757 }
758 
760  const cv::Mat & descriptorsIn,
761  int signatureId)
762 {
763  UDEBUG("id=%d descriptors=%d", signatureId, descriptorsIn.rows);
764  UTimer timer;
765  std::list<int> wordIds;
766  if(descriptorsIn.rows == 0 || descriptorsIn.cols == 0)
767  {
768  UERROR("Descriptors size is null!");
769  return wordIds;
770  }
771 
772  if(!_incrementalDictionary && _visualWords.empty())
773  {
774  UERROR("Dictionary mode is set to fixed but no words are in it!");
775  return wordIds;
776  }
777 
778  // verify we have the same features
779  int dim = 0;
780  int type = -1;
781  if(_visualWords.size())
782  {
783  dim = _visualWords.begin()->second->getDescriptor().cols;
784  type = _visualWords.begin()->second->getDescriptor().type();
785  UASSERT(type == CV_32F || type == CV_8U);
786  }
787  if(dim && dim != descriptorsIn.cols)
788  {
789  UERROR("Descriptors (size=%d) are not the same size as already added words in dictionary(size=%d)", descriptorsIn.cols, dim);
790  return wordIds;
791  }
792  if(type>=0 && type != descriptorsIn.type())
793  {
794  UERROR("Descriptors (type=%d) are not the same type as already added words in dictionary(type=%d)", descriptorsIn.type(), type);
795  return wordIds;
796  }
797 
798  // now compare with the actual index
799  cv::Mat descriptors;
800  if(descriptorsIn.type() == CV_8U)
801  {
802  useDistanceL1_ = true;
804  {
805  descriptors = convertBinTo32F(descriptorsIn, _byteToFloat);
806  }
807  else
808  {
809  descriptors = descriptorsIn;
810  }
811  }
812  else
813  {
814  descriptors = descriptorsIn;
815  }
816  dim = 0;
817  type = -1;
818  if(_dataTree.rows || _flannIndex->isBuilt())
819  {
822  UASSERT(type == CV_32F || type == CV_8U);
823  }
824 
825  if(dim && dim != descriptors.cols)
826  {
827  UERROR("Descriptors (size=%d) are not the same size as already added words in dictionary(size=%d)", descriptors.cols, dim);
828  return wordIds;
829  }
830 
831  if(type>=0 && type != descriptors.type())
832  {
833  UERROR("Descriptors (type=%d) are not the same type as already added words in dictionary(type=%d)", descriptors.type(), type);
834  return wordIds;
835  }
836 
837  int dupWordsCountFromDict= 0;
838  int dupWordsCountFromLast= 0;
839 
840  unsigned int k=2; // k nearest neighbors
841 
842  cv::Mat newWords;
843  std::vector<int> newWordsId;
844 
845  cv::Mat results;
846  cv::Mat dists;
847  std::vector<std::vector<cv::DMatch> > matches;
848  bool bruteForce = false;
849  bool isL2NotSqr = false;
850 
851  UTimer timerLocal;
852  timerLocal.start();
853 
854  if(_flannIndex->isBuilt() || (!_dataTree.empty() && _dataTree.rows >= (int)k))
855  {
856  //Find nearest neighbors
857  UDEBUG("newPts.total()=%d ", descriptors.rows);
858 
860  {
861  _flannIndex->knnSearch(descriptors, results, dists, k, KNN_CHECKS);
862  }
863  else if(_strategy == kNNBruteForce)
864  {
865  bruteForce = true;
866  cv::BFMatcher matcher(descriptors.type()==CV_8U?cv::NORM_HAMMING:cv::NORM_L2SQR);
867  matcher.knnMatch(descriptors, _dataTree, matches, k);
868  }
869  else if(_strategy == kNNBruteForceGPU)
870  {
871  bruteForce = true;
872 #if CV_MAJOR_VERSION < 3
873 #ifdef HAVE_OPENCV_GPU
874  cv::gpu::GpuMat newDescriptorsGpu(descriptors);
875  cv::gpu::GpuMat lastDescriptorsGpu(_dataTree);
876  if(descriptors.type()==CV_8U)
877  {
878  cv::gpu::BruteForceMatcher_GPU<cv::Hamming> gpuMatcher;
879  gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
880  }
881  else
882  {
883  cv::gpu::BruteForceMatcher_GPU<cv::L2<float> > gpuMatcher;
884  gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
885  isL2NotSqr = true;
886  }
887 #else
888  UERROR("Cannot use brute Force GPU because OpenCV is not built with gpu module.");
889 #endif
890 #else
891 #ifdef HAVE_OPENCV_CUDAFEATURES2D
892  cv::cuda::GpuMat newDescriptorsGpu(descriptors);
893  cv::cuda::GpuMat lastDescriptorsGpu(_dataTree);
894  cv::Ptr<cv::cuda::DescriptorMatcher> gpuMatcher;
895  if(descriptors.type()==CV_8U)
896  {
897  gpuMatcher = cv::cuda::DescriptorMatcher::createBFMatcher(cv::NORM_HAMMING);
898  gpuMatcher->knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
899  }
900  else
901  {
902  gpuMatcher = cv::cuda::DescriptorMatcher::createBFMatcher(cv::NORM_L2);
903  gpuMatcher->knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
904  isL2NotSqr = true;
905  }
906 #else
907  UERROR("Cannot use brute Force GPU because OpenCV is not built with cuda module.");
908 #endif
909 #endif
910  }
911  else
912  {
913  UFATAL("");
914  }
915 
916  // In case of binary descriptors
917  if(dists.type() == CV_32S)
918  {
919  cv::Mat temp;
920  dists.convertTo(temp, CV_32F);
921  dists = temp;
922  }
923 
924  UDEBUG("Time to find nn = %f s", timerLocal.ticks());
925  }
926 
927  // Process results
928  for(int i = 0; i < descriptors.rows; ++i)
929  {
930  std::multimap<float, int> fullResults; // Contains results from the kd-tree search and the naive search in new words
931  if(!bruteForce && dists.cols)
932  {
933  for(int j=0; j<dists.cols; ++j)
934  {
935  float d = dists.at<float>(i,j);
936  int index;
937  if (sizeof(size_t) == 8)
938  {
939  index = *((size_t*)&results.at<double>(i, j));
940  }
941  else
942  {
943  index = *((size_t*)&results.at<int>(i, j));
944  }
945  int id = uValue(_mapIndexId, index);
946  if(d >= 0.0f && id != 0)
947  {
948  fullResults.insert(std::pair<float, int>(d, id));
949  }
950  else
951  {
952  break;
953  }
954  }
955  }
956  else if(bruteForce && matches.size())
957  {
958  for(unsigned int j=0; j<matches.at(i).size(); ++j)
959  {
960  float d = matches.at(i).at(j).distance;
961  int id = uValue(_mapIndexId, matches.at(i).at(j).trainIdx);
962  if(d >= 0.0f && id != 0)
963  {
964  if(isL2NotSqr)
965  {
966  // Make it compatible with L2SQR format of flann
967  d*=d;
968  }
969  fullResults.insert(std::pair<float, int>(d, id));
970  }
971  else
972  {
973  break;
974  }
975  }
976  }
977 
978  // Check if this descriptor matches with a word from the last signature (a word not already added to the tree)
979  if(_newWordsComparedTogether && newWords.rows)
980  {
981  std::vector<std::vector<cv::DMatch> > matchesNewWords;
982  cv::BFMatcher matcher(descriptors.type()==CV_8U?cv::NORM_HAMMING:useDistanceL1_?cv::NORM_L1:cv::NORM_L2SQR);
983  UASSERT(descriptors.cols == newWords.cols && descriptors.type() == newWords.type());
984  matcher.knnMatch(descriptors.row(i), newWords, matchesNewWords, newWords.rows>1?2:1);
985  UASSERT(matchesNewWords.size() == 1);
986  for(unsigned int j=0; j<matchesNewWords.at(0).size(); ++j)
987  {
988  float d = matchesNewWords.at(0).at(j).distance;
989  int id = newWordsId[matchesNewWords.at(0).at(j).trainIdx];
990  if(d >= 0.0f && id != 0)
991  {
992  fullResults.insert(std::pair<float, int>(d, id));
993  }
994  else
995  {
996  break;
997  }
998  }
999  }
1000 
1002  {
1003  bool badDist = false;
1004  if(fullResults.size() == 0)
1005  {
1006  badDist = true;
1007  }
1008  if(!badDist)
1009  {
1010  if(fullResults.size() >= 2)
1011  {
1012  // Apply NNDR
1013  if(fullResults.begin()->first > _nndrRatio * (++fullResults.begin())->first)
1014  {
1015  badDist = true; // Rejected
1016  }
1017  }
1018  else
1019  {
1020  badDist = true; // Rejected
1021  }
1022  }
1023 
1024  if(badDist)
1025  {
1026  // use original descriptor
1027  VisualWord * vw = new VisualWord(getNextId(), descriptorsIn.row(i), signatureId);
1028  _visualWords.insert(_visualWords.end(), std::pair<int, VisualWord *>(vw->id(), vw));
1029  _notIndexedWords.insert(_notIndexedWords.end(), vw->id());
1030  newWords.push_back(descriptors.row(i));
1031  newWordsId.push_back(vw->id());
1032  wordIds.push_back(vw->id());
1033  UASSERT(vw->id()>0);
1034  }
1035  else
1036  {
1037  if(_notIndexedWords.find(fullResults.begin()->second) != _notIndexedWords.end())
1038  {
1039  ++dupWordsCountFromLast;
1040  }
1041  else
1042  {
1043  ++dupWordsCountFromDict;
1044  }
1045 
1046  this->addWordRef(fullResults.begin()->second, signatureId);
1047  wordIds.push_back(fullResults.begin()->second);
1048  }
1049  }
1050  else if(fullResults.size())
1051  {
1052  // If the dictionary is not incremental, just take the nearest word
1053  ++dupWordsCountFromDict;
1054  this->addWordRef(fullResults.begin()->second, signatureId);
1055  wordIds.push_back(fullResults.begin()->second);
1056  UASSERT(fullResults.begin()->second>0);
1057  }
1058  }
1059  ULOGGER_DEBUG("naive search and add ref/words time = %f s", timerLocal.ticks());
1060 
1061  ULOGGER_DEBUG("%d new words added...", _notIndexedWords.size());
1062  ULOGGER_DEBUG("%d duplicated words added (from current image = %d)...",
1063  dupWordsCountFromDict+dupWordsCountFromLast, dupWordsCountFromLast);
1064  UDEBUG("total time %fs", timer.ticks());
1065 
1067  return wordIds;
1068 }
1069 
1070 std::vector<int> VWDictionary::findNN(const std::list<VisualWord *> & vws) const
1071 {
1072  UTimer timer;
1073  timer.start();
1074 
1075  if(_visualWords.size() && vws.size())
1076  {
1077  int type = (*vws.begin())->getDescriptor().type();
1078  int dim = (*vws.begin())->getDescriptor().cols;
1079 
1080  if(dim != _visualWords.begin()->second->getDescriptor().cols)
1081  {
1082  UERROR("Descriptors (size=%d) are not the same size as already added words in dictionary(size=%d)", (*vws.begin())->getDescriptor().cols, dim);
1083  return std::vector<int>(vws.size(), 0);
1084  }
1085 
1086  if(type != _visualWords.begin()->second->getDescriptor().type())
1087  {
1088  UERROR("Descriptors (type=%d) are not the same type as already added words in dictionary(type=%d)", (*vws.begin())->getDescriptor().type(), type);
1089  return std::vector<int>(vws.size(), 0);
1090  }
1091 
1092  // fill the request matrix
1093  int index = 0;
1094  VisualWord * vw;
1095  cv::Mat query(vws.size(), dim, type);
1096  for(std::list<VisualWord *>::const_iterator iter=vws.begin(); iter!=vws.end(); ++iter, ++index)
1097  {
1098  vw = *iter;
1099  UASSERT(vw);
1100 
1101  UASSERT(vw->getDescriptor().cols == dim);
1102  UASSERT(vw->getDescriptor().type() == type);
1103 
1104  vw->getDescriptor().copyTo(query.row(index));
1105  }
1106  ULOGGER_DEBUG("Preparation time = %fs", timer.ticks());
1107 
1108  return findNN(query);
1109  }
1110  return std::vector<int>(vws.size(), 0);
1111 }
1112 std::vector<int> VWDictionary::findNN(const cv::Mat & queryIn) const
1113 {
1114  UTimer timer;
1115  timer.start();
1116  std::vector<int> resultIds(queryIn.rows, 0);
1117  unsigned int k=2; // k nearest neighbor
1118 
1119  if(_visualWords.size() && queryIn.rows)
1120  {
1121  // verify we have the same features
1122  int dim = _visualWords.begin()->second->getDescriptor().cols;
1123  int type = _visualWords.begin()->second->getDescriptor().type();
1124  UASSERT(type == CV_32F || type == CV_8U);
1125 
1126  if(dim != queryIn.cols)
1127  {
1128  UERROR("Descriptors (size=%d) are not the same size as already added words in dictionary(size=%d)", queryIn.cols, dim);
1129  return resultIds;
1130  }
1131  if(type != queryIn.type())
1132  {
1133  UERROR("Descriptors (type=%d) are not the same type as already added words in dictionary(type=%d)", queryIn.type(), type);
1134  return resultIds;
1135  }
1136 
1137  // now compare with the actual index
1138  cv::Mat query;
1139  if(queryIn.type() == CV_8U)
1140  {
1141  if(_strategy == kNNFlannKdTree)
1142  {
1143  query = convertBinTo32F(queryIn, _byteToFloat);
1144  }
1145  else
1146  {
1147  query = queryIn;
1148  }
1149  }
1150  else
1151  {
1152  query = queryIn;
1153  }
1154  dim = 0;
1155  type = -1;
1156  if(_dataTree.rows || _flannIndex->isBuilt())
1157  {
1160  UASSERT(type == CV_32F || type == CV_8U);
1161  }
1162 
1163  if(dim && dim != query.cols)
1164  {
1165  UERROR("Descriptors (size=%d) are not the same size as already added words in dictionary(size=%d)", query.cols, dim);
1166  return resultIds;
1167  }
1168 
1169  if(type>=0 && type != query.type())
1170  {
1171  UERROR("Descriptors (type=%d) are not the same type as already added words in dictionary(type=%d)", query.type(), type);
1172  return resultIds;
1173  }
1174 
1175  std::vector<std::vector<cv::DMatch> > matches;
1176  bool bruteForce = false;
1177  bool isL2NotSqr = false;
1178  cv::Mat results;
1179  cv::Mat dists;
1180 
1181  if(_flannIndex->isBuilt() || (!_dataTree.empty() && _dataTree.rows >= (int)k))
1182  {
1183  //Find nearest neighbors
1184  UDEBUG("query.rows=%d ", query.rows);
1185 
1187  {
1188  _flannIndex->knnSearch(query, results, dists, k, KNN_CHECKS);
1189  }
1190  else if(_strategy == kNNBruteForce)
1191  {
1192  bruteForce = true;
1193  cv::BFMatcher matcher(query.type()==CV_8U?cv::NORM_HAMMING:cv::NORM_L2SQR);
1194  matcher.knnMatch(query, _dataTree, matches, k);
1195  }
1196  else if(_strategy == kNNBruteForceGPU)
1197  {
1198  bruteForce = true;
1199 #if CV_MAJOR_VERSION < 3
1200 #ifdef HAVE_OPENCV_GPU
1201  cv::gpu::GpuMat newDescriptorsGpu(query);
1202  cv::gpu::GpuMat lastDescriptorsGpu(_dataTree);
1203  if(query.type()==CV_8U)
1204  {
1205  cv::gpu::BruteForceMatcher_GPU<cv::Hamming> gpuMatcher;
1206  gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
1207  }
1208  else
1209  {
1210  cv::gpu::BruteForceMatcher_GPU<cv::L2<float> > gpuMatcher;
1211  gpuMatcher.knnMatch(newDescriptorsGpu, lastDescriptorsGpu, matches, k);
1212  isL2NotSqr = true;
1213  }
1214 #else
1215  UERROR("Cannot use brute Force GPU because OpenCV is not built with gpu module.");
1216 #endif
1217 #else
1218 #ifdef HAVE_OPENCV_CUDAFEATURES2D
1219  cv::cuda::GpuMat newDescriptorsGpu(query);
1220  cv::cuda::GpuMat lastDescriptorsGpu(_dataTree);
1221  cv::cuda::GpuMat matchesGpu;
1222  cv::Ptr<cv::cuda::DescriptorMatcher> gpuMatcher;
1223  if(query.type()==CV_8U)
1224  {
1225  gpuMatcher = cv::cuda::DescriptorMatcher::createBFMatcher(cv::NORM_HAMMING);
1226  gpuMatcher->knnMatchAsync(newDescriptorsGpu, lastDescriptorsGpu, matchesGpu, k);
1227  }
1228  else
1229  {
1230  gpuMatcher = cv::cuda::DescriptorMatcher::createBFMatcher(cv::NORM_L2);
1231  gpuMatcher->knnMatchAsync(newDescriptorsGpu, lastDescriptorsGpu, matchesGpu, k);
1232  isL2NotSqr = true;
1233  }
1234  gpuMatcher->knnMatchConvert(matchesGpu, matches);
1235 #else
1236  UERROR("Cannot use brute Force GPU because OpenCV is not built with cuda module.");
1237 #endif
1238 #endif
1239  }
1240  else
1241  {
1242  UFATAL("");
1243  }
1244 
1245  // In case of binary descriptors
1246  if(dists.type() == CV_32S)
1247  {
1248  cv::Mat temp;
1249  dists.convertTo(temp, CV_32F);
1250  dists = temp;
1251  }
1252  }
1253  ULOGGER_DEBUG("Search dictionary time = %fs", timer.ticks());
1254 
1255  std::map<int, int> mapIndexIdNotIndexed;
1256  std::vector<std::vector<cv::DMatch> > matchesNotIndexed;
1257  if(!_notIndexedWords.empty())
1258  {
1259  cv::Mat dataNotIndexed = cv::Mat::zeros(_notIndexedWords.size(), query.cols, query.type());
1260  unsigned int index = 0;
1261  VisualWord * vw;
1262  for(std::set<int>::iterator iter = _notIndexedWords.begin(); iter != _notIndexedWords.end(); ++iter, ++index)
1263  {
1264  vw = _visualWords.at(*iter);
1265 
1266  cv::Mat descriptor;
1267  if(vw->getDescriptor().type() == CV_8U)
1268  {
1269  if(_strategy == kNNFlannKdTree)
1270  {
1272  }
1273  else
1274  {
1275  descriptor = vw->getDescriptor();
1276  }
1277  }
1278  else
1279  {
1280  descriptor = vw->getDescriptor();
1281  }
1282  UASSERT(vw != 0 && descriptor.cols == query.cols && descriptor.type() == query.type());
1283  descriptor.copyTo(dataNotIndexed.row(index));
1284  mapIndexIdNotIndexed.insert(mapIndexIdNotIndexed.end(), std::pair<int,int>(index, vw->id()));
1285  }
1286  // Find nearest neighbor
1287  ULOGGER_DEBUG("Searching in words not indexed...");
1288  cv::BFMatcher matcher(query.type()==CV_8U?cv::NORM_HAMMING:useDistanceL1_?cv::NORM_L1:cv::NORM_L2SQR);
1289  matcher.knnMatch(query, dataNotIndexed, matchesNotIndexed, dataNotIndexed.rows>1?2:1);
1290  }
1291  ULOGGER_DEBUG("Search not yet indexed words time = %fs", timer.ticks());
1292 
1293  for(int i=0; i<query.rows; ++i)
1294  {
1295  std::multimap<float, int> fullResults; // Contains results from the kd-tree search [and the naive search in new words]
1296  if(!bruteForce && dists.cols)
1297  {
1298  for(int j=0; j<dists.cols; ++j)
1299  {
1300  float d = dists.at<float>(i,j);
1301  int index;
1302 
1303  if (sizeof(size_t) == 8)
1304  {
1305  index = *((size_t*)&results.at<double>(i, j));
1306  }
1307  else
1308  {
1309  index = *((size_t*)&results.at<int>(i, j));
1310  }
1311  int id = uValue(_mapIndexId, index);
1312  if(d >= 0.0f && id != 0)
1313  {
1314  fullResults.insert(std::pair<float, int>(d, id));
1315  }
1316  }
1317  }
1318  else if(bruteForce && matches.size())
1319  {
1320  for(unsigned int j=0; j<matches.at(i).size(); ++j)
1321  {
1322  float d = matches.at(i).at(j).distance;
1323  int id = uValue(_mapIndexId, matches.at(i).at(j).trainIdx);
1324  if(d >= 0.0f && id != 0)
1325  {
1326  if(isL2NotSqr)
1327  {
1328  // make it compatible with L2SQR from FLANN
1329  d*=d;
1330  }
1331  fullResults.insert(std::pair<float, int>(d, id));
1332  }
1333  }
1334  }
1335 
1336  // not indexed..
1337  if(matchesNotIndexed.size())
1338  {
1339  for(unsigned int j=0; j<matchesNotIndexed.at(i).size(); ++j)
1340  {
1341  float d = matchesNotIndexed.at(i).at(j).distance;
1342  int id = uValue(mapIndexIdNotIndexed, matchesNotIndexed.at(i).at(j).trainIdx);
1343  if(d >= 0.0f && id != 0)
1344  {
1345  fullResults.insert(std::pair<float, int>(d, id));
1346  }
1347  else
1348  {
1349  break;
1350  }
1351  }
1352  }
1353 
1355  {
1356  bool badDist = false;
1357  if(fullResults.size() == 0)
1358  {
1359  badDist = true;
1360  }
1361  if(!badDist)
1362  {
1363  if(fullResults.size() >= 2)
1364  {
1365  // Apply NNDR
1366  if(fullResults.begin()->first > _nndrRatio * (++fullResults.begin())->first)
1367  {
1368  badDist = true; // Rejected
1369  }
1370  }
1371  else
1372  {
1373  badDist = true; // Rejected
1374  }
1375  }
1376 
1377  if(!badDist)
1378  {
1379  resultIds[i] = fullResults.begin()->second; // Accepted
1380  }
1381  }
1382  else if(fullResults.size())
1383  {
1384  //Just take the nearest if the dictionary is not incremental
1385  resultIds[i] = fullResults.begin()->second; // Accepted
1386  }
1387  }
1388  ULOGGER_DEBUG("badDist check time = %fs", timer.ticks());
1389  }
1390  return resultIds;
1391 }
1392 
1394 {
1395  if(vw)
1396  {
1397  _visualWords.insert(std::pair<int, VisualWord *>(vw->id(), vw));
1398  _notIndexedWords.insert(vw->id());
1399  if(vw->getReferences().size())
1400  {
1402  }
1403  else
1404  {
1405  _unusedWords.insert(std::pair<int, VisualWord *>(vw->id(), vw));
1406  }
1407  if(_lastWordId < vw->id())
1408  {
1409  _lastWordId = vw->id();
1410  }
1411  }
1412 }
1413 
1414 const VisualWord * VWDictionary::getWord(int id) const
1415 {
1416  return uValue(_visualWords, id, (VisualWord *)0);
1417 }
1418 
1420 {
1421  return uValue(_unusedWords, id, (VisualWord *)0);
1422 }
1423 
1424 std::vector<VisualWord*> VWDictionary::getUnusedWords() const
1425 {
1426  return uValues(_unusedWords);
1427 }
1428 
1429 std::vector<int> VWDictionary::getUnusedWordIds() const
1430 {
1431  return uKeys(_unusedWords);
1432 }
1433 
1434 void VWDictionary::removeWords(const std::vector<VisualWord*> & words)
1435 {
1436  //UDEBUG("Removing %d words from dictionary (current size=%d)", (int)words.size(), (int)_visualWords.size());
1437  for(unsigned int i=0; i<words.size(); ++i)
1438  {
1439  _visualWords.erase(words[i]->id());
1440  _unusedWords.erase(words[i]->id());
1441  if(_notIndexedWords.erase(words[i]->id()) == 0)
1442  {
1443  _removedIndexedWords.insert(words[i]->id());
1444  }
1445  }
1446 }
1447 
1449 {
1450  std::vector<VisualWord*> unusedWords = uValues(_unusedWords);
1451  removeWords(unusedWords);
1452  for(unsigned int i=0; i<unusedWords.size(); ++i)
1453  {
1454  delete unusedWords[i];
1455  }
1456 }
1457 
1458 void VWDictionary::exportDictionary(const char * fileNameReferences, const char * fileNameDescriptors) const
1459 {
1460  UDEBUG("");
1461  if(_visualWords.empty())
1462  {
1463  UWARN("Dictionary is empty, cannot export it!");
1464  return;
1465  }
1466  if(_visualWords.begin()->second->getDescriptor().type() != CV_32FC1)
1467  {
1468  UERROR("Exporting binary descriptors is not implemented!");
1469  return;
1470  }
1471  FILE* foutRef = 0;
1472  FILE* foutDesc = 0;
1473 #ifdef _MSC_VER
1474  fopen_s(&foutRef, fileNameReferences, "w");
1475  fopen_s(&foutDesc, fileNameDescriptors, "w");
1476 #else
1477  foutRef = fopen(fileNameReferences, "w");
1478  foutDesc = fopen(fileNameDescriptors, "w");
1479 #endif
1480 
1481  if(foutRef)
1482  {
1483  fprintf(foutRef, "WordID SignaturesID...\n");
1484  }
1485  if(foutDesc)
1486  {
1487  if(_visualWords.begin() == _visualWords.end())
1488  {
1489  fprintf(foutDesc, "WordID Descriptors...\n");
1490  }
1491  else
1492  {
1493  UDEBUG("");
1494  fprintf(foutDesc, "WordID Descriptors...%d\n", (*_visualWords.begin()).second->getDescriptor().cols);
1495  }
1496  }
1497 
1498  UDEBUG("Export %d words...", _visualWords.size());
1499  for(std::map<int, VisualWord *>::const_iterator iter=_visualWords.begin(); iter!=_visualWords.end(); ++iter)
1500  {
1501  // References
1502  if(foutRef)
1503  {
1504  fprintf(foutRef, "%d ", (*iter).first);
1505  const std::map<int, int> ref = (*iter).second->getReferences();
1506  for(std::map<int, int>::const_iterator jter=ref.begin(); jter!=ref.end(); ++jter)
1507  {
1508  for(int i=0; i<(*jter).second; ++i)
1509  {
1510  fprintf(foutRef, "%d ", (*jter).first);
1511  }
1512  }
1513  fprintf(foutRef, "\n");
1514  }
1515 
1516  //Descriptors
1517  if(foutDesc)
1518  {
1519  fprintf(foutDesc, "%d ", (*iter).first);
1520  const float * desc = (const float *)(*iter).second->getDescriptor().data;
1521  int dim = (*iter).second->getDescriptor().cols;
1522 
1523  for(int i=0; i<dim; i++)
1524  {
1525  fprintf(foutDesc, "%f ", desc[i]);
1526  }
1527  fprintf(foutDesc, "\n");
1528  }
1529  }
1530 
1531  if(foutRef)
1532  fclose(foutRef);
1533  if(foutDesc)
1534  fclose(foutDesc);
1535 }
1536 
1537 } // namespace rtabmap
w
RowVector3d w
int
int
rtabmap::FlannIndex::knnSearch
void knnSearch(const cv::Mat &query, cv::Mat &indices, cv::Mat &dists, int knn, int checks=32, float eps=0.0, bool sorted=true) const
Definition: FlannIndex.cpp:497
UtiLite.h
compare
bool compare
rtabmap::VWDictionary::getUnusedWordIds
std::vector< int > getUnusedWordIds() const
Definition: VWDictionary.cpp:1429
rtabmap::VWDictionary::deleteUnusedWords
void deleteUnusedWords()
Definition: VWDictionary.cpp:1448
rtabmap::VWDictionary::ID_INVALID
static const int ID_INVALID
Definition: VWDictionary.h:57
rtabmap::VWDictionary::getWord
const VisualWord * getWord(int id) const
Definition: VWDictionary.cpp:1414
rtabmap::VWDictionary::_strategy
NNStrategy _strategy
Definition: VWDictionary.h:144
rtabmap::VWDictionary::_visualWords
std::map< int, VisualWord * > _visualWords
Definition: VWDictionary.h:128
UINFO
#define UINFO(...)
KDTREE_SIZE
#define KDTREE_SIZE
Definition: VWDictionary.cpp:54
rtabmap::VWDictionary::kNNBruteForceGPU
@ kNNBruteForceGPU
Definition: VWDictionary.h:54
rtabmap::FlannIndex::removePoint
void removePoint(unsigned int index)
Definition: FlannIndex.cpp:464
timer
rtabmap::FlannIndex::memoryUsed
size_t memoryUsed() const
Definition: FlannIndex.cpp:111
rtabmap_netvlad.descriptor
def descriptor
Definition: rtabmap_netvlad.py:81
rtabmap::VWDictionary::getLastIndexedWordId
int getLastIndexedWordId() const
Definition: VWDictionary.cpp:347
rtabmap::VisualWord::addRef
void addRef(int signatureId)
Definition: VisualWord.cpp:51
rtabmap::DBDriver::openConnection
bool openConnection(const std::string &url, bool overwritten=false)
Definition: DBDriver.cpp:86
uSum
T uSum(const std::list< T > &list)
Definition: UMath.h:318
VWDictionary.h
rtabmap::VWDictionary::_mapIdIndex
std::map< int,int > _mapIdIndex
Definition: VWDictionary.h:146
rtabmap::VWDictionary::kNNUndef
@ kNNUndef
Definition: VWDictionary.h:55
rtabmap::FlannIndex::featuresType
int featuresType() const
Definition: FlannIndex.h:74
rtabmap::VWDictionary::useDistanceL1_
bool useDistanceL1_
Definition: VWDictionary.h:141
size
Index size
uStr2Bool
bool UTILITE_EXPORT uStr2Bool(const char *str)
Definition: UConversion.cpp:170
type
rtabmap::FlannIndex::addPoints
std::vector< unsigned int > addPoints(const cv::Mat &features)
Definition: FlannIndex.cpp:368
rtabmap::VWDictionary::exportDictionary
void exportDictionary(const char *fileNameReferences, const char *fileNameDescriptors) const
Definition: VWDictionary.cpp:1458
UTimer::start
void start()
Definition: UTimer.cpp:87
rtabmap::ParametersMap
std::map< std::string, std::string > ParametersMap
Definition: Parameters.h:43
ULOGGER_DEBUG
#define ULOGGER_DEBUG(...)
Definition: ULogger.h:53
rtabmap::VWDictionary::update
virtual void update()
Definition: VWDictionary.cpp:468
rtabmap::VisualWord
Definition: VisualWord.h:38
rtabmap::VisualWord::removeAllRef
int removeAllRef(int signatureId)
Definition: VisualWord.cpp:65
rtabmap::Parameters::parse
static bool parse(const ParametersMap &parameters, const std::string &key, bool &value)
Definition: Parameters.cpp:500
desc
desc
Parameters.h
uSplitNumChar
std::list< std::string > uSplitNumChar(const std::string &str)
Definition: UStl.h:670
rtabmap::FlannIndex::indexedFeatures
size_t indexedFeatures() const
Definition: FlannIndex.cpp:83
VisualWord.h
rtabmap::VWDictionary::setIncrementalDictionary
void setIncrementalDictionary()
Definition: VWDictionary.cpp:132
rtabmap::VWDictionary::_dataTree
cv::Mat _dataTree
Definition: VWDictionary.h:143
rtabmap::VWDictionary::getUnusedWord
VisualWord * getUnusedWord(int id) const
Definition: VWDictionary.cpp:1419
matches
matches
uContains
bool uContains(const std::list< V > &list, const V &value)
Definition: UStl.h:407
uKeys
std::vector< K > uKeys(const std::multimap< K, V > &mm)
Definition: UStl.h:67
UFATAL
#define UFATAL(...)
rtabmap::VWDictionary::addNewWords
virtual std::list< int > addNewWords(const cv::Mat &descriptors, int signatureId)
Definition: VWDictionary.cpp:759
KNN_CHECKS
#define KNN_CHECKS
Definition: VWDictionary.cpp:55
FlannIndex.h
rtabmap::VWDictionary::addWordRef
void addWordRef(int wordId, int signatureId)
Definition: VWDictionary.cpp:728
j
std::ptrdiff_t j
rtabmap::VWDictionary::_totalActiveReferences
int _totalActiveReferences
Definition: VWDictionary.h:129
UFile::getExtension
std::string getExtension()
Definition: UFile.h:140
rtabmap::VWDictionary::getMemoryUsed
unsigned long getMemoryUsed() const
Definition: VWDictionary.cpp:369
rtabmap::VWDictionary::_mapIndexId
std::map< int,int > _mapIndexId
Definition: VWDictionary.h:145
Signature.h
rtabmap::VWDictionary::parseParameters
virtual void parseParameters(const ParametersMap &parameters)
Definition: VWDictionary.cpp:87
rtabmap::VWDictionary::setFixedDictionary
void setFixedDictionary(const std::string &dictionaryPath)
Definition: VWDictionary.cpp:146
rtabmap::VWDictionary::convert32FToBin
static cv::Mat convert32FToBin(const cv::Mat &descriptorsIn, bool byteToFloat=true)
Definition: VWDictionary.cpp:432
rtabmap::VWDictionary::VWDictionary
VWDictionary(const ParametersMap &parameters=ParametersMap())
Definition: VWDictionary.cpp:63
rtabmap::VWDictionary::_incrementalFlann
bool _incrementalFlann
Definition: VWDictionary.h:133
rtabmap::VisualWord::getDescriptor
const cv::Mat & getDescriptor() const
Definition: VisualWord.h:50
rtabmap::VWDictionary::_incrementalDictionary
bool _incrementalDictionary
Definition: VWDictionary.h:132
UASSERT
#define UASSERT(condition)
d
d
uValue
V uValue(const std::map< K, V > &m, const K &key, const V &defaultValue=V())
Definition: UStl.h:238
DBDriver.h
rtabmap::FlannIndex
Definition: FlannIndex.h:37
rtabmap::Parameters
Definition: Parameters.h:170
rtabmap::VWDictionary::_unusedWords
std::map< int, VisualWord * > _unusedWords
Definition: VWDictionary.h:147
uKeysSet
std::set< K > uKeysSet(const std::map< K, V > &m)
Definition: UStl.h:186
rtabmap::VWDictionary::kNNFlannLSH
@ kNNFlannLSH
Definition: VWDictionary.h:52
rtabmap::VWDictionary::_newWordsComparedTogether
bool _newWordsComparedTogether
Definition: VWDictionary.h:139
rtabmap::VWDictionary::clear
void clear(bool printWarningsIfNotEmpty=true)
Definition: VWDictionary.cpp:692
matcher
matcher
str
UASSERT_MSG
#define UASSERT_MSG(condition, msg_str)
Definition: ULogger.h:67
UWARN
#define UWARN(...)
rtabmap::VWDictionary::_nndrRatio
float _nndrRatio
Definition: VWDictionary.h:136
f
Point2(* f)(const Point3 &, OptionalJacobian< 2, 3 >)
rtabmap::DBDriver
Definition: DBDriver.h:62
uFormat
std::string UTILITE_EXPORT uFormat(const char *fmt,...)
Definition: UConversion.cpp:365
rtabmap::VWDictionary::removeAllWordRef
void removeAllWordRef(int wordId, int signatureId)
Definition: VWDictionary.cpp:745
rtabmap::FlannIndex::release
void release()
Definition: FlannIndex.cpp:50
rtabmap::VWDictionary::_rebalancingFactor
float _rebalancingFactor
Definition: VWDictionary.h:134
rtabmap::FlannIndex::buildLinearIndex
void buildLinearIndex(const cv::Mat &features, bool useDistanceL1=false, float rebalancingFactor=2.0f)
Definition: FlannIndex.cpp:142
rtabmap::VWDictionary::kNNFlannKdTree
@ kNNFlannKdTree
Definition: VWDictionary.h:51
UTimer::ticks
double ticks()
Definition: UTimer.cpp:117
uSplit
std::list< std::string > uSplit(const std::string &str, char separator=' ')
Definition: UStl.h:564
rtabmap::VWDictionary::kNNBruteForce
@ kNNBruteForce
Definition: VWDictionary.h:53
rtabmap::VWDictionary::getUnusedWords
std::vector< VisualWord * > getUnusedWords() const
Definition: VWDictionary.cpp:1424
rtabmap::VWDictionary::_removedIndexedWords
std::set< int > _removedIndexedWords
Definition: VWDictionary.h:149
iter
iterator iter(handle obj)
dim
dim
ref
rtabmap::FlannIndex::buildLSHIndex
void buildLSHIndex(const cv::Mat &features, unsigned int table_number=12, unsigned int key_size=20, unsigned int multi_probe_level=2, float rebalancingFactor=2.0f)
Definition: FlannIndex.cpp:325
UDEBUG
#define UDEBUG(...)
rtabmap::VWDictionary::getNextId
int getNextId()
Definition: VWDictionary.cpp:723
rtabmap::FlannIndex::featuresDim
int featuresDim() const
Definition: FlannIndex.h:75
rtabmap::FlannIndex::isBuilt
bool isBuilt()
Definition: FlannIndex.cpp:363
rtabmap::VisualWord::setSaved
void setSaved(bool saved)
Definition: VisualWord.h:54
UTimer
Definition: UTimer.h:46
results
std::map< std::string, Array< float, 1, 8, DontAlign|RowMajor > > results
float
float
rtabmap::VWDictionary::setNNStrategy
bool setNNStrategy(NNStrategy strategy)
Definition: VWDictionary.cpp:292
rtabmap::VWDictionary::_byteToFloat
bool _byteToFloat
Definition: VWDictionary.h:135
rtabmap::VWDictionary::findNN
std::vector< int > findNN(const std::list< VisualWord * > &vws) const
Definition: VWDictionary.cpp:1070
rtabmap::VWDictionary::addWord
virtual void addWord(VisualWord *vw)
Definition: VWDictionary.cpp:1393
rtabmap::VWDictionary::_flannIndex
FlannIndex * _flannIndex
Definition: VWDictionary.h:142
rtabmap::DBDriver::load
void load(VWDictionary *dictionary, bool lastStateOnly=true) const
Definition: DBDriver.cpp:533
false
#define false
Definition: ConvertUTF.c:56
uIsDigit
bool uIsDigit(const char c)
Definition: UStl.h:620
uValues
std::vector< V > uValues(const std::multimap< K, V > &mm)
Definition: UStl.h:100
rtabmap::VWDictionary::NNStrategy
NNStrategy
Definition: VWDictionary.h:49
uStr2Float
float UTILITE_EXPORT uStr2Float(const std::string &str)
Definition: UConversion.cpp:138
rtabmap::FlannIndex::buildKDTreeIndex
void buildKDTreeIndex(const cv::Mat &features, int trees=4, bool useDistanceL1=false, float rebalancingFactor=2.0f)
Definition: FlannIndex.cpp:202
rtabmap::VWDictionary::removeWords
void removeWords(const std::vector< VisualWord * > &words)
Definition: VWDictionary.cpp:1434
rtabmap::VisualWord::getReferences
const std::map< int, int > & getReferences() const
Definition: VisualWord.h:51
rtabmap::VWDictionary::_notIndexedWords
std::set< int > _notIndexedWords
Definition: VWDictionary.h:148
rtabmap
Definition: CameraARCore.cpp:35
rtabmap::VWDictionary::~VWDictionary
virtual ~VWDictionary()
Definition: VWDictionary.cpp:81
rtabmap::VWDictionary::_newDictionaryPath
std::string _newDictionaryPath
Definition: VWDictionary.h:138
UERROR
#define UERROR(...)
rtabmap::VisualWord::id
int id() const
Definition: VisualWord.h:49
file
file
rtabmap::VWDictionary::getIndexedWordsCount
unsigned int getIndexedWordsCount() const
Definition: VWDictionary.cpp:359
rtabmap::VWDictionary::kNNFlannNaive
@ kNNFlannNaive
Definition: VWDictionary.h:50
rtabmap::VWDictionary::ID_START
static const int ID_START
Definition: VWDictionary.h:56
rtabmap::VWDictionary::getIndexMemoryUsed
unsigned int getIndexMemoryUsed() const
Definition: VWDictionary.cpp:364
i
int i
rtabmap::VWDictionary::convertBinTo32F
static cv::Mat convertBinTo32F(const cv::Mat &descriptorsIn, bool byteToFloat=true)
Definition: VWDictionary.cpp:397
rtabmap::DBDriver::closeConnection
void closeConnection(bool save=true, const std::string &outputUrl="")
Definition: DBDriver.cpp:64
rtabmap::VWDictionary::_lastWordId
int _lastWordId
Definition: VWDictionary.h:140
rtabmap::VWDictionary::_dictionaryPath
std::string _dictionaryPath
Definition: VWDictionary.h:137
rtabmap::DBDriver::create
static DBDriver * create(const ParametersMap &parameters=ParametersMap())
Definition: DBDriver.cpp:41


rtabmap
Author(s): Mathieu Labbe
autogenerated on Thu Jul 25 2024 02:50:24