Settings.cpp
Go to the documentation of this file.
1 /*
2 Copyright (c) 2011-2014, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
3 All rights reserved.
4 
5 Redistribution and use in source and binary forms, with or without
6 modification, are permitted provided that the following conditions are met:
7  * Redistributions of source code must retain the above copyright
8  notice, this list of conditions and the following disclaimer.
9  * Redistributions in binary form must reproduce the above copyright
10  notice, this list of conditions and the following disclaimer in the
11  documentation and/or other materials provided with the distribution.
12  * Neither the name of the Universite de Sherbrooke nor the
13  names of its contributors may be used to endorse or promote products
14  derived from this software without specific prior written permission.
15 
16 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
20 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27 
28 #include "find_object/Camera.h"
29 #include "find_object/Settings.h"
31 
32 #include <QtCore/QSettings>
33 #include <QtCore/QStringList>
34 #include <QtCore/QDir>
35 #include <stdio.h>
36 #include <opencv2/calib3d/calib3d.hpp>
37 #include <opencv2/opencv_modules.hpp>
38 
39 #if CV_MAJOR_VERSION < 3
40 #include <opencv2/gpu/gpu.hpp>
41 #define CVCUDA cv::gpu
42 #else
43 #include <opencv2/core/cuda.hpp>
44 #define CVCUDA cv::cuda
45 #ifdef HAVE_OPENCV_CUDAFEATURES2D
46 #include <opencv2/cudafeatures2d.hpp>
47 #endif
48 #endif
49 
50 #ifdef HAVE_OPENCV_NONFREE
51  #if CV_MAJOR_VERSION == 2 && CV_MINOR_VERSION >=4
52  #include <opencv2/nonfree/gpu.hpp>
53  #include <opencv2/nonfree/features2d.hpp>
54  #endif
55 #endif
56 #ifdef HAVE_OPENCV_XFEATURES2D
57  #include <opencv2/xfeatures2d.hpp>
58  #include <opencv2/xfeatures2d/cuda.hpp>
59 #endif
60 
61 namespace find_object {
62 
67 Settings Settings::dummyInit_;
68 QString Settings::iniPath_;
69 
71 {
72 #ifdef WIN32
73  return QString("%1/Documents/%2").arg(QDir::homePath()).arg(PROJECT_NAME);
74 #else
75  return QString("%1").arg(QDir::homePath());
76 #endif
77 }
78 
80 {
81 #ifdef WIN32
82  return QString("%1/Documents/%2/%3").arg(QDir::homePath()).arg(PROJECT_NAME).arg(Settings::iniDefaultFileName());
83 #else
84  return QString("%1/.%2/%3").arg(QDir::homePath()).arg(PROJECT_PREFIX).arg(Settings::iniDefaultFileName());
85 #endif
86 }
87 
89 {
90  if(!iniPath_.isNull())
91  {
92  return iniPath_;
93  }
94  return iniDefaultPath();
95 }
96 
97 void Settings::init(const QString & fileName)
98 {
99  iniPath_ = fileName;
101 }
102 
103 void Settings::loadSettings(const QString & fileName)
104 {
105  QString path = fileName;
106  if(fileName.isEmpty())
107  {
108  path = iniPath();
109  }
110  if(!path.isEmpty())
111  {
112  QSettings ini(path, QSettings::IniFormat);
113  for(ParametersMap::const_iterator iter = defaultParameters_.begin(); iter!=defaultParameters_.end(); ++iter)
114  {
115  const QString & key = iter.key();
116  QVariant value = ini.value(key, QVariant());
117  if(value.isValid())
118  {
119  QString str = value.toString();
120  if(str.contains(";"))
121  {
122  if(str.size() != getParameter(key).toString().size())
123  {
124  // If a string list is modified, update the value
125  // (assuming that index < 10... one character for index)
126  QChar index = str.at(0);
127  str = getParameter(key).toString();
128  str[0] = index.toLatin1();
129  value = QVariant(str);
130  UINFO("Updated list of parameter \"%s\"", key.toStdString().c_str());
131  }
132 #if FINDOBJECT_NONFREE == 0
133  QChar index = str.at(0);
134  if(key.compare(Settings::kFeature2D_1Detector()) == 0)
135  {
136  if(index == '5' || index == '7')
137  {
138  index = Settings::defaultFeature2D_1Detector().at(0);
139  int indexInt = Settings::defaultFeature2D_1Detector().split(':').first().toInt();
140  UWARN("Trying to set \"%s\" to SIFT/SURF but Find-Object isn't built "
141  "with the nonfree module from OpenCV. Keeping default combo value: %s.",
142  Settings::kFeature2D_1Detector().toStdString().c_str(),
143  Settings::defaultFeature2D_1Detector().split(':').last().split(";").at(indexInt).toStdString().c_str());
144  }
145  }
146  else if(key.compare(Settings::kFeature2D_2Descriptor()) == 0)
147  {
148  if(index == '2' || index == '3')
149  {
150  index = Settings::defaultFeature2D_2Descriptor().at(0);
151  int indexInt = Settings::defaultFeature2D_2Descriptor().split(':').first().toInt();
152  UWARN("Trying to set \"%s\" to SIFT/SURF but Find-Object isn't built "
153  "with the nonfree module from OpenCV. Keeping default combo value: %s.",
154  Settings::kFeature2D_2Descriptor().toStdString().c_str(),
155  Settings::defaultFeature2D_2Descriptor().split(':').last().split(";").at(indexInt).toStdString().c_str());
156  }
157  }
158  else if(key.compare(Settings::kNearestNeighbor_1Strategy()) == 0)
159  {
160  if(index <= '4')
161  {
162  index = Settings::defaultNearestNeighbor_1Strategy().at(0);
163  int indexInt = Settings::defaultNearestNeighbor_1Strategy().split(':').first().toInt();
164  UWARN("Trying to set \"%s\" to one FLANN approach but Find-Object isn't built "
165  "with the nonfree module from OpenCV and FLANN cannot be used "
166  "with binary descriptors. Keeping default combo value: %s.",
167  Settings::kNearestNeighbor_1Strategy().toStdString().c_str(),
168  Settings::defaultNearestNeighbor_1Strategy().split(':').last().split(";").at(indexInt).toStdString().c_str());
169  }
170  }
171  str = getParameter(key).toString();
172  str[0] = index.toLatin1();
173  value = QVariant(str);
174 #endif
175  }
176  setParameter(key, value);
177  }
178  }
179  UINFO("Settings loaded from %s.", path.toStdString().c_str());
180  }
181  else
182  {
184  UINFO("Settings set to defaults.");
185  }
186 
187  if(CVCUDA::getCudaEnabledDeviceCount() == 0)
188  {
189 #if FINDOBJECT_NONFREE == 1
190  Settings::setFeature2D_SURF_gpu(false);
191 #endif
192  Settings::setFeature2D_Fast_gpu(false);
193  Settings::setFeature2D_ORB_gpu(false);
194  Settings::setNearestNeighbor_BruteForce_gpu(false);
195  }
196 }
197 
198 void Settings::loadWindowSettings(QByteArray & windowGeometry, QByteArray & windowState, const QString & fileName)
199 {
200  QString path = fileName;
201  if(fileName.isEmpty())
202  {
203  path = iniPath();
204  }
205 
206  if(!path.isEmpty())
207  {
208  QSettings ini(path, QSettings::IniFormat);
209 
210  QVariant value = ini.value("windowGeometry", QVariant());
211  if(value.isValid())
212  {
213  windowGeometry = value.toByteArray();
214  }
215 
216  value = ini.value("windowState", QVariant());
217  if(value.isValid())
218  {
219  windowState = value.toByteArray();
220  }
221 
222  UINFO("Window settings loaded from %s", path.toStdString().c_str());
223  }
224 }
225 
226 void Settings::saveSettings(const QString & fileName)
227 {
228  QString path = fileName;
229  if(fileName.isEmpty())
230  {
231  path = iniPath();
232  }
233  if(!path.isEmpty())
234  {
235  QSettings ini(path, QSettings::IniFormat);
236  for(ParametersMap::const_iterator iter = parameters_.begin(); iter!=parameters_.end(); ++iter)
237  {
238  QString type = Settings::getParametersType().value(iter.key());
239  if(type.compare("float") == 0)
240  {
241  ini.setValue(iter.key(), QString::number(iter.value().toFloat(),'g',6));
242  }
243  else
244  {
245  ini.setValue(iter.key(), iter.value());
246  }
247  }
248  UINFO("Settings saved to %s", path.toStdString().c_str());
249  }
250 }
251 
252 void Settings::saveWindowSettings(const QByteArray & windowGeometry, const QByteArray & windowState, const QString & fileName)
253 {
254  QString path = fileName;
255  if(fileName.isEmpty())
256  {
257  path = iniPath();
258  }
259  if(!path.isEmpty())
260  {
261  QSettings ini(path, QSettings::IniFormat);
262  if(!windowGeometry.isEmpty())
263  {
264  ini.setValue("windowGeometry", windowGeometry);
265  }
266  if(!windowState.isEmpty())
267  {
268  ini.setValue("windowState", windowState);
269  }
270  UINFO("Window settings saved to %s", path.toStdString().c_str());
271  }
272 }
273 
274 #if FINDOBJECT_NONFREE == 1
275 class GPUSURF : public Feature2D
276 {
277 public:
278  GPUSURF(double hessianThreshold,
279  int nOctaves,
280  int nOctaveLayers,
281  bool extended,
282  float keypointsRatio,
283  bool upright) :
284  surf_(hessianThreshold,
285  nOctaves,
286  nOctaveLayers,
287  extended,
288  keypointsRatio,
289  upright)
290  {
291  }
292  virtual ~GPUSURF() {}
293 
294  virtual void detect(const cv::Mat & image,
295  std::vector<cv::KeyPoint> & keypoints,
296  const cv::Mat & mask = cv::Mat())
297  {
298  CVCUDA::GpuMat imgGpu(image);
299  CVCUDA::GpuMat maskGpu(mask);
300  try
301  {
302  surf_(imgGpu, maskGpu, keypoints);
303  }
304  catch(cv::Exception &e)
305  {
306  UERROR("GPUSURF error: %s \n(If something about layer_rows, parameter nOctaves=%d of SURF "
307  "is too high for the size of the image (%d,%d).)",
308  e.msg.c_str(),
309  surf_.nOctaves,
310  image.cols,
311  image.rows);
312  }
313  }
314 
315  virtual void compute( const cv::Mat& image,
316  std::vector<cv::KeyPoint>& keypoints,
317  cv::Mat& descriptors)
318  {
319  std::vector<float> d;
320  CVCUDA::GpuMat imgGpu(image);
321  CVCUDA::GpuMat descriptorsGPU;
322  try
323  {
324  surf_(imgGpu, CVCUDA::GpuMat(), keypoints, descriptorsGPU, true);
325  }
326  catch(cv::Exception &e)
327  {
328  UERROR("GPUSURF error: %s \n(If something about layer_rows, parameter nOctaves=%d of SURF "
329  "is too high for the size of the image (%d,%d).)",
330  e.msg.c_str(),
331  surf_.nOctaves,
332  image.cols,
333  image.rows);
334  }
335 
336  // Download descriptors
337  if (descriptorsGPU.empty())
338  descriptors = cv::Mat();
339  else
340  {
341  UASSERT(descriptorsGPU.type() == CV_32F);
342  descriptors = cv::Mat(descriptorsGPU.size(), CV_32F);
343  descriptorsGPU.download(descriptors);
344  }
345  }
346 
347  virtual void detectAndCompute( const cv::Mat& image,
348  std::vector<cv::KeyPoint>& keypoints,
349  cv::Mat& descriptors,
350  const cv::Mat & mask = cv::Mat())
351  {
352  std::vector<float> d;
353  CVCUDA::GpuMat imgGpu(image);
354  CVCUDA::GpuMat descriptorsGPU;
355  CVCUDA::GpuMat maskGpu(mask);
356  try
357  {
358  surf_(imgGpu, maskGpu, keypoints, descriptorsGPU, false);
359  }
360  catch(cv::Exception &e)
361  {
362  UERROR("GPUSURF error: %s \n(If something about layer_rows, parameter nOctaves=%d of SURF "
363  "is too high for the size of the image (%d,%d).)",
364  e.msg.c_str(),
365  surf_.nOctaves,
366  image.cols,
367  image.rows);
368  }
369 
370  // Download descriptors
371  if (descriptorsGPU.empty())
372  descriptors = cv::Mat();
373  else
374  {
375  UASSERT(descriptorsGPU.type() == CV_32F);
376  descriptors = cv::Mat(descriptorsGPU.size(), CV_32F);
377  descriptorsGPU.download(descriptors);
378  }
379  }
380 
381 private:
382 #if CV_MAJOR_VERSION < 3
383  CVCUDA::SURF_GPU surf_;
384 #else
385  CVCUDA::SURF_CUDA surf_;
386 #endif
387 };
388 #endif
389 
390 class GPUFAST : public Feature2D
391 {
392 public:
393  GPUFAST(int threshold=Settings::defaultFeature2D_Fast_threshold(),
394  bool nonmaxSuppression=Settings::defaultFeature2D_Fast_nonmaxSuppression(),
395 #if CV_MAJOR_VERSION < 3
396  double keypointsRatio=Settings::defaultFeature2D_Fast_keypointsRatio())
397  : fast_(threshold,
398  nonmaxSuppression,
399  keypointsRatio)
400 #else
401  int max_npoints=Settings::defaultFeature2D_Fast_maxNpoints())
402 #ifdef HAVE_OPENCV_CUDAFEATURES2D
403  : fast_(CVCUDA::FastFeatureDetector::create(
404  threshold,
405  nonmaxSuppression,
406  CVCUDA::FastFeatureDetector::TYPE_9_16,
407  max_npoints))
408 #endif
409 #endif
410  {
411  }
412  virtual ~GPUFAST() {}
413 
414  virtual void detect(const cv::Mat & image,
415  std::vector<cv::KeyPoint> & keypoints,
416  const cv::Mat & mask = cv::Mat())
417  {
418  CVCUDA::GpuMat imgGpu(image);
419  CVCUDA::GpuMat maskGpu(mask);
420 #if CV_MAJOR_VERSION < 3
421  fast_(imgGpu, maskGpu, keypoints);
422 #else
423 #ifdef HAVE_OPENCV_CUDAFEATURES2D
424  CVCUDA::GpuMat keypointsGpu(keypoints);
425  fast_->detectAsync(imgGpu, keypointsGpu, maskGpu);
426  fast_->convert(keypointsGpu, keypoints);
427 #endif
428 #endif
429  }
430  virtual void compute( const cv::Mat& image,
431  std::vector<cv::KeyPoint>& keypoints,
432  cv::Mat& descriptors)
433  {
434  UERROR("GPUFAST:computeDescriptors() Should not be used!");
435  }
436  virtual void detectAndCompute( const cv::Mat& image,
437  std::vector<cv::KeyPoint>& keypoints,
438  cv::Mat& descriptors,
439  const cv::Mat & mask = cv::Mat())
440  {
441  UERROR("GPUFAST:detectAndCompute() Should not be used!");
442  }
443 
444 private:
445 #if CV_MAJOR_VERSION < 3
446  CVCUDA::FAST_GPU fast_;
447 #else
448 #ifdef HAVE_OPENCV_CUDAFEATURES2D
449  cv::Ptr<CVCUDA::FastFeatureDetector> fast_;
450 #endif
451 #endif
452 };
453 
454 class GPUORB : public Feature2D
455 {
456 public:
457  GPUORB(int nFeatures = Settings::defaultFeature2D_ORB_nFeatures(),
458  float scaleFactor = Settings::defaultFeature2D_ORB_scaleFactor(),
459  int nLevels = Settings::defaultFeature2D_ORB_nLevels(),
460  int edgeThreshold = Settings::defaultFeature2D_ORB_edgeThreshold(),
461  int firstLevel = Settings::defaultFeature2D_ORB_firstLevel(),
462  int WTA_K = Settings::defaultFeature2D_ORB_WTA_K(),
463  int scoreType = Settings::defaultFeature2D_ORB_scoreType(),
464  int patchSize = Settings::defaultFeature2D_ORB_patchSize(),
465  int fastThreshold = Settings::defaultFeature2D_Fast_threshold(),
466 #if CV_MAJOR_VERSION < 3
467  bool fastNonmaxSupression = Settings::defaultFeature2D_Fast_nonmaxSuppression())
468  : orb_(nFeatures,
469  scaleFactor,
470  nLevels,
471  edgeThreshold ,
472  firstLevel,
473  WTA_K,
474  scoreType,
475  patchSize)
476 #else
477  bool blurForDescriptor = Settings::defaultFeature2D_ORB_blurForDescriptor())
478 #ifdef HAVE_OPENCV_CUDAFEATURES2D
479  : orb_(CVCUDA::ORB::create(nFeatures,
480  scaleFactor,
481  nLevels,
482  edgeThreshold ,
483  firstLevel,
484  WTA_K,
485  scoreType,
486  patchSize,
487  fastThreshold,
488  blurForDescriptor))
489 #endif
490 #endif
491  {
492 #if CV_MAJOR_VERSION < 3
493  orb_.setFastParams(fastThreshold, fastNonmaxSupression);
494 #endif
495  }
496  virtual ~GPUORB() {}
497 
498  virtual void detect(const cv::Mat & image,
499  std::vector<cv::KeyPoint> & keypoints,
500  const cv::Mat & mask = cv::Mat())
501  {
502 
503  CVCUDA::GpuMat imgGpu(image);
504  CVCUDA::GpuMat maskGpu(mask);
505 
506  try
507  {
508 #if CV_MAJOR_VERSION < 3
509  orb_(imgGpu, maskGpu, keypoints);
510 #else
511 #ifdef HAVE_OPENCV_CUDAFEATURES2D
512  CVCUDA::GpuMat keypointsGpu;
513  orb_->detectAsync(imgGpu, keypointsGpu, maskGpu);
514  orb_->convert(keypointsGpu, keypoints);
515 #endif
516 #endif
517  }
518  catch(cv::Exception &e)
519  {
520  UERROR("GPUORB error: %s \n(If something about matrix size, the image/object may be too small (%d,%d).)",
521  e.msg.c_str(),
522  image.cols,
523  image.rows);
524  }
525  }
526 
527  virtual void compute( const cv::Mat& image,
528  std::vector<cv::KeyPoint>& keypoints,
529  cv::Mat& descriptors)
530  {
531  std::vector<float> d;
532 
533  CVCUDA::GpuMat imgGpu(image);
534  CVCUDA::GpuMat descriptorsGPU;
535 
536  try
537  {
538 #if CV_MAJOR_VERSION < 3
539  orb_(imgGpu, CVCUDA::GpuMat(), keypoints, descriptorsGPU); // No option to use provided keypoints!?
540 #else
541 #ifdef HAVE_OPENCV_CUDAFEATURES2D
542  UERROR("OpenCV 3 ORB-GPU doesn't support extracting ORB descriptors from already extracted keypoints. "
543  "Use ORB as feature detector too or desactivate ORB-GPU.");
544  //orb_->computeAsync(imgGpu, keypoints, descriptorsGPU, true);
545 #endif
546 #endif
547  }
548  catch(cv::Exception &e)
549  {
550  UERROR("GPUORB error: %s \n(If something about matrix size, the image/object may be too small (%d,%d).)",
551  e.msg.c_str(),
552  image.cols,
553  image.rows);
554  }
555  // Download descriptors
556  if (descriptorsGPU.empty())
557  descriptors = cv::Mat();
558  else
559  {
560  UASSERT(descriptorsGPU.type() == CV_8U);
561  descriptors = cv::Mat(descriptorsGPU.size(), CV_8U);
562  descriptorsGPU.download(descriptors);
563  }
564  }
565 
566  virtual void detectAndCompute( const cv::Mat& image,
567  std::vector<cv::KeyPoint>& keypoints,
568  cv::Mat& descriptors,
569  const cv::Mat & mask = cv::Mat())
570  {
571  std::vector<float> d;
572 
573  CVCUDA::GpuMat imgGpu(image);
574  CVCUDA::GpuMat descriptorsGPU;
575  CVCUDA::GpuMat maskGpu(mask);
576 
577  try
578  {
579 #if CV_MAJOR_VERSION < 3
580  orb_(imgGpu, CVCUDA::GpuMat(), keypoints, descriptorsGPU); // No option to use provided keypoints!?
581 #else
582 #ifdef HAVE_OPENCV_CUDAFEATURES2D
583  CVCUDA::GpuMat keypointsGpu;
584  orb_->detectAndComputeAsync(imgGpu, maskGpu, keypointsGpu, descriptorsGPU, false);
585  orb_->convert(keypointsGpu, keypoints);
586 #endif
587 #endif
588  }
589  catch(cv::Exception &e)
590  {
591  UERROR("GPUORB error: %s \n(If something about matrix size, the image/object may be too small (%d,%d).)",
592  e.msg.c_str(),
593  image.cols,
594  image.rows);
595  }
596  // Download descriptors
597  if (descriptorsGPU.empty())
598  descriptors = cv::Mat();
599  else
600  {
601  UASSERT(descriptorsGPU.type() == CV_8U);
602  descriptors = cv::Mat(descriptorsGPU.size(), CV_8U);
603  descriptorsGPU.download(descriptors);
604  }
605  }
606 
607 private:
608 #if CV_MAJOR_VERSION < 3
609  CVCUDA::ORB_GPU orb_;
610 #else
611 #ifdef HAVE_OPENCV_CUDAFEATURES2D
612  cv::Ptr<CVCUDA::ORB> orb_;
613 #endif
614 #endif
615 };
616 
618 {
619  Feature2D * feature2D = 0;
620  QString str = getFeature2D_1Detector();
621  QStringList split = str.split(':');
622  if(split.size()==2)
623  {
624  bool ok = false;
625  int index = split.first().toInt(&ok);
626  if(ok)
627  {
628  QStringList strategies = split.last().split(';');
629 
630  if(index>=0 && index<strategies.size())
631  {
632 
633 #if FINDOBJECT_NONFREE == 0
634  //check for nonfree stuff
635  if(strategies.at(index).compare("SIFT") == 0 ||
636  strategies.at(index).compare("SURF") == 0)
637  {
638  index = Settings::defaultFeature2D_1Detector().split(':').first().toInt();
639  UERROR("Find-Object is not built with OpenCV nonfree module so "
640  "SIFT/SURF cannot be used! Using default \"%s\" instead.",
641  strategies.at(index).toStdString().c_str());
642 
643  }
644 #endif
645 
646 #if CV_MAJOR_VERSION < 3
647  if(strategies.at(index).compare("AGAST") == 0 ||
648  strategies.at(index).compare("KAZE") == 0 ||
649  strategies.at(index).compare("AKAZE") == 0)
650  {
651  index = Settings::defaultFeature2D_1Detector().split(':').first().toInt();
652  UERROR("Find-Object is built with OpenCV 2 so "
653  "AGAST/KAZE/AKAZE cannot be used! Using default \"%s\" instead.",
654  strategies.at(index).toStdString().c_str());
655 
656  }
657 #else
658  if(strategies.at(index).compare("Dense") == 0)
659  {
660  index = Settings::defaultFeature2D_1Detector().split(':').first().toInt();
661  UERROR("Find-Object is built with OpenCV 3 so "
662  "Dense cannot be used! Using default \"%s\" instead.",
663  strategies.at(index).toStdString().c_str());
664 
665  }
666 #ifndef HAVE_OPENCV_XFEATURES2D
667  if(strategies.at(index).compare("Star") == 0)
668  {
669  index = Settings::defaultFeature2D_1Detector().split(':').first().toInt();
670  UERROR("Find-Object is not built with OpenCV xfeatures2d module so "
671  "Star cannot be used! Using default \"%s\" instead.",
672  strategies.at(index).toStdString().c_str());
673 
674  }
675 #endif
676 #endif
677 
678  if(strategies.at(index).compare("Dense") == 0)
679  {
680 #if CV_MAJOR_VERSION < 3
681  feature2D = new Feature2D(cv::Ptr<cv::FeatureDetector>(new cv::DenseFeatureDetector(
682  getFeature2D_Dense_initFeatureScale(),
683  getFeature2D_Dense_featureScaleLevels(),
684  getFeature2D_Dense_featureScaleMul(),
685  getFeature2D_Dense_initXyStep(),
686  getFeature2D_Dense_initImgBound(),
687  getFeature2D_Dense_varyXyStepWithScale(),
688  getFeature2D_Dense_varyImgBoundWithScale())));
689 #else
690  UWARN("Find-Object is not built with OpenCV 2 so Dense cannot be used!");
691 #endif
692  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
693  }
694  else if(strategies.at(index).compare("Fast") == 0)
695  {
696  if(getFeature2D_Fast_gpu() && CVCUDA::getCudaEnabledDeviceCount())
697  {
698  feature2D = new GPUFAST(
699  getFeature2D_Fast_threshold(),
700  getFeature2D_Fast_nonmaxSuppression());
701  UDEBUG("type=%s GPU", strategies.at(index).toStdString().c_str());
702  }
703  else
704  {
705 #if CV_MAJOR_VERSION < 3
706  feature2D = new Feature2D(cv::Ptr<cv::FeatureDetector>(new cv::FastFeatureDetector(
707  getFeature2D_Fast_threshold(),
708  getFeature2D_Fast_nonmaxSuppression())));
709 #else
710  feature2D = new Feature2D(cv::FastFeatureDetector::create(
711  getFeature2D_Fast_threshold(),
712  getFeature2D_Fast_nonmaxSuppression()));
713 #endif
714  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
715  }
716  }
717  else if(strategies.at(index).compare("AGAST") == 0)
718  {
719 #if CV_MAJOR_VERSION < 3
720  UWARN("Find-Object is not built with OpenCV 3 so AGAST cannot be used!");
721 #else
722  feature2D = new Feature2D(cv::AgastFeatureDetector::create(
723  getFeature2D_AGAST_threshold(),
724  getFeature2D_AGAST_nonmaxSuppression()));
725 #endif
726  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
727  }
728  else if(strategies.at(index).compare("GFTT") == 0)
729  {
730 #if CV_MAJOR_VERSION < 3
731  feature2D = new Feature2D(cv::Ptr<cv::FeatureDetector>(new cv::GFTTDetector(
732  getFeature2D_GFTT_maxCorners(),
733  getFeature2D_GFTT_qualityLevel(),
734  getFeature2D_GFTT_minDistance(),
735  getFeature2D_GFTT_blockSize(),
736  getFeature2D_GFTT_useHarrisDetector(),
737  getFeature2D_GFTT_k())));
738 #else
739  feature2D = new Feature2D(cv::GFTTDetector::create(
740  getFeature2D_GFTT_maxCorners(),
741  getFeature2D_GFTT_qualityLevel(),
742  getFeature2D_GFTT_minDistance(),
743  getFeature2D_GFTT_blockSize(),
744  getFeature2D_GFTT_useHarrisDetector(),
745  getFeature2D_GFTT_k()));
746 #endif
747  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
748  }
749  else if(strategies.at(index).compare("MSER") == 0)
750  {
751 #if CV_MAJOR_VERSION < 3
752  feature2D = new Feature2D(cv::Ptr<cv::FeatureDetector>(new cv::MSER(
753  getFeature2D_MSER_delta(),
754  getFeature2D_MSER_minArea(),
755  getFeature2D_MSER_maxArea(),
756  getFeature2D_MSER_maxVariation(),
757  getFeature2D_MSER_minDiversity(),
758  getFeature2D_MSER_maxEvolution(),
759  getFeature2D_MSER_areaThreshold(),
760  getFeature2D_MSER_minMargin(),
761  getFeature2D_MSER_edgeBlurSize())));
762 #else
763  feature2D = new Feature2D(cv::MSER::create(
764  getFeature2D_MSER_delta(),
765  getFeature2D_MSER_minArea(),
766  getFeature2D_MSER_maxArea(),
767  getFeature2D_MSER_maxVariation(),
768  getFeature2D_MSER_minDiversity(),
769  getFeature2D_MSER_maxEvolution(),
770  getFeature2D_MSER_areaThreshold(),
771  getFeature2D_MSER_minMargin(),
772  getFeature2D_MSER_edgeBlurSize()));
773 #endif
774  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
775  }
776  else if(strategies.at(index).compare("ORB") == 0)
777  {
778  if(getFeature2D_ORB_gpu() && CVCUDA::getCudaEnabledDeviceCount())
779  {
780  feature2D = new GPUORB(
781  getFeature2D_ORB_nFeatures(),
782  getFeature2D_ORB_scaleFactor(),
783  getFeature2D_ORB_nLevels(),
784  getFeature2D_ORB_edgeThreshold(),
785  getFeature2D_ORB_firstLevel(),
786  getFeature2D_ORB_WTA_K(),
787  getFeature2D_ORB_scoreType(),
788  getFeature2D_ORB_patchSize(),
789  getFeature2D_Fast_threshold(),
790 #if CV_MAJOR_VERSION < 3
791  getFeature2D_Fast_nonmaxSuppression());
792 #else
793  getFeature2D_ORB_blurForDescriptor());
794 #endif
795  UDEBUG("type=%s (GPU)", strategies.at(index).toStdString().c_str());
796  }
797  else
798  {
799 #if CV_MAJOR_VERSION < 3
800  feature2D = new Feature2D(cv::Ptr<cv::Feature2D>(new cv::ORB(
801  getFeature2D_ORB_nFeatures(),
802  getFeature2D_ORB_scaleFactor(),
803  getFeature2D_ORB_nLevels(),
804  getFeature2D_ORB_edgeThreshold(),
805  getFeature2D_ORB_firstLevel(),
806  getFeature2D_ORB_WTA_K(),
807  getFeature2D_ORB_scoreType(),
808  getFeature2D_ORB_patchSize())));
809 #else
810  feature2D = new Feature2D(cv::ORB::create(
811  getFeature2D_ORB_nFeatures(),
812  getFeature2D_ORB_scaleFactor(),
813  getFeature2D_ORB_nLevels(),
814  getFeature2D_ORB_edgeThreshold(),
815  getFeature2D_ORB_firstLevel(),
816  getFeature2D_ORB_WTA_K(),
817  getFeature2D_ORB_scoreType(),
818  getFeature2D_ORB_patchSize(),
819  getFeature2D_Fast_threshold()));
820 #endif
821  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
822  }
823  }
824  else if(strategies.at(index).compare("Star") == 0)
825  {
826 #if CV_MAJOR_VERSION < 3
827  feature2D = new Feature2D(cv::Ptr<cv::FeatureDetector>(new cv::StarFeatureDetector(
828  getFeature2D_Star_maxSize(),
829  getFeature2D_Star_responseThreshold(),
830  getFeature2D_Star_lineThresholdProjected(),
831  getFeature2D_Star_lineThresholdBinarized(),
832  getFeature2D_Star_suppressNonmaxSize())));
833 #else
834 #ifdef HAVE_OPENCV_XFEATURES2D
835  feature2D = new Feature2D(cv::xfeatures2d::StarDetector::create(
836  getFeature2D_Star_maxSize(),
837  getFeature2D_Star_responseThreshold(),
838  getFeature2D_Star_lineThresholdProjected(),
839  getFeature2D_Star_lineThresholdBinarized(),
840  getFeature2D_Star_suppressNonmaxSize()));
841 #else
842  UWARN("Find-Object is not built with OpenCV xfeatures2d module so Star cannot be used!");
843 #endif
844 #endif
845  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
846  }
847  else if(strategies.at(index).compare("BRISK") == 0)
848  {
849 #if CV_MAJOR_VERSION < 3
850  feature2D = new Feature2D(cv::Ptr<cv::Feature2D>(new cv::BRISK(
851  getFeature2D_BRISK_thresh(),
852  getFeature2D_BRISK_octaves(),
853  getFeature2D_BRISK_patternScale())));
854 #else
855  feature2D = new Feature2D(cv::BRISK::create(
856  getFeature2D_BRISK_thresh(),
857  getFeature2D_BRISK_octaves(),
858  getFeature2D_BRISK_patternScale()));
859 #endif
860  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
861  }
862  else if(strategies.at(index).compare("KAZE") == 0)
863  {
864 #if CV_MAJOR_VERSION < 3
865  UWARN("Find-Object is not built with OpenCV 3 so KAZE cannot be used!");
866 #else
867  feature2D = new Feature2D(cv::KAZE::create(
868  getFeature2D_KAZE_extended(),
869  getFeature2D_KAZE_upright(),
870  getFeature2D_KAZE_threshold(),
871  getFeature2D_KAZE_nOctaves(),
872  getFeature2D_KAZE_nOctaveLayers(),
873  cv::KAZE::DIFF_PM_G2)); // FIXME: make a parameter
874 #endif
875  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
876  }
877  else if(strategies.at(index).compare("AKAZE") == 0)
878  {
879 #if CV_MAJOR_VERSION < 3
880  UWARN("Find-Object is not built with OpenCV 3 so AKAZE cannot be used!");
881 #else
882  feature2D = new Feature2D(cv::AKAZE::create(
883  cv::AKAZE::DESCRIPTOR_MLDB, // FIXME: make a parameter
884  getFeature2D_AKAZE_descriptorSize(),
885  getFeature2D_AKAZE_descriptorChannels(),
886  getFeature2D_AKAZE_threshold(),
887  getFeature2D_AKAZE_nOctaves(),
888  getFeature2D_AKAZE_nOctaveLayers(),
889  cv::KAZE::DIFF_PM_G2)); // FIXME: make a parameter
890 #endif
891  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
892  }
893 #if FINDOBJECT_NONFREE == 1
894  else if(strategies.at(index).compare("SIFT") == 0)
895  {
896 #if CV_MAJOR_VERSION < 3
897  feature2D = new Feature2D(cv::Ptr<cv::Feature2D>(new cv::SIFT(
898  getFeature2D_SIFT_nfeatures(),
899  getFeature2D_SIFT_nOctaveLayers(),
900  getFeature2D_SIFT_contrastThreshold(),
901  getFeature2D_SIFT_edgeThreshold(),
902  getFeature2D_SIFT_sigma())));
903 #else
904  feature2D = new Feature2D(cv::xfeatures2d::SIFT::create(
905  getFeature2D_SIFT_nfeatures(),
906  getFeature2D_SIFT_nOctaveLayers(),
907  getFeature2D_SIFT_contrastThreshold(),
908  getFeature2D_SIFT_edgeThreshold(),
909  getFeature2D_SIFT_sigma()));
910 #endif
911  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
912  }
913  else if(strategies.at(index).compare("SURF") == 0)
914  {
915  if(getFeature2D_SURF_gpu() && CVCUDA::getCudaEnabledDeviceCount())
916  {
917  feature2D = new GPUSURF(
918  getFeature2D_SURF_hessianThreshold(),
919  getFeature2D_SURF_nOctaves(),
920  getFeature2D_SURF_nOctaveLayers(),
921  getFeature2D_SURF_extended(),
922  getFeature2D_SURF_keypointsRatio(),
923  getFeature2D_SURF_upright());
924  UDEBUG("type=%s (GPU)", strategies.at(index).toStdString().c_str());
925  }
926  else
927  {
928 #if CV_MAJOR_VERSION < 3
929  feature2D = new Feature2D(cv::Ptr<cv::Feature2D>(new cv::SURF(
930  getFeature2D_SURF_hessianThreshold(),
931  getFeature2D_SURF_nOctaves(),
932  getFeature2D_SURF_nOctaveLayers(),
933  getFeature2D_SURF_extended(),
934  getFeature2D_SURF_upright())));
935 #else
936  feature2D = new Feature2D(cv::xfeatures2d::SURF::create(
937  getFeature2D_SURF_hessianThreshold(),
938  getFeature2D_SURF_nOctaves(),
939  getFeature2D_SURF_nOctaveLayers(),
940  getFeature2D_SURF_extended(),
941  getFeature2D_SURF_upright()));
942 #endif
943  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
944  }
945  }
946 #endif
947  }
948  }
949  }
950 
951  return feature2D;
952 }
953 
955 {
956  Feature2D * feature2D = 0;
957  QString str = getFeature2D_2Descriptor();
958  QStringList split = str.split(':');
959  if(split.size()==2)
960  {
961  bool ok = false;
962  int index = split.first().toInt(&ok);
963  if(ok)
964  {
965  QStringList strategies = split.last().split(';');
966  if(index>=0 && index<strategies.size())
967  {
968 
969 #if FINDOBJECT_NONFREE == 0
970  //check for nonfree stuff
971  if(strategies.at(index).compare("SIFT") == 0 ||
972  strategies.at(index).compare("SURF") == 0)
973  {
974  index = Settings::defaultFeature2D_2Descriptor().split(':').first().toInt();
975  UERROR("Find-Object is not built with OpenCV nonfree module so "
976  "SIFT/SURF cannot be used! Using default \"%s\" instead.",
977  strategies.at(index).toStdString().c_str());
978 
979  }
980 #endif
981 
982 #if CV_MAJOR_VERSION < 3
983  if(strategies.at(index).compare("KAZE") == 0 ||
984  strategies.at(index).compare("AKAZE") == 0)
985  {
986  index = Settings::defaultFeature2D_2Descriptor().split(':').first().toInt();
987  UERROR("Find-Object is built with OpenCV 2 so "
988  "KAZE/AKAZE cannot be used! Using default \"%s\" instead.",
989  strategies.at(index).toStdString().c_str());
990 
991  }
992 #else
993 #ifndef HAVE_OPENCV_XFEATURES2D
994  if(strategies.at(index).compare("Brief") == 0 ||
995  strategies.at(index).compare("FREAK") == 0 ||
996  strategies.at(index).compare("LUCID") == 0 ||
997  strategies.at(index).compare("LATCH") == 0 ||
998  strategies.at(index).compare("DAISY") == 0)
999  {
1000  index = Settings::defaultFeature2D_2Descriptor().split(':').first().toInt();
1001  UERROR("Find-Object is not built with OpenCV xfeatures2d module so "
1002  "Brief/FREAK/LUCID/LATCH/DAISY cannot be used! Using default \"%s\" instead.",
1003  strategies.at(index).toStdString().c_str());
1004 
1005  }
1006 #endif
1007 #endif
1008 
1009  if(strategies.at(index).compare("Brief") == 0)
1010  {
1011 #if CV_MAJOR_VERSION < 3
1012  feature2D = new Feature2D(cv::Ptr<cv::DescriptorExtractor>(new cv::BriefDescriptorExtractor(
1013  getFeature2D_Brief_bytes())));
1014 #else
1015 #ifdef HAVE_OPENCV_XFEATURES2D
1016  feature2D = new Feature2D(cv::xfeatures2d::BriefDescriptorExtractor::create(
1017  getFeature2D_Brief_bytes()));
1018 #else
1019  UWARN("Find-Object is not built with OpenCV xfeatures2d module so Brief cannot be used!");
1020 #endif
1021 #endif
1022  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
1023  }
1024  else if(strategies.at(index).compare("ORB") == 0)
1025  {
1026  if(getFeature2D_ORB_gpu() && CVCUDA::getCudaEnabledDeviceCount())
1027  {
1028  feature2D = new GPUORB(
1029  getFeature2D_ORB_nFeatures(),
1030  getFeature2D_ORB_scaleFactor(),
1031  getFeature2D_ORB_nLevels(),
1032  getFeature2D_ORB_edgeThreshold(),
1033  getFeature2D_ORB_firstLevel(),
1034  getFeature2D_ORB_WTA_K(),
1035  getFeature2D_ORB_scoreType(),
1036  getFeature2D_ORB_patchSize(),
1037  getFeature2D_Fast_threshold(),
1038  getFeature2D_Fast_nonmaxSuppression());
1039  UDEBUG("type=%s (GPU)", strategies.at(index).toStdString().c_str());
1040  }
1041  else
1042  {
1043 #if CV_MAJOR_VERSION < 3
1044  feature2D = new Feature2D(cv::Ptr<cv::Feature2D>(new cv::ORB(
1045  getFeature2D_ORB_nFeatures(),
1046  getFeature2D_ORB_scaleFactor(),
1047  getFeature2D_ORB_nLevels(),
1048  getFeature2D_ORB_edgeThreshold(),
1049  getFeature2D_ORB_firstLevel(),
1050  getFeature2D_ORB_WTA_K(),
1051  getFeature2D_ORB_scoreType(),
1052  getFeature2D_ORB_patchSize())));
1053 #else
1054  feature2D = new Feature2D(cv::ORB::create(
1055  getFeature2D_ORB_nFeatures(),
1056  getFeature2D_ORB_scaleFactor(),
1057  getFeature2D_ORB_nLevels(),
1058  getFeature2D_ORB_edgeThreshold(),
1059  getFeature2D_ORB_firstLevel(),
1060  getFeature2D_ORB_WTA_K(),
1061  getFeature2D_ORB_scoreType(),
1062  getFeature2D_ORB_patchSize(),
1063  getFeature2D_Fast_threshold()));
1064 #endif
1065  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
1066  }
1067  }
1068  else if(strategies.at(index).compare("BRISK") == 0)
1069  {
1070 #if CV_MAJOR_VERSION < 3
1071  feature2D = new Feature2D(cv::Ptr<cv::Feature2D>(new cv::BRISK(
1072  getFeature2D_BRISK_thresh(),
1073  getFeature2D_BRISK_octaves(),
1074  getFeature2D_BRISK_patternScale())));
1075 #else
1076  feature2D = new Feature2D(cv::BRISK::create(
1077  getFeature2D_BRISK_thresh(),
1078  getFeature2D_BRISK_octaves(),
1079  getFeature2D_BRISK_patternScale()));
1080 #endif
1081  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
1082  }
1083  else if(strategies.at(index).compare("KAZE") == 0)
1084  {
1085 #if CV_MAJOR_VERSION < 3
1086  UWARN("Find-Object is not built with OpenCV 3 so KAZE cannot be used!");
1087 #else
1088  feature2D = new Feature2D(cv::KAZE::create(
1089  getFeature2D_KAZE_extended(),
1090  getFeature2D_KAZE_upright(),
1091  getFeature2D_KAZE_threshold(),
1092  getFeature2D_KAZE_nOctaves(),
1093  getFeature2D_KAZE_nOctaveLayers(),
1094  cv::KAZE::DIFF_PM_G2)); // FIXME: make a parameter
1095 #endif
1096  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
1097  }
1098  else if(strategies.at(index).compare("AKAZE") == 0)
1099  {
1100 #if CV_MAJOR_VERSION < 3
1101  UWARN("Find-Object is not built with OpenCV 3 so AKAZE cannot be used!");
1102 #else
1103  feature2D = new Feature2D(cv::AKAZE::create(
1104  cv::AKAZE::DESCRIPTOR_MLDB, // FIXME: make a parameter
1105  getFeature2D_AKAZE_descriptorSize(),
1106  getFeature2D_AKAZE_descriptorChannels(),
1107  getFeature2D_AKAZE_threshold(),
1108  getFeature2D_AKAZE_nOctaves(),
1109  getFeature2D_AKAZE_nOctaveLayers(),
1110  cv::KAZE::DIFF_PM_G2)); // FIXME: make a parameter
1111 #endif
1112  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
1113  }
1114  else if(strategies.at(index).compare("FREAK") == 0)
1115  {
1116 #if CV_MAJOR_VERSION < 3
1117  feature2D = new Feature2D(cv::Ptr<cv::DescriptorExtractor>(new cv::FREAK(
1118  getFeature2D_FREAK_orientationNormalized(),
1119  getFeature2D_FREAK_scaleNormalized(),
1120  getFeature2D_FREAK_patternScale(),
1121  getFeature2D_FREAK_nOctaves())));
1122 #else
1123 #ifdef HAVE_OPENCV_XFEATURES2D
1124  feature2D = new Feature2D(cv::xfeatures2d::FREAK::create(
1125  getFeature2D_FREAK_orientationNormalized(),
1126  getFeature2D_FREAK_scaleNormalized(),
1127  getFeature2D_FREAK_patternScale(),
1128  getFeature2D_FREAK_nOctaves()));
1129 #else
1130  UWARN("Find-Object is not built with OpenCV xfeatures2d module so Freak cannot be used!");
1131 #endif
1132 #endif
1133 
1134  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
1135  }
1136 #ifdef HAVE_OPENCV_XFEATURES2D
1137  else if(strategies.at(index).compare("LUCID") == 0)
1138  {
1139  feature2D = new Feature2D(cv::xfeatures2d::LUCID::create(
1140  getFeature2D_LUCID_kernel(),
1141  getFeature2D_LUCID_blur_kernel()));
1142 
1143  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
1144  }
1145  else if(strategies.at(index).compare("LATCH") == 0)
1146  {
1147  feature2D = new Feature2D(cv::xfeatures2d::LATCH::create(
1148  getFeature2D_LATCH_bytes(),
1149  getFeature2D_LATCH_rotationInvariance(),
1150  getFeature2D_LATCH_half_ssd_size()));
1151 
1152  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
1153  }
1154  else if(strategies.at(index).compare("DAISY") == 0)
1155  {
1156  feature2D = new Feature2D(cv::xfeatures2d::DAISY::create(
1157  getFeature2D_DAISY_radius(),
1158  getFeature2D_DAISY_q_radius(),
1159  getFeature2D_DAISY_q_theta(),
1160  getFeature2D_DAISY_q_hist(),
1161  cv::xfeatures2d::DAISY::NRM_NONE,
1162  cv::noArray(),
1163  getFeature2D_DAISY_interpolation(),
1164  getFeature2D_DAISY_use_orientation()));
1165 
1166  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
1167  }
1168 #endif
1169 #if FINDOBJECT_NONFREE == 1
1170  else if(strategies.at(index).compare("SIFT") == 0)
1171  {
1172 #if CV_MAJOR_VERSION < 3
1173  feature2D = new Feature2D(cv::Ptr<cv::Feature2D>(new cv::SIFT(
1174  getFeature2D_SIFT_nfeatures(),
1175  getFeature2D_SIFT_nOctaveLayers(),
1176  getFeature2D_SIFT_contrastThreshold(),
1177  getFeature2D_SIFT_edgeThreshold(),
1178  getFeature2D_SIFT_sigma())));
1179 #else
1180  feature2D = new Feature2D(cv::xfeatures2d::SIFT::create(
1181  getFeature2D_SIFT_nfeatures(),
1182  getFeature2D_SIFT_nOctaveLayers(),
1183  getFeature2D_SIFT_contrastThreshold(),
1184  getFeature2D_SIFT_edgeThreshold(),
1185  getFeature2D_SIFT_sigma()));
1186 #endif
1187  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
1188  }
1189  else if(strategies.at(index).compare("SURF") == 0)
1190  {
1191  if(getFeature2D_SURF_gpu() && CVCUDA::getCudaEnabledDeviceCount())
1192  {
1193  feature2D = new GPUSURF(
1194  getFeature2D_SURF_hessianThreshold(),
1195  getFeature2D_SURF_nOctaves(),
1196  getFeature2D_SURF_nOctaveLayers(),
1197  getFeature2D_SURF_extended(),
1198  getFeature2D_SURF_keypointsRatio(),
1199  getFeature2D_SURF_upright());
1200  UDEBUG("type=%s (GPU)", strategies.at(index).toStdString().c_str());
1201  }
1202  else
1203  {
1204 #if CV_MAJOR_VERSION < 3
1205  feature2D = new Feature2D(cv::Ptr<cv::Feature2D>(new cv::SURF(
1206  getFeature2D_SURF_hessianThreshold(),
1207  getFeature2D_SURF_nOctaves(),
1208  getFeature2D_SURF_nOctaveLayers(),
1209  getFeature2D_SURF_extended(),
1210  getFeature2D_SURF_upright())));
1211 #else
1212  feature2D = new Feature2D(cv::xfeatures2d::SURF::create(
1213  getFeature2D_SURF_hessianThreshold(),
1214  getFeature2D_SURF_nOctaves(),
1215  getFeature2D_SURF_nOctaveLayers(),
1216  getFeature2D_SURF_extended(),
1217  getFeature2D_SURF_upright()));
1218 #endif
1219  UDEBUG("type=%s", strategies.at(index).toStdString().c_str());
1220  }
1221  }
1222 #endif
1223  }
1224  }
1225  }
1226 
1227  return feature2D;
1228 }
1229 
1231 {
1232  int index = getFeature2D_1Detector().split(':').first().toInt();
1233  return getFeature2D_1Detector().split(':').last().split(';').at(index);
1234 }
1235 
1237 {
1238  int index = getFeature2D_2Descriptor().split(':').first().toInt();
1239  return getFeature2D_2Descriptor().split(':').last().split(';').at(index);
1240 }
1241 
1243 {
1244  int index = getNearestNeighbor_1Strategy().split(':').first().toInt();
1245  return getNearestNeighbor_1Strategy().split(':').last().split(';').at(index);
1246 }
1247 
1249 {
1250  bool bruteForce = false;
1251  QString str = getNearestNeighbor_1Strategy();
1252  QStringList split = str.split(':');
1253  if(split.size()==2)
1254  {
1255  bool ok = false;
1256  int index = split.first().toInt(&ok);
1257  if(ok)
1258  {
1259  QStringList strategies = split.last().split(';');
1260  if(strategies.size() >= 7 && index == 6)
1261  {
1262  bruteForce = true;
1263  }
1264  }
1265  }
1266  return bruteForce;
1267 }
1268 
1269 cv::flann::IndexParams * Settings::createFlannIndexParams()
1270 {
1271  cv::flann::IndexParams * params = 0;
1272  QString str = getNearestNeighbor_1Strategy();
1273  QStringList split = str.split(':');
1274  if(split.size()==2)
1275  {
1276  bool ok = false;
1277  int index = split.first().toInt(&ok);
1278  if(ok)
1279  {
1280  QStringList strategies = split.last().split(';');
1281  if(strategies.size() >= 6 && index>=0 && index<6)
1282  {
1283  switch(index)
1284  {
1285  case 0:
1286  if(strategies.at(index).compare("Linear") == 0)
1287  {
1288  UDEBUG("type=%s", "Linear");
1289  params = new cv::flann::LinearIndexParams();
1290  }
1291  break;
1292  case 1:
1293  if(strategies.at(index).compare("KDTree") == 0)
1294  {
1295  UDEBUG("type=%s", "KDTree");
1296  params = new cv::flann::KDTreeIndexParams(
1297  getNearestNeighbor_KDTree_trees());
1298  }
1299  break;
1300  case 2:
1301  if(strategies.at(index).compare("KMeans") == 0)
1302  {
1303  cvflann::flann_centers_init_t centers_init = cvflann::FLANN_CENTERS_RANDOM;
1304  QString str = getNearestNeighbor_KMeans_centers_init();
1305  QStringList split = str.split(':');
1306  if(split.size()==2)
1307  {
1308  bool ok = false;
1309  int index = split.first().toInt(&ok);
1310  if(ok)
1311  {
1312  centers_init = (cvflann::flann_centers_init_t)index;
1313  }
1314  }
1315  UDEBUG("type=%s", "KMeans");
1316  params = new cv::flann::KMeansIndexParams(
1317  getNearestNeighbor_KMeans_branching(),
1318  getNearestNeighbor_KMeans_iterations(),
1319  centers_init,
1320  getNearestNeighbor_KMeans_cb_index());
1321  }
1322  break;
1323  case 3:
1324  if(strategies.at(index).compare("Composite") == 0)
1325  {
1326  cvflann::flann_centers_init_t centers_init = cvflann::FLANN_CENTERS_RANDOM;
1327  QString str = getNearestNeighbor_Composite_centers_init();
1328  QStringList split = str.split(':');
1329  if(split.size()==2)
1330  {
1331  bool ok = false;
1332  int index = split.first().toInt(&ok);
1333  if(ok)
1334  {
1335  centers_init = (cvflann::flann_centers_init_t)index;
1336  }
1337  }
1338  UDEBUG("type=%s", "Composite");
1339  params = new cv::flann::CompositeIndexParams(
1340  getNearestNeighbor_Composite_trees(),
1341  getNearestNeighbor_Composite_branching(),
1342  getNearestNeighbor_Composite_iterations(),
1343  centers_init,
1344  getNearestNeighbor_Composite_cb_index());
1345  }
1346  break;
1347  case 4:
1348  if(strategies.at(index).compare("Autotuned") == 0)
1349  {
1350  UDEBUG("type=%s", "Autotuned");
1351  params = new cv::flann::AutotunedIndexParams(
1352  getNearestNeighbor_Autotuned_target_precision(),
1353  getNearestNeighbor_Autotuned_build_weight(),
1354  getNearestNeighbor_Autotuned_memory_weight(),
1355  getNearestNeighbor_Autotuned_sample_fraction());
1356  }
1357  break;
1358  case 5:
1359  if(strategies.at(index).compare("Lsh") == 0)
1360  {
1361  UDEBUG("type=%s", "Lsh");
1362  params = new cv::flann::LshIndexParams(
1363  getNearestNeighbor_Lsh_table_number(),
1364  getNearestNeighbor_Lsh_key_size(),
1365  getNearestNeighbor_Lsh_multi_probe_level());
1366 
1367  }
1368  break;
1369  default:
1370  break;
1371  }
1372  }
1373  }
1374  }
1375  if(!params)
1376  {
1377  UERROR("NN strategy not found !? Using default KDTRee...");
1378  params = new cv::flann::KDTreeIndexParams();
1379  }
1380  return params ;
1381 }
1382 
1383 cvflann::flann_distance_t Settings::getFlannDistanceType()
1384 {
1385  cvflann::flann_distance_t distance = cvflann::FLANN_DIST_L2;
1386  QString str = getNearestNeighbor_2Distance_type();
1387  QStringList split = str.split(':');
1388  if(split.size()==2)
1389  {
1390  bool ok = false;
1391  int index = split.first().toInt(&ok);
1392  if(ok)
1393  {
1394  QStringList strategies = split.last().split(';');
1395  if(strategies.size() == 9 && index>=0 && index<=8)
1396  {
1397  distance = (cvflann::flann_distance_t)(index+1);
1398  }
1399  }
1400  }
1401  return distance;
1402 }
1403 
1405 {
1406  int method = cv::RANSAC;
1407  QString str = getHomography_method();
1408  QStringList split = str.split(':');
1409  if(split.size()==2)
1410  {
1411  bool ok = false;
1412  int index = split.first().toInt(&ok);
1413  if(ok)
1414  {
1415  QStringList strategies = split.last().split(';');
1416  if(strategies.size() == 2 && index>=0 && index<2)
1417  {
1418  switch(method)
1419  {
1420 #if CV_MAJOR_VERSION >= 3
1421  case 2:
1422  method = cv::RHO;
1423  break;
1424 #endif
1425  case 0:
1426  method = cv::LMEDS;
1427  break;
1428  default:
1429  method = cv::RANSAC;
1430  break;
1431  }
1432  }
1433  }
1434  }
1435  UDEBUG("method=%d", method);
1436  return method;
1437 }
1438 
1439 #if CV_MAJOR_VERSION < 3
1440 Feature2D::Feature2D(cv::Ptr<cv::FeatureDetector> featureDetector) :
1441  featureDetector_(featureDetector)
1442 {
1443  UASSERT(!featureDetector_.empty());
1444 }
1445 Feature2D::Feature2D(cv::Ptr<cv::DescriptorExtractor> descriptorExtractor) :
1446  descriptorExtractor_(descriptorExtractor)
1447 {
1448  UASSERT(!descriptorExtractor_.empty());
1449 }
1450 #endif
1451 Feature2D::Feature2D(cv::Ptr<cv::Feature2D> feature2D) :
1452  feature2D_(feature2D)
1453 {
1454  UASSERT(!feature2D_.empty());
1455 }
1456 
1457 void Feature2D::detect(const cv::Mat & image,
1458  std::vector<cv::KeyPoint> & keypoints,
1459  const cv::Mat & mask)
1460 {
1461 #if CV_MAJOR_VERSION < 3
1462  if(!featureDetector_.empty())
1463  {
1464  featureDetector_->detect(image, keypoints, mask);
1465  }
1466  else
1467 #endif
1468  if(!feature2D_.empty())
1469  {
1470  feature2D_->detect(image, keypoints, mask);
1471  }
1472  else
1473  {
1474  UERROR("Feature2D not set!?!?");
1475  }
1476 }
1477 
1478 void Feature2D::compute(const cv::Mat & image,
1479  std::vector<cv::KeyPoint> & keypoints,
1480  cv::Mat & descriptors)
1481 {
1482 #if CV_MAJOR_VERSION < 3
1483  if(!descriptorExtractor_.empty())
1484  {
1485  descriptorExtractor_->compute(image, keypoints, descriptors);
1486  }
1487  else
1488 #endif
1489  if(!feature2D_.empty())
1490  {
1491  feature2D_->compute(image, keypoints, descriptors);
1492  }
1493  else
1494  {
1495  UERROR("Feature2D not set!?!?");
1496  }
1497 }
1498 
1499 void Feature2D::detectAndCompute(const cv::Mat & image,
1500  std::vector<cv::KeyPoint> & keypoints,
1501  cv::Mat & descriptors,
1502  const cv::Mat & mask)
1503 {
1504  if(!feature2D_.empty())
1505  {
1506 #if CV_MAJOR_VERSION < 3
1507  (*feature2D_)(image, mask, keypoints, descriptors);
1508 #else
1509  feature2D_->detectAndCompute(image, mask, keypoints, descriptors);
1510 #endif
1511  }
1512  else
1513  {
1514  UERROR("Cannot use Feature2D::detectAndCompute() if feature2D member is not set.");
1515  }
1516 }
1517 
1518 } // namespace find_object
d
static QString iniDefaultPath()
Definition: Settings.cpp:79
static cv::flann::IndexParams * createFlannIndexParams()
Definition: Settings.cpp:1269
static int getHomographyMethod()
Definition: Settings.cpp:1404
GPUORB(int nFeatures=Settings::defaultFeature2D_ORB_nFeatures(), float scaleFactor=Settings::defaultFeature2D_ORB_scaleFactor(), int nLevels=Settings::defaultFeature2D_ORB_nLevels(), int edgeThreshold=Settings::defaultFeature2D_ORB_edgeThreshold(), int firstLevel=Settings::defaultFeature2D_ORB_firstLevel(), int WTA_K=Settings::defaultFeature2D_ORB_WTA_K(), int scoreType=Settings::defaultFeature2D_ORB_scoreType(), int patchSize=Settings::defaultFeature2D_ORB_patchSize(), int fastThreshold=Settings::defaultFeature2D_Fast_threshold(), bool fastNonmaxSupression=Settings::defaultFeature2D_Fast_nonmaxSuppression())
Definition: Settings.cpp:457
static Feature2D * createKeypointDetector()
Definition: Settings.cpp:617
static QVariant getParameter(const QString &key)
Definition: Settings.h:328
CVCUDA::FAST_GPU fast_
Definition: Settings.cpp:446
virtual void detectAndCompute(const cv::Mat &image, std::vector< cv::KeyPoint > &keypoints, cv::Mat &descriptors, const cv::Mat &mask=cv::Mat())
Definition: Settings.cpp:1499
static const ParametersType & getParametersType()
Definition: Settings.h:324
CVCUDA::ORB_GPU orb_
Definition: Settings.cpp:609
cv::Ptr< cv::FeatureDetector > featureDetector_
Definition: Settings.h:381
static ParametersType parametersType_
Definition: Settings.h:349
GPUFAST(int threshold=Settings::defaultFeature2D_Fast_threshold(), bool nonmaxSuppression=Settings::defaultFeature2D_Fast_nonmaxSuppression(), double keypointsRatio=Settings::defaultFeature2D_Fast_keypointsRatio())
Definition: Settings.cpp:393
static cvflann::flann_distance_t getFlannDistanceType()
Definition: Settings.cpp:1383
TFSIMD_FORCE_INLINE tfScalar distance(const Vector3 &v) const
static void loadWindowSettings(QByteArray &windowGeometry, QByteArray &windowState, const QString &fileName=QString())
Definition: Settings.cpp:198
#define UASSERT(condition)
static QString currentDetectorType()
Definition: Settings.cpp:1230
#define CVCUDA
Definition: Settings.cpp:41
static DescriptionsMap descriptions_
Definition: Settings.h:350
static void loadSettings(const QString &fileName=QString())
Definition: Settings.cpp:103
virtual void detect(const cv::Mat &image, std::vector< cv::KeyPoint > &keypoints, const cv::Mat &mask=cv::Mat())
Definition: Settings.cpp:414
static QString iniPath_
Definition: Settings.h:352
virtual void compute(const cv::Mat &image, std::vector< cv::KeyPoint > &keypoints, cv::Mat &descriptors)
Definition: Settings.cpp:430
virtual void detect(const cv::Mat &image, std::vector< cv::KeyPoint > &keypoints, const cv::Mat &mask=cv::Mat())
Definition: Settings.cpp:1457
virtual void detect(const cv::Mat &image, std::vector< cv::KeyPoint > &keypoints, const cv::Mat &mask=cv::Mat())
Definition: Settings.cpp:498
QMap< QString, QString > ParametersType
Definition: Settings.h:44
static void setParameter(const QString &key, const QVariant &value)
Definition: Settings.h:326
static void init(const QString &fileName)
Definition: Settings.cpp:97
QMap< QString, QVariant > ParametersMap
Definition: Settings.h:41
static QString currentDescriptorType()
Definition: Settings.cpp:1236
static ParametersMap parameters_
Definition: Settings.h:348
static Settings dummyInit_
Definition: Settings.h:351
virtual void detectAndCompute(const cv::Mat &image, std::vector< cv::KeyPoint > &keypoints, cv::Mat &descriptors, const cv::Mat &mask=cv::Mat())
Definition: Settings.cpp:566
QMap< QString, QString > DescriptionsMap
Definition: Settings.h:45
static void saveWindowSettings(const QByteArray &windowGeometry, const QByteArray &windowState, const QString &fileName=QString())
Definition: Settings.cpp:252
static QString workingDirectory()
Definition: Settings.cpp:70
static QString currentNearestNeighborType()
Definition: Settings.cpp:1242
static bool isBruteForceNearestNeighbor()
Definition: Settings.cpp:1248
virtual void compute(const cv::Mat &image, std::vector< cv::KeyPoint > &keypoints, cv::Mat &descriptors)
Definition: Settings.cpp:1478
#define UDEBUG(...)
static QString iniDefaultFileName()
Definition: Settings.h:312
#define UERROR(...)
ULogger class and convenient macros.
#define UWARN(...)
static QString iniPath()
Definition: Settings.cpp:88
static Feature2D * createDescriptorExtractor()
Definition: Settings.cpp:954
static ParametersMap defaultParameters_
Definition: Settings.h:347
virtual void compute(const cv::Mat &image, std::vector< cv::KeyPoint > &keypoints, cv::Mat &descriptors)
Definition: Settings.cpp:527
cv::Ptr< cv::DescriptorExtractor > descriptorExtractor_
Definition: Settings.h:382
cv::Ptr< cv::Feature2D > feature2D_
Definition: Settings.h:384
virtual void detectAndCompute(const cv::Mat &image, std::vector< cv::KeyPoint > &keypoints, cv::Mat &descriptors, const cv::Mat &mask=cv::Mat())
Definition: Settings.cpp:436
static void saveSettings(const QString &fileName=QString())
Definition: Settings.cpp:226
#define UINFO(...)


find_object_2d
Author(s): Mathieu Labbe
autogenerated on Thu Jun 6 2019 19:22:26