FindObject.cpp
Go to the documentation of this file.
00001 /*
00002 Copyright (c) 2011-2014, Mathieu Labbe - IntRoLab - Universite de Sherbrooke
00003 All rights reserved.
00004 
00005 Redistribution and use in source and binary forms, with or without
00006 modification, are permitted provided that the following conditions are met:
00007     * Redistributions of source code must retain the above copyright
00008       notice, this list of conditions and the following disclaimer.
00009     * Redistributions in binary form must reproduce the above copyright
00010       notice, this list of conditions and the following disclaimer in the
00011       documentation and/or other materials provided with the distribution.
00012     * Neither the name of the Universite de Sherbrooke nor the
00013       names of its contributors may be used to endorse or promote products
00014       derived from this software without specific prior written permission.
00015 
00016 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
00017 ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
00018 WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
00019 DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
00020 DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
00021 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00022 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
00023 ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
00024 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
00025 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00026 */
00027 
00028 #include "find_object/FindObject.h"
00029 #include "find_object/Settings.h"
00030 #include "find_object/utilite/ULogger.h"
00031 
00032 #include "ObjSignature.h"
00033 #include "utilite/UDirectory.h"
00034 #include "Vocabulary.h"
00035 
00036 #include <QtCore/QThread>
00037 #include <QtCore/QFileInfo>
00038 #include <QtCore/QStringList>
00039 #include <QtCore/QTime>
00040 #include <QtGui/QGraphicsRectItem>
00041 #include <stdio.h>
00042 
00043 namespace find_object {
00044 
00045 FindObject::FindObject(QObject * parent) :
00046         QObject(parent),
00047         vocabulary_(new Vocabulary()),
00048         detector_(Settings::createKeypointDetector()),
00049         extractor_(Settings::createDescriptorExtractor()),
00050         sessionModified_(false)
00051 {
00052         qRegisterMetaType<find_object::DetectionInfo>("find_object::DetectionInfo");
00053         UASSERT(detector_ != 0 && extractor_ != 0);
00054 }
00055 
00056 FindObject::~FindObject() {
00057         delete detector_;
00058         delete extractor_;
00059         delete vocabulary_;
00060         objectsDescriptors_.clear();
00061 }
00062 
00063 bool FindObject::loadSession(const QString & path)
00064 {
00065         if(QFile::exists(path) && !path.isEmpty() && QFileInfo(path).suffix().compare("bin") == 0)
00066         {
00067                 QFile file(path);
00068                 file.open(QIODevice::ReadOnly);
00069                 QDataStream in(&file);
00070 
00071                 ParametersMap parameters;
00072 
00073                 // load parameters
00074                 in >> parameters;
00075                 for(QMap<QString, QVariant>::iterator iter=parameters.begin(); iter!=parameters.end(); ++iter)
00076                 {
00077                         Settings::setParameter(iter.key(), iter.value());
00078                 }
00079 
00080                 // save vocabulary
00081                 vocabulary_->load(in);
00082 
00083                 // load objects
00084                 while(!in.atEnd())
00085                 {
00086                         ObjSignature * obj = new ObjSignature();
00087                         obj->load(in);
00088                         if(obj->id() >= 0)
00089                         {
00090                                 objects_.insert(obj->id(), obj);
00091                         }
00092                         else
00093                         {
00094                                 UERROR("Failed to load and object!");
00095                                 delete obj;
00096                         }
00097                 }
00098                 file.close();
00099 
00100                 if(!Settings::getGeneral_invertedSearch())
00101                 {
00102                         // this will fill objectsDescriptors_ matrix
00103                         updateVocabulary();
00104                 }
00105                 sessionModified_ = false;
00106                 return true;
00107         }
00108         else
00109         {
00110                 UERROR("Invalid session file (should be *.bin): \"%s\"", path.toStdString().c_str());
00111         }
00112         return false;
00113 }
00114 
00115 bool FindObject::saveSession(const QString & path)
00116 {
00117         if(!path.isEmpty() && QFileInfo(path).suffix().compare("bin") == 0)
00118         {
00119                 QFile file(path);
00120                 file.open(QIODevice::WriteOnly);
00121                 QDataStream out(&file);
00122 
00123                 // save parameters
00124                 out << Settings::getParameters();
00125 
00126                 // save vocabulary
00127                 vocabulary_->save(out);
00128 
00129                 // save objects
00130                 for(QMultiMap<int, ObjSignature*>::const_iterator iter=objects_.constBegin(); iter!=objects_.constEnd(); ++iter)
00131                 {
00132                         iter.value()->save(out);
00133                 }
00134 
00135                 file.close();
00136                 sessionModified_ = false;
00137                 return true;
00138         }
00139         UERROR("Path \"%s\" not valid (should be *.bin)", path.toStdString().c_str());
00140         return false;
00141 }
00142 
00143 int FindObject::loadObjects(const QString & dirPath)
00144 {
00145         int loadedObjects = 0;
00146         QString formats = Settings::getGeneral_imageFormats().remove('*').remove('.');
00147         UDirectory dir(dirPath.toStdString(), formats.toStdString());
00148         if(dir.isValid())
00149         {
00150                 const std::list<std::string> & names = dir.getFileNames(); // sorted in natural order
00151                 for(std::list<std::string>::const_iterator iter=names.begin(); iter!=names.end(); ++iter)
00152                 {
00153                         this->addObject((dirPath.toStdString()+dir.separator()+*iter).c_str());
00154                 }
00155                 if(names.size())
00156                 {
00157                         this->updateObjects();
00158                         this->updateVocabulary();
00159                 }
00160                 loadedObjects = (int)names.size();
00161         }
00162         return loadedObjects;
00163 }
00164 
00165 const ObjSignature * FindObject::addObject(const QString & filePath)
00166 {
00167         UINFO("Load file %s", filePath.toStdString().c_str());
00168         if(!filePath.isNull())
00169         {
00170                 cv::Mat img = cv::imread(filePath.toStdString().c_str(), cv::IMREAD_GRAYSCALE);
00171                 if(!img.empty())
00172                 {
00173                         int id = 0;
00174                         QFileInfo file(filePath);
00175                         QStringList list = file.fileName().split('.');
00176                         if(list.size())
00177                         {
00178                                 bool ok = false;
00179                                 id = list.front().toInt(&ok);
00180                                 if(ok && id>0)
00181                                 {
00182                                         if(objects_.contains(id))
00183                                         {
00184                                                 UWARN("Object %d already added, a new ID will be generated (new id=%d).", id, Settings::getGeneral_nextObjID());
00185                                                 id = 0;
00186                                         }
00187                                 }
00188                                 else
00189                                 {
00190                                         id = 0;
00191                                 }
00192                         }
00193                         return this->addObject(img, id, file.fileName());
00194                 }
00195         }
00196         return 0;
00197 }
00198 
00199 const ObjSignature * FindObject::addObject(const cv::Mat & image, int id, const QString & filename)
00200 {
00201         UASSERT(id >= 0);
00202         ObjSignature * s = new ObjSignature(id, image, filename);
00203         if(!this->addObject(s))
00204         {
00205                 delete s;
00206                 return 0;
00207         }
00208         return s;
00209 }
00210 
00211 bool FindObject::addObject(ObjSignature * obj)
00212 {
00213         UASSERT(obj != 0 && obj->id() >= 0);
00214         if(obj->id() && objects_.contains(obj->id()))
00215         {
00216                 UERROR("object with id %d already added!", obj->id());
00217                 return false;
00218         }
00219         else if(obj->id() == 0)
00220         {
00221                 obj->setId(Settings::getGeneral_nextObjID());
00222         }
00223 
00224         Settings::setGeneral_nextObjID(obj->id()+1);
00225 
00226         objects_.insert(obj->id(), obj);
00227 
00228         return true;
00229 }
00230 
00231 void FindObject::removeObject(int id)
00232 {
00233         if(objects_.contains(id))
00234         {
00235                 delete objects_.value(id);
00236                 objects_.remove(id);
00237                 clearVocabulary();
00238         }
00239 }
00240 
00241 void FindObject::removeAllObjects()
00242 {
00243         qDeleteAll(objects_);
00244         objects_.clear();
00245         clearVocabulary();
00246 }
00247 
00248 void FindObject::addObjectAndUpdate(const cv::Mat & image, int id, const QString & filename)
00249 {
00250         const ObjSignature * s = this->addObject(image, id, filename);
00251         if(s)
00252         {
00253                 QList<int> ids;
00254                 ids.push_back(s->id());
00255                 updateObjects(ids);
00256                 updateVocabulary();
00257         }
00258 }
00259 
00260 void FindObject::removeObjectAndUpdate(int id)
00261 {
00262         if(objects_.contains(id))
00263         {
00264                 delete objects_.value(id);
00265                 objects_.remove(id);
00266         }
00267         updateVocabulary();
00268 }
00269 
00270 void FindObject::updateDetectorExtractor()
00271 {
00272         delete detector_;
00273         delete extractor_;
00274         detector_ = Settings::createKeypointDetector();
00275         extractor_ = Settings::createDescriptorExtractor();
00276         UASSERT(detector_ != 0 && extractor_ != 0);
00277 }
00278 
00279 std::vector<cv::KeyPoint> limitKeypoints(const std::vector<cv::KeyPoint> & keypoints, int maxKeypoints)
00280 {
00281         std::vector<cv::KeyPoint> kptsKept;
00282         if(maxKeypoints > 0 && (int)keypoints.size() > maxKeypoints)
00283         {
00284                 // Sort words by response
00285                 std::multimap<float, int> reponseMap; // <response,id>
00286                 for(unsigned int i = 0; i <keypoints.size(); ++i)
00287                 {
00288                         //Keep track of the data, to be easier to manage the data in the next step
00289                         reponseMap.insert(std::pair<float, int>(fabs(keypoints[i].response), i));
00290                 }
00291 
00292                 // Remove them
00293                 std::multimap<float, int>::reverse_iterator iter = reponseMap.rbegin();
00294                 kptsKept.resize(maxKeypoints);
00295                 for(unsigned int k=0; k < kptsKept.size() && iter!=reponseMap.rend(); ++k, ++iter)
00296                 {
00297                         kptsKept[k] = keypoints[iter->second];
00298                 }
00299         }
00300         else
00301         {
00302                 kptsKept = keypoints;
00303         }
00304         return kptsKept;
00305 }
00306 
00307 
00308 // taken from ASIFT example https://github.com/Itseez/opencv/blob/master/samples/python2/asift.py
00309 // affine - is an affine transform matrix from skew_img to img
00310 void FindObject::affineSkew(
00311                 float tilt,
00312                 float phi,
00313                 const cv::Mat & image,
00314                 cv::Mat & skewImage,
00315                 cv::Mat & skewMask,
00316                 cv::Mat & Ai)
00317 {
00318     float h = image.rows;
00319     float w = image.cols;
00320     cv::Mat A = cv::Mat::zeros(2,3,CV_32FC1);
00321     A.at<float>(0,0) = A.at<float>(1,1) = 1;
00322     skewMask = cv::Mat::ones(h, w, CV_8U) * 255;
00323     if(phi != 0.0)
00324     {
00325         phi = phi*CV_PI/180.0f; // deg2rad
00326         float s = std::sin(phi);
00327         float c = std::cos(phi);
00328         cv::Mat A22 = (cv::Mat_<float>(2, 2) <<
00329                         c, -s,
00330                         s, c);
00331         cv::Mat cornersIn = (cv::Mat_<float>(4, 2) <<
00332                         0,0,
00333                         w,0,
00334                         w,h,
00335                         0,h);
00336         cv::Mat cornersOut = cornersIn * A22.t();
00337         cv::Rect rect = cv::boundingRect(cornersOut.reshape(2,4));
00338         A = (cv::Mat_<float>(2, 3) <<
00339                                 c, -s, -rect.x,
00340                                 s, c, -rect.y);
00341         cv::warpAffine(image, skewImage, A, cv::Size(rect.width, rect.height), cv::INTER_LINEAR, cv::BORDER_REPLICATE);
00342     }
00343     else
00344     {
00345         skewImage = image;
00346     }
00347     if(tilt != 1.0)
00348     {
00349         float s = 0.8*std::sqrt(tilt*tilt-1);
00350         cv::Mat out, out2;
00351         cv::GaussianBlur(skewImage, out, cv::Size(0, 0), s, 0.01);
00352         cv::resize(out, out2, cv::Size(0, 0), 1.0/tilt, 1.0, cv::INTER_NEAREST);
00353         skewImage = out2;
00354         A.row(0) /= tilt;
00355     }
00356     if(phi != 0.0 || tilt != 1.0)
00357     {
00358         cv::Mat mask = skewMask;
00359         cv::warpAffine(mask, skewMask, A, skewImage.size(), cv::INTER_NEAREST);
00360     }
00361     cv::invertAffineTransform(A, Ai);
00362 }
00363 
00364 class AffineExtractionThread : public QThread
00365 {
00366 public:
00367         AffineExtractionThread(
00368                         KeypointDetector * detector,
00369                         DescriptorExtractor * extractor,
00370                         const cv::Mat & image,
00371                         float tilt,
00372                         float phi) :
00373                 detector_(detector),
00374                 extractor_(extractor),
00375                 image_(image),
00376                 tilt_(tilt),
00377                 phi_(phi),
00378                 timeSkewAffine_(0),
00379                 timeDetection_(0),
00380                 timeExtraction_(0)
00381         {
00382                 UASSERT(detector && extractor);
00383         }
00384         const cv::Mat & image() const {return image_;}
00385         const std::vector<cv::KeyPoint> & keypoints() const {return keypoints_;}
00386         const cv::Mat & descriptors() const {return descriptors_;}
00387 
00388         int timeSkewAffine() const {return timeSkewAffine_;}
00389         int timeDetection() const {return timeDetection_;}
00390         int timeExtraction() const {return timeExtraction_;}
00391 
00392 protected:
00393         virtual void run()
00394         {
00395                 QTime timeStep;
00396                 timeStep.start();
00397                 cv::Mat skewImage, skewMask, Ai;
00398                 FindObject::affineSkew(tilt_, phi_, image_, skewImage, skewMask, Ai);
00399                 timeSkewAffine_=timeStep.restart();
00400 
00401                 //Detect features
00402                 detector_->detect(skewImage, keypoints_, skewMask);
00403 
00404                 if(keypoints_.size())
00405                 {
00406                         int maxFeatures = Settings::getFeature2D_3MaxFeatures();
00407                         if(maxFeatures > 0 && (int)keypoints_.size() > maxFeatures)
00408                         {
00409                                 keypoints_ = limitKeypoints(keypoints_, maxFeatures);
00410                         }
00411                         timeDetection_=timeStep.restart();
00412 
00413                         //Extract descriptors
00414                         extractor_->compute(skewImage, keypoints_, descriptors_);
00415                         timeExtraction_=timeStep.restart();
00416 
00417                         // Transform points to original image coordinates
00418                         for(unsigned int i=0; i<keypoints_.size(); ++i)
00419                         {
00420                                 cv::Mat p = (cv::Mat_<float>(3, 1) << keypoints_[i].pt.x, keypoints_[i].pt.y, 1);
00421                                 cv::Mat pa = Ai * p;
00422                                 keypoints_[i].pt.x = pa.at<float>(0,0);
00423                                 keypoints_[i].pt.y = pa.at<float>(1,0);
00424                         }
00425                 }
00426                 else
00427                 {
00428                         timeDetection_=timeStep.restart();
00429                 }
00430         }
00431 private:
00432         KeypointDetector * detector_;
00433         DescriptorExtractor * extractor_;
00434         cv::Mat image_;
00435         float tilt_;
00436         float phi_;
00437         std::vector<cv::KeyPoint> keypoints_;
00438         cv::Mat descriptors_;
00439 
00440         int timeSkewAffine_;
00441         int timeDetection_;
00442         int timeExtraction_;
00443 };
00444 
00445 class ExtractFeaturesThread : public QThread
00446 {
00447 public:
00448         ExtractFeaturesThread(
00449                         KeypointDetector * detector,
00450                         DescriptorExtractor * extractor,
00451                         int objectId,
00452                         const cv::Mat & image) :
00453                 detector_(detector),
00454                 extractor_(extractor),
00455                 objectId_(objectId),
00456                 image_(image),
00457                 timeSkewAffine_(0),
00458                 timeDetection_(0),
00459                 timeExtraction_(0)
00460         {
00461                 UASSERT(detector && extractor);
00462         }
00463         int objectId() const {return objectId_;}
00464         const cv::Mat & image() const {return image_;}
00465         const std::vector<cv::KeyPoint> & keypoints() const {return keypoints_;}
00466         const cv::Mat & descriptors() const {return descriptors_;}
00467 
00468         int timeSkewAffine() const {return timeSkewAffine_;}
00469         int timeDetection() const {return timeDetection_;}
00470         int timeExtraction() const {return timeExtraction_;}
00471 
00472 protected:
00473         virtual void run()
00474         {
00475                 QTime time;
00476                 time.start();
00477                 UINFO("Extracting descriptors from object %d...", objectId_);
00478 
00479                 QTime timeStep;
00480                 timeStep.start();
00481 
00482                 if(!Settings::getFeature2D_4Affine())
00483                 {
00484                         keypoints_.clear();
00485                         descriptors_ = cv::Mat();
00486                         detector_->detect(image_, keypoints_);
00487 
00488                         if(keypoints_.size())
00489                         {
00490                                 int maxFeatures = Settings::getFeature2D_3MaxFeatures();
00491                                 if(maxFeatures > 0 && (int)keypoints_.size() > maxFeatures)
00492                                 {
00493                                         int previousCount = (int)keypoints_.size();
00494                                         keypoints_ = limitKeypoints(keypoints_, maxFeatures);
00495                                         UDEBUG("obj=%d, %d keypoints removed, (kept %d), min/max response=%f/%f", objectId_, previousCount-(int)keypoints_.size(), (int)keypoints_.size(), keypoints_.size()?keypoints_.back().response:0.0f, keypoints_.size()?keypoints_.front().response:0.0f);
00496                                 }
00497                                 timeDetection_+=timeStep.restart();
00498 
00499                                 try
00500                                 {
00501                                         extractor_->compute(image_, keypoints_, descriptors_);
00502                                 }
00503                                 catch(cv::Exception & e)
00504                                 {
00505                                         UERROR("Descriptor exception: %s. Maybe some keypoints are invalid "
00506                                                         "for the selected descriptor extractor.", e.what());
00507                                         descriptors_ = cv::Mat();
00508                                         keypoints_.clear();
00509                                 }
00510                                 timeExtraction_+=timeStep.restart();
00511 
00512                                 if((int)keypoints_.size() != descriptors_.rows)
00513                                 {
00514                                         UERROR("obj=%d kpt=%d != descriptors=%d", objectId_, (int)keypoints_.size(), descriptors_.rows);
00515                                 }
00516                         }
00517                         else
00518                         {
00519                                 timeDetection_+=timeStep.restart();
00520                                 UWARN("no features detected in object %d !?!", objectId_);
00521                         }
00522                 }
00523                 else
00524                 {
00525                         //ASIFT
00526                         std::vector<float> tilts;
00527                         std::vector<float> phis;
00528                         tilts.push_back(1.0f);
00529                         phis.push_back(0.0f);
00530                         int nTilt = Settings::getFeature2D_5AffineCount();
00531                         for(int t=1; t<nTilt; ++t)
00532                         {
00533                                 float tilt = std::pow(2.0f, 0.5f*float(t));
00534                                 float inc = 72.0f / float(tilt);
00535                                 for(float phi=0.0f; phi<180.0f; phi+=inc)
00536                                 {
00537                                         tilts.push_back(tilt);
00538                                         phis.push_back(phi);
00539                                 }
00540                         }
00541 
00542                         //multi-threaded
00543                         unsigned int threadCounts = Settings::getGeneral_threads();
00544                         if(threadCounts == 0)
00545                         {
00546                                 threadCounts = (unsigned int)tilts.size();
00547                         }
00548 
00549                         for(unsigned int i=0; i<tilts.size(); i+=threadCounts)
00550                         {
00551                                 QVector<AffineExtractionThread*> threads;
00552 
00553                                 for(unsigned int k=i; k<i+threadCounts && k<tilts.size(); ++k)
00554                                 {
00555                                         threads.push_back(new AffineExtractionThread(detector_, extractor_, image_, tilts[k], phis[k]));
00556                                         threads.back()->start();
00557                                 }
00558 
00559                                 for(int k=0; k<threads.size(); ++k)
00560                                 {
00561                                         threads[k]->wait();
00562 
00563                                         keypoints_.insert(keypoints_.end(), threads[k]->keypoints().begin(), threads[k]->keypoints().end());
00564                                         descriptors_.push_back(threads[k]->descriptors());
00565 
00566                                         timeSkewAffine_ += threads[k]->timeSkewAffine();
00567                                         timeDetection_ += threads[k]->timeDetection();
00568                                         timeExtraction_ += threads[k]->timeExtraction();
00569                                 }
00570                         }
00571                 }
00572 
00573                 UINFO("%d descriptors extracted from object %d (in %d ms)", descriptors_.rows, objectId_, time.elapsed());
00574         }
00575 private:
00576         KeypointDetector * detector_;
00577         DescriptorExtractor * extractor_;
00578         int objectId_;
00579         cv::Mat image_;
00580         std::vector<cv::KeyPoint> keypoints_;
00581         cv::Mat descriptors_;
00582 
00583         int timeSkewAffine_;
00584         int timeDetection_;
00585         int timeExtraction_;
00586 };
00587 
00588 void FindObject::updateObjects(const QList<int> & ids)
00589 {
00590         QList<ObjSignature*> objectsList;
00591         if(ids.size())
00592         {
00593                 for(int i=0; i<ids.size(); ++i)
00594                 {
00595                         if(objects_.contains(ids[i]))
00596                         {
00597                                 objectsList.push_back(objects_[ids[i]]);
00598                         }
00599                         else
00600                         {
00601                                 UERROR("Not found object %d!", ids[i]);
00602                         }
00603                 }
00604         }
00605         else
00606         {
00607                 objectsList = objects_.values();
00608         }
00609 
00610         if(objectsList.size())
00611         {
00612                 sessionModified_ = true;
00613                 int threadCounts = Settings::getGeneral_threads();
00614                 if(threadCounts == 0)
00615                 {
00616                         threadCounts = objectsList.size();
00617                 }
00618 
00619                 QTime time;
00620                 time.start();
00621 
00622                 if(objectsList.size())
00623                 {
00624                         UINFO("Features extraction from %d objects...", objectsList.size());
00625                         for(int i=0; i<objectsList.size(); i+=threadCounts)
00626                         {
00627                                 QVector<ExtractFeaturesThread*> threads;
00628                                 for(int k=i; k<i+threadCounts && k<objectsList.size(); ++k)
00629                                 {
00630                                         threads.push_back(new ExtractFeaturesThread(detector_, extractor_, objectsList.at(k)->id(), objectsList.at(k)->image()));
00631                                         threads.back()->start();
00632                                 }
00633 
00634                                 for(int j=0; j<threads.size(); ++j)
00635                                 {
00636                                         threads[j]->wait();
00637 
00638                                         int id = threads[j]->objectId();
00639 
00640                                         objects_.value(id)->setData(threads[j]->keypoints(), threads[j]->descriptors());
00641                                 }
00642                         }
00643                         UINFO("Features extraction from %d objects... done! (%d ms)", objectsList.size(), time.elapsed());
00644                 }
00645         }
00646         else
00647         {
00648                 UINFO("No objects to update...");
00649         }
00650 }
00651 
00652 void FindObject::clearVocabulary()
00653 {
00654         objectsDescriptors_.clear();
00655         dataRange_.clear();
00656         vocabulary_->clear();
00657 }
00658 
00659 void FindObject::updateVocabulary()
00660 {
00661         clearVocabulary();
00662         int count = 0;
00663         int dim = -1;
00664         int type = -1;
00665         // Get the total size and verify descriptors
00666         QList<ObjSignature*> objectsList = objects_.values();
00667         for(int i=0; i<objectsList.size(); ++i)
00668         {
00669                 if(!objectsList.at(i)->descriptors().empty())
00670                 {
00671                         if(dim >= 0 && objectsList.at(i)->descriptors().cols != dim)
00672                         {
00673                                 UERROR("Descriptors of the objects are not all the same size! Objects "
00674                                                 "opened must have all the same size (and from the same descriptor extractor).");
00675                                 return;
00676                         }
00677                         dim = objectsList.at(i)->descriptors().cols;
00678                         if(type >= 0 && objectsList.at(i)->descriptors().type() != type)
00679                         {
00680                                 UERROR("Descriptors of the objects are not all the same type! Objects opened "
00681                                                 "must have been processed by the same descriptor extractor.");
00682                                 return;
00683                         }
00684                         type = objectsList.at(i)->descriptors().type();
00685                         count += objectsList.at(i)->descriptors().rows;
00686                 }
00687         }
00688 
00689         // Copy data
00690         if(count)
00691         {
00692                 UINFO("Updating global descriptors matrix: Objects=%d, total descriptors=%d, dim=%d, type=%d",
00693                                 (int)objects_.size(), count, dim, type);
00694                 if(Settings::getGeneral_invertedSearch() || Settings::getGeneral_threads() == 1)
00695                 {
00696                         // If only one thread, put all descriptors in the same cv::Mat
00697                         objectsDescriptors_.insert(0, cv::Mat(count, dim, type));
00698                         int row = 0;
00699                         for(int i=0; i<objectsList.size(); ++i)
00700                         {
00701                                 if(objectsList.at(i)->descriptors().rows)
00702                                 {
00703                                         cv::Mat dest(objectsDescriptors_.begin().value(), cv::Range(row, row+objectsList.at(i)->descriptors().rows));
00704                                         objectsList.at(i)->descriptors().copyTo(dest);
00705                                         row += objectsList.at(i)->descriptors().rows;
00706                                         // dataRange contains the upper_bound for each
00707                                         // object (the last descriptors position in the
00708                                         // global object descriptors matrix)
00709                                         if(objectsList.at(i)->descriptors().rows)
00710                                         {
00711                                                 dataRange_.insert(row-1, objectsList.at(i)->id());
00712                                         }
00713                                 }
00714                         }
00715 
00716                         if(Settings::getGeneral_invertedSearch())
00717                         {
00718                                 sessionModified_ = true;
00719                                 QTime time;
00720                                 time.start();
00721                                 bool incremental = Settings::getGeneral_vocabularyIncremental();
00722                                 if(incremental)
00723                                 {
00724                                         UINFO("Creating incremental vocabulary...");
00725                                 }
00726                                 else
00727                                 {
00728                                         UINFO("Creating vocabulary...");
00729                                 }
00730                                 QTime localTime;
00731                                 localTime.start();
00732                                 int updateVocabularyMinWords = Settings::getGeneral_vocabularyUpdateMinWords();
00733                                 int addedWords = 0;
00734                                 for(int i=0; i<objectsList.size(); ++i)
00735                                 {
00736                                         QMultiMap<int, int> words = vocabulary_->addWords(objectsList[i]->descriptors(), objectsList.at(i)->id(), incremental);
00737                                         objectsList[i]->setWords(words);
00738                                         addedWords += words.uniqueKeys().size();
00739                                         bool updated = false;
00740                                         if(incremental && addedWords && addedWords >= updateVocabularyMinWords)
00741                                         {
00742                                                 vocabulary_->update();
00743                                                 addedWords = 0;
00744                                                 updated = true;
00745                                         }
00746                                         UINFO("Object %d, %d words from %d descriptors (%d words, %d ms) %s",
00747                                                         objectsList[i]->id(),
00748                                                         words.uniqueKeys().size(),
00749                                                         objectsList[i]->descriptors().rows,
00750                                                         vocabulary_->size(),
00751                                                         localTime.restart(),
00752                                                         updated?"updated":"");
00753                                 }
00754                                 if(addedWords)
00755                                 {
00756                                         vocabulary_->update();
00757                                 }
00758 
00759                                 if(incremental)
00760                                 {
00761                                         UINFO("Creating incremental vocabulary... done! size=%d (%d ms)", vocabulary_->size(), time.elapsed());
00762                                 }
00763                                 else
00764                                 {
00765                                         UINFO("Creating vocabulary... done! size=%d (%d ms)", vocabulary_->size(), time.elapsed());
00766                                 }
00767                         }
00768                 }
00769                 else
00770                 {
00771                         for(int i=0; i<objectsList.size(); ++i)
00772                         {
00773                                 objectsDescriptors_.insert(objectsList.at(i)->id(), objectsList.at(i)->descriptors());
00774                         }
00775                 }
00776         }
00777 }
00778 
00779 class SearchThread: public QThread
00780 {
00781 public:
00782         SearchThread(Vocabulary * vocabulary, int objectId, const cv::Mat * descriptors, const QMultiMap<int, int> * sceneWords) :
00783                 vocabulary_(vocabulary),
00784                 objectId_(objectId),
00785                 descriptors_(descriptors),
00786                 sceneWords_(sceneWords),
00787                 minMatchedDistance_(-1.0f),
00788                 maxMatchedDistance_(-1.0f)
00789         {
00790                 UASSERT(descriptors);
00791         }
00792         virtual ~SearchThread() {}
00793 
00794         int getObjectId() const {return objectId_;}
00795         float getMinMatchedDistance() const {return minMatchedDistance_;}
00796         float getMaxMatchedDistance() const {return maxMatchedDistance_;}
00797         const QMultiMap<int, int> & getMatches() const {return matches_;}
00798 
00799 protected:
00800         virtual void run()
00801         {
00802                 //QTime time;
00803                 //time.start();
00804 
00805                 cv::Mat results;
00806                 cv::Mat dists;
00807 
00808                 //match objects to scene
00809                 int k = Settings::getNearestNeighbor_3nndrRatioUsed()?2:1;
00810                 results = cv::Mat(descriptors_->rows, k, CV_32SC1); // results index
00811                 dists = cv::Mat(descriptors_->rows, k, CV_32FC1); // Distance results are CV_32FC1
00812                 vocabulary_->search(*descriptors_, results, dists, k);
00813 
00814                 // PROCESS RESULTS
00815                 // Get all matches for each object
00816                 for(int i=0; i<dists.rows; ++i)
00817                 {
00818                         // Check if this descriptor matches with those of the objects
00819                         bool matched = false;
00820 
00821                         if(Settings::getNearestNeighbor_3nndrRatioUsed() &&
00822                            dists.at<float>(i,0) <= Settings::getNearestNeighbor_4nndrRatio() * dists.at<float>(i,1))
00823                         {
00824                                 matched = true;
00825                         }
00826                         if((matched || !Settings::getNearestNeighbor_3nndrRatioUsed()) &&
00827                            Settings::getNearestNeighbor_5minDistanceUsed())
00828                         {
00829                                 if(dists.at<float>(i,0) <= Settings::getNearestNeighbor_6minDistance())
00830                                 {
00831                                         matched = true;
00832                                 }
00833                                 else
00834                                 {
00835                                         matched = false;
00836                                 }
00837                         }
00838                         if(!matched && !Settings::getNearestNeighbor_3nndrRatioUsed() && !Settings::getNearestNeighbor_5minDistanceUsed())
00839                         {
00840                                 matched = true; // no criterion, match to the nearest descriptor
00841                         }
00842                         if(minMatchedDistance_ == -1 || minMatchedDistance_ > dists.at<float>(i,0))
00843                         {
00844                                 minMatchedDistance_ = dists.at<float>(i,0);
00845                         }
00846                         if(maxMatchedDistance_ == -1 || maxMatchedDistance_ < dists.at<float>(i,0))
00847                         {
00848                                 maxMatchedDistance_ = dists.at<float>(i,0);
00849                         }
00850 
00851                         int wordId = results.at<int>(i,0);
00852                         if(matched && sceneWords_->count(wordId) == 1)
00853                         {
00854                                 matches_.insert(i, sceneWords_->value(wordId));
00855                                 matches_.insert(i, results.at<int>(i,0));
00856                         }
00857                 }
00858 
00859                 //UINFO("Search Object %d time=%d ms", objectIndex_, time.elapsed());
00860         }
00861 private:
00862         Vocabulary * vocabulary_; // would be const but flann search() method is not const!?
00863         int objectId_;
00864         const cv::Mat * descriptors_;
00865         const QMultiMap<int, int> * sceneWords_; // <word id, keypoint indexes>
00866 
00867         float minMatchedDistance_;
00868         float maxMatchedDistance_;
00869         QMultiMap<int, int> matches_;
00870 };
00871 
00872 class HomographyThread: public QThread
00873 {
00874 public:
00875         HomographyThread(
00876                         const QMultiMap<int, int> * matches, // <object, scene>
00877                         int objectId,
00878                         const std::vector<cv::KeyPoint> * kptsA,
00879                         const std::vector<cv::KeyPoint> * kptsB) :
00880                                 matches_(matches),
00881                                 objectId_(objectId),
00882                                 kptsA_(kptsA),
00883                                 kptsB_(kptsB),
00884                                 code_(DetectionInfo::kRejectedUndef)
00885         {
00886                 UASSERT(matches && kptsA && kptsB);
00887         }
00888         virtual ~HomographyThread() {}
00889 
00890         int getObjectId() const {return objectId_;}
00891         const std::vector<int> & getIndexesA() const {return indexesA_;}
00892         const std::vector<int> & getIndexesB() const {return indexesB_;}
00893         const std::vector<uchar> & getOutlierMask() const {return outlierMask_;}
00894         QMultiMap<int, int> getInliers() const {return inliers_;}
00895         QMultiMap<int, int> getOutliers() const {return outliers_;}
00896         const cv::Mat & getHomography() const {return h_;}
00897         DetectionInfo::RejectedCode rejectedCode() const {return code_;}
00898 
00899 protected:
00900         virtual void run()
00901         {
00902                 //QTime time;
00903                 //time.start();
00904 
00905                 std::vector<cv::Point2f> mpts_1(matches_->size());
00906                 std::vector<cv::Point2f> mpts_2(matches_->size());
00907                 indexesA_.resize(matches_->size());
00908                 indexesB_.resize(matches_->size());
00909 
00910                 int j=0;
00911                 for(QMultiMap<int, int>::const_iterator iter = matches_->begin(); iter!=matches_->end(); ++iter)
00912                 {
00913                         mpts_1[j] = kptsA_->at(iter.key()).pt;
00914                         indexesA_[j] = iter.key();
00915                         mpts_2[j] = kptsB_->at(iter.value()).pt;
00916                         indexesB_[j] = iter.value();
00917                         ++j;
00918                 }
00919 
00920                 if((int)mpts_1.size() >= Settings::getHomography_minimumInliers())
00921                 {
00922                         h_ = findHomography(mpts_1,
00923                                         mpts_2,
00924                                         Settings::getHomographyMethod(),
00925                                         Settings::getHomography_ransacReprojThr(),
00926                                         outlierMask_);
00927 
00928                         for(unsigned int k=0; k<mpts_1.size();++k)
00929                         {
00930                                 if(outlierMask_.at(k))
00931                                 {
00932                                         inliers_.insert(indexesA_[k], indexesB_[k]);
00933                                 }
00934                                 else
00935                                 {
00936                                         outliers_.insert(indexesA_[k], indexesB_[k]);
00937                                 }
00938                         }
00939 
00940                         if(inliers_.size() == (int)outlierMask_.size() && !h_.empty())
00941                         {
00942                                 if(Settings::getHomography_ignoreWhenAllInliers() || cv::countNonZero(h_) < 1)
00943                                 {
00944                                         // ignore homography when all features are inliers
00945                                         h_ = cv::Mat();
00946                                         code_ = DetectionInfo::kRejectedAllInliers;
00947                                 }
00948                         }
00949                 }
00950                 else
00951                 {
00952                         code_ = DetectionInfo::kRejectedLowMatches;
00953                 }
00954 
00955                 //UINFO("Homography Object %d time=%d ms", objectIndex_, time.elapsed());
00956         }
00957 private:
00958         const QMultiMap<int, int> * matches_;
00959         int objectId_;
00960         const std::vector<cv::KeyPoint> * kptsA_;
00961         const std::vector<cv::KeyPoint> * kptsB_;
00962         DetectionInfo::RejectedCode code_;
00963 
00964         std::vector<int> indexesA_;
00965         std::vector<int> indexesB_;
00966         std::vector<uchar> outlierMask_;
00967         QMultiMap<int, int> inliers_;
00968         QMultiMap<int, int> outliers_;
00969         cv::Mat h_;
00970 };
00971 
00972 void FindObject::detect(const cv::Mat & image)
00973 {
00974         QTime time;
00975         time.start();
00976         DetectionInfo info;
00977         this->detect(image, info);
00978         if(info.objDetected_.size() > 0 || Settings::getGeneral_sendNoObjDetectedEvents())
00979         {
00980                 Q_EMIT objectsFound(info);
00981         }
00982 
00983         if(info.objDetected_.size() > 1)
00984         {
00985                 UINFO("(%s) %d objects detected! (%d ms)",
00986                                 QTime::currentTime().toString("HH:mm:ss.zzz").toStdString().c_str(),
00987                                 (int)info.objDetected_.size(),
00988                                 time.elapsed());
00989         }
00990         else if(info.objDetected_.size() == 1)
00991         {
00992                 UINFO("(%s) Object %d detected! (%d ms)",
00993                                 QTime::currentTime().toString("HH:mm:ss.zzz").toStdString().c_str(),
00994                                 (int)info.objDetected_.begin().key(),
00995                                 time.elapsed());
00996         }
00997         else if(Settings::getGeneral_sendNoObjDetectedEvents())
00998         {
00999                 UINFO("(%s) No objects detected. (%d ms)",
01000                                 QTime::currentTime().toString("HH:mm:ss.zzz").toStdString().c_str(),
01001                                 time.elapsed());
01002         }
01003 }
01004 
01005 bool FindObject::detect(const cv::Mat & image, find_object::DetectionInfo & info)
01006 {
01007         QTime totalTime;
01008         totalTime.start();
01009 
01010         // reset statistics
01011         info = DetectionInfo();
01012 
01013         bool success = false;
01014         if(!image.empty())
01015         {
01016                 //Convert to grayscale
01017                 cv::Mat grayscaleImg;
01018                 if(image.channels() != 1 || image.depth() != CV_8U)
01019                 {
01020                         cv::cvtColor(image, grayscaleImg, cv::COLOR_BGR2GRAY);
01021                 }
01022                 else
01023                 {
01024                         grayscaleImg =  image;
01025                 }
01026 
01027                 // DETECT FEATURES AND EXTRACT DESCRIPTORS
01028                 UDEBUG("DETECT FEATURES AND EXTRACT DESCRIPTORS FROM THE SCENE");
01029                 ExtractFeaturesThread extractThread(detector_, extractor_, -1, grayscaleImg);
01030                 extractThread.start();
01031                 extractThread.wait();
01032                 info.sceneKeypoints_ = extractThread.keypoints();
01033                 info.sceneDescriptors_ = extractThread.descriptors();
01034                 info.timeStamps_.insert(DetectionInfo::kTimeKeypointDetection, extractThread.timeDetection());
01035                 info.timeStamps_.insert(DetectionInfo::kTimeDescriptorExtraction, extractThread.timeExtraction());
01036                 info.timeStamps_.insert(DetectionInfo::kTimeSkewAffine, extractThread.timeSkewAffine());
01037 
01038                 bool consistentNNData = (vocabulary_->size()!=0 && vocabulary_->wordToObjects().begin().value()!=-1 && Settings::getGeneral_invertedSearch()) ||
01039                                                                 ((vocabulary_->size()==0 || vocabulary_->wordToObjects().begin().value()==-1) && !Settings::getGeneral_invertedSearch());
01040 
01041                 bool descriptorsValid = !Settings::getGeneral_invertedSearch() &&
01042                                                                 !objectsDescriptors_.empty() &&
01043                                                                 objectsDescriptors_.begin().value().cols == info.sceneDescriptors_.cols &&
01044                                                                 objectsDescriptors_.begin().value().type() == info.sceneDescriptors_.type();
01045 
01046                 bool vocabularyValid = Settings::getGeneral_invertedSearch() &&
01047                                                                 vocabulary_->size() &&
01048                                                                 !vocabulary_->indexedDescriptors().empty() &&
01049                                                                 vocabulary_->indexedDescriptors().cols == info.sceneDescriptors_.cols &&
01050                                                                 vocabulary_->indexedDescriptors().type() == info.sceneDescriptors_.type();
01051 
01052                 // COMPARE
01053                 UDEBUG("COMPARE");
01054                 if((descriptorsValid || vocabularyValid) &&
01055                         info.sceneKeypoints_.size() &&
01056                     consistentNNData)
01057                 {
01058                         success = true;
01059                         QTime time;
01060                         time.start();
01061 
01062                         QMultiMap<int, int> words;
01063 
01064                         if(!Settings::getGeneral_invertedSearch())
01065                         {
01066                                 // CREATE INDEX for the scene
01067                                 UDEBUG("CREATE INDEX FOR THE SCENE");
01068                                 vocabulary_->clear();
01069                                 words = vocabulary_->addWords(info.sceneDescriptors_, -1, Settings::getGeneral_vocabularyIncremental());
01070                                 if(!Settings::getGeneral_vocabularyIncremental())
01071                                 {
01072                                         vocabulary_->update();
01073                                 }
01074                                 info.timeStamps_.insert(DetectionInfo::kTimeIndexing, time.restart());
01075                         }
01076 
01077                         for(QMap<int, ObjSignature*>::iterator iter=objects_.begin(); iter!=objects_.end(); ++iter)
01078                         {
01079                                 info.matches_.insert(iter.key(), QMultiMap<int, int>());
01080                         }
01081 
01082                         if(Settings::getGeneral_invertedSearch() || Settings::getGeneral_threads() == 1)
01083                         {
01084                                 cv::Mat results;
01085                                 cv::Mat dists;
01086                                 // DO NEAREST NEIGHBOR
01087                                 UDEBUG("DO NEAREST NEIGHBOR");
01088                                 int k = Settings::getNearestNeighbor_3nndrRatioUsed()?2:1;
01089                                 if(!Settings::getGeneral_invertedSearch())
01090                                 {
01091                                         //match objects to scene
01092                                         results = cv::Mat(objectsDescriptors_.begin().value().rows, k, CV_32SC1); // results index
01093                                         dists = cv::Mat(objectsDescriptors_.begin().value().rows, k, CV_32FC1); // Distance results are CV_32FC1
01094                                         vocabulary_->search(objectsDescriptors_.begin().value(), results, dists, k);
01095                                 }
01096                                 else
01097                                 {
01098                                         //match scene to objects
01099                                         results = cv::Mat(info.sceneDescriptors_.rows, k, CV_32SC1); // results index
01100                                         dists = cv::Mat(info.sceneDescriptors_.rows, k, CV_32FC1); // Distance results are CV_32FC1
01101                                         vocabulary_->search(info.sceneDescriptors_, results, dists, k);
01102                                 }
01103 
01104                                 // PROCESS RESULTS
01105                                 UDEBUG("PROCESS RESULTS");
01106                                 // Get all matches for each object
01107                                 for(int i=0; i<dists.rows; ++i)
01108                                 {
01109                                         // Check if this descriptor matches with those of the objects
01110                                         bool matched = false;
01111 
01112                                         if(Settings::getNearestNeighbor_3nndrRatioUsed() &&
01113                                            dists.at<float>(i,0) <= Settings::getNearestNeighbor_4nndrRatio() * dists.at<float>(i,1))
01114                                         {
01115                                                 matched = true;
01116                                         }
01117                                         if((matched || !Settings::getNearestNeighbor_3nndrRatioUsed()) &&
01118                                            Settings::getNearestNeighbor_5minDistanceUsed())
01119                                         {
01120                                                 if(dists.at<float>(i,0) <= Settings::getNearestNeighbor_6minDistance())
01121                                                 {
01122                                                         matched = true;
01123                                                 }
01124                                                 else
01125                                                 {
01126                                                         matched = false;
01127                                                 }
01128                                         }
01129                                         if(!matched &&
01130                                            !Settings::getNearestNeighbor_3nndrRatioUsed() &&
01131                                            !Settings::getNearestNeighbor_5minDistanceUsed() &&
01132                                            dists.at<float>(i,0) >= 0.0f)
01133                                         {
01134                                                 matched = true; // no criterion, match to the nearest descriptor
01135                                         }
01136                                         if(info.minMatchedDistance_ == -1 || info.minMatchedDistance_ > dists.at<float>(i,0))
01137                                         {
01138                                                 info.minMatchedDistance_ = dists.at<float>(i,0);
01139                                         }
01140                                         if(info.maxMatchedDistance_ == -1 || info.maxMatchedDistance_ < dists.at<float>(i,0))
01141                                         {
01142                                                 info.maxMatchedDistance_ = dists.at<float>(i,0);
01143                                         }
01144 
01145                                         if(matched)
01146                                         {
01147                                                 if(Settings::getGeneral_invertedSearch())
01148                                                 {
01149                                                         int wordId = results.at<int>(i,0);
01150                                                         QList<int> objIds = vocabulary_->wordToObjects().values(wordId);
01151                                                         for(int j=0; j<objIds.size(); ++j)
01152                                                         {
01153                                                                 // just add unique matches
01154                                                                 if(vocabulary_->wordToObjects().count(wordId, objIds[j]) == 1)
01155                                                                 {
01156                                                                         info.matches_.find(objIds[j]).value().insert(objects_.value(objIds[j])->words().value(wordId), i);
01157                                                                 }
01158                                                         }
01159                                                 }
01160                                                 else
01161                                                 {
01162                                                         QMap<int, int>::iterator iter = dataRange_.lowerBound(i);
01163                                                         int objectId = iter.value();
01164                                                         int fisrtObjectDescriptorIndex = (iter == dataRange_.begin())?0:(--iter).key()+1;
01165                                                         int objectDescriptorIndex = i - fisrtObjectDescriptorIndex;
01166 
01167                                                         int wordId = results.at<int>(i,0);
01168                                                         if(words.count(wordId) == 1)
01169                                                         {
01170                                                                 info.matches_.find(objectId).value().insert(objectDescriptorIndex, words.value(wordId));
01171                                                         }
01172                                                 }
01173                                         }
01174                                 }
01175                         }
01176                         else
01177                         {
01178                                 //multi-threaded, match objects to scene
01179                                 UDEBUG("MULTI-THREADED, MATCH OBJECTS TO SCENE");
01180                                 int threadCounts = Settings::getGeneral_threads();
01181                                 if(threadCounts == 0)
01182                                 {
01183                                         threadCounts = (int)objectsDescriptors_.size();
01184                                 }
01185 
01186                                 QList<int> objectsDescriptorsId = objectsDescriptors_.keys();
01187                                 QList<cv::Mat> objectsDescriptorsMat = objectsDescriptors_.values();
01188                                 for(int j=0; j<objectsDescriptorsMat.size(); j+=threadCounts)
01189                                 {
01190                                         QVector<SearchThread*> threads;
01191 
01192                                         for(int k=j; k<j+threadCounts && k<objectsDescriptorsMat.size(); ++k)
01193                                         {
01194                                                 threads.push_back(new SearchThread(vocabulary_, objectsDescriptorsId[k], &objectsDescriptorsMat[k], &words));
01195                                                 threads.back()->start();
01196                                         }
01197 
01198                                         for(int k=0; k<threads.size(); ++k)
01199                                         {
01200                                                 threads[k]->wait();
01201                                                 info.matches_[threads[k]->getObjectId()] = threads[k]->getMatches();
01202 
01203                                                 if(info.minMatchedDistance_ == -1 || info.minMatchedDistance_ > threads[k]->getMinMatchedDistance())
01204                                                 {
01205                                                         info.minMatchedDistance_ = threads[k]->getMinMatchedDistance();
01206                                                 }
01207                                                 if(info.maxMatchedDistance_ == -1 || info.maxMatchedDistance_ < threads[k]->getMaxMatchedDistance())
01208                                                 {
01209                                                         info.maxMatchedDistance_ = threads[k]->getMaxMatchedDistance();
01210                                                 }
01211                                                 delete threads[k];
01212                                         }
01213 
01214                                 }
01215                         }
01216 
01217                         info.timeStamps_.insert(DetectionInfo::kTimeMatching, time.restart());
01218 
01219                         // Homographies
01220                         if(Settings::getHomography_homographyComputed())
01221                         {
01222                                 // HOMOGRAPHY
01223                                 UDEBUG("COMPUTE HOMOGRAPHY");
01224                                 int threadCounts = Settings::getGeneral_threads();
01225                                 if(threadCounts == 0)
01226                                 {
01227                                         threadCounts = info.matches_.size();
01228                                 }
01229                                 QList<int> matchesId = info.matches_.keys();
01230                                 QList<QMultiMap<int, int> > matchesList = info.matches_.values();
01231                                 for(int i=0; i<matchesList.size(); i+=threadCounts)
01232                                 {
01233                                         QVector<HomographyThread*> threads;
01234 
01235                                         for(int k=i; k<i+threadCounts && k<matchesList.size(); ++k)
01236                                         {
01237                                                 int objectId = matchesId[k];
01238                                                 threads.push_back(new HomographyThread(&matchesList[k], objectId, &objects_.value(objectId)->keypoints(), &info.sceneKeypoints_));
01239                                                 threads.back()->start();
01240                                         }
01241 
01242                                         for(int j=0; j<threads.size(); ++j)
01243                                         {
01244                                                 threads[j]->wait();
01245 
01246                                                 int id = threads[j]->getObjectId();
01247                                                 QTransform hTransform;
01248                                                 DetectionInfo::RejectedCode code = DetectionInfo::kRejectedUndef;
01249                                                 if(threads[j]->getHomography().empty())
01250                                                 {
01251                                                         code = threads[j]->rejectedCode();
01252                                                 }
01253                                                 if(code == DetectionInfo::kRejectedUndef &&
01254                                                    threads[j]->getInliers().size() < Settings::getHomography_minimumInliers()   )
01255                                                 {
01256                                                         code = DetectionInfo::kRejectedLowInliers;
01257                                                 }
01258                                                 if(code == DetectionInfo::kRejectedUndef)
01259                                                 {
01260                                                         const cv::Mat & H = threads[j]->getHomography();
01261                                                         hTransform = QTransform(
01262                                                                 H.at<double>(0,0), H.at<double>(1,0), H.at<double>(2,0),
01263                                                                 H.at<double>(0,1), H.at<double>(1,1), H.at<double>(2,1),
01264                                                                 H.at<double>(0,2), H.at<double>(1,2), H.at<double>(2,2));
01265 
01266                                                         // is homography valid?
01267                                                         // Here we use mapToScene() from QGraphicsItem instead
01268                                                         // of QTransform::map() because if the homography is not valid,
01269                                                         // huge errors are set by the QGraphicsItem and not by QTransform::map();
01270                                                         QRectF objectRect = objects_.value(id)->rect();
01271                                                         QGraphicsRectItem item(objectRect);
01272                                                         item.setTransform(hTransform);
01273                                                         QPolygonF rectH = item.mapToScene(item.rect());
01274 
01275                                                         // If a point is outside of 2x times the surface of the scene, homography is invalid.
01276                                                         for(int p=0; p<rectH.size(); ++p)
01277                                                         {
01278                                                                 if((rectH.at(p).x() < -image.cols && rectH.at(p).x() < -objectRect.width()) ||
01279                                                                    (rectH.at(p).x() > image.cols*2  && rectH.at(p).x() > objectRect.width()*2) ||
01280                                                                    (rectH.at(p).y() < -image.rows  && rectH.at(p).x() < -objectRect.height()) ||
01281                                                                    (rectH.at(p).y() > image.rows*2  && rectH.at(p).x() > objectRect.height()*2))
01282                                                                 {
01283                                                                         code= DetectionInfo::kRejectedNotValid;
01284                                                                         break;
01285                                                                 }
01286                                                         }
01287 
01288                                                         // angle
01289                                                         if(code == DetectionInfo::kRejectedUndef &&
01290                                                            Settings::getHomography_minAngle() > 0)
01291                                                         {
01292                                                                 for(int a=0; a<rectH.size(); ++a)
01293                                                                 {
01294                                                                         //  Find the smaller angle
01295                                                                         QLineF ab(rectH.at(a).x(), rectH.at(a).y(), rectH.at((a+1)%4).x(), rectH.at((a+1)%4).y());
01296                                                                         QLineF cb(rectH.at((a+1)%4).x(), rectH.at((a+1)%4).y(), rectH.at((a+2)%4).x(), rectH.at((a+2)%4).y());
01297                                                                         float angle =  ab.angle(cb);
01298                                                                         float minAngle = (float)Settings::getHomography_minAngle();
01299                                                                         if(angle < minAngle ||
01300                                                                            angle > 180.0-minAngle)
01301                                                                         {
01302                                                                                 code = DetectionInfo::kRejectedByAngle;
01303                                                                                 break;
01304                                                                         }
01305                                                                 }
01306                                                         }
01307 
01308                                                         // multi detection
01309                                                         if(code == DetectionInfo::kRejectedUndef &&
01310                                                            Settings::getGeneral_multiDetection())
01311                                                         {
01312                                                                 int distance = Settings::getGeneral_multiDetectionRadius(); // in pixels
01313                                                                 // Get the outliers and recompute homography with them
01314                                                                 matchesList.push_back(threads[j]->getOutliers());
01315                                                                 matchesId.push_back(id);
01316 
01317                                                                 // compute distance from previous added same objects...
01318                                                                 QMultiMap<int, QTransform>::iterator objIter = info.objDetected_.find(id);
01319                                                                 for(;objIter!=info.objDetected_.end() && objIter.key() == id; ++objIter)
01320                                                                 {
01321                                                                         qreal dx = objIter.value().m31() - hTransform.m31();
01322                                                                         qreal dy = objIter.value().m32() - hTransform.m32();
01323                                                                         int d = (int)sqrt(dx*dx + dy*dy);
01324                                                                         if(d < distance)
01325                                                                         {
01326                                                                                 distance = d;
01327                                                                         }
01328                                                                 }
01329 
01330                                                                 if(distance < Settings::getGeneral_multiDetectionRadius())
01331                                                                 {
01332                                                                         code = DetectionInfo::kRejectedSuperposed;
01333                                                                 }
01334                                                         }
01335 
01336                                                         // Corners visible
01337                                                         if(code == DetectionInfo::kRejectedUndef &&
01338                                                            Settings::getHomography_allCornersVisible())
01339                                                         {
01340                                                                 // Now verify if all corners are in the scene
01341                                                                 QRectF sceneRect(0,0,image.cols, image.rows);
01342                                                                 for(int p=0; p<rectH.size(); ++p)
01343                                                                 {
01344                                                                         if(!sceneRect.contains(QPointF(rectH.at(p).x(), rectH.at(p).y())))
01345                                                                         {
01346                                                                                 code = DetectionInfo::kRejectedCornersOutside;
01347                                                                                 break;
01348                                                                         }
01349                                                                 }
01350                                                         }
01351                                                 }
01352 
01353                                                 if(code == DetectionInfo::kRejectedUndef)
01354                                                 {
01355                                                         // Accepted!
01356                                                         //std::cout << "H= " << threads[j]->getHomography() << std::endl;
01357 
01358                                                         info.objDetected_.insert(id, hTransform);
01359                                                         info.objDetectedSizes_.insert(id, objects_.value(id)->rect().size());
01360                                                         info.objDetectedInliers_.insert(id, threads[j]->getInliers());
01361                                                         info.objDetectedOutliers_.insert(id, threads[j]->getOutliers());
01362                                                         info.objDetectedInliersCount_.insert(id, threads[j]->getInliers().size());
01363                                                         info.objDetectedOutliersCount_.insert(id, threads[j]->getOutliers().size());
01364                                                         info.objDetectedFilenames_.insert(id, objects_.value(id)->filename());
01365                                                 }
01366                                                 else
01367                                                 {
01368                                                         //Rejected!
01369                                                         info.rejectedInliers_.insert(id, threads[j]->getInliers());
01370                                                         info.rejectedOutliers_.insert(id, threads[j]->getOutliers());
01371                                                         info.rejectedCodes_.insert(id, code);
01372                                                 }
01373                                         }
01374                                 }
01375                                 info.timeStamps_.insert(DetectionInfo::kTimeHomography, time.restart());
01376                         }
01377                 }
01378                 else if((descriptorsValid || vocabularyValid) && info.sceneKeypoints_.size())
01379                 {
01380                         UWARN("Cannot search, objects must be updated");
01381                 }
01382                 else if(info.sceneKeypoints_.size() == 0)
01383                 {
01384                         // Accept but warn the user
01385                         UWARN("No features detected in the scene!?!");
01386                         success = true;
01387                 }
01388         }
01389 
01390         info.timeStamps_.insert(DetectionInfo::kTimeTotal, totalTime.elapsed());
01391 
01392         return success;
01393 }
01394 
01395 } // namespace find_object


find_object_2d
Author(s): Mathieu Labbe
autogenerated on Thu Feb 11 2016 22:57:56