markerdetector.cpp
Go to the documentation of this file.
00001 /*****************************
00002 Copyright 2011 Rafael Muñoz Salinas. All rights reserved.
00003 
00004 Redistribution and use in source and binary forms, with or without modification, are
00005 permitted provided that the following conditions are met:
00006 
00007    1. Redistributions of source code must retain the above copyright notice, this list of
00008       conditions and the following disclaimer.
00009 
00010    2. Redistributions in binary form must reproduce the above copyright notice, this list
00011       of conditions and the following disclaimer in the documentation and/or other materials
00012       provided with the distribution.
00013 
00014 THIS SOFTWARE IS PROVIDED BY Rafael Muñoz Salinas ''AS IS'' AND ANY EXPRESS OR IMPLIED
00015 WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
00016 FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Rafael Muñoz Salinas OR
00017 CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
00018 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
00019 SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
00020 ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
00021 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
00022 ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
00023 
00024 The views and conclusions contained in the software and documentation are those of the
00025 authors and should not be interpreted as representing official policies, either expressed
00026 or implied, of Rafael Muñoz Salinas.
00027 ********************************/
00028 #include "markerdetector.h"
00029 #include "subpixelcorner.h"
00030 #include <opencv2/core/core.hpp>
00031 #include <opencv2/features2d/features2d.hpp>
00032 #include <opencv2/imgproc/imgproc.hpp>
00033 #include <opencv2/calib3d/calib3d.hpp>
00034 #include <opencv2/highgui/highgui.hpp>
00035 #include <iostream>
00036 #include <fstream>
00037 #include "arucofidmarkers.h"
00038 #include <valarray>
00039 #include "ar_omp.h"
00040 using namespace std;
00041 using namespace cv;
00042 
00043 namespace aruco {
00044 /************************************
00045  *
00046  *
00047  *
00048  *
00049  ************************************/
00050 MarkerDetector::MarkerDetector() {
00051     _thresMethod = ADPT_THRES;
00052     _thresParam1 = _thresParam2 = 7;
00053     _cornerMethod = LINES;
00054     _useLockedCorners = false;
00055     //         _cornerMethod=SUBPIX;
00056     _thresParam1_range = 0;
00057     _markerWarpSize = 56;
00058     _speed = 0;
00059     markerIdDetector_ptrfunc = aruco::FiducidalMarkers::detect;
00060     _minSize = 0.04;
00061     _maxSize = 0.5;
00062 
00063     _borderDistThres = 0.025; // corners in a border of 2.5% of image  are ignored
00064 }
00065 /************************************
00066  *
00067  *
00068  *
00069  *
00070  ************************************/
00071 
00072 MarkerDetector::~MarkerDetector() {}
00073 
00074 /************************************
00075  *
00076  *
00077  *
00078  *
00079  ************************************/
00080 void MarkerDetector::setDesiredSpeed(int val) {
00081     if (val < 0)
00082         val = 0;
00083     else if (val > 3)
00084         val = 2;
00085 
00086     _speed = val;
00087     switch (_speed) {
00088 
00089     case 0:
00090         _markerWarpSize = 56;
00091         _cornerMethod = SUBPIX;
00092         break;
00093 
00094     case 1:
00095     case 2:
00096         _markerWarpSize = 28;
00097         _cornerMethod = NONE;
00098         break;
00099     };
00100 }
00101 
00102 /************************************
00103  *
00104  *
00105  *
00106  *
00107  ************************************/
00108 void MarkerDetector::detect(const cv::Mat &input, std::vector< Marker > &detectedMarkers, CameraParameters camParams, float markerSizeMeters,
00109                             bool setYPerpendicular) throw(cv::Exception) {
00110     detect(input, detectedMarkers, camParams.CameraMatrix, camParams.Distorsion, markerSizeMeters, setYPerpendicular);
00111 }
00112 
00113 /***
00114  *
00115  *
00116  **/
00117 void MarkerDetector::enableLockedCornersMethod(bool enable) {
00118     _useLockedCorners = enable;
00119     if (enable)
00120         _cornerMethod = SUBPIX;
00121 }
00122 /************************************
00123  *
00124  * Main detection function. Performs all steps
00125  *
00126  *
00127  ************************************/
00128 void MarkerDetector::detect(const cv::Mat &input, vector< Marker > &detectedMarkers, Mat camMatrix, Mat distCoeff, float markerSizeMeters,
00129                             bool setYPerpendicular) throw(cv::Exception) {
00130 
00131     // it must be a 3 channel image
00132     if (input.type() == CV_8UC3)
00133         cv::cvtColor(input, grey, CV_BGR2GRAY);
00134     else
00135         grey = input;
00136     double t1 = cv::getTickCount();
00137     //     cv::cvtColor(grey,_ssImC ,CV_GRAY2BGR); //DELETE
00138 
00139     // clear input data
00140     detectedMarkers.clear();
00141 
00142 
00143     cv::Mat imgToBeThresHolded = grey;
00144     double ThresParam1 = _thresParam1, ThresParam2 = _thresParam2;
00145 
00147     // work simultaneouly in a range of values of the first threshold
00148     int n_param1 = 2 * _thresParam1_range + 1;
00149     vector< cv::Mat > thres_images(n_param1);
00150 #pragma omp parallel for
00151     for (int i = 0; i < n_param1; i++) {
00152         double t1 = ThresParam1 - _thresParam1_range + _thresParam1_range * i;
00153         thresHold(_thresMethod, imgToBeThresHolded, thres_images[i], t1, ThresParam2);
00154     }
00155     thres = thres_images[n_param1 / 2];
00156     //
00157 
00158     double t2 = cv::getTickCount();
00159     // find all rectangles in the thresholdes image
00160     vector< MarkerCandidate > MarkerCanditates;
00161     detectRectangles(thres_images, MarkerCanditates);
00162 
00163     double t3 = cv::getTickCount();
00164 
00166     vector< vector< Marker > > markers_omp(omp_get_max_threads());
00167     vector< vector< std::vector< cv::Point2f > > > candidates_omp(omp_get_max_threads());
00168 //#pragma omp parallel for
00169     for (int i = 0; i < MarkerCanditates.size(); i++) {
00170         // Find proyective homography
00171         Mat canonicalMarker;
00172         bool resW = false;
00173         resW = warp(grey, canonicalMarker, Size(_markerWarpSize, _markerWarpSize), MarkerCanditates[i]);
00174         if (resW) {
00175             int nRotations;
00176             int id = (*markerIdDetector_ptrfunc)(canonicalMarker, nRotations);
00177             if (id != -1) {
00178                 if (_cornerMethod == LINES) // make LINES refinement before lose contour points
00179                     refineCandidateLines(MarkerCanditates[i], camMatrix, distCoeff);
00180                 markers_omp[omp_get_thread_num()].push_back(MarkerCanditates[i]);
00181                 markers_omp[omp_get_thread_num()].back().id = id;
00182                 // sort the points so that they are always in the same order no matter the camera orientation
00183                 std::rotate(markers_omp[omp_get_thread_num()].back().begin(), markers_omp[omp_get_thread_num()].back().begin() + 4 - nRotations,
00184                             markers_omp[omp_get_thread_num()].back().end());
00185             } else
00186                 candidates_omp[omp_get_thread_num()].push_back(MarkerCanditates[i]);
00187         }
00188     }
00189     // unify parallel data
00190     joinVectors(markers_omp, detectedMarkers, true);
00191     joinVectors(candidates_omp, _candidates, true);
00192 
00193 
00194     double t4 = cv::getTickCount();
00195 
00197     if (detectedMarkers.size() > 0 && _cornerMethod != NONE && _cornerMethod != LINES) {
00198 
00199         vector< Point2f > Corners;
00200         for (unsigned int i = 0; i < detectedMarkers.size(); i++)
00201             for (int c = 0; c < 4; c++)
00202                 Corners.push_back(detectedMarkers[i][c]);
00203 
00204         // in case of "locked corners", it is neccesary in some ocasions to
00205         // find the corner in the sourrondings of the initially estimated location
00206         if (_useLockedCorners)
00207             findCornerMaxima(Corners, grey, _thresParam1);
00208 
00209         if (_cornerMethod == HARRIS)
00210             findBestCornerInRegion_harris(grey, Corners, 7);
00211         else if (_cornerMethod == SUBPIX) {
00212             cornerSubPix(grey, Corners, cvSize(_thresParam1, _thresParam1), cvSize(-1, -1), cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 8, 0.005));
00213         }
00214         // copy back
00215         for (unsigned int i = 0; i < detectedMarkers.size(); i++)
00216             for (int c = 0; c < 4; c++)
00217                 detectedMarkers[i][c] = Corners[i * 4 + c];
00218     }
00219 
00220     double t5 = cv::getTickCount();
00221 
00222     // sort by id
00223     std::sort(detectedMarkers.begin(), detectedMarkers.end());
00224     // there might be still the case that a marker is detected twice because of the double border indicated earlier,
00225     // detect and remove these cases
00226     vector< bool > toRemove(detectedMarkers.size(), false);
00227     for (int i = 0; i < int(detectedMarkers.size()) - 1; i++) {
00228         if (detectedMarkers[i].id == detectedMarkers[i + 1].id && !toRemove[i + 1]) {
00229             // deletes the one with smaller perimeter
00230             if (perimeter(detectedMarkers[i]) > perimeter(detectedMarkers[i + 1]))
00231                 toRemove[i + 1] = true;
00232             else
00233                 toRemove[i] = true;
00234         }
00235     }
00236 
00237     // remove the markers marker
00238     removeElements(detectedMarkers, toRemove);
00239 
00241     if (camMatrix.rows != 0 && markerSizeMeters > 0) {
00242         for (unsigned int i = 0; i < detectedMarkers.size(); i++)
00243             detectedMarkers[i].calculateExtrinsics(markerSizeMeters, camMatrix, distCoeff, setYPerpendicular);
00244     }
00245     double t6 = cv::getTickCount();
00246 
00247 //    cerr << "Threshold: " << (t2 - t1) / double(cv::getTickFrequency()) << endl;
00248 //    cerr << "Rectangles: " << (t3 - t2) / double(cv::getTickFrequency()) << endl;
00249 //    cerr << "Identify: " << (t4 - t3) / double(cv::getTickFrequency()) << endl;
00250 //    cerr << "Subpixel: " << (t5 - t4) / double(cv::getTickFrequency()) << endl;
00251 //    cerr << "Filtering: " << (t6 - t5) / double(cv::getTickFrequency()) << endl;
00252 }
00253 
00254 
00255 /************************************
00256  *
00257  * Crucial step. Detects the rectangular regions of the thresholded image
00258  *
00259  *
00260  ************************************/
00261 void MarkerDetector::detectRectangles(const cv::Mat &thres, vector< std::vector< cv::Point2f > > &MarkerCanditates) {
00262     vector< MarkerCandidate > candidates;
00263     vector< cv::Mat > thres_v;
00264     thres_v.push_back(thres);
00265     detectRectangles(thres_v, candidates);
00266     // create the output
00267     MarkerCanditates.resize(candidates.size());
00268     for (size_t i = 0; i < MarkerCanditates.size(); i++)
00269         MarkerCanditates[i] = candidates[i];
00270 }
00271 
00272 void MarkerDetector::detectRectangles(vector< cv::Mat > &thresImgv, vector< MarkerCandidate > &OutMarkerCanditates) {
00273     //         omp_set_num_threads ( 1 );
00274     vector< vector< MarkerCandidate > > MarkerCanditatesV(omp_get_max_threads());
00275     // calcualte the min_max contour sizes
00276     int minSize = _minSize * std::max(thresImgv[0].cols, thresImgv[0].rows) * 4;
00277     int maxSize = _maxSize * std::max(thresImgv[0].cols, thresImgv[0].rows) * 4;
00278 
00279 //         cv::Mat input;
00280 //         cv::cvtColor ( thresImgv[0],input,CV_GRAY2BGR );
00281 
00282 #pragma omp parallel for
00283     for (size_t i = 0; i < thresImgv.size(); i++) {
00284         std::vector< cv::Vec4i > hierarchy2;
00285         std::vector< std::vector< cv::Point > > contours2;
00286         cv::Mat thres2;
00287         thresImgv[i].copyTo(thres2);
00288         cv::findContours(thres2, contours2, hierarchy2, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
00289 
00290 
00291         vector< Point > approxCurve;
00293         for (unsigned int i = 0; i < contours2.size(); i++) {
00294 
00295             // check it is a possible element by first checking is has enough points
00296             if (minSize < contours2[i].size() && contours2[i].size() < maxSize) {
00297                 // approximate to a poligon
00298                 approxPolyDP(contours2[i], approxCurve, double(contours2[i].size()) * 0.05, true);
00299                 //                              drawApproxCurve(copy,approxCurve,Scalar(0,0,255));
00300                 // check that the poligon has 4 points
00301                 if (approxCurve.size() == 4) {
00302                     /*
00303                                             drawContour ( input,contours2[i],Scalar ( 255,0,225 ) );
00304                                             namedWindow ( "input" );
00305                                             imshow ( "input",input );*/
00306                     //                  waitKey(0);
00307                     // and is convex
00308                     if (isContourConvex(Mat(approxCurve))) {
00309                         //                                            drawApproxCurve(input,approxCurve,Scalar(255,0,255));
00310                         //                                              //ensure that the   distace between consecutive points is large enough
00311                         float minDist = 1e10;
00312                         for (int j = 0; j < 4; j++) {
00313                             float d = std::sqrt((float)(approxCurve[j].x - approxCurve[(j + 1) % 4].x) * (approxCurve[j].x - approxCurve[(j + 1) % 4].x) +
00314                                                 (approxCurve[j].y - approxCurve[(j + 1) % 4].y) * (approxCurve[j].y - approxCurve[(j + 1) % 4].y));
00315                             //          norm(Mat(approxCurve[i]),Mat(approxCurve[(i+1)%4]));
00316                             if (d < minDist)
00317                                 minDist = d;
00318                         }
00319                         // check that distance is not very small
00320                         if (minDist > 10) {
00321                             // add the points
00322                             //        cout<<"ADDED"<<endl;
00323                             MarkerCanditatesV[omp_get_thread_num()].push_back(MarkerCandidate());
00324                             MarkerCanditatesV[omp_get_thread_num()].back().idx = i;
00325                             for (int j = 0; j < 4; j++) {
00326                                 MarkerCanditatesV[omp_get_thread_num()].back().push_back(Point2f(approxCurve[j].x, approxCurve[j].y));
00327                                 MarkerCanditatesV[omp_get_thread_num()].back().contour = contours2[i];
00328                             }
00329                         }
00330                     }
00331                 }
00332             }
00333         }
00334     }
00335     // join all candidates
00336     vector< MarkerCandidate > MarkerCanditates;
00337 
00338     for (size_t i = 0; i < MarkerCanditatesV.size(); i++)
00339         for (size_t j = 0; j < MarkerCanditatesV[i].size(); j++) {
00340             MarkerCanditates.push_back(MarkerCanditatesV[i][j]);
00341             //                 MarkerCanditates.back().draw ( input,cv::Scalar ( 0,0,255 ) );
00342         }
00343 
00345     valarray< bool > swapped(false, MarkerCanditates.size()); // used later
00346     for (unsigned int i = 0; i < MarkerCanditates.size(); i++) {
00347 
00348         // trace a line between the first and second point.
00349         // if the thrid point is at the right side, then the points are anti-clockwise
00350         double dx1 = MarkerCanditates[i][1].x - MarkerCanditates[i][0].x;
00351         double dy1 = MarkerCanditates[i][1].y - MarkerCanditates[i][0].y;
00352         double dx2 = MarkerCanditates[i][2].x - MarkerCanditates[i][0].x;
00353         double dy2 = MarkerCanditates[i][2].y - MarkerCanditates[i][0].y;
00354         double o = (dx1 * dy2) - (dy1 * dx2);
00355 
00356         if (o < 0.0) { // if the third point is in the left side, then sort in anti-clockwise order
00357             swap(MarkerCanditates[i][1], MarkerCanditates[i][3]);
00358             swapped[i] = true;
00359             // sort the contour points
00360             //              reverse(MarkerCanditates[i].contour.begin(),MarkerCanditates[i].contour.end());//????
00361         }
00362     }
00363 
00365     // first detect candidates to be removed
00366 
00367     vector< vector< pair< int, int > > > TooNearCandidates_omp(omp_get_max_threads());
00368 #pragma omp parallel for
00369     for (unsigned int i = 0; i < MarkerCanditates.size(); i++) {
00370         //      cout<<"Marker i="<<i<<MarkerCanditates[i]<<endl;
00371         // calculate the average distance of each corner to the nearest corner of the other marker candidate
00372         for (unsigned int j = i + 1; j < MarkerCanditates.size(); j++) {
00373             valarray< float > vdist(4);
00374             for (int c = 0; c < 4; c++)
00375                 vdist[c] = sqrt((MarkerCanditates[i][c].x - MarkerCanditates[j][c].x) * (MarkerCanditates[i][c].x - MarkerCanditates[j][c].x) +
00376                                 (MarkerCanditates[i][c].y - MarkerCanditates[j][c].y) * (MarkerCanditates[i][c].y - MarkerCanditates[j][c].y));
00377             //                 dist/=4;
00378             // if distance is too small
00379             if (vdist[0] < 6 && vdist[1] < 6 && vdist[2] < 6 && vdist[3] < 6) {
00380                 TooNearCandidates_omp[omp_get_thread_num()].push_back(pair< int, int >(i, j));
00381             }
00382         }
00383     }
00384     // join
00385     vector< pair< int, int > > TooNearCandidates;
00386     joinVectors(TooNearCandidates_omp, TooNearCandidates);
00387     // mark for removal the element of  the pair with smaller perimeter
00388     valarray< bool > toRemove(false, MarkerCanditates.size());
00389     for (unsigned int i = 0; i < TooNearCandidates.size(); i++) {
00390         if (perimeter(MarkerCanditates[TooNearCandidates[i].first]) > perimeter(MarkerCanditates[TooNearCandidates[i].second]))
00391             toRemove[TooNearCandidates[i].second] = true;
00392         else
00393             toRemove[TooNearCandidates[i].first] = true;
00394     }
00395 
00396     // remove the invalid ones
00397     // finally, assign to the remaining candidates the contour
00398     OutMarkerCanditates.reserve(MarkerCanditates.size());
00399     for (size_t i = 0; i < MarkerCanditates.size(); i++) {
00400         if (!toRemove[i]) {
00401             OutMarkerCanditates.push_back(MarkerCanditates[i]);
00402             //                 OutMarkerCanditates.back().contour=contours2[ MarkerCanditates[i].idx];
00403             if (swapped[i]) // if the corners where swapped, it is required to reverse here the points so that they are in the same order
00404                 reverse(OutMarkerCanditates.back().contour.begin(), OutMarkerCanditates.back().contour.end()); //????
00405         }
00406     }
00407     /*
00408             for ( size_t i=0; i<OutMarkerCanditates.size(); i++ )
00409                     OutMarkerCanditates[i].draw ( input,cv::Scalar ( 124,  255,125 ) );
00410 
00411 
00412             namedWindow ( "input" );
00413             imshow ( "input",input );*/
00414 }
00415 
00416 /************************************
00417  *
00418  *
00419  *
00420  *
00421  ************************************/
00422 void MarkerDetector::thresHold(int method, const Mat &grey, Mat &out, double param1, double param2) throw(cv::Exception) {
00423 
00424     if (param1 == -1)
00425         param1 = _thresParam1;
00426     if (param2 == -1)
00427         param2 = _thresParam2;
00428 
00429     if (grey.type() != CV_8UC1)
00430         throw cv::Exception(9001, "grey.type()!=CV_8UC1", "MarkerDetector::thresHold", __FILE__, __LINE__);
00431     switch (method) {
00432     case FIXED_THRES:
00433         cv::threshold(grey, out, param1, 255, CV_THRESH_BINARY_INV);
00434         break;
00435     case ADPT_THRES: // currently, this is the best method
00436         // ensure that _thresParam1%2==1
00437         if (param1 < 3)
00438             param1 = 3;
00439         else if (((int)param1) % 2 != 1)
00440             param1 = (int)(param1 + 1);
00441 
00442         cv::adaptiveThreshold(grey, out, 255, ADAPTIVE_THRESH_MEAN_C, THRESH_BINARY_INV, param1, param2);
00443         break;
00444     case CANNY: {
00445         // this should be the best method, and generally it is.
00446         // However, some times there are small holes in the marker contour that makes
00447         // the contour detector not to find it properly
00448         // if there is a missing pixel
00449         cv::Canny(grey, out, 10, 220);
00450         // I've tried a closing but it add many more points that some
00451         // times makes this even worse
00452         //                        Mat aux;
00453         //                        cv::morphologyEx(thres,aux,MORPH_CLOSE,Mat());
00454         //                        out=aux;
00455     } break;
00456     }
00457 }
00458 /************************************
00459  *
00460  *
00461  *
00462  *
00463  ************************************/
00464 bool MarkerDetector::warp(Mat &in, Mat &out, Size size, vector< Point2f > points) throw(cv::Exception) {
00465 
00466     if (points.size() != 4)
00467         throw cv::Exception(9001, "point.size()!=4", "MarkerDetector::warp", __FILE__, __LINE__);
00468     // obtain the perspective transform
00469     Point2f pointsRes[4], pointsIn[4];
00470     for (int i = 0; i < 4; i++)
00471         pointsIn[i] = points[i];
00472     pointsRes[0] = (Point2f(0, 0));
00473     pointsRes[1] = Point2f(size.width - 1, 0);
00474     pointsRes[2] = Point2f(size.width - 1, size.height - 1);
00475     pointsRes[3] = Point2f(0, size.height - 1);
00476     Mat M = getPerspectiveTransform(pointsIn, pointsRes);
00477     cv::warpPerspective(in, out, M, size, cv::INTER_NEAREST);
00478     return true;
00479 }
00480 
00481 void findCornerPointsInContour(const vector< cv::Point2f > &points, const vector< cv::Point > &contour, vector< int > &idxs) {
00482     assert(points.size() == 4);
00483     int idxSegments[4] = {-1, -1, -1, -1};
00484     // the first point coincides with one
00485     cv::Point points2i[4];
00486     for (int i = 0; i < 4; i++) {
00487         points2i[i].x = points[i].x;
00488         points2i[i].y = points[i].y;
00489     }
00490 
00491     for (size_t i = 0; i < contour.size(); i++) {
00492         if (idxSegments[0] == -1)
00493             if (contour[i] == points2i[0])
00494                 idxSegments[0] = i;
00495         if (idxSegments[1] == -1)
00496             if (contour[i] == points2i[1])
00497                 idxSegments[1] = i;
00498         if (idxSegments[2] == -1)
00499             if (contour[i] == points2i[2])
00500                 idxSegments[2] = i;
00501         if (idxSegments[3] == -1)
00502             if (contour[i] == points2i[3])
00503                 idxSegments[3] = i;
00504     }
00505     idxs.resize(4);
00506     for (int i = 0; i < 4; i++)
00507         idxs[i] = idxSegments[i];
00508 }
00509 
00510 int findDeformedSidesIdx(const vector< cv::Point > &contour, const vector< int > &idxSegments) {
00511     float distSum[4] = {0, 0, 0, 0};
00512     cv::Scalar colors[4] = {cv::Scalar(0, 0, 255), cv::Scalar(255, 0, 0), cv::Scalar(0, 255, 0), cv::Scalar(111, 111, 0)};
00513 
00514     for (int i = 0; i < 3; i++) {
00515         cv::Point p1 = contour[idxSegments[i]];
00516         cv::Point p2 = contour[idxSegments[i + 1]];
00517         float inv_den = 1. / sqrt(float((p2.x - p1.x) * (p2.x - p1.x) + (p2.y - p1.y) * (p2.y - p1.y)));
00518         //   d=|v^^·r|=(|(x_2-x_1)(y_1-y_0)-(x_1-x_0)(y_2-y_1)|)/(sqrt((x_2-x_1)^2+(y_2-y_1)^2)).
00519         //         cerr<<"POSS="<<idxSegments[i]<<" "<<idxSegments[i+1]<<endl;
00520         for (size_t j = idxSegments[i]; j < idxSegments[i + 1]; j++) {
00521             float dist = std::fabs(float((p2.x - p1.x) * (p1.y - contour[j].y) - (p1.x - contour[j].x) * (p2.y - p1.y))) * inv_den;
00522             distSum[i] += dist;
00523             //             cerr<< dist<<" ";
00524             //             cv::rectangle(_ssImC,contour[j],contour[j],colors[i],-1);
00525         }
00526         distSum[i] /= float(idxSegments[i + 1] - idxSegments[i]);
00527         //         cout<<endl<<endl;
00528     }
00529 
00530 
00531     // for the last one
00532     cv::Point p1 = contour[idxSegments[0]];
00533     cv::Point p2 = contour[idxSegments[3]];
00534     float inv_den = 1. / std::sqrt(float((p2.x - p1.x) * (p2.x - p1.x) + (p2.y - p1.y) * (p2.y - p1.y)));
00535     //   d=|v^^·r|=(|(x_2-x_1)(y_1-y_0)-(x_1-x_0)(y_2-y_1)|)/(sqrt((x_2-x_1)^2+(y_2-y_1)^2)).
00536     for (size_t j = 0; j < idxSegments[0]; j++)
00537         distSum[3] += std::fabs(float((p2.x - p1.x) * (p1.y - contour[j].y) - (p1.x - contour[j].x) * (p2.y - p1.y))) * inv_den;
00538     for (size_t j = idxSegments[3]; j < contour.size(); j++)
00539         distSum[3] += std::fabs(float((p2.x - p1.x) * (p1.y - contour[j].y) - (p1.x - contour[j].x) * (p2.y - p1.y))) * inv_den;
00540 
00541     distSum[3] /= float(idxSegments[0] + (contour.size() - idxSegments[3]));
00542     // now, get the maximum
00543     /*    for (int i=0;i<4;i++)
00544             cout<<"DD="<<distSum[i]<<endl;*/
00545     // check the two combinations to see the one with higher error
00546     if (distSum[0] + distSum[2] > distSum[1] + distSum[3])
00547         return 0;
00548     else
00549         return 1;
00550 }
00551 
00552 void setPointIntoImage(cv::Point2f &p, cv::Size s) {
00553     if (p.x < 0)
00554         p.x = 0;
00555     else if (p.x >= s.width)
00556         p.x = s.width - 1;
00557     if (p.y < 0)
00558         p.y = 0;
00559     else if (p.y >= s.height)
00560         p.y = s.height - 1;
00561 }
00562 
00563 void setPointIntoImage(cv::Point &p, cv::Size s) {
00564     if (p.x < 0)
00565         p.x = 0;
00566     else if (p.x >= s.width)
00567         p.x = s.width - 1;
00568     if (p.y < 0)
00569         p.y = 0;
00570     else if (p.y >= s.height)
00571         p.y = s.height - 1;
00572 }
00573 /************************************
00574  *
00575  *
00576  *
00577  *
00578  ************************************/
00579 bool MarkerDetector::warp_cylinder(Mat &in, Mat &out, Size size, MarkerCandidate &mcand) throw(cv::Exception) {
00580 
00581     if (mcand.size() != 4)
00582         throw cv::Exception(9001, "point.size()!=4", "MarkerDetector::warp", __FILE__, __LINE__);
00583 
00584     // check first the real need for cylinder warping
00585     //     cout<<"im="<<mcand.contour.size()<<endl;
00586 
00587     //     for (size_t i=0;i<mcand.contour.size();i++) {
00588     //         cv::rectangle(_ssImC ,mcand.contour[i],mcand.contour[i],cv::Scalar(111,111,111),-1 );
00589     //     }
00590     //     mcand.draw(imC,cv::Scalar(0,255,0));
00591     // find the 4 different segments of the contour
00592     vector< int > idxSegments;
00593     findCornerPointsInContour(mcand, mcand.contour, idxSegments);
00594     // let us rearrange the points so that the first corner is the one whith smaller idx
00595     int minIdx = 0;
00596     for (int i = 1; i < 4; i++)
00597         if (idxSegments[i] < idxSegments[minIdx])
00598             minIdx = i;
00599     // now, rotate the points to be in this order
00600     std::rotate(idxSegments.begin(), idxSegments.begin() + minIdx, idxSegments.end());
00601     std::rotate(mcand.begin(), mcand.begin() + minIdx, mcand.end());
00602 
00603     //     cout<<"idxSegments="<<idxSegments[0]<< " "<<idxSegments[1]<< " "<<idxSegments[2]<<" "<<idxSegments[3]<<endl;
00604     // now, determine the sides that are deformated by cylinder perspective
00605     int defrmdSide = findDeformedSidesIdx(mcand.contour, idxSegments);
00606     //     cout<<"Def="<<defrmdSide<<endl;
00607 
00608     // instead of removing perspective distortion  of the rectangular region
00609     // given by the rectangle, we enlarge it a bit to include the deformed parts
00610     cv::Point2f center = mcand.getCenter();
00611     Point2f enlargedRegion[4];
00612     for (int i = 0; i < 4; i++)
00613         enlargedRegion[i] = mcand[i];
00614     if (defrmdSide == 0) {
00615         enlargedRegion[0] = mcand[0] + (mcand[3] - mcand[0]) * 1.2;
00616         enlargedRegion[1] = mcand[1] + (mcand[2] - mcand[1]) * 1.2;
00617         enlargedRegion[2] = mcand[2] + (mcand[1] - mcand[2]) * 1.2;
00618         enlargedRegion[3] = mcand[3] + (mcand[0] - mcand[3]) * 1.2;
00619     } else {
00620         enlargedRegion[0] = mcand[0] + (mcand[1] - mcand[0]) * 1.2;
00621         enlargedRegion[1] = mcand[1] + (mcand[0] - mcand[1]) * 1.2;
00622         enlargedRegion[2] = mcand[2] + (mcand[3] - mcand[2]) * 1.2;
00623         enlargedRegion[3] = mcand[3] + (mcand[2] - mcand[3]) * 1.2;
00624     }
00625     for (size_t i = 0; i < 4; i++)
00626         setPointIntoImage(enlargedRegion[i], in.size());
00627 
00628     /*
00629         cv::Scalar colors[4]={cv::Scalar(0,0,255),cv::Scalar(255,0,0),cv::Scalar(0,255,0),cv::Scalar(111,111,0)};
00630         for (int i=0;i<4;i++) {
00631             cv::rectangle(_ssImC,mcand.contour[idxSegments[i]]-cv::Point(2,2),mcand.contour[idxSegments[i]]+cv::Point(2,2),colors[i],-1 );
00632             cv::rectangle(_ssImC,enlargedRegion[i]-cv::Point2f(2,2),enlargedRegion[i]+cv::Point2f(2,2),colors[i],-1 );
00633 
00634         }*/
00635     //     cv::imshow("imC",_ssImC);
00636 
00637 
00638     // calculate the max distance from each contour point the line of the corresponding segment it belongs to
00639     //     calculate
00640     //      cv::waitKey(0);
00641     // check that the region is into image limits
00642     // obtain the perspective transform
00643     Point2f pointsRes[4], pointsIn[4];
00644     for (int i = 0; i < 4; i++)
00645         pointsIn[i] = mcand[i];
00646 
00647     cv::Size enlargedSize = size;
00648     enlargedSize.width += 2 * enlargedSize.width * 0.2;
00649     pointsRes[0] = (Point2f(0, 0));
00650     pointsRes[1] = Point2f(enlargedSize.width - 1, 0);
00651     pointsRes[2] = Point2f(enlargedSize.width - 1, enlargedSize.height - 1);
00652     pointsRes[3] = Point2f(0, enlargedSize.height - 1);
00653     // rotate to ensure that deformed sides are in the horizontal axis when warping
00654     if (defrmdSide == 0)
00655         rotate(pointsRes, pointsRes + 1, pointsRes + 4);
00656     cv::Mat imAux, imAux2(enlargedSize, CV_8UC1);
00657     Mat M = cv::getPerspectiveTransform(enlargedRegion, pointsRes);
00658     cv::warpPerspective(in, imAux, M, enlargedSize, cv::INTER_NEAREST);
00659 
00660     // now, transform all points to the new image
00661     vector< cv::Point > pointsCO(mcand.contour.size());
00662     assert(M.type() == CV_64F);
00663     assert(M.cols == 3 && M.rows == 3);
00664     //     cout<<M<<endl;
00665     double *mptr = M.ptr< double >(0);
00666     imAux2.setTo(cv::Scalar::all(0));
00667 
00668 
00669     for (size_t i = 0; i < mcand.contour.size(); i++) {
00670         float inX = mcand.contour[i].x;
00671         float inY = mcand.contour[i].y;
00672         float w = inX * mptr[6] + inY * mptr[7] + mptr[8];
00673         cv::Point2f pres;
00674         pointsCO[i].x = ((inX * mptr[0] + inY * mptr[1] + mptr[2]) / w) + 0.5;
00675         pointsCO[i].y = ((inX * mptr[3] + inY * mptr[4] + mptr[5]) / w) + 0.5;
00676         // make integers
00677         setPointIntoImage(pointsCO[i], imAux.size()); // ensure points are into image limits
00678         //      cout<<"p="<<pointsCO[i]<<" "<<imAux.size().width<<" "<<imAux.size().height<<endl;
00679         imAux2.at< uchar >(pointsCO[i].y, pointsCO[i].x) = 255;
00680         if (pointsCO[i].y > 0)
00681             imAux2.at< uchar >(pointsCO[i].y - 1, pointsCO[i].x) = 255;
00682         if (pointsCO[i].y < imAux2.rows - 1)
00683             imAux2.at< uchar >(pointsCO[i].y + 1, pointsCO[i].x) = 255;
00684     }
00685 
00686     cv::Mat outIm(enlargedSize, CV_8UC1);
00687     outIm.setTo(cv::Scalar::all(0));
00688     // now, scan in lines to determine the required displacement
00689     for (int y = 0; y < imAux2.rows; y++) {
00690         uchar *_offInfo = imAux2.ptr< uchar >(y);
00691         int start = -1, end = -1;
00692         // determine the start and end of markerd regions
00693         for (int x = 0; x < imAux.cols; x++) {
00694             if (_offInfo[x]) {
00695                 if (start == -1)
00696                     start = x;
00697                 else
00698                     end = x;
00699             }
00700         }
00701         //       cout<<"S="<<start<<" "<<end<<" "<<end-start<<" "<<(size.width>>1)<<endl;
00702         // check that the size is big enough and
00703         assert(start != -1 && end != -1 && (end - start) > size.width >> 1);
00704         uchar *In_image = imAux.ptr< uchar >(y);
00705         uchar *Out_image = outIm.ptr< uchar >(y);
00706         memcpy(Out_image, In_image + start, imAux.cols - start);
00707     }
00708 
00709 
00710     //     cout<<"SS="<<mcand.contour.size()<<" "<<pointsCO.size()<<endl;
00711     // get the central region with the size specified
00712     cv::Mat centerReg = outIm(cv::Range::all(), cv::Range(0, size.width));
00713     out = centerReg.clone();
00714     //     cv::perspectiveTransform(mcand.contour,pointsCO,M);
00715     // draw them
00716     //     cv::imshow("out2",out);
00717     //     cv::imshow("imm",imAux2);
00718     //     cv::waitKey(0);
00719     return true;
00720 }
00721 /************************************
00722  *
00723  *
00724  *
00725  *
00726  ************************************/
00727 bool MarkerDetector::isInto(Mat &contour, vector< Point2f > &b) {
00728 
00729     for (unsigned int i = 0; i < b.size(); i++)
00730         if (pointPolygonTest(contour, b[i], false) > 0)
00731             return true;
00732     return false;
00733 }
00734 /************************************
00735  *
00736  *
00737  *
00738  *
00739  ************************************/
00740 int MarkerDetector::perimeter(vector< Point2f > &a) {
00741     int sum = 0;
00742     for (unsigned int i = 0; i < a.size(); i++) {
00743         int i2 = (i + 1) % a.size();
00744         sum += sqrt((a[i].x - a[i2].x) * (a[i].x - a[i2].x) + (a[i].y - a[i2].y) * (a[i].y - a[i2].y));
00745     }
00746     return sum;
00747 }
00748 
00749 
00754 void MarkerDetector::findBestCornerInRegion_harris(const cv::Mat &grey, vector< cv::Point2f > &Corners, int blockSize) {
00755     SubPixelCorner Subp;
00756     Subp.RefineCorner(grey, Corners);
00757 }
00758 
00759 
00764 void MarkerDetector::refineCandidateLines(MarkerDetector::MarkerCandidate &candidate, const cv::Mat &camMatrix, const cv::Mat &distCoeff) {
00765     // search corners on the contour vector
00766     vector< unsigned int > cornerIndex;
00767     cornerIndex.resize(4);
00768     for (unsigned int j = 0; j < candidate.contour.size(); j++) {
00769         for (unsigned int k = 0; k < 4; k++) {
00770             if (candidate.contour[j].x == candidate[k].x && candidate.contour[j].y == candidate[k].y) {
00771                 cornerIndex[k] = j;
00772             }
00773         }
00774     }
00775 
00776     // contour pixel in inverse order or not?
00777     bool inverse;
00778     if ((cornerIndex[1] > cornerIndex[0]) && (cornerIndex[2] > cornerIndex[1] || cornerIndex[2] < cornerIndex[0]))
00779         inverse = false;
00780     else if (cornerIndex[2] > cornerIndex[1] && cornerIndex[2] < cornerIndex[0])
00781         inverse = false;
00782     else
00783         inverse = true;
00784 
00785 
00786     // get pixel vector for each line of the marker
00787     int inc = 1;
00788     if (inverse)
00789         inc = -1;
00790 
00791     // undistort contour
00792     vector< Point2f > contour2f;
00793     for (unsigned int i = 0; i < candidate.contour.size(); i++)
00794         contour2f.push_back(cv::Point2f(candidate.contour[i].x, candidate.contour[i].y));
00795     if (!camMatrix.empty() && !distCoeff.empty())
00796         cv::undistortPoints(contour2f, contour2f, camMatrix, distCoeff, cv::Mat(), camMatrix);
00797 
00798 
00799     vector< std::vector< cv::Point2f > > contourLines;
00800     contourLines.resize(4);
00801     for (unsigned int l = 0; l < 4; l++) {
00802         for (int j = (int)cornerIndex[l]; j != (int)cornerIndex[(l + 1) % 4]; j += inc) {
00803             if (j == (int)candidate.contour.size() && !inverse)
00804                 j = 0;
00805             else if (j == 0 && inverse)
00806                 j = candidate.contour.size() - 1;
00807             contourLines[l].push_back(contour2f[j]);
00808             if (j == (int)cornerIndex[(l + 1) % 4])
00809                 break; // this has to be added because of the previous ifs
00810         }
00811     }
00812 
00813     // interpolate marker lines
00814     vector< Point3f > lines;
00815     lines.resize(4);
00816     for (unsigned int j = 0; j < lines.size(); j++)
00817         interpolate2Dline(contourLines[j], lines[j]);
00818 
00819     // get cross points of lines
00820     vector< Point2f > crossPoints;
00821     crossPoints.resize(4);
00822     for (unsigned int i = 0; i < 4; i++)
00823         crossPoints[i] = getCrossPoint(lines[(i - 1) % 4], lines[i]);
00824 
00825     // distort corners again if undistortion was performed
00826     if (!camMatrix.empty() && !distCoeff.empty())
00827         distortPoints(crossPoints, crossPoints, camMatrix, distCoeff);
00828 
00829     // reassing points
00830     for (unsigned int j = 0; j < 4; j++)
00831         candidate[j] = crossPoints[j];
00832 }
00833 
00834 
00837 void MarkerDetector::interpolate2Dline(const std::vector< Point2f > &inPoints, Point3f &outLine) {
00838 
00839     float minX, maxX, minY, maxY;
00840     minX = maxX = inPoints[0].x;
00841     minY = maxY = inPoints[0].y;
00842     for (unsigned int i = 1; i < inPoints.size(); i++) {
00843         if (inPoints[i].x < minX)
00844             minX = inPoints[i].x;
00845         if (inPoints[i].x > maxX)
00846             maxX = inPoints[i].x;
00847         if (inPoints[i].y < minY)
00848             minY = inPoints[i].y;
00849         if (inPoints[i].y > maxY)
00850             maxY = inPoints[i].y;
00851     }
00852 
00853     // create matrices of equation system
00854     Mat A(inPoints.size(), 2, CV_32FC1, Scalar(0));
00855     Mat B(inPoints.size(), 1, CV_32FC1, Scalar(0));
00856     Mat X;
00857 
00858 
00859 
00860     if (maxX - minX > maxY - minY) {
00861         // Ax + C = y
00862         for (int i = 0; i < inPoints.size(); i++) {
00863 
00864             A.at< float >(i, 0) = inPoints[i].x;
00865             A.at< float >(i, 1) = 1.;
00866             B.at< float >(i, 0) = inPoints[i].y;
00867         }
00868 
00869         // solve system
00870         solve(A, B, X, DECOMP_SVD);
00871         // return Ax + By + C
00872         outLine = Point3f(X.at< float >(0, 0), -1., X.at< float >(1, 0));
00873     } else {
00874         // By + C = x
00875         for (int i = 0; i < inPoints.size(); i++) {
00876 
00877             A.at< float >(i, 0) = inPoints[i].y;
00878             A.at< float >(i, 1) = 1.;
00879             B.at< float >(i, 0) = inPoints[i].x;
00880         }
00881 
00882         // solve system
00883         solve(A, B, X, DECOMP_SVD);
00884         // return Ax + By + C
00885         outLine = Point3f(-1., X.at< float >(0, 0), X.at< float >(1, 0));
00886     }
00887 }
00888 
00891 Point2f MarkerDetector::getCrossPoint(const cv::Point3f &line1, const cv::Point3f &line2) {
00892 
00893     // create matrices of equation system
00894     Mat A(2, 2, CV_32FC1, Scalar(0));
00895     Mat B(2, 1, CV_32FC1, Scalar(0));
00896     Mat X;
00897 
00898     A.at< float >(0, 0) = line1.x;
00899     A.at< float >(0, 1) = line1.y;
00900     B.at< float >(0, 0) = -line1.z;
00901 
00902     A.at< float >(1, 0) = line2.x;
00903     A.at< float >(1, 1) = line2.y;
00904     B.at< float >(1, 0) = -line2.z;
00905 
00906     // solve system
00907     solve(A, B, X, DECOMP_SVD);
00908     return Point2f(X.at< float >(0, 0), X.at< float >(1, 0));
00909 }
00910 
00911 
00914 void MarkerDetector::distortPoints(vector< cv::Point2f > in, vector< cv::Point2f > &out, const Mat &camMatrix, const Mat &distCoeff) {
00915     // trivial extrinsics
00916     cv::Mat Rvec = cv::Mat(3, 1, CV_32FC1, cv::Scalar::all(0));
00917     cv::Mat Tvec = Rvec.clone();
00918     // calculate 3d points and then reproject, so opencv makes the distortion internally
00919     vector< cv::Point3f > cornersPoints3d;
00920     for (unsigned int i = 0; i < in.size(); i++)
00921         cornersPoints3d.push_back(cv::Point3f((in[i].x - camMatrix.at< float >(0, 2)) / camMatrix.at< float >(0, 0), // x
00922                                               (in[i].y - camMatrix.at< float >(1, 2)) / camMatrix.at< float >(1, 1), // y
00923                                               1)); // z
00924     cv::projectPoints(cornersPoints3d, Rvec, Tvec, camMatrix, distCoeff, out);
00925 }
00926 
00927 
00928 
00929 /************************************
00930  *
00931  *
00932  *
00933  *
00934  ************************************/
00935 void MarkerDetector::drawAllContours(Mat input, std::vector< std::vector< cv::Point > > &contours) { drawContours(input, contours, -1, Scalar(255, 0, 255)); }
00936 
00937 /************************************
00938  *
00939  *
00940  *
00941  *
00942  ************************************/
00943 void MarkerDetector::drawContour(Mat &in, vector< Point > &contour, Scalar color) {
00944     for (unsigned int i = 0; i < contour.size(); i++) {
00945         cv::rectangle(in, contour[i], contour[i], color);
00946     }
00947 }
00948 
00949 void MarkerDetector::drawApproxCurve(Mat &in, vector< Point > &contour, Scalar color) {
00950     for (unsigned int i = 0; i < contour.size(); i++) {
00951         cv::line(in, contour[i], contour[(i + 1) % contour.size()], color);
00952     }
00953 }
00954 /************************************
00955  *
00956  *
00957  *
00958  *
00959  ************************************/
00960 
00961 void MarkerDetector::draw(Mat out, const vector< Marker > &markers) {
00962     for (unsigned int i = 0; i < markers.size(); i++) {
00963         cv::line(out, markers[i][0], markers[i][1], cvScalar(255, 0, 0), 2, CV_AA);
00964         cv::line(out, markers[i][1], markers[i][2], cvScalar(255, 0, 0), 2, CV_AA);
00965         cv::line(out, markers[i][2], markers[i][3], cvScalar(255, 0, 0), 2, CV_AA);
00966         cv::line(out, markers[i][3], markers[i][0], cvScalar(255, 0, 0), 2, CV_AA);
00967     }
00968 }
00969 /* Attempt to make it faster than in opencv. I could not :( Maybe trying with SSE3...
00970 void MarkerDetector::warpPerspective(const cv::Mat &in,cv::Mat & out, const cv::Mat & M,cv::Size size)
00971 {
00972    //inverse the matrix
00973    out.create(size,in.type());
00974    //convert to float to speed up operations
00975    const double *m=M.ptr<double>(0);
00976    float mf[9];
00977    mf[0]=m[0];mf[1]=m[1];mf[2]=m[2];
00978    mf[3]=m[3];mf[4]=m[4];mf[5]=m[5];
00979    mf[6]=m[6];mf[7]=m[7];mf[8]=m[8];
00980 
00981    for(int y=0;y<out.rows;y++){
00982      uchar *_ptrout=out.ptr<uchar>(y);
00983      for(int x=0;x<out.cols;x++){
00984    //get the x,y position
00985    float den=1./(x*mf[6]+y*mf[7]+mf[8]);
00986    float ox= (x*mf[0]+y*mf[1]+mf[2])*den;
00987    float oy= (x*mf[3]+y*mf[4]+mf[5])*den;
00988    _ptrout[x]=in.at<uchar>(oy,ox);
00989      }
00990    }
00991 }
00992 */
00993 
00994 /************************************
00995  *
00996  *
00997  *
00998  *
00999  ************************************/
01000 
01001 void MarkerDetector::glGetProjectionMatrix(CameraParameters &CamMatrix, cv::Size orgImgSize, cv::Size size, double proj_matrix[16], double gnear, double gfar,
01002                                            bool invert) throw(cv::Exception) {
01003     cerr << "MarkerDetector::glGetProjectionMatrix . This a deprecated function. Use CameraParameters::glGetProjectionMatrix instead. " << __FILE__ << " "
01004          << __LINE__ << endl;
01005     CamMatrix.glGetProjectionMatrix(orgImgSize, size, proj_matrix, gnear, gfar, invert);
01006 }
01007 
01008 /************************************
01009 *
01010 *
01011 *
01012 *
01013 ************************************/
01014 
01015 void MarkerDetector::setMinMaxSize(float min, float max) throw(cv::Exception) {
01016     if (min <= 0 || min > 1)
01017         throw cv::Exception(1, " min parameter out of range", "MarkerDetector::setMinMaxSize", __FILE__, __LINE__);
01018     if (max <= 0 || max > 1)
01019         throw cv::Exception(1, " max parameter out of range", "MarkerDetector::setMinMaxSize", __FILE__, __LINE__);
01020     if (min > max)
01021         throw cv::Exception(1, " min>max", "MarkerDetector::setMinMaxSize", __FILE__, __LINE__);
01022     _minSize = min;
01023     _maxSize = max;
01024 }
01025 
01026 /************************************
01027 *
01028 *
01029 *
01030 *
01031 ************************************/
01032 
01033 void MarkerDetector::setWarpSize(int val) throw(cv::Exception) {
01034     if (val < 10)
01035         throw cv::Exception(1, " invalid canonical image size", "MarkerDetector::setWarpSize", __FILE__, __LINE__);
01036     _markerWarpSize = val;
01037 }
01038 
01039 
01040 void MarkerDetector::findCornerMaxima(vector< cv::Point2f > &Corners, const cv::Mat &grey, int wsize) {
01041 
01042 // for each element, search in a region around
01043 #pragma omp parallel for
01044 
01045     for (size_t i = 0; i < Corners.size(); i++) {
01046         cv::Point2f minLimit(std::max(0, int(Corners[i].x - wsize)), std::max(0, int(Corners[i].y - wsize)));
01047         cv::Point2f maxLimit(std::min(grey.cols, int(Corners[i].x + wsize)), std::min(grey.rows, int(Corners[i].y + wsize)));
01048 
01049         cv::Mat reg = grey(cv::Range(minLimit.y, maxLimit.y), cv::Range(minLimit.x, maxLimit.x));
01050         cv::Mat harr, harrint;
01051         cv::cornerHarris(reg, harr, 3, 3, 0.04);
01052 
01053         // now, do a sum block operation
01054         cv::integral(harr, harrint);
01055         int bls_a = 4;
01056         for (int y = bls_a; y < harr.rows - bls_a; y++) {
01057             float *h = harr.ptr< float >(y);
01058             for (int x = bls_a; x < harr.cols - bls_a; x++)
01059                 h[x] = harrint.at< double >(y + bls_a, x + bls_a) - harrint.at< double >(y + bls_a, x) - harrint.at< double >(y, x + bls_a) +
01060                        harrint.at< double >(y, x);
01061         }
01062 
01063 
01064 
01065         cv::Point2f best(-1, -1);
01066         cv::Point2f center(reg.cols / 2, reg.rows / 2);
01067         ;
01068         double maxv = 0;
01069         for (size_t i = 0; i < harr.rows; i++) {
01070             // L1 dist to center
01071             float *har = harr.ptr< float >(i);
01072             for (size_t x = 0; x < harr.cols; x++) {
01073                 float d = float(fabs(center.x - x) + fabs(center.y - i)) / float(reg.cols / 2 + reg.rows / 2);
01074                 float w = 1. - d;
01075                 if (w * har[x] > maxv) {
01076                     maxv = w * har[x];
01077                     best = cv::Point2f(x, i);
01078                 }
01079             }
01080         }
01081         Corners[i] = best + minLimit;
01082     }
01083 }
01084 };


asr_aruco_marker_recognition
Author(s): Allgeyer Tobias, Meißner Pascal, Qattan Mohamad
autogenerated on Thu Jun 6 2019 21:14:12