markerdetector.cpp
Go to the documentation of this file.
1 /*****************************
2 Copyright 2011 Rafael Muñoz Salinas. All rights reserved.
3 
4 Redistribution and use in source and binary forms, with or without modification, are
5 permitted provided that the following conditions are met:
6 
7  1. Redistributions of source code must retain the above copyright notice, this list of
8  conditions and the following disclaimer.
9 
10  2. Redistributions in binary form must reproduce the above copyright notice, this list
11  of conditions and the following disclaimer in the documentation and/or other materials
12  provided with the distribution.
13 
14 THIS SOFTWARE IS PROVIDED BY Rafael Muñoz Salinas ''AS IS'' AND ANY EXPRESS OR IMPLIED
15 WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
16 FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL Rafael Muñoz Salinas OR
17 CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
19 SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
20 ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
21 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
22 ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
23 
24 The views and conclusions contained in the software and documentation are those of the
25 authors and should not be interpreted as representing official policies, either expressed
26 or implied, of Rafael Muñoz Salinas.
27 ********************************/
28 #include "markerdetector.h"
29 #include "subpixelcorner.h"
30 #include <opencv2/core/core.hpp>
31 #include <opencv2/features2d/features2d.hpp>
32 #include <opencv2/imgproc/imgproc.hpp>
33 #include <opencv2/calib3d/calib3d.hpp>
34 #include <opencv2/highgui/highgui.hpp>
35 #include <iostream>
36 #include <fstream>
37 #include "arucofidmarkers.h"
38 #include <valarray>
39 #include "ar_omp.h"
40 using namespace std;
41 using namespace cv;
42 
43 namespace aruco {
44 /************************************
45  *
46  *
47  *
48  *
49  ************************************/
50 MarkerDetector::MarkerDetector() {
51  _thresMethod = ADPT_THRES;
52  _thresParam1 = _thresParam2 = 7;
53  _cornerMethod = LINES;
54  _useLockedCorners = false;
55  // _cornerMethod=SUBPIX;
56  _thresParam1_range = 0;
57  _markerWarpSize = 56;
58  _speed = 0;
59  markerIdDetector_ptrfunc = aruco::FiducidalMarkers::detect;
60  _minSize = 0.04;
61  _maxSize = 0.5;
62 
63  _borderDistThres = 0.025; // corners in a border of 2.5% of image are ignored
64 }
65 /************************************
66  *
67  *
68  *
69  *
70  ************************************/
71 
72 MarkerDetector::~MarkerDetector() {}
73 
74 /************************************
75  *
76  *
77  *
78  *
79  ************************************/
80 void MarkerDetector::setDesiredSpeed(int val) {
81  if (val < 0)
82  val = 0;
83  else if (val > 3)
84  val = 2;
85 
86  _speed = val;
87  switch (_speed) {
88 
89  case 0:
90  _markerWarpSize = 56;
91  _cornerMethod = SUBPIX;
92  break;
93 
94  case 1:
95  case 2:
96  _markerWarpSize = 28;
97  _cornerMethod = NONE;
98  break;
99  };
100 }
101 
102 /************************************
103  *
104  *
105  *
106  *
107  ************************************/
108 void MarkerDetector::detect(const cv::Mat &input, std::vector< Marker > &detectedMarkers, CameraParameters camParams, float markerSizeMeters,
109  bool setYPerpendicular) throw(cv::Exception) {
110  detect(input, detectedMarkers, camParams.CameraMatrix, camParams.Distorsion, markerSizeMeters, setYPerpendicular);
111 }
112 
113 /***
114  *
115  *
116  **/
117 void MarkerDetector::enableLockedCornersMethod(bool enable) {
118  _useLockedCorners = enable;
119  if (enable)
120  _cornerMethod = SUBPIX;
121 }
122 /************************************
123  *
124  * Main detection function. Performs all steps
125  *
126  *
127  ************************************/
128 void MarkerDetector::detect(const cv::Mat &input, vector< Marker > &detectedMarkers, Mat camMatrix, Mat distCoeff, float markerSizeMeters,
129  bool setYPerpendicular) throw(cv::Exception) {
130 
131  // it must be a 3 channel image
132  if (input.type() == CV_8UC3)
133  cv::cvtColor(input, grey, CV_BGR2GRAY);
134  else
135  grey = input;
136  double t1 = cv::getTickCount();
137  // cv::cvtColor(grey,_ssImC ,CV_GRAY2BGR); //DELETE
138 
139  // clear input data
140  detectedMarkers.clear();
141 
142 
143  cv::Mat imgToBeThresHolded = grey;
144  double ThresParam1 = _thresParam1, ThresParam2 = _thresParam2;
145 
147  // work simultaneouly in a range of values of the first threshold
148  int n_param1 = 2 * _thresParam1_range + 1;
149  vector< cv::Mat > thres_images(n_param1);
150 #pragma omp parallel for
151  for (int i = 0; i < n_param1; i++) {
152  double t1 = ThresParam1 - _thresParam1_range + _thresParam1_range * i;
153  thresHold(_thresMethod, imgToBeThresHolded, thres_images[i], t1, ThresParam2);
154  }
155  thres = thres_images[n_param1 / 2];
156  //
157 
158  double t2 = cv::getTickCount();
159  // find all rectangles in the thresholdes image
160  vector< MarkerCandidate > MarkerCanditates;
161  detectRectangles(thres_images, MarkerCanditates);
162 
163  double t3 = cv::getTickCount();
164 
166  vector< vector< Marker > > markers_omp(omp_get_max_threads());
167  vector< vector< std::vector< cv::Point2f > > > candidates_omp(omp_get_max_threads());
168 //#pragma omp parallel for
169  for (int i = 0; i < MarkerCanditates.size(); i++) {
170  // Find proyective homography
171  Mat canonicalMarker;
172  bool resW = false;
173  resW = warp(grey, canonicalMarker, Size(_markerWarpSize, _markerWarpSize), MarkerCanditates[i]);
174  if (resW) {
175  int nRotations;
176  int id = (*markerIdDetector_ptrfunc)(canonicalMarker, nRotations);
177  if (id != -1) {
178  if (_cornerMethod == LINES) // make LINES refinement before lose contour points
179  refineCandidateLines(MarkerCanditates[i], camMatrix, distCoeff);
180  markers_omp[omp_get_thread_num()].push_back(MarkerCanditates[i]);
181  markers_omp[omp_get_thread_num()].back().id = id;
182  // sort the points so that they are always in the same order no matter the camera orientation
183  std::rotate(markers_omp[omp_get_thread_num()].back().begin(), markers_omp[omp_get_thread_num()].back().begin() + 4 - nRotations,
184  markers_omp[omp_get_thread_num()].back().end());
185  } else
186  candidates_omp[omp_get_thread_num()].push_back(MarkerCanditates[i]);
187  }
188  }
189  // unify parallel data
190  joinVectors(markers_omp, detectedMarkers, true);
191  joinVectors(candidates_omp, _candidates, true);
192 
193 
194  double t4 = cv::getTickCount();
195 
197  if (detectedMarkers.size() > 0 && _cornerMethod != NONE && _cornerMethod != LINES) {
198 
199  vector< Point2f > Corners;
200  for (unsigned int i = 0; i < detectedMarkers.size(); i++)
201  for (int c = 0; c < 4; c++)
202  Corners.push_back(detectedMarkers[i][c]);
203 
204  // in case of "locked corners", it is neccesary in some ocasions to
205  // find the corner in the sourrondings of the initially estimated location
206  if (_useLockedCorners)
207  findCornerMaxima(Corners, grey, _thresParam1);
208 
209  if (_cornerMethod == HARRIS)
210  findBestCornerInRegion_harris(grey, Corners, 7);
211  else if (_cornerMethod == SUBPIX) {
212  cornerSubPix(grey, Corners, cvSize(_thresParam1, _thresParam1), cvSize(-1, -1), cvTermCriteria(CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 8, 0.005));
213  }
214  // copy back
215  for (unsigned int i = 0; i < detectedMarkers.size(); i++)
216  for (int c = 0; c < 4; c++)
217  detectedMarkers[i][c] = Corners[i * 4 + c];
218  }
219 
220  double t5 = cv::getTickCount();
221 
222  // sort by id
223  std::sort(detectedMarkers.begin(), detectedMarkers.end());
224  // there might be still the case that a marker is detected twice because of the double border indicated earlier,
225  // detect and remove these cases
226  vector< bool > toRemove(detectedMarkers.size(), false);
227  for (int i = 0; i < int(detectedMarkers.size()) - 1; i++) {
228  if (detectedMarkers[i].id == detectedMarkers[i + 1].id && !toRemove[i + 1]) {
229  // deletes the one with smaller perimeter
230  if (perimeter(detectedMarkers[i]) > perimeter(detectedMarkers[i + 1]))
231  toRemove[i + 1] = true;
232  else
233  toRemove[i] = true;
234  }
235  }
236 
237  // remove the markers marker
238  removeElements(detectedMarkers, toRemove);
239 
241  if (camMatrix.rows != 0 && markerSizeMeters > 0) {
242  for (unsigned int i = 0; i < detectedMarkers.size(); i++)
243  detectedMarkers[i].calculateExtrinsics(markerSizeMeters, camMatrix, distCoeff, setYPerpendicular);
244  }
245  double t6 = cv::getTickCount();
246 
247 // cerr << "Threshold: " << (t2 - t1) / double(cv::getTickFrequency()) << endl;
248 // cerr << "Rectangles: " << (t3 - t2) / double(cv::getTickFrequency()) << endl;
249 // cerr << "Identify: " << (t4 - t3) / double(cv::getTickFrequency()) << endl;
250 // cerr << "Subpixel: " << (t5 - t4) / double(cv::getTickFrequency()) << endl;
251 // cerr << "Filtering: " << (t6 - t5) / double(cv::getTickFrequency()) << endl;
252 }
253 
254 
255 /************************************
256  *
257  * Crucial step. Detects the rectangular regions of the thresholded image
258  *
259  *
260  ************************************/
261 void MarkerDetector::detectRectangles(const cv::Mat &thres, vector< std::vector< cv::Point2f > > &MarkerCanditates) {
262  vector< MarkerCandidate > candidates;
263  vector< cv::Mat > thres_v;
264  thres_v.push_back(thres);
265  detectRectangles(thres_v, candidates);
266  // create the output
267  MarkerCanditates.resize(candidates.size());
268  for (size_t i = 0; i < MarkerCanditates.size(); i++)
269  MarkerCanditates[i] = candidates[i];
270 }
271 
272 void MarkerDetector::detectRectangles(vector< cv::Mat > &thresImgv, vector< MarkerCandidate > &OutMarkerCanditates) {
273  // omp_set_num_threads ( 1 );
274  vector< vector< MarkerCandidate > > MarkerCanditatesV(omp_get_max_threads());
275  // calcualte the min_max contour sizes
276  int minSize = _minSize * std::max(thresImgv[0].cols, thresImgv[0].rows) * 4;
277  int maxSize = _maxSize * std::max(thresImgv[0].cols, thresImgv[0].rows) * 4;
278 
279 // cv::Mat input;
280 // cv::cvtColor ( thresImgv[0],input,CV_GRAY2BGR );
281 
282 #pragma omp parallel for
283  for (size_t i = 0; i < thresImgv.size(); i++) {
284  std::vector< cv::Vec4i > hierarchy2;
285  std::vector< std::vector< cv::Point > > contours2;
286  cv::Mat thres2;
287  thresImgv[i].copyTo(thres2);
288  cv::findContours(thres2, contours2, hierarchy2, CV_RETR_LIST, CV_CHAIN_APPROX_NONE);
289 
290 
291  vector< Point > approxCurve;
293  for (unsigned int i = 0; i < contours2.size(); i++) {
294 
295  // check it is a possible element by first checking is has enough points
296  if (minSize < contours2[i].size() && contours2[i].size() < maxSize) {
297  // approximate to a poligon
298  approxPolyDP(contours2[i], approxCurve, double(contours2[i].size()) * 0.05, true);
299  // drawApproxCurve(copy,approxCurve,Scalar(0,0,255));
300  // check that the poligon has 4 points
301  if (approxCurve.size() == 4) {
302  /*
303  drawContour ( input,contours2[i],Scalar ( 255,0,225 ) );
304  namedWindow ( "input" );
305  imshow ( "input",input );*/
306  // waitKey(0);
307  // and is convex
308  if (isContourConvex(Mat(approxCurve))) {
309  // drawApproxCurve(input,approxCurve,Scalar(255,0,255));
310  // //ensure that the distace between consecutive points is large enough
311  float minDist = 1e10;
312  for (int j = 0; j < 4; j++) {
313  float d = std::sqrt((float)(approxCurve[j].x - approxCurve[(j + 1) % 4].x) * (approxCurve[j].x - approxCurve[(j + 1) % 4].x) +
314  (approxCurve[j].y - approxCurve[(j + 1) % 4].y) * (approxCurve[j].y - approxCurve[(j + 1) % 4].y));
315  // norm(Mat(approxCurve[i]),Mat(approxCurve[(i+1)%4]));
316  if (d < minDist)
317  minDist = d;
318  }
319  // check that distance is not very small
320  if (minDist > 10) {
321  // add the points
322  // cout<<"ADDED"<<endl;
323  MarkerCanditatesV[omp_get_thread_num()].push_back(MarkerCandidate());
324  MarkerCanditatesV[omp_get_thread_num()].back().idx = i;
325  for (int j = 0; j < 4; j++) {
326  MarkerCanditatesV[omp_get_thread_num()].back().push_back(Point2f(approxCurve[j].x, approxCurve[j].y));
327  MarkerCanditatesV[omp_get_thread_num()].back().contour = contours2[i];
328  }
329  }
330  }
331  }
332  }
333  }
334  }
335  // join all candidates
336  vector< MarkerCandidate > MarkerCanditates;
337 
338  for (size_t i = 0; i < MarkerCanditatesV.size(); i++)
339  for (size_t j = 0; j < MarkerCanditatesV[i].size(); j++) {
340  MarkerCanditates.push_back(MarkerCanditatesV[i][j]);
341  // MarkerCanditates.back().draw ( input,cv::Scalar ( 0,0,255 ) );
342  }
343 
345  valarray< bool > swapped(false, MarkerCanditates.size()); // used later
346  for (unsigned int i = 0; i < MarkerCanditates.size(); i++) {
347 
348  // trace a line between the first and second point.
349  // if the thrid point is at the right side, then the points are anti-clockwise
350  double dx1 = MarkerCanditates[i][1].x - MarkerCanditates[i][0].x;
351  double dy1 = MarkerCanditates[i][1].y - MarkerCanditates[i][0].y;
352  double dx2 = MarkerCanditates[i][2].x - MarkerCanditates[i][0].x;
353  double dy2 = MarkerCanditates[i][2].y - MarkerCanditates[i][0].y;
354  double o = (dx1 * dy2) - (dy1 * dx2);
355 
356  if (o < 0.0) { // if the third point is in the left side, then sort in anti-clockwise order
357  swap(MarkerCanditates[i][1], MarkerCanditates[i][3]);
358  swapped[i] = true;
359  // sort the contour points
360  // reverse(MarkerCanditates[i].contour.begin(),MarkerCanditates[i].contour.end());//????
361  }
362  }
363 
365  // first detect candidates to be removed
366 
367  vector< vector< pair< int, int > > > TooNearCandidates_omp(omp_get_max_threads());
368 #pragma omp parallel for
369  for (unsigned int i = 0; i < MarkerCanditates.size(); i++) {
370  // cout<<"Marker i="<<i<<MarkerCanditates[i]<<endl;
371  // calculate the average distance of each corner to the nearest corner of the other marker candidate
372  for (unsigned int j = i + 1; j < MarkerCanditates.size(); j++) {
373  valarray< float > vdist(4);
374  for (int c = 0; c < 4; c++)
375  vdist[c] = sqrt((MarkerCanditates[i][c].x - MarkerCanditates[j][c].x) * (MarkerCanditates[i][c].x - MarkerCanditates[j][c].x) +
376  (MarkerCanditates[i][c].y - MarkerCanditates[j][c].y) * (MarkerCanditates[i][c].y - MarkerCanditates[j][c].y));
377  // dist/=4;
378  // if distance is too small
379  if (vdist[0] < 6 && vdist[1] < 6 && vdist[2] < 6 && vdist[3] < 6) {
380  TooNearCandidates_omp[omp_get_thread_num()].push_back(pair< int, int >(i, j));
381  }
382  }
383  }
384  // join
385  vector< pair< int, int > > TooNearCandidates;
386  joinVectors(TooNearCandidates_omp, TooNearCandidates);
387  // mark for removal the element of the pair with smaller perimeter
388  valarray< bool > toRemove(false, MarkerCanditates.size());
389  for (unsigned int i = 0; i < TooNearCandidates.size(); i++) {
390  if (perimeter(MarkerCanditates[TooNearCandidates[i].first]) > perimeter(MarkerCanditates[TooNearCandidates[i].second]))
391  toRemove[TooNearCandidates[i].second] = true;
392  else
393  toRemove[TooNearCandidates[i].first] = true;
394  }
395 
396  // remove the invalid ones
397  // finally, assign to the remaining candidates the contour
398  OutMarkerCanditates.reserve(MarkerCanditates.size());
399  for (size_t i = 0; i < MarkerCanditates.size(); i++) {
400  if (!toRemove[i]) {
401  OutMarkerCanditates.push_back(MarkerCanditates[i]);
402  // OutMarkerCanditates.back().contour=contours2[ MarkerCanditates[i].idx];
403  if (swapped[i]) // if the corners where swapped, it is required to reverse here the points so that they are in the same order
404  reverse(OutMarkerCanditates.back().contour.begin(), OutMarkerCanditates.back().contour.end()); //????
405  }
406  }
407  /*
408  for ( size_t i=0; i<OutMarkerCanditates.size(); i++ )
409  OutMarkerCanditates[i].draw ( input,cv::Scalar ( 124, 255,125 ) );
410 
411 
412  namedWindow ( "input" );
413  imshow ( "input",input );*/
414 }
415 
416 /************************************
417  *
418  *
419  *
420  *
421  ************************************/
422 void MarkerDetector::thresHold(int method, const Mat &grey, Mat &out, double param1, double param2) throw(cv::Exception) {
423 
424  if (param1 == -1)
425  param1 = _thresParam1;
426  if (param2 == -1)
427  param2 = _thresParam2;
428 
429  if (grey.type() != CV_8UC1)
430  throw cv::Exception(9001, "grey.type()!=CV_8UC1", "MarkerDetector::thresHold", __FILE__, __LINE__);
431  switch (method) {
432  case FIXED_THRES:
433  cv::threshold(grey, out, param1, 255, CV_THRESH_BINARY_INV);
434  break;
435  case ADPT_THRES: // currently, this is the best method
436  // ensure that _thresParam1%2==1
437  if (param1 < 3)
438  param1 = 3;
439  else if (((int)param1) % 2 != 1)
440  param1 = (int)(param1 + 1);
441 
442  cv::adaptiveThreshold(grey, out, 255, ADAPTIVE_THRESH_MEAN_C, THRESH_BINARY_INV, param1, param2);
443  break;
444  case CANNY: {
445  // this should be the best method, and generally it is.
446  // However, some times there are small holes in the marker contour that makes
447  // the contour detector not to find it properly
448  // if there is a missing pixel
449  cv::Canny(grey, out, 10, 220);
450  // I've tried a closing but it add many more points that some
451  // times makes this even worse
452  // Mat aux;
453  // cv::morphologyEx(thres,aux,MORPH_CLOSE,Mat());
454  // out=aux;
455  } break;
456  }
457 }
458 /************************************
459  *
460  *
461  *
462  *
463  ************************************/
464 bool MarkerDetector::warp(Mat &in, Mat &out, Size size, vector< Point2f > points) throw(cv::Exception) {
465 
466  if (points.size() != 4)
467  throw cv::Exception(9001, "point.size()!=4", "MarkerDetector::warp", __FILE__, __LINE__);
468  // obtain the perspective transform
469  Point2f pointsRes[4], pointsIn[4];
470  for (int i = 0; i < 4; i++)
471  pointsIn[i] = points[i];
472  pointsRes[0] = (Point2f(0, 0));
473  pointsRes[1] = Point2f(size.width - 1, 0);
474  pointsRes[2] = Point2f(size.width - 1, size.height - 1);
475  pointsRes[3] = Point2f(0, size.height - 1);
476  Mat M = getPerspectiveTransform(pointsIn, pointsRes);
477  cv::warpPerspective(in, out, M, size, cv::INTER_NEAREST);
478  return true;
479 }
480 
481 void findCornerPointsInContour(const vector< cv::Point2f > &points, const vector< cv::Point > &contour, vector< int > &idxs) {
482  assert(points.size() == 4);
483  int idxSegments[4] = {-1, -1, -1, -1};
484  // the first point coincides with one
485  cv::Point points2i[4];
486  for (int i = 0; i < 4; i++) {
487  points2i[i].x = points[i].x;
488  points2i[i].y = points[i].y;
489  }
490 
491  for (size_t i = 0; i < contour.size(); i++) {
492  if (idxSegments[0] == -1)
493  if (contour[i] == points2i[0])
494  idxSegments[0] = i;
495  if (idxSegments[1] == -1)
496  if (contour[i] == points2i[1])
497  idxSegments[1] = i;
498  if (idxSegments[2] == -1)
499  if (contour[i] == points2i[2])
500  idxSegments[2] = i;
501  if (idxSegments[3] == -1)
502  if (contour[i] == points2i[3])
503  idxSegments[3] = i;
504  }
505  idxs.resize(4);
506  for (int i = 0; i < 4; i++)
507  idxs[i] = idxSegments[i];
508 }
509 
510 int findDeformedSidesIdx(const vector< cv::Point > &contour, const vector< int > &idxSegments) {
511  float distSum[4] = {0, 0, 0, 0};
512  cv::Scalar colors[4] = {cv::Scalar(0, 0, 255), cv::Scalar(255, 0, 0), cv::Scalar(0, 255, 0), cv::Scalar(111, 111, 0)};
513 
514  for (int i = 0; i < 3; i++) {
515  cv::Point p1 = contour[idxSegments[i]];
516  cv::Point p2 = contour[idxSegments[i + 1]];
517  float inv_den = 1. / sqrt(float((p2.x - p1.x) * (p2.x - p1.x) + (p2.y - p1.y) * (p2.y - p1.y)));
518  // d=|v^^·r|=(|(x_2-x_1)(y_1-y_0)-(x_1-x_0)(y_2-y_1)|)/(sqrt((x_2-x_1)^2+(y_2-y_1)^2)).
519  // cerr<<"POSS="<<idxSegments[i]<<" "<<idxSegments[i+1]<<endl;
520  for (size_t j = idxSegments[i]; j < idxSegments[i + 1]; j++) {
521  float dist = std::fabs(float((p2.x - p1.x) * (p1.y - contour[j].y) - (p1.x - contour[j].x) * (p2.y - p1.y))) * inv_den;
522  distSum[i] += dist;
523  // cerr<< dist<<" ";
524  // cv::rectangle(_ssImC,contour[j],contour[j],colors[i],-1);
525  }
526  distSum[i] /= float(idxSegments[i + 1] - idxSegments[i]);
527  // cout<<endl<<endl;
528  }
529 
530 
531  // for the last one
532  cv::Point p1 = contour[idxSegments[0]];
533  cv::Point p2 = contour[idxSegments[3]];
534  float inv_den = 1. / std::sqrt(float((p2.x - p1.x) * (p2.x - p1.x) + (p2.y - p1.y) * (p2.y - p1.y)));
535  // d=|v^^·r|=(|(x_2-x_1)(y_1-y_0)-(x_1-x_0)(y_2-y_1)|)/(sqrt((x_2-x_1)^2+(y_2-y_1)^2)).
536  for (size_t j = 0; j < idxSegments[0]; j++)
537  distSum[3] += std::fabs(float((p2.x - p1.x) * (p1.y - contour[j].y) - (p1.x - contour[j].x) * (p2.y - p1.y))) * inv_den;
538  for (size_t j = idxSegments[3]; j < contour.size(); j++)
539  distSum[3] += std::fabs(float((p2.x - p1.x) * (p1.y - contour[j].y) - (p1.x - contour[j].x) * (p2.y - p1.y))) * inv_den;
540 
541  distSum[3] /= float(idxSegments[0] + (contour.size() - idxSegments[3]));
542  // now, get the maximum
543  /* for (int i=0;i<4;i++)
544  cout<<"DD="<<distSum[i]<<endl;*/
545  // check the two combinations to see the one with higher error
546  if (distSum[0] + distSum[2] > distSum[1] + distSum[3])
547  return 0;
548  else
549  return 1;
550 }
551 
552 void setPointIntoImage(cv::Point2f &p, cv::Size s) {
553  if (p.x < 0)
554  p.x = 0;
555  else if (p.x >= s.width)
556  p.x = s.width - 1;
557  if (p.y < 0)
558  p.y = 0;
559  else if (p.y >= s.height)
560  p.y = s.height - 1;
561 }
562 
563 void setPointIntoImage(cv::Point &p, cv::Size s) {
564  if (p.x < 0)
565  p.x = 0;
566  else if (p.x >= s.width)
567  p.x = s.width - 1;
568  if (p.y < 0)
569  p.y = 0;
570  else if (p.y >= s.height)
571  p.y = s.height - 1;
572 }
573 /************************************
574  *
575  *
576  *
577  *
578  ************************************/
579 bool MarkerDetector::warp_cylinder(Mat &in, Mat &out, Size size, MarkerCandidate &mcand) throw(cv::Exception) {
580 
581  if (mcand.size() != 4)
582  throw cv::Exception(9001, "point.size()!=4", "MarkerDetector::warp", __FILE__, __LINE__);
583 
584  // check first the real need for cylinder warping
585  // cout<<"im="<<mcand.contour.size()<<endl;
586 
587  // for (size_t i=0;i<mcand.contour.size();i++) {
588  // cv::rectangle(_ssImC ,mcand.contour[i],mcand.contour[i],cv::Scalar(111,111,111),-1 );
589  // }
590  // mcand.draw(imC,cv::Scalar(0,255,0));
591  // find the 4 different segments of the contour
592  vector< int > idxSegments;
593  findCornerPointsInContour(mcand, mcand.contour, idxSegments);
594  // let us rearrange the points so that the first corner is the one whith smaller idx
595  int minIdx = 0;
596  for (int i = 1; i < 4; i++)
597  if (idxSegments[i] < idxSegments[minIdx])
598  minIdx = i;
599  // now, rotate the points to be in this order
600  std::rotate(idxSegments.begin(), idxSegments.begin() + minIdx, idxSegments.end());
601  std::rotate(mcand.begin(), mcand.begin() + minIdx, mcand.end());
602 
603  // cout<<"idxSegments="<<idxSegments[0]<< " "<<idxSegments[1]<< " "<<idxSegments[2]<<" "<<idxSegments[3]<<endl;
604  // now, determine the sides that are deformated by cylinder perspective
605  int defrmdSide = findDeformedSidesIdx(mcand.contour, idxSegments);
606  // cout<<"Def="<<defrmdSide<<endl;
607 
608  // instead of removing perspective distortion of the rectangular region
609  // given by the rectangle, we enlarge it a bit to include the deformed parts
610  cv::Point2f center = mcand.getCenter();
611  Point2f enlargedRegion[4];
612  for (int i = 0; i < 4; i++)
613  enlargedRegion[i] = mcand[i];
614  if (defrmdSide == 0) {
615  enlargedRegion[0] = mcand[0] + (mcand[3] - mcand[0]) * 1.2;
616  enlargedRegion[1] = mcand[1] + (mcand[2] - mcand[1]) * 1.2;
617  enlargedRegion[2] = mcand[2] + (mcand[1] - mcand[2]) * 1.2;
618  enlargedRegion[3] = mcand[3] + (mcand[0] - mcand[3]) * 1.2;
619  } else {
620  enlargedRegion[0] = mcand[0] + (mcand[1] - mcand[0]) * 1.2;
621  enlargedRegion[1] = mcand[1] + (mcand[0] - mcand[1]) * 1.2;
622  enlargedRegion[2] = mcand[2] + (mcand[3] - mcand[2]) * 1.2;
623  enlargedRegion[3] = mcand[3] + (mcand[2] - mcand[3]) * 1.2;
624  }
625  for (size_t i = 0; i < 4; i++)
626  setPointIntoImage(enlargedRegion[i], in.size());
627 
628  /*
629  cv::Scalar colors[4]={cv::Scalar(0,0,255),cv::Scalar(255,0,0),cv::Scalar(0,255,0),cv::Scalar(111,111,0)};
630  for (int i=0;i<4;i++) {
631  cv::rectangle(_ssImC,mcand.contour[idxSegments[i]]-cv::Point(2,2),mcand.contour[idxSegments[i]]+cv::Point(2,2),colors[i],-1 );
632  cv::rectangle(_ssImC,enlargedRegion[i]-cv::Point2f(2,2),enlargedRegion[i]+cv::Point2f(2,2),colors[i],-1 );
633 
634  }*/
635  // cv::imshow("imC",_ssImC);
636 
637 
638  // calculate the max distance from each contour point the line of the corresponding segment it belongs to
639  // calculate
640  // cv::waitKey(0);
641  // check that the region is into image limits
642  // obtain the perspective transform
643  Point2f pointsRes[4], pointsIn[4];
644  for (int i = 0; i < 4; i++)
645  pointsIn[i] = mcand[i];
646 
647  cv::Size enlargedSize = size;
648  enlargedSize.width += 2 * enlargedSize.width * 0.2;
649  pointsRes[0] = (Point2f(0, 0));
650  pointsRes[1] = Point2f(enlargedSize.width - 1, 0);
651  pointsRes[2] = Point2f(enlargedSize.width - 1, enlargedSize.height - 1);
652  pointsRes[3] = Point2f(0, enlargedSize.height - 1);
653  // rotate to ensure that deformed sides are in the horizontal axis when warping
654  if (defrmdSide == 0)
655  rotate(pointsRes, pointsRes + 1, pointsRes + 4);
656  cv::Mat imAux, imAux2(enlargedSize, CV_8UC1);
657  Mat M = cv::getPerspectiveTransform(enlargedRegion, pointsRes);
658  cv::warpPerspective(in, imAux, M, enlargedSize, cv::INTER_NEAREST);
659 
660  // now, transform all points to the new image
661  vector< cv::Point > pointsCO(mcand.contour.size());
662  assert(M.type() == CV_64F);
663  assert(M.cols == 3 && M.rows == 3);
664  // cout<<M<<endl;
665  double *mptr = M.ptr< double >(0);
666  imAux2.setTo(cv::Scalar::all(0));
667 
668 
669  for (size_t i = 0; i < mcand.contour.size(); i++) {
670  float inX = mcand.contour[i].x;
671  float inY = mcand.contour[i].y;
672  float w = inX * mptr[6] + inY * mptr[7] + mptr[8];
673  cv::Point2f pres;
674  pointsCO[i].x = ((inX * mptr[0] + inY * mptr[1] + mptr[2]) / w) + 0.5;
675  pointsCO[i].y = ((inX * mptr[3] + inY * mptr[4] + mptr[5]) / w) + 0.5;
676  // make integers
677  setPointIntoImage(pointsCO[i], imAux.size()); // ensure points are into image limits
678  // cout<<"p="<<pointsCO[i]<<" "<<imAux.size().width<<" "<<imAux.size().height<<endl;
679  imAux2.at< uchar >(pointsCO[i].y, pointsCO[i].x) = 255;
680  if (pointsCO[i].y > 0)
681  imAux2.at< uchar >(pointsCO[i].y - 1, pointsCO[i].x) = 255;
682  if (pointsCO[i].y < imAux2.rows - 1)
683  imAux2.at< uchar >(pointsCO[i].y + 1, pointsCO[i].x) = 255;
684  }
685 
686  cv::Mat outIm(enlargedSize, CV_8UC1);
687  outIm.setTo(cv::Scalar::all(0));
688  // now, scan in lines to determine the required displacement
689  for (int y = 0; y < imAux2.rows; y++) {
690  uchar *_offInfo = imAux2.ptr< uchar >(y);
691  int start = -1, end = -1;
692  // determine the start and end of markerd regions
693  for (int x = 0; x < imAux.cols; x++) {
694  if (_offInfo[x]) {
695  if (start == -1)
696  start = x;
697  else
698  end = x;
699  }
700  }
701  // cout<<"S="<<start<<" "<<end<<" "<<end-start<<" "<<(size.width>>1)<<endl;
702  // check that the size is big enough and
703  assert(start != -1 && end != -1 && (end - start) > size.width >> 1);
704  uchar *In_image = imAux.ptr< uchar >(y);
705  uchar *Out_image = outIm.ptr< uchar >(y);
706  memcpy(Out_image, In_image + start, imAux.cols - start);
707  }
708 
709 
710  // cout<<"SS="<<mcand.contour.size()<<" "<<pointsCO.size()<<endl;
711  // get the central region with the size specified
712  cv::Mat centerReg = outIm(cv::Range::all(), cv::Range(0, size.width));
713  out = centerReg.clone();
714  // cv::perspectiveTransform(mcand.contour,pointsCO,M);
715  // draw them
716  // cv::imshow("out2",out);
717  // cv::imshow("imm",imAux2);
718  // cv::waitKey(0);
719  return true;
720 }
721 /************************************
722  *
723  *
724  *
725  *
726  ************************************/
727 bool MarkerDetector::isInto(Mat &contour, vector< Point2f > &b) {
728 
729  for (unsigned int i = 0; i < b.size(); i++)
730  if (pointPolygonTest(contour, b[i], false) > 0)
731  return true;
732  return false;
733 }
734 /************************************
735  *
736  *
737  *
738  *
739  ************************************/
740 int MarkerDetector::perimeter(vector< Point2f > &a) {
741  int sum = 0;
742  for (unsigned int i = 0; i < a.size(); i++) {
743  int i2 = (i + 1) % a.size();
744  sum += sqrt((a[i].x - a[i2].x) * (a[i].x - a[i2].x) + (a[i].y - a[i2].y) * (a[i].y - a[i2].y));
745  }
746  return sum;
747 }
748 
749 
754 void MarkerDetector::findBestCornerInRegion_harris(const cv::Mat &grey, vector< cv::Point2f > &Corners, int blockSize) {
755  SubPixelCorner Subp;
756  Subp.RefineCorner(grey, Corners);
757 }
758 
759 
764 void MarkerDetector::refineCandidateLines(MarkerDetector::MarkerCandidate &candidate, const cv::Mat &camMatrix, const cv::Mat &distCoeff) {
765  // search corners on the contour vector
766  vector< unsigned int > cornerIndex;
767  cornerIndex.resize(4);
768  for (unsigned int j = 0; j < candidate.contour.size(); j++) {
769  for (unsigned int k = 0; k < 4; k++) {
770  if (candidate.contour[j].x == candidate[k].x && candidate.contour[j].y == candidate[k].y) {
771  cornerIndex[k] = j;
772  }
773  }
774  }
775 
776  // contour pixel in inverse order or not?
777  bool inverse;
778  if ((cornerIndex[1] > cornerIndex[0]) && (cornerIndex[2] > cornerIndex[1] || cornerIndex[2] < cornerIndex[0]))
779  inverse = false;
780  else if (cornerIndex[2] > cornerIndex[1] && cornerIndex[2] < cornerIndex[0])
781  inverse = false;
782  else
783  inverse = true;
784 
785 
786  // get pixel vector for each line of the marker
787  int inc = 1;
788  if (inverse)
789  inc = -1;
790 
791  // undistort contour
792  vector< Point2f > contour2f;
793  for (unsigned int i = 0; i < candidate.contour.size(); i++)
794  contour2f.push_back(cv::Point2f(candidate.contour[i].x, candidate.contour[i].y));
795  if (!camMatrix.empty() && !distCoeff.empty())
796  cv::undistortPoints(contour2f, contour2f, camMatrix, distCoeff, cv::Mat(), camMatrix);
797 
798 
799  vector< std::vector< cv::Point2f > > contourLines;
800  contourLines.resize(4);
801  for (unsigned int l = 0; l < 4; l++) {
802  for (int j = (int)cornerIndex[l]; j != (int)cornerIndex[(l + 1) % 4]; j += inc) {
803  if (j == (int)candidate.contour.size() && !inverse)
804  j = 0;
805  else if (j == 0 && inverse)
806  j = candidate.contour.size() - 1;
807  contourLines[l].push_back(contour2f[j]);
808  if (j == (int)cornerIndex[(l + 1) % 4])
809  break; // this has to be added because of the previous ifs
810  }
811  }
812 
813  // interpolate marker lines
814  vector< Point3f > lines;
815  lines.resize(4);
816  for (unsigned int j = 0; j < lines.size(); j++)
817  interpolate2Dline(contourLines[j], lines[j]);
818 
819  // get cross points of lines
820  vector< Point2f > crossPoints;
821  crossPoints.resize(4);
822  for (unsigned int i = 0; i < 4; i++)
823  crossPoints[i] = getCrossPoint(lines[(i - 1) % 4], lines[i]);
824 
825  // distort corners again if undistortion was performed
826  if (!camMatrix.empty() && !distCoeff.empty())
827  distortPoints(crossPoints, crossPoints, camMatrix, distCoeff);
828 
829  // reassing points
830  for (unsigned int j = 0; j < 4; j++)
831  candidate[j] = crossPoints[j];
832 }
833 
834 
837 void MarkerDetector::interpolate2Dline(const std::vector< Point2f > &inPoints, Point3f &outLine) {
838 
839  float minX, maxX, minY, maxY;
840  minX = maxX = inPoints[0].x;
841  minY = maxY = inPoints[0].y;
842  for (unsigned int i = 1; i < inPoints.size(); i++) {
843  if (inPoints[i].x < minX)
844  minX = inPoints[i].x;
845  if (inPoints[i].x > maxX)
846  maxX = inPoints[i].x;
847  if (inPoints[i].y < minY)
848  minY = inPoints[i].y;
849  if (inPoints[i].y > maxY)
850  maxY = inPoints[i].y;
851  }
852 
853  // create matrices of equation system
854  Mat A(inPoints.size(), 2, CV_32FC1, Scalar(0));
855  Mat B(inPoints.size(), 1, CV_32FC1, Scalar(0));
856  Mat X;
857 
858 
859 
860  if (maxX - minX > maxY - minY) {
861  // Ax + C = y
862  for (int i = 0; i < inPoints.size(); i++) {
863 
864  A.at< float >(i, 0) = inPoints[i].x;
865  A.at< float >(i, 1) = 1.;
866  B.at< float >(i, 0) = inPoints[i].y;
867  }
868 
869  // solve system
870  solve(A, B, X, DECOMP_SVD);
871  // return Ax + By + C
872  outLine = Point3f(X.at< float >(0, 0), -1., X.at< float >(1, 0));
873  } else {
874  // By + C = x
875  for (int i = 0; i < inPoints.size(); i++) {
876 
877  A.at< float >(i, 0) = inPoints[i].y;
878  A.at< float >(i, 1) = 1.;
879  B.at< float >(i, 0) = inPoints[i].x;
880  }
881 
882  // solve system
883  solve(A, B, X, DECOMP_SVD);
884  // return Ax + By + C
885  outLine = Point3f(-1., X.at< float >(0, 0), X.at< float >(1, 0));
886  }
887 }
888 
891 Point2f MarkerDetector::getCrossPoint(const cv::Point3f &line1, const cv::Point3f &line2) {
892 
893  // create matrices of equation system
894  Mat A(2, 2, CV_32FC1, Scalar(0));
895  Mat B(2, 1, CV_32FC1, Scalar(0));
896  Mat X;
897 
898  A.at< float >(0, 0) = line1.x;
899  A.at< float >(0, 1) = line1.y;
900  B.at< float >(0, 0) = -line1.z;
901 
902  A.at< float >(1, 0) = line2.x;
903  A.at< float >(1, 1) = line2.y;
904  B.at< float >(1, 0) = -line2.z;
905 
906  // solve system
907  solve(A, B, X, DECOMP_SVD);
908  return Point2f(X.at< float >(0, 0), X.at< float >(1, 0));
909 }
910 
911 
914 void MarkerDetector::distortPoints(vector< cv::Point2f > in, vector< cv::Point2f > &out, const Mat &camMatrix, const Mat &distCoeff) {
915  // trivial extrinsics
916  cv::Mat Rvec = cv::Mat(3, 1, CV_32FC1, cv::Scalar::all(0));
917  cv::Mat Tvec = Rvec.clone();
918  // calculate 3d points and then reproject, so opencv makes the distortion internally
919  vector< cv::Point3f > cornersPoints3d;
920  for (unsigned int i = 0; i < in.size(); i++)
921  cornersPoints3d.push_back(cv::Point3f((in[i].x - camMatrix.at< float >(0, 2)) / camMatrix.at< float >(0, 0), // x
922  (in[i].y - camMatrix.at< float >(1, 2)) / camMatrix.at< float >(1, 1), // y
923  1)); // z
924  cv::projectPoints(cornersPoints3d, Rvec, Tvec, camMatrix, distCoeff, out);
925 }
926 
927 
928 
929 /************************************
930  *
931  *
932  *
933  *
934  ************************************/
935 void MarkerDetector::drawAllContours(Mat input, std::vector< std::vector< cv::Point > > &contours) { drawContours(input, contours, -1, Scalar(255, 0, 255)); }
936 
937 /************************************
938  *
939  *
940  *
941  *
942  ************************************/
943 void MarkerDetector::drawContour(Mat &in, vector< Point > &contour, Scalar color) {
944  for (unsigned int i = 0; i < contour.size(); i++) {
945  cv::rectangle(in, contour[i], contour[i], color);
946  }
947 }
948 
949 void MarkerDetector::drawApproxCurve(Mat &in, vector< Point > &contour, Scalar color) {
950  for (unsigned int i = 0; i < contour.size(); i++) {
951  cv::line(in, contour[i], contour[(i + 1) % contour.size()], color);
952  }
953 }
954 /************************************
955  *
956  *
957  *
958  *
959  ************************************/
960 
961 void MarkerDetector::draw(Mat out, const vector< Marker > &markers) {
962  for (unsigned int i = 0; i < markers.size(); i++) {
963  cv::line(out, markers[i][0], markers[i][1], cvScalar(255, 0, 0), 2, CV_AA);
964  cv::line(out, markers[i][1], markers[i][2], cvScalar(255, 0, 0), 2, CV_AA);
965  cv::line(out, markers[i][2], markers[i][3], cvScalar(255, 0, 0), 2, CV_AA);
966  cv::line(out, markers[i][3], markers[i][0], cvScalar(255, 0, 0), 2, CV_AA);
967  }
968 }
969 /* Attempt to make it faster than in opencv. I could not :( Maybe trying with SSE3...
970 void MarkerDetector::warpPerspective(const cv::Mat &in,cv::Mat & out, const cv::Mat & M,cv::Size size)
971 {
972  //inverse the matrix
973  out.create(size,in.type());
974  //convert to float to speed up operations
975  const double *m=M.ptr<double>(0);
976  float mf[9];
977  mf[0]=m[0];mf[1]=m[1];mf[2]=m[2];
978  mf[3]=m[3];mf[4]=m[4];mf[5]=m[5];
979  mf[6]=m[6];mf[7]=m[7];mf[8]=m[8];
980 
981  for(int y=0;y<out.rows;y++){
982  uchar *_ptrout=out.ptr<uchar>(y);
983  for(int x=0;x<out.cols;x++){
984  //get the x,y position
985  float den=1./(x*mf[6]+y*mf[7]+mf[8]);
986  float ox= (x*mf[0]+y*mf[1]+mf[2])*den;
987  float oy= (x*mf[3]+y*mf[4]+mf[5])*den;
988  _ptrout[x]=in.at<uchar>(oy,ox);
989  }
990  }
991 }
992 */
993 
994 /************************************
995  *
996  *
997  *
998  *
999  ************************************/
1000 
1001 void MarkerDetector::glGetProjectionMatrix(CameraParameters &CamMatrix, cv::Size orgImgSize, cv::Size size, double proj_matrix[16], double gnear, double gfar,
1002  bool invert) throw(cv::Exception) {
1003  cerr << "MarkerDetector::glGetProjectionMatrix . This a deprecated function. Use CameraParameters::glGetProjectionMatrix instead. " << __FILE__ << " "
1004  << __LINE__ << endl;
1005  CamMatrix.glGetProjectionMatrix(orgImgSize, size, proj_matrix, gnear, gfar, invert);
1006 }
1007 
1008 /************************************
1009 *
1010 *
1011 *
1012 *
1013 ************************************/
1014 
1015 void MarkerDetector::setMinMaxSize(float min, float max) throw(cv::Exception) {
1016  if (min <= 0 || min > 1)
1017  throw cv::Exception(1, " min parameter out of range", "MarkerDetector::setMinMaxSize", __FILE__, __LINE__);
1018  if (max <= 0 || max > 1)
1019  throw cv::Exception(1, " max parameter out of range", "MarkerDetector::setMinMaxSize", __FILE__, __LINE__);
1020  if (min > max)
1021  throw cv::Exception(1, " min>max", "MarkerDetector::setMinMaxSize", __FILE__, __LINE__);
1022  _minSize = min;
1023  _maxSize = max;
1024 }
1025 
1026 /************************************
1027 *
1028 *
1029 *
1030 *
1031 ************************************/
1032 
1033 void MarkerDetector::setWarpSize(int val) throw(cv::Exception) {
1034  if (val < 10)
1035  throw cv::Exception(1, " invalid canonical image size", "MarkerDetector::setWarpSize", __FILE__, __LINE__);
1036  _markerWarpSize = val;
1037 }
1038 
1039 
1040 void MarkerDetector::findCornerMaxima(vector< cv::Point2f > &Corners, const cv::Mat &grey, int wsize) {
1041 
1042 // for each element, search in a region around
1043 #pragma omp parallel for
1044 
1045  for (size_t i = 0; i < Corners.size(); i++) {
1046  cv::Point2f minLimit(std::max(0, int(Corners[i].x - wsize)), std::max(0, int(Corners[i].y - wsize)));
1047  cv::Point2f maxLimit(std::min(grey.cols, int(Corners[i].x + wsize)), std::min(grey.rows, int(Corners[i].y + wsize)));
1048 
1049  cv::Mat reg = grey(cv::Range(minLimit.y, maxLimit.y), cv::Range(minLimit.x, maxLimit.x));
1050  cv::Mat harr, harrint;
1051  cv::cornerHarris(reg, harr, 3, 3, 0.04);
1052 
1053  // now, do a sum block operation
1054  cv::integral(harr, harrint);
1055  int bls_a = 4;
1056  for (int y = bls_a; y < harr.rows - bls_a; y++) {
1057  float *h = harr.ptr< float >(y);
1058  for (int x = bls_a; x < harr.cols - bls_a; x++)
1059  h[x] = harrint.at< double >(y + bls_a, x + bls_a) - harrint.at< double >(y + bls_a, x) - harrint.at< double >(y, x + bls_a) +
1060  harrint.at< double >(y, x);
1061  }
1062 
1063 
1064 
1065  cv::Point2f best(-1, -1);
1066  cv::Point2f center(reg.cols / 2, reg.rows / 2);
1067  ;
1068  double maxv = 0;
1069  for (size_t i = 0; i < harr.rows; i++) {
1070  // L1 dist to center
1071  float *har = harr.ptr< float >(i);
1072  for (size_t x = 0; x < harr.cols; x++) {
1073  float d = float(fabs(center.x - x) + fabs(center.y - i)) / float(reg.cols / 2 + reg.rows / 2);
1074  float w = 1. - d;
1075  if (w * har[x] > maxv) {
1076  maxv = w * har[x];
1077  best = cv::Point2f(x, i);
1078  }
1079  }
1080  }
1081  Corners[i] = best + minLimit;
1082  }
1083 }
1084 };
d
void findCornerPointsInContour(const vector< cv::Point2f > &points, const vector< cv::Point > &contour, vector< int > &idxs)
ROSCPP_DECL void start()
void RefineCorner(cv::Mat image, std::vector< cv::Point2f > &corners)
method to refine the corners
int findDeformedSidesIdx(const vector< cv::Point > &contour, const vector< int > &idxSegments)
void setPointIntoImage(cv::Point &p, cv::Size s)
void swap(Bag &a, Bag &b)
int omp_get_max_threads()
Definition: ar_omp.cpp:2
int omp_get_thread_num()
Definition: ar_omp.cpp:3
static int detect(const cv::Mat &in, int &nRotations)
Parameters of the camera.
int min(int a, int b)


asr_aruco_marker_recognition
Author(s): Allgeyer Tobias, Meißner Pascal, Qattan Mohamad
autogenerated on Mon Feb 28 2022 21:41:30