1// This file is part of OpenCV project.
2// It is subject to the license terms in the LICENSE file found in the top-level directory
3// of this distribution and at http://opencv.org/license.html
4
5#include "../precomp.hpp"
6#include <opencv2/calib3d.hpp>
7
8#include "opencv2/objdetect/aruco_detector.hpp"
9#include "opencv2/objdetect/aruco_board.hpp"
10#include "apriltag/apriltag_quad_thresh.hpp"
11#include "aruco_utils.hpp"
12#include <cmath>
13
14namespace cv {
15namespace aruco {
16
17using namespace std;
18
19static inline bool readWrite(DetectorParameters &params, const FileNode* readNode,
20 FileStorage* writeStorage = nullptr)
21{
22 CV_Assert(readNode || writeStorage);
23 bool check = false;
24
25 check |= readWriteParameter(name: "adaptiveThreshWinSizeMin", parameter&: params.adaptiveThreshWinSizeMin, readNode, writeStorage);
26 check |= readWriteParameter(name: "adaptiveThreshWinSizeMax", parameter&: params.adaptiveThreshWinSizeMax, readNode, writeStorage);
27 check |= readWriteParameter(name: "adaptiveThreshWinSizeStep", parameter&: params.adaptiveThreshWinSizeStep, readNode, writeStorage);
28 check |= readWriteParameter(name: "adaptiveThreshConstant", parameter&: params.adaptiveThreshConstant, readNode, writeStorage);
29 check |= readWriteParameter(name: "minMarkerPerimeterRate", parameter&: params.minMarkerPerimeterRate, readNode, writeStorage);
30 check |= readWriteParameter(name: "maxMarkerPerimeterRate", parameter&: params.maxMarkerPerimeterRate, readNode, writeStorage);
31 check |= readWriteParameter(name: "polygonalApproxAccuracyRate", parameter&: params.polygonalApproxAccuracyRate,
32 readNode, writeStorage);
33 check |= readWriteParameter(name: "minCornerDistanceRate", parameter&: params.minCornerDistanceRate, readNode, writeStorage);
34 check |= readWriteParameter(name: "minDistanceToBorder", parameter&: params.minDistanceToBorder, readNode, writeStorage);
35 check |= readWriteParameter(name: "minMarkerDistanceRate", parameter&: params.minMarkerDistanceRate, readNode, writeStorage);
36 check |= readWriteParameter(name: "cornerRefinementMethod", parameter&: params.cornerRefinementMethod, readNode, writeStorage);
37 check |= readWriteParameter(name: "cornerRefinementWinSize", parameter&: params.cornerRefinementWinSize, readNode, writeStorage);
38 check |= readWriteParameter(name: "relativeCornerRefinmentWinSize", parameter&: params.relativeCornerRefinmentWinSize, readNode,
39 writeStorage);
40 check |= readWriteParameter(name: "cornerRefinementMaxIterations", parameter&: params.cornerRefinementMaxIterations,
41 readNode, writeStorage);
42 check |= readWriteParameter(name: "cornerRefinementMinAccuracy", parameter&: params.cornerRefinementMinAccuracy,
43 readNode, writeStorage);
44 check |= readWriteParameter(name: "markerBorderBits", parameter&: params.markerBorderBits, readNode, writeStorage);
45 check |= readWriteParameter(name: "perspectiveRemovePixelPerCell", parameter&: params.perspectiveRemovePixelPerCell,
46 readNode, writeStorage);
47 check |= readWriteParameter(name: "perspectiveRemoveIgnoredMarginPerCell", parameter&: params.perspectiveRemoveIgnoredMarginPerCell,
48 readNode, writeStorage);
49 check |= readWriteParameter(name: "maxErroneousBitsInBorderRate", parameter&: params.maxErroneousBitsInBorderRate,
50 readNode, writeStorage);
51 check |= readWriteParameter(name: "minOtsuStdDev", parameter&: params.minOtsuStdDev, readNode, writeStorage);
52 check |= readWriteParameter(name: "errorCorrectionRate", parameter&: params.errorCorrectionRate, readNode, writeStorage);
53 check |= readWriteParameter(name: "minGroupDistance", parameter&: params.minGroupDistance, readNode, writeStorage);
54 // new aruco 3 functionality
55 check |= readWriteParameter(name: "useAruco3Detection", parameter&: params.useAruco3Detection, readNode, writeStorage);
56 check |= readWriteParameter(name: "minSideLengthCanonicalImg", parameter&: params.minSideLengthCanonicalImg, readNode, writeStorage);
57 check |= readWriteParameter(name: "minMarkerLengthRatioOriginalImg", parameter&: params.minMarkerLengthRatioOriginalImg,
58 readNode, writeStorage);
59 return check;
60}
61
62bool DetectorParameters::readDetectorParameters(const FileNode& fn)
63{
64 if (fn.empty())
65 return false;
66 return readWrite(params&: *this, readNode: &fn);
67}
68
69bool DetectorParameters::writeDetectorParameters(FileStorage& fs, const String& name)
70{
71 CV_Assert(fs.isOpened());
72 if (!name.empty())
73 fs << name << "{";
74 bool res = readWrite(params&: *this, readNode: nullptr, writeStorage: &fs);
75 if (!name.empty())
76 fs << "}";
77 return res;
78}
79
80static inline bool readWrite(RefineParameters& refineParameters, const FileNode* readNode,
81 FileStorage* writeStorage = nullptr)
82{
83 CV_Assert(readNode || writeStorage);
84 bool check = false;
85
86 check |= readWriteParameter(name: "minRepDistance", parameter&: refineParameters.minRepDistance, readNode, writeStorage);
87 check |= readWriteParameter(name: "errorCorrectionRate", parameter&: refineParameters.errorCorrectionRate, readNode, writeStorage);
88 check |= readWriteParameter(name: "checkAllOrders", parameter&: refineParameters.checkAllOrders, readNode, writeStorage);
89 return check;
90}
91
92RefineParameters::RefineParameters(float _minRepDistance, float _errorCorrectionRate, bool _checkAllOrders):
93 minRepDistance(_minRepDistance), errorCorrectionRate(_errorCorrectionRate),
94 checkAllOrders(_checkAllOrders){}
95
96bool RefineParameters::readRefineParameters(const FileNode &fn)
97{
98 if (fn.empty())
99 return false;
100 return readWrite(refineParameters&: *this, readNode: &fn);
101}
102
103bool RefineParameters::writeRefineParameters(FileStorage& fs, const String& name)
104{
105 CV_Assert(fs.isOpened());
106 if (!name.empty())
107 fs << name << "{";
108 bool res = readWrite(refineParameters&: *this, readNode: nullptr, writeStorage: &fs);
109 if (!name.empty())
110 fs << "}";
111 return res;
112}
113
114/**
115 * @brief Threshold input image using adaptive thresholding
116 */
117static void _threshold(InputArray _in, OutputArray _out, int winSize, double constant) {
118
119 CV_Assert(winSize >= 3);
120 if(winSize % 2 == 0) winSize++; // win size must be odd
121 adaptiveThreshold(src: _in, dst: _out, maxValue: 255, adaptiveMethod: ADAPTIVE_THRESH_MEAN_C, thresholdType: THRESH_BINARY_INV, blockSize: winSize, C: constant);
122}
123
124
125/**
126 * @brief Given a tresholded image, find the contours, calculate their polygonal approximation
127 * and take those that accomplish some conditions
128 */
129static void _findMarkerContours(const Mat &in, vector<vector<Point2f> > &candidates,
130 vector<vector<Point> > &contoursOut, double minPerimeterRate,
131 double maxPerimeterRate, double accuracyRate,
132 double minCornerDistanceRate, int minDistanceToBorder, int minSize) {
133
134 CV_Assert(minPerimeterRate > 0 && maxPerimeterRate > 0 && accuracyRate > 0 &&
135 minCornerDistanceRate >= 0 && minDistanceToBorder >= 0);
136
137 // calculate maximum and minimum sizes in pixels
138 unsigned int minPerimeterPixels =
139 (unsigned int)(minPerimeterRate * max(a: in.cols, b: in.rows));
140 unsigned int maxPerimeterPixels =
141 (unsigned int)(maxPerimeterRate * max(a: in.cols, b: in.rows));
142
143 // for aruco3 functionality
144 if (minSize != 0) {
145 minPerimeterPixels = 4*minSize;
146 }
147
148 vector<vector<Point> > contours;
149 findContours(image: in, contours, mode: RETR_LIST, method: CHAIN_APPROX_NONE);
150 // now filter list of contours
151 for(unsigned int i = 0; i < contours.size(); i++) {
152 // check perimeter
153 if(contours[i].size() < minPerimeterPixels || contours[i].size() > maxPerimeterPixels)
154 continue;
155
156 // check is square and is convex
157 vector<Point> approxCurve;
158 approxPolyDP(curve: contours[i], approxCurve, epsilon: double(contours[i].size()) * accuracyRate, closed: true);
159 if(approxCurve.size() != 4 || !isContourConvex(contour: approxCurve)) continue;
160
161 // check min distance between corners
162 double minDistSq = max(a: in.cols, b: in.rows) * max(a: in.cols, b: in.rows);
163 for(int j = 0; j < 4; j++) {
164 double d = (double)(approxCurve[j].x - approxCurve[(j + 1) % 4].x) *
165 (double)(approxCurve[j].x - approxCurve[(j + 1) % 4].x) +
166 (double)(approxCurve[j].y - approxCurve[(j + 1) % 4].y) *
167 (double)(approxCurve[j].y - approxCurve[(j + 1) % 4].y);
168 minDistSq = min(a: minDistSq, b: d);
169 }
170 double minCornerDistancePixels = double(contours[i].size()) * minCornerDistanceRate;
171 if(minDistSq < minCornerDistancePixels * minCornerDistancePixels) continue;
172
173 // check if it is too near to the image border
174 bool tooNearBorder = false;
175 for(int j = 0; j < 4; j++) {
176 if(approxCurve[j].x < minDistanceToBorder || approxCurve[j].y < minDistanceToBorder ||
177 approxCurve[j].x > in.cols - 1 - minDistanceToBorder ||
178 approxCurve[j].y > in.rows - 1 - minDistanceToBorder)
179 tooNearBorder = true;
180 }
181 if(tooNearBorder) continue;
182
183 // if it passes all the test, add to candidates vector
184 vector<Point2f> currentCandidate;
185 currentCandidate.resize(new_size: 4);
186 for(int j = 0; j < 4; j++) {
187 currentCandidate[j] = Point2f((float)approxCurve[j].x, (float)approxCurve[j].y);
188 }
189 candidates.push_back(x: currentCandidate);
190 contoursOut.push_back(x: contours[i]);
191 }
192}
193
194
195/**
196 * @brief Assure order of candidate corners is clockwise direction
197 */
198static void _reorderCandidatesCorners(vector<vector<Point2f> > &candidates) {
199
200 for(unsigned int i = 0; i < candidates.size(); i++) {
201 double dx1 = candidates[i][1].x - candidates[i][0].x;
202 double dy1 = candidates[i][1].y - candidates[i][0].y;
203 double dx2 = candidates[i][2].x - candidates[i][0].x;
204 double dy2 = candidates[i][2].y - candidates[i][0].y;
205 double crossProduct = (dx1 * dy2) - (dy1 * dx2);
206
207 if(crossProduct < 0.0) { // not clockwise direction
208 swap(a&: candidates[i][1], b&: candidates[i][3]);
209 }
210 }
211}
212
213static float getAverageModuleSize(const vector<Point2f>& markerCorners, int markerSize, int markerBorderBits) {
214 float averageArucoModuleSize = 0.f;
215 for (size_t i = 0ull; i < 4ull; i++) {
216 averageArucoModuleSize += sqrt(x: normL2Sqr<float>(pt: Point2f(markerCorners[i] - markerCorners[(i+1ull) % 4ull])));
217 }
218 int numModules = markerSize + markerBorderBits * 2;
219 averageArucoModuleSize /= ((float)markerCorners.size()*numModules);
220 return averageArucoModuleSize;
221}
222
223static bool checkMarker1InMarker2(const vector<Point2f>& marker1, const vector<Point2f>& marker2) {
224 return pointPolygonTest(contour: marker2, pt: marker1[0], measureDist: false) >= 0 && pointPolygonTest(contour: marker2, pt: marker1[1], measureDist: false) >= 0 &&
225 pointPolygonTest(contour: marker2, pt: marker1[2], measureDist: false) >= 0 && pointPolygonTest(contour: marker2, pt: marker1[3], measureDist: false) >= 0;
226}
227
228struct MarkerCandidate {
229 vector<Point2f> corners;
230 vector<Point> contour;
231 float perimeter = 0.f;
232};
233
234struct MarkerCandidateTree : MarkerCandidate{
235 int parent = -1;
236 int depth = 0;
237 vector<MarkerCandidate> closeContours;
238
239 MarkerCandidateTree() {}
240
241 MarkerCandidateTree(vector<Point2f>&& corners_, vector<Point>&& contour_) {
242 corners = std::move(corners_);
243 contour = std::move(contour_);
244 perimeter = 0.f;
245 for (size_t i = 0ull; i < 4ull; i++) {
246 perimeter += sqrt(x: normL2Sqr<float>(pt: Point2f(corners[i] - corners[(i+1ull) % 4ull])));
247 }
248 }
249
250 bool operator<(const MarkerCandidateTree& m) const {
251 // sorting the contors in descending order
252 return perimeter > m.perimeter;
253 }
254};
255
256
257// returns the average distance between the marker points
258float static inline getAverageDistance(const std::vector<Point2f>& marker1, const std::vector<Point2f>& marker2) {
259 float minDistSq = std::numeric_limits<float>::max();
260 // fc is the first corner considered on one of the markers, 4 combinations are possible
261 for(int fc = 0; fc < 4; fc++) {
262 float distSq = 0;
263 for(int c = 0; c < 4; c++) {
264 // modC is the corner considering first corner is fc
265 int modC = (c + fc) % 4;
266 distSq += normL2Sqr<float>(pt: marker1[modC] - marker2[c]);
267 }
268 distSq /= 4.f;
269 minDistSq = min(a: minDistSq, b: distSq);
270 }
271 return sqrt(x: minDistSq);
272}
273
274/**
275 * @brief Initial steps on finding square candidates
276 */
277static void _detectInitialCandidates(const Mat &grey, vector<vector<Point2f> > &candidates,
278 vector<vector<Point> > &contours,
279 const DetectorParameters &params) {
280
281 CV_Assert(params.adaptiveThreshWinSizeMin >= 3 && params.adaptiveThreshWinSizeMax >= 3);
282 CV_Assert(params.adaptiveThreshWinSizeMax >= params.adaptiveThreshWinSizeMin);
283 CV_Assert(params.adaptiveThreshWinSizeStep > 0);
284
285 // number of window sizes (scales) to apply adaptive thresholding
286 int nScales = (params.adaptiveThreshWinSizeMax - params.adaptiveThreshWinSizeMin) /
287 params.adaptiveThreshWinSizeStep + 1;
288
289 vector<vector<vector<Point2f> > > candidatesArrays((size_t) nScales);
290 vector<vector<vector<Point> > > contoursArrays((size_t) nScales);
291
292 ////for each value in the interval of thresholding window sizes
293 parallel_for_(range: Range(0, nScales), functor: [&](const Range& range) {
294 const int begin = range.start;
295 const int end = range.end;
296
297 for (int i = begin; i < end; i++) {
298 int currScale = params.adaptiveThreshWinSizeMin + i * params.adaptiveThreshWinSizeStep;
299 // threshold
300 Mat thresh;
301 _threshold(in: grey, out: thresh, winSize: currScale, constant: params.adaptiveThreshConstant);
302
303 // detect rectangles
304 _findMarkerContours(in: thresh, candidates&: candidatesArrays[i], contoursOut&: contoursArrays[i],
305 minPerimeterRate: params.minMarkerPerimeterRate, maxPerimeterRate: params.maxMarkerPerimeterRate,
306 accuracyRate: params.polygonalApproxAccuracyRate, minCornerDistanceRate: params.minCornerDistanceRate,
307 minDistanceToBorder: params.minDistanceToBorder, minSize: params.minSideLengthCanonicalImg);
308 }
309 });
310 // join candidates
311 for(int i = 0; i < nScales; i++) {
312 for(unsigned int j = 0; j < candidatesArrays[i].size(); j++) {
313 candidates.push_back(x: candidatesArrays[i][j]);
314 contours.push_back(x: contoursArrays[i][j]);
315 }
316 }
317}
318
319
320/**
321 * @brief Given an input image and candidate corners, extract the bits of the candidate, including
322 * the border bits
323 */
324static Mat _extractBits(InputArray _image, const vector<Point2f>& corners, int markerSize,
325 int markerBorderBits, int cellSize, double cellMarginRate, double minStdDevOtsu) {
326 CV_Assert(_image.getMat().channels() == 1);
327 CV_Assert(corners.size() == 4ull);
328 CV_Assert(markerBorderBits > 0 && cellSize > 0 && cellMarginRate >= 0 && cellMarginRate <= 1);
329 CV_Assert(minStdDevOtsu >= 0);
330
331 // number of bits in the marker
332 int markerSizeWithBorders = markerSize + 2 * markerBorderBits;
333 int cellMarginPixels = int(cellMarginRate * cellSize);
334
335 Mat resultImg; // marker image after removing perspective
336 int resultImgSize = markerSizeWithBorders * cellSize;
337 Mat resultImgCorners(4, 1, CV_32FC2);
338 resultImgCorners.ptr<Point2f>(y: 0)[0] = Point2f(0, 0);
339 resultImgCorners.ptr<Point2f>(y: 0)[1] = Point2f((float)resultImgSize - 1, 0);
340 resultImgCorners.ptr<Point2f>(y: 0)[2] =
341 Point2f((float)resultImgSize - 1, (float)resultImgSize - 1);
342 resultImgCorners.ptr<Point2f>(y: 0)[3] = Point2f(0, (float)resultImgSize - 1);
343
344 // remove perspective
345 Mat transformation = getPerspectiveTransform(src: corners, dst: resultImgCorners);
346 warpPerspective(src: _image, dst: resultImg, M: transformation, dsize: Size(resultImgSize, resultImgSize),
347 flags: INTER_NEAREST);
348
349 // output image containing the bits
350 Mat bits(markerSizeWithBorders, markerSizeWithBorders, CV_8UC1, Scalar::all(v0: 0));
351
352 // check if standard deviation is enough to apply Otsu
353 // if not enough, it probably means all bits are the same color (black or white)
354 Mat mean, stddev;
355 // Remove some border just to avoid border noise from perspective transformation
356 Mat innerRegion = resultImg.colRange(startcol: cellSize / 2, endcol: resultImg.cols - cellSize / 2)
357 .rowRange(startrow: cellSize / 2, endrow: resultImg.rows - cellSize / 2);
358 meanStdDev(src: innerRegion, mean, stddev);
359 if(stddev.ptr< double >(y: 0)[0] < minStdDevOtsu) {
360 // all black or all white, depending on mean value
361 if(mean.ptr< double >(y: 0)[0] > 127)
362 bits.setTo(value: 1);
363 else
364 bits.setTo(value: 0);
365 return bits;
366 }
367
368 // now extract code, first threshold using Otsu
369 threshold(src: resultImg, dst: resultImg, thresh: 125, maxval: 255, type: THRESH_BINARY | THRESH_OTSU);
370
371 // for each cell
372 for(int y = 0; y < markerSizeWithBorders; y++) {
373 for(int x = 0; x < markerSizeWithBorders; x++) {
374 int Xstart = x * (cellSize) + cellMarginPixels;
375 int Ystart = y * (cellSize) + cellMarginPixels;
376 Mat square = resultImg(Rect(Xstart, Ystart, cellSize - 2 * cellMarginPixels,
377 cellSize - 2 * cellMarginPixels));
378 // count white pixels on each cell to assign its value
379 size_t nZ = (size_t) countNonZero(src: square);
380 if(nZ > square.total() / 2) bits.at<unsigned char>(i0: y, i1: x) = 1;
381 }
382 }
383
384 return bits;
385}
386
387
388
389/**
390 * @brief Return number of erroneous bits in border, i.e. number of white bits in border.
391 */
392static int _getBorderErrors(const Mat &bits, int markerSize, int borderSize) {
393
394 int sizeWithBorders = markerSize + 2 * borderSize;
395
396 CV_Assert(markerSize > 0 && bits.cols == sizeWithBorders && bits.rows == sizeWithBorders);
397
398 int totalErrors = 0;
399 for(int y = 0; y < sizeWithBorders; y++) {
400 for(int k = 0; k < borderSize; k++) {
401 if(bits.ptr<unsigned char>(y)[k] != 0) totalErrors++;
402 if(bits.ptr<unsigned char>(y)[sizeWithBorders - 1 - k] != 0) totalErrors++;
403 }
404 }
405 for(int x = borderSize; x < sizeWithBorders - borderSize; x++) {
406 for(int k = 0; k < borderSize; k++) {
407 if(bits.ptr<unsigned char>(y: k)[x] != 0) totalErrors++;
408 if(bits.ptr<unsigned char>(y: sizeWithBorders - 1 - k)[x] != 0) totalErrors++;
409 }
410 }
411 return totalErrors;
412}
413
414
415/**
416 * @brief Tries to identify one candidate given the dictionary
417 * @return candidate typ. zero if the candidate is not valid,
418 * 1 if the candidate is a black candidate (default candidate)
419 * 2 if the candidate is a white candidate
420 */
421static uint8_t _identifyOneCandidate(const Dictionary& dictionary, const Mat& _image,
422 const vector<Point2f>& _corners, int& idx,
423 const DetectorParameters& params, int& rotation,
424 const float scale = 1.f) {
425 CV_DbgAssert(params.markerBorderBits > 0);
426 uint8_t typ=1;
427 // get bits
428 // scale corners to the correct size to search on the corresponding image pyramid
429 vector<Point2f> scaled_corners(4);
430 for (int i = 0; i < 4; ++i) {
431 scaled_corners[i].x = _corners[i].x * scale;
432 scaled_corners[i].y = _corners[i].y * scale;
433 }
434
435 Mat candidateBits =
436 _extractBits(_image, corners: scaled_corners, markerSize: dictionary.markerSize, markerBorderBits: params.markerBorderBits,
437 cellSize: params.perspectiveRemovePixelPerCell,
438 cellMarginRate: params.perspectiveRemoveIgnoredMarginPerCell, minStdDevOtsu: params.minOtsuStdDev);
439
440 // analyze border bits
441 int maximumErrorsInBorder =
442 int(dictionary.markerSize * dictionary.markerSize * params.maxErroneousBitsInBorderRate);
443 int borderErrors =
444 _getBorderErrors(bits: candidateBits, markerSize: dictionary.markerSize, borderSize: params.markerBorderBits);
445
446 // check if it is a white marker
447 if(params.detectInvertedMarker){
448 // to get from 255 to 1
449 Mat invertedImg = ~candidateBits-254;
450 int invBError = _getBorderErrors(bits: invertedImg, markerSize: dictionary.markerSize, borderSize: params.markerBorderBits);
451 // white marker
452 if(invBError<borderErrors){
453 borderErrors = invBError;
454 invertedImg.copyTo(m: candidateBits);
455 typ=2;
456 }
457 }
458 if(borderErrors > maximumErrorsInBorder) return 0; // border is wrong
459
460 // take only inner bits
461 Mat onlyBits =
462 candidateBits.rowRange(startrow: params.markerBorderBits,
463 endrow: candidateBits.rows - params.markerBorderBits)
464 .colRange(startcol: params.markerBorderBits, endcol: candidateBits.cols - params.markerBorderBits);
465
466 // try to indentify the marker
467 if(!dictionary.identify(onlyBits, idx, rotation, maxCorrectionRate: params.errorCorrectionRate))
468 return 0;
469
470 return typ;
471}
472
473/**
474 * @brief rotate the initial corner to get to the right position
475 */
476static void correctCornerPosition(vector<Point2f>& _candidate, int rotate){
477 std::rotate(first: _candidate.begin(), middle: _candidate.begin() + 4 - rotate, last: _candidate.end());
478}
479
480static size_t _findOptPyrImageForCanonicalImg(
481 const vector<Mat>& img_pyr,
482 const int scaled_width,
483 const int cur_perimeter,
484 const int min_perimeter) {
485 CV_Assert(scaled_width > 0);
486 size_t optLevel = 0;
487 float dist = std::numeric_limits<float>::max();
488 for (size_t i = 0; i < img_pyr.size(); ++i) {
489 const float scale = img_pyr[i].cols / static_cast<float>(scaled_width);
490 const float perimeter_scaled = cur_perimeter * scale;
491 // instead of std::abs() favor the larger pyramid level by checking if the distance is postive
492 // will slow down the algorithm but find more corners in the end
493 const float new_dist = perimeter_scaled - min_perimeter;
494 if (new_dist < dist && new_dist > 0.f) {
495 dist = new_dist;
496 optLevel = i;
497 }
498 }
499 return optLevel;
500}
501
502
503/**
504 * Line fitting A * B = C :: Called from function refineCandidateLines
505 * @param nContours contour-container
506 */
507static Point3f _interpolate2Dline(const vector<Point2f>& nContours){
508 CV_Assert(nContours.size() >= 2);
509 float minX, minY, maxX, maxY;
510 minX = maxX = nContours[0].x;
511 minY = maxY = nContours[0].y;
512
513 for(unsigned int i = 0; i< nContours.size(); i++){
514 minX = nContours[i].x < minX ? nContours[i].x : minX;
515 minY = nContours[i].y < minY ? nContours[i].y : minY;
516 maxX = nContours[i].x > maxX ? nContours[i].x : maxX;
517 maxY = nContours[i].y > maxY ? nContours[i].y : maxY;
518 }
519
520 Mat A = Mat::ones(rows: (int)nContours.size(), cols: 2, CV_32F); // Coefficient Matrix (N x 2)
521 Mat B((int)nContours.size(), 1, CV_32F); // Variables Matrix (N x 1)
522 Mat C; // Constant
523
524 if(maxX - minX > maxY - minY){
525 for(unsigned int i =0; i < nContours.size(); i++){
526 A.at<float>(i0: i,i1: 0)= nContours[i].x;
527 B.at<float>(i0: i,i1: 0)= nContours[i].y;
528 }
529
530 solve(src1: A, src2: B, dst: C, flags: DECOMP_NORMAL);
531
532 return Point3f(C.at<float>(i0: 0, i1: 0), -1., C.at<float>(i0: 1, i1: 0));
533 }
534 else{
535 for(unsigned int i =0; i < nContours.size(); i++){
536 A.at<float>(i0: i,i1: 0)= nContours[i].y;
537 B.at<float>(i0: i,i1: 0)= nContours[i].x;
538 }
539
540 solve(src1: A, src2: B, dst: C, flags: DECOMP_NORMAL);
541
542 return Point3f(-1., C.at<float>(i0: 0, i1: 0), C.at<float>(i0: 1, i1: 0));
543 }
544
545}
546
547/**
548 * Find the Point where the lines crosses :: Called from function refineCandidateLines
549 * @param nLine1
550 * @param nLine2
551 * @return Crossed Point
552 */
553static Point2f _getCrossPoint(Point3f nLine1, Point3f nLine2){
554 Matx22f A(nLine1.x, nLine1.y, nLine2.x, nLine2.y);
555 Vec2f B(-nLine1.z, -nLine2.z);
556 return Vec2f(A.solve(rhs: B).val);
557}
558
559/**
560 * Refine Corners using the contour vector :: Called from function detectMarkers
561 * @param nContours contour-container
562 * @param nCorners candidate Corners
563 */
564static void _refineCandidateLines(vector<Point>& nContours, vector<Point2f>& nCorners){
565 vector<Point2f> contour2f(nContours.begin(), nContours.end());
566 /* 5 groups :: to group the edges
567 * 4 - classified by its corner
568 * extra group - (temporary) if contours do not begin with a corner
569 */
570 vector<Point2f> cntPts[5];
571 int cornerIndex[4]={-1};
572 int group=4;
573
574 for ( unsigned int i =0; i < nContours.size(); i++ ) {
575 for(unsigned int j=0; j<4; j++){
576 if ( nCorners[j] == contour2f[i] ){
577 cornerIndex[j] = i;
578 group=j;
579 }
580 }
581 cntPts[group].push_back(x: contour2f[i]);
582 }
583 for (int i = 0; i < 4; i++)
584 {
585 CV_Assert(cornerIndex[i] != -1);
586 }
587 // saves extra group into corresponding
588 if( !cntPts[4].empty() ){
589 for( unsigned int i=0; i < cntPts[4].size() ; i++ )
590 cntPts[group].push_back(x: cntPts[4].at(n: i));
591 cntPts[4].clear();
592 }
593
594 //Evaluate contour direction :: using the position of the detected corners
595 int inc=1;
596
597 inc = ( (cornerIndex[0] > cornerIndex[1]) && (cornerIndex[3] > cornerIndex[0]) ) ? -1:inc;
598 inc = ( (cornerIndex[2] > cornerIndex[3]) && (cornerIndex[1] > cornerIndex[2]) ) ? -1:inc;
599
600 // calculate the line :: who passes through the grouped points
601 Point3f lines[4];
602 for(int i=0; i<4; i++){
603 lines[i]=_interpolate2Dline(nContours: cntPts[i]);
604 }
605
606 /*
607 * calculate the corner :: where the lines crosses to each other
608 * clockwise direction no clockwise direction
609 * 0 1
610 * .---. 1 .---. 2
611 * | | | |
612 * 3 .___. 0 .___.
613 * 2 3
614 */
615 for(int i=0; i < 4; i++){
616 if(inc<0)
617 nCorners[i] = _getCrossPoint(nLine1: lines[ i ], nLine2: lines[ (i+1)%4 ]); // 01 12 23 30
618 else
619 nCorners[i] = _getCrossPoint(nLine1: lines[ i ], nLine2: lines[ (i+3)%4 ]); // 30 01 12 23
620 }
621}
622
623static inline void findCornerInPyrImage(const float scale_init, const int closest_pyr_image_idx,
624 const vector<Mat>& grey_pyramid, Mat corners,
625 const DetectorParameters& params) {
626 // scale them to the closest pyramid level
627 if (scale_init != 1.f)
628 corners *= scale_init; // scale_init * scale_pyr
629 for (int idx = closest_pyr_image_idx - 1; idx >= 0; --idx) {
630 // scale them to new pyramid level
631 corners *= 2.f; // *= scale_pyr;
632 // use larger win size for larger images
633 const int subpix_win_size = std::max(a: grey_pyramid[idx].cols, b: grey_pyramid[idx].rows) > 1080 ? 5 : 3;
634 cornerSubPix(image: grey_pyramid[idx], corners,
635 winSize: Size(subpix_win_size, subpix_win_size),
636 zeroZone: Size(-1, -1),
637 criteria: TermCriteria(TermCriteria::MAX_ITER | TermCriteria::EPS,
638 params.cornerRefinementMaxIterations,
639 params.cornerRefinementMinAccuracy));
640 }
641}
642
643struct ArucoDetector::ArucoDetectorImpl {
644 /// dictionary indicates the type of markers that will be searched
645 Dictionary dictionary;
646
647 /// marker detection parameters, check DetectorParameters docs to see available settings
648 DetectorParameters detectorParams;
649
650 /// marker refine parameters
651 RefineParameters refineParams;
652 ArucoDetectorImpl() {}
653
654 ArucoDetectorImpl(const Dictionary &_dictionary, const DetectorParameters &_detectorParams,
655 const RefineParameters& _refineParams): dictionary(_dictionary),
656 detectorParams(_detectorParams), refineParams(_refineParams) {}
657 /**
658 * @brief Detect square candidates in the input image
659 */
660 void detectCandidates(const Mat& grey, vector<vector<Point2f> >& candidates, vector<vector<Point> >& contours) {
661 /// 1. DETECT FIRST SET OF CANDIDATES
662 _detectInitialCandidates(grey, candidates, contours, params: detectorParams);
663 /// 2. SORT CORNERS
664 _reorderCandidatesCorners(candidates);
665 }
666
667 /**
668 * @brief FILTER OUT NEAR CANDIDATE PAIRS
669 *
670 * save the outter/inner border (i.e. potential candidates) to vector<MarkerCandidateTree>,
671 * clear candidates and contours
672 */
673 vector<MarkerCandidateTree>
674 filterTooCloseCandidates(vector<vector<Point2f> > &candidates, vector<vector<Point> > &contours) {
675 CV_Assert(detectorParams.minMarkerDistanceRate >= 0.);
676 vector<MarkerCandidateTree> candidateTree(candidates.size());
677 for(size_t i = 0ull; i < candidates.size(); i++) {
678 candidateTree[i] = MarkerCandidateTree(std::move(candidates[i]), std::move(contours[i]));
679 }
680 candidates.clear();
681 contours.clear();
682
683 // sort candidates from big to small
684 std::stable_sort(first: candidateTree.begin(), last: candidateTree.end());
685 // group index for each candidate
686 vector<int> groupId(candidateTree.size(), -1);
687 vector<vector<size_t> > groupedCandidates;
688 vector<bool> isSelectedContours(candidateTree.size(), true);
689
690 size_t countSelectedContours = 0ull;
691 for (size_t i = 0ull; i < candidateTree.size(); i++) {
692 for (size_t j = i + 1ull; j < candidateTree.size(); j++) {
693 float minDist = getAverageDistance(marker1: candidateTree[i].corners, marker2: candidateTree[j].corners);
694 // if mean distance is too low, group markers
695 // the distance between the points of two independent markers should be more than half the side of the marker
696 // half the side of the marker = (perimeter / 4) * 0.5 = perimeter * 0.125
697 if(minDist < candidateTree[j].perimeter*(float)detectorParams.minMarkerDistanceRate) {
698 isSelectedContours[i] = false;
699 isSelectedContours[j] = false;
700 // i and j are not related to a group
701 if(groupId[i] < 0 && groupId[j] < 0){
702 // mark candidates with their corresponding group number
703 groupId[i] = groupId[j] = (int)groupedCandidates.size();
704 // create group
705 groupedCandidates.push_back(x: {i, j});
706 }
707 // i is related to a group
708 else if(groupId[i] > -1 && groupId[j] == -1) {
709 int group = groupId[i];
710 groupId[j] = group;
711 // add to group
712 groupedCandidates[group].push_back(x: j);
713 }
714 // j is related to a group
715 else if(groupId[j] > -1 && groupId[i] == -1) {
716 int group = groupId[j];
717 groupId[i] = group;
718 // add to group
719 groupedCandidates[group].push_back(x: i);
720 }
721 }
722 }
723 countSelectedContours += isSelectedContours[i];
724 }
725
726 for (vector<size_t>& grouped : groupedCandidates) {
727 if (detectorParams.detectInvertedMarker) // if detectInvertedMarker choose smallest contours
728 std::stable_sort(first: grouped.begin(), last: grouped.end(), comp: [](const size_t &a, const size_t &b) {
729 return a > b;
730 });
731 else // if detectInvertedMarker==false choose largest contours
732 std::stable_sort(first: grouped.begin(), last: grouped.end());
733 size_t currId = grouped[0];
734 isSelectedContours[currId] = true;
735 for (size_t i = 1ull; i < grouped.size(); i++) {
736 size_t id = grouped[i];
737 float dist = getAverageDistance(marker1: candidateTree[id].corners, marker2: candidateTree[currId].corners);
738 float moduleSize = getAverageModuleSize(markerCorners: candidateTree[id].corners, markerSize: dictionary.markerSize, markerBorderBits: detectorParams.markerBorderBits);
739 if (dist > detectorParams.minGroupDistance*moduleSize) {
740 currId = id;
741 candidateTree[grouped[0]].closeContours.push_back(x: candidateTree[id]);
742 }
743 }
744 }
745
746 vector<MarkerCandidateTree> selectedCandidates(countSelectedContours + groupedCandidates.size());
747 countSelectedContours = 0ull;
748 for (size_t i = 0ull; i < candidateTree.size(); i++) {
749 if (isSelectedContours[i]) {
750 selectedCandidates[countSelectedContours] = std::move(candidateTree[i]);
751 countSelectedContours++;
752 }
753 }
754
755 // find hierarchy in the candidate tree
756 for (int i = (int)selectedCandidates.size()-1; i >= 0; i--) {
757 for (int j = i - 1; j >= 0; j--) {
758 if (checkMarker1InMarker2(marker1: selectedCandidates[i].corners, marker2: selectedCandidates[j].corners)) {
759 selectedCandidates[i].parent = j;
760 selectedCandidates[j].depth = max(a: selectedCandidates[j].depth, b: selectedCandidates[i].depth + 1);
761 break;
762 }
763 }
764 }
765 return selectedCandidates;
766 }
767
768 /**
769 * @brief Identify square candidates according to a marker dictionary
770 */
771 void identifyCandidates(const Mat& grey, const vector<Mat>& image_pyr, vector<MarkerCandidateTree>& selectedContours,
772 vector<vector<Point2f> >& accepted, vector<vector<Point> >& contours,
773 vector<int>& ids, OutputArrayOfArrays _rejected = noArray()) {
774 size_t ncandidates = selectedContours.size();
775 vector<vector<Point2f> > rejected;
776
777 vector<int> idsTmp(ncandidates, -1);
778 vector<int> rotated(ncandidates, 0);
779 vector<uint8_t> validCandidates(ncandidates, 0);
780 vector<uint8_t> was(ncandidates, false);
781 bool checkCloseContours = true;
782
783 int maxDepth = 0;
784 for (size_t i = 0ull; i < selectedContours.size(); i++)
785 maxDepth = max(a: selectedContours[i].depth, b: maxDepth);
786 vector<vector<size_t>> depths(maxDepth+1);
787 for (size_t i = 0ull; i < selectedContours.size(); i++) {
788 depths[selectedContours[i].depth].push_back(x: i);
789 }
790
791 //// Analyze each of the candidates
792 int depth = 0;
793 size_t counter = 0;
794 while (counter < ncandidates) {
795 parallel_for_(range: Range(0, (int)depths[depth].size()), functor: [&](const Range& range) {
796 const int begin = range.start;
797 const int end = range.end;
798 for (int i = begin; i < end; i++) {
799 size_t v = depths[depth][i];
800 was[v] = true;
801 Mat img = grey;
802 // implements equation (4)
803 if (detectorParams.useAruco3Detection) {
804 const int minPerimeter = detectorParams.minSideLengthCanonicalImg * 4;
805 const size_t nearestImgId = _findOptPyrImageForCanonicalImg(img_pyr: image_pyr, scaled_width: grey.cols, cur_perimeter: static_cast<int>(selectedContours[v].contour.size()), min_perimeter: minPerimeter);
806 img = image_pyr[nearestImgId];
807 }
808 const float scale = detectorParams.useAruco3Detection ? img.cols / static_cast<float>(grey.cols) : 1.f;
809
810 validCandidates[v] = _identifyOneCandidate(dictionary, image: img, corners: selectedContours[v].corners, idx&: idsTmp[v], params: detectorParams, rotation&: rotated[v], scale);
811
812 if (validCandidates[v] == 0 && checkCloseContours) {
813 for (const MarkerCandidate& closeMarkerCandidate: selectedContours[v].closeContours) {
814 validCandidates[v] = _identifyOneCandidate(dictionary, image: img, corners: closeMarkerCandidate.corners, idx&: idsTmp[v], params: detectorParams, rotation&: rotated[v], scale);
815 if (validCandidates[v] > 0) {
816 selectedContours[v].corners = closeMarkerCandidate.corners;
817 selectedContours[v].contour = closeMarkerCandidate.contour;
818 break;
819 }
820 }
821 }
822 }
823 });
824
825 // visit the parent vertices of the detected markers to skip identify parent contours
826 for(size_t v : depths[depth]) {
827 if(validCandidates[v] > 0) {
828 int parent = selectedContours[v].parent;
829 while (parent != -1) {
830 if (!was[parent]) {
831 was[parent] = true;
832 counter++;
833 }
834 parent = selectedContours[parent].parent;
835 }
836 }
837 counter++;
838 }
839 depth++;
840 }
841
842 for (size_t i = 0ull; i < selectedContours.size(); i++) {
843 if (validCandidates[i] > 0) {
844 // shift corner positions to the correct rotation
845 correctCornerPosition(candidate&: selectedContours[i].corners, rotate: rotated[i]);
846
847 accepted.push_back(x: selectedContours[i].corners);
848 contours.push_back(x: selectedContours[i].contour);
849 ids.push_back(x: idsTmp[i]);
850 }
851 else {
852 rejected.push_back(x: selectedContours[i].corners);
853 }
854 }
855
856 // parse output
857 if(_rejected.needed()) {
858 _copyVector2Output(vec&: rejected, out: _rejected);
859 }
860 }
861
862};
863
864ArucoDetector::ArucoDetector(const Dictionary &_dictionary,
865 const DetectorParameters &_detectorParams,
866 const RefineParameters& _refineParams) {
867 arucoDetectorImpl = makePtr<ArucoDetectorImpl>(a1: _dictionary, a1: _detectorParams, a1: _refineParams);
868}
869
870void ArucoDetector::detectMarkers(InputArray _image, OutputArrayOfArrays _corners, OutputArray _ids,
871 OutputArrayOfArrays _rejectedImgPoints) const {
872 CV_Assert(!_image.empty());
873 DetectorParameters& detectorParams = arucoDetectorImpl->detectorParams;
874 const Dictionary& dictionary = arucoDetectorImpl->dictionary;
875
876 CV_Assert(detectorParams.markerBorderBits > 0);
877 // check that the parameters are set correctly if Aruco3 is used
878 CV_Assert(!(detectorParams.useAruco3Detection == true &&
879 detectorParams.minSideLengthCanonicalImg == 0 &&
880 detectorParams.minMarkerLengthRatioOriginalImg == 0.0));
881
882 Mat grey;
883 _convertToGrey(in: _image, out&: grey);
884
885 // Aruco3 functionality is the extension of Aruco.
886 // The description can be found in:
887 // [1] Speeded up detection of squared fiducial markers, 2018, FJ Romera-Ramirez et al.
888 // if Aruco3 functionality if not wanted
889 // change some parameters to be sure to turn it off
890 if (!detectorParams.useAruco3Detection) {
891 detectorParams.minMarkerLengthRatioOriginalImg = 0.0;
892 detectorParams.minSideLengthCanonicalImg = 0;
893 }
894 else {
895 // always turn on corner refinement in case of Aruco3, due to upsampling
896 detectorParams.cornerRefinementMethod = (int)CORNER_REFINE_SUBPIX;
897 // only CORNER_REFINE_SUBPIX implement correctly for useAruco3Detection
898 // Todo: update other CORNER_REFINE methods
899 }
900
901 /// Step 0: equation (2) from paper [1]
902 const float fxfy = (!detectorParams.useAruco3Detection ? 1.f : detectorParams.minSideLengthCanonicalImg /
903 (detectorParams.minSideLengthCanonicalImg + std::max(a: grey.cols, b: grey.rows)*
904 detectorParams.minMarkerLengthRatioOriginalImg));
905
906 /// Step 1: create image pyramid. Section 3.4. in [1]
907 vector<Mat> grey_pyramid;
908 int closest_pyr_image_idx = 0, num_levels = 0;
909 //// Step 1.1: resize image with equation (1) from paper [1]
910 if (detectorParams.useAruco3Detection) {
911 const float scale_pyr = 2.f;
912 const float img_area = static_cast<float>(grey.rows*grey.cols);
913 const float min_area_marker = static_cast<float>(detectorParams.minSideLengthCanonicalImg*
914 detectorParams.minSideLengthCanonicalImg);
915 // find max level
916 num_levels = static_cast<int>(log2(x: img_area / min_area_marker)/scale_pyr);
917 // the closest pyramid image to the downsampled segmentation image
918 // will later be used as start index for corner upsampling
919 const float scale_img_area = img_area * fxfy * fxfy;
920 closest_pyr_image_idx = cvRound(value: log2(x: img_area / scale_img_area)/scale_pyr);
921 }
922 buildPyramid(src: grey, dst: grey_pyramid, maxlevel: num_levels);
923
924 // resize to segmentation image
925 // in this reduces size the contours will be detected
926 if (fxfy != 1.f)
927 resize(src: grey, dst: grey, dsize: Size(cvRound(value: fxfy * grey.cols), cvRound(value: fxfy * grey.rows)));
928
929 /// STEP 2: Detect marker candidates
930 vector<vector<Point2f> > candidates;
931 vector<vector<Point> > contours;
932 vector<int> ids;
933
934 /// STEP 2.a Detect marker candidates :: using AprilTag
935 if(detectorParams.cornerRefinementMethod == (int)CORNER_REFINE_APRILTAG){
936 _apriltag(im_orig: grey, params: detectorParams, candidates, contours);
937 }
938 /// STEP 2.b Detect marker candidates :: traditional way
939 else {
940 arucoDetectorImpl->detectCandidates(grey, candidates, contours);
941 }
942
943 /// STEP 2.c FILTER OUT NEAR CANDIDATE PAIRS
944 auto selectedCandidates = arucoDetectorImpl->filterTooCloseCandidates(candidates, contours);
945
946 /// STEP 2: Check candidate codification (identify markers)
947 arucoDetectorImpl->identifyCandidates(grey, image_pyr: grey_pyramid, selectedContours&: selectedCandidates, accepted&: candidates, contours,
948 ids, rejected: _rejectedImgPoints);
949
950 /// STEP 3: Corner refinement :: use corner subpix
951 if (detectorParams.cornerRefinementMethod == (int)CORNER_REFINE_SUBPIX) {
952 CV_Assert(detectorParams.cornerRefinementWinSize > 0 && detectorParams.cornerRefinementMaxIterations > 0 &&
953 detectorParams.cornerRefinementMinAccuracy > 0);
954 // Do subpixel estimation. In Aruco3 start on the lowest pyramid level and upscale the corners
955 parallel_for_(range: Range(0, (int)candidates.size()), functor: [&](const Range& range) {
956 const int begin = range.start;
957 const int end = range.end;
958
959 for (int i = begin; i < end; i++) {
960 if (detectorParams.useAruco3Detection) {
961 const float scale_init = (float) grey_pyramid[closest_pyr_image_idx].cols / grey.cols;
962 findCornerInPyrImage(scale_init, closest_pyr_image_idx, grey_pyramid, corners: Mat(candidates[i]), params: detectorParams);
963 }
964 else {
965 int cornerRefinementWinSize = std::max(a: 1, b: cvRound(value: detectorParams.relativeCornerRefinmentWinSize*
966 getAverageModuleSize(markerCorners: candidates[i], markerSize: dictionary.markerSize, markerBorderBits: detectorParams.markerBorderBits)));
967 cornerRefinementWinSize = min(a: cornerRefinementWinSize, b: detectorParams.cornerRefinementWinSize);
968 cornerSubPix(image: grey, corners: Mat(candidates[i]), winSize: Size(cornerRefinementWinSize, cornerRefinementWinSize), zeroZone: Size(-1, -1),
969 criteria: TermCriteria(TermCriteria::MAX_ITER | TermCriteria::EPS,
970 detectorParams.cornerRefinementMaxIterations,
971 detectorParams.cornerRefinementMinAccuracy));
972 }
973 }
974 });
975 }
976
977 /// STEP 3, Optional : Corner refinement :: use contour container
978 if (detectorParams.cornerRefinementMethod == (int)CORNER_REFINE_CONTOUR){
979
980 if (!ids.empty()) {
981
982 // do corner refinement using the contours for each detected markers
983 parallel_for_(range: Range(0, (int)candidates.size()), functor: [&](const Range& range) {
984 for (int i = range.start; i < range.end; i++) {
985 _refineCandidateLines(nContours&: contours[i], nCorners&: candidates[i]);
986 }
987 });
988 }
989 }
990
991 if (detectorParams.cornerRefinementMethod != (int)CORNER_REFINE_SUBPIX && fxfy != 1.f) {
992 // only CORNER_REFINE_SUBPIX implement correctly for useAruco3Detection
993 // Todo: update other CORNER_REFINE methods
994
995 // scale to orignal size, this however will lead to inaccurate detections!
996 for (auto &vecPoints : candidates)
997 for (auto &point : vecPoints)
998 point *= 1.f/fxfy;
999 }
1000
1001 // copy to output arrays
1002 _copyVector2Output(vec&: candidates, out: _corners);
1003 Mat(ids).copyTo(m: _ids);
1004}
1005
1006/**
1007 * Project board markers that are not included in the list of detected markers
1008 */
1009static inline void _projectUndetectedMarkers(const Board &board, InputOutputArrayOfArrays detectedCorners,
1010 InputOutputArray detectedIds, InputArray cameraMatrix, InputArray distCoeffs,
1011 vector<vector<Point2f> >& undetectedMarkersProjectedCorners,
1012 OutputArray undetectedMarkersIds) {
1013 Mat rvec, tvec; // first estimate board pose with the current avaible markers
1014 Mat objPoints, imgPoints; // object and image points for the solvePnP function
1015 // To refine corners of ArUco markers the function refineDetectedMarkers() find an aruco markers pose from 3D-2D point correspondences.
1016 // To find 3D-2D point correspondences uses matchImagePoints().
1017 // The method matchImagePoints() works with ArUco corners (in Board/GridBoard cases) or with ChArUco corners (in CharucoBoard case).
1018 // To refine corners of ArUco markers we need work with ArUco corners only in all boards.
1019 // To call matchImagePoints() with ArUco corners for all boards we need to call matchImagePoints() from base class Board.
1020 // The method matchImagePoints() implemented in Pimpl and we need to create temp Board object to call the base method.
1021 Board(board.getObjPoints(), board.getDictionary(), board.getIds()).matchImagePoints(detectedCorners, detectedIds, objPoints, imgPoints);
1022 if (objPoints.total() < 4ull) // at least one marker from board so rvec and tvec are valid
1023 return;
1024 solvePnP(objectPoints: objPoints, imagePoints: imgPoints, cameraMatrix, distCoeffs, rvec, tvec);
1025
1026 // search undetected markers and project them using the previous pose
1027 vector<vector<Point2f> > undetectedCorners;
1028 const std::vector<int>& ids = board.getIds();
1029 vector<int> undetectedIds;
1030 for(unsigned int i = 0; i < ids.size(); i++) {
1031 int foundIdx = -1;
1032 for(unsigned int j = 0; j < detectedIds.total(); j++) {
1033 if(ids[i] == detectedIds.getMat().ptr<int>()[j]) {
1034 foundIdx = j;
1035 break;
1036 }
1037 }
1038
1039 // not detected
1040 if(foundIdx == -1) {
1041 undetectedCorners.push_back(x: vector<Point2f>());
1042 undetectedIds.push_back(x: ids[i]);
1043 projectPoints(objectPoints: board.getObjPoints()[i], rvec, tvec, cameraMatrix, distCoeffs,
1044 imagePoints: undetectedCorners.back());
1045 }
1046 }
1047 // parse output
1048 Mat(undetectedIds).copyTo(m: undetectedMarkersIds);
1049 undetectedMarkersProjectedCorners = undetectedCorners;
1050}
1051
1052/**
1053 * Interpolate board markers that are not included in the list of detected markers using
1054 * global homography
1055 */
1056static void _projectUndetectedMarkers(const Board &_board, InputOutputArrayOfArrays _detectedCorners,
1057 InputOutputArray _detectedIds,
1058 vector<vector<Point2f> >& _undetectedMarkersProjectedCorners,
1059 OutputArray _undetectedMarkersIds) {
1060 // check board points are in the same plane, if not, global homography cannot be applied
1061 CV_Assert(_board.getObjPoints().size() > 0);
1062 CV_Assert(_board.getObjPoints()[0].size() > 0);
1063 float boardZ = _board.getObjPoints()[0][0].z;
1064 for(unsigned int i = 0; i < _board.getObjPoints().size(); i++) {
1065 for(unsigned int j = 0; j < _board.getObjPoints()[i].size(); j++)
1066 CV_Assert(boardZ == _board.getObjPoints()[i][j].z);
1067 }
1068
1069 vector<Point2f> detectedMarkersObj2DAll; // Object coordinates (without Z) of all the detected
1070 // marker corners in a single vector
1071 vector<Point2f> imageCornersAll; // Image corners of all detected markers in a single vector
1072 vector<vector<Point2f> > undetectedMarkersObj2D; // Object coordinates (without Z) of all
1073 // missing markers in different vectors
1074 vector<int> undetectedMarkersIds; // ids of missing markers
1075 // find markers included in board, and missing markers from board. Fill the previous vectors
1076 for(unsigned int j = 0; j < _board.getIds().size(); j++) {
1077 bool found = false;
1078 for(unsigned int i = 0; i < _detectedIds.total(); i++) {
1079 if(_detectedIds.getMat().ptr<int>()[i] == _board.getIds()[j]) {
1080 for(int c = 0; c < 4; c++) {
1081 imageCornersAll.push_back(x: _detectedCorners.getMat(i).ptr<Point2f>()[c]);
1082 detectedMarkersObj2DAll.push_back(
1083 x: Point2f(_board.getObjPoints()[j][c].x, _board.getObjPoints()[j][c].y));
1084 }
1085 found = true;
1086 break;
1087 }
1088 }
1089 if(!found) {
1090 undetectedMarkersObj2D.push_back(x: vector<Point2f>());
1091 for(int c = 0; c < 4; c++) {
1092 undetectedMarkersObj2D.back().push_back(
1093 x: Point2f(_board.getObjPoints()[j][c].x, _board.getObjPoints()[j][c].y));
1094 }
1095 undetectedMarkersIds.push_back(x: _board.getIds()[j]);
1096 }
1097 }
1098 if(imageCornersAll.size() == 0) return;
1099
1100 // get homography from detected markers
1101 Mat transformation = findHomography(srcPoints: detectedMarkersObj2DAll, dstPoints: imageCornersAll);
1102
1103 _undetectedMarkersProjectedCorners.resize(new_size: undetectedMarkersIds.size());
1104
1105 // for each undetected marker, apply transformation
1106 for(unsigned int i = 0; i < undetectedMarkersObj2D.size(); i++) {
1107 perspectiveTransform(src: undetectedMarkersObj2D[i], dst: _undetectedMarkersProjectedCorners[i], m: transformation);
1108 }
1109 Mat(undetectedMarkersIds).copyTo(m: _undetectedMarkersIds);
1110}
1111
1112void ArucoDetector::refineDetectedMarkers(InputArray _image, const Board& _board,
1113 InputOutputArrayOfArrays _detectedCorners, InputOutputArray _detectedIds,
1114 InputOutputArrayOfArrays _rejectedCorners, InputArray _cameraMatrix,
1115 InputArray _distCoeffs, OutputArray _recoveredIdxs) const {
1116 DetectorParameters& detectorParams = arucoDetectorImpl->detectorParams;
1117 const Dictionary& dictionary = arucoDetectorImpl->dictionary;
1118 RefineParameters& refineParams = arucoDetectorImpl->refineParams;
1119 CV_Assert(refineParams.minRepDistance > 0);
1120
1121 if(_detectedIds.total() == 0 || _rejectedCorners.total() == 0) return;
1122
1123 // get projections of missing markers in the board
1124 vector<vector<Point2f> > undetectedMarkersCorners;
1125 vector<int> undetectedMarkersIds;
1126 if(_cameraMatrix.total() != 0) {
1127 // reproject based on camera projection model
1128 _projectUndetectedMarkers(board: _board, detectedCorners: _detectedCorners, detectedIds: _detectedIds, cameraMatrix: _cameraMatrix, distCoeffs: _distCoeffs,
1129 undetectedMarkersProjectedCorners&: undetectedMarkersCorners, undetectedMarkersIds);
1130
1131 } else {
1132 // reproject based on global homography
1133 _projectUndetectedMarkers(_board, _detectedCorners, _detectedIds, undetectedMarkersProjectedCorners&: undetectedMarkersCorners,
1134 undetectedMarkersIds: undetectedMarkersIds);
1135 }
1136
1137 // list of missing markers indicating if they have been assigned to a candidate
1138 vector<bool > alreadyIdentified(_rejectedCorners.total(), false);
1139
1140 // maximum bits that can be corrected
1141 int maxCorrectionRecalculated =
1142 int(double(dictionary.maxCorrectionBits) * refineParams.errorCorrectionRate);
1143
1144 Mat grey;
1145 _convertToGrey(in: _image, out&: grey);
1146
1147 // vector of final detected marker corners and ids
1148 vector<vector<Point2f> > finalAcceptedCorners;
1149 vector<int> finalAcceptedIds;
1150 // fill with the current markers
1151 finalAcceptedCorners.resize(new_size: _detectedCorners.total());
1152 finalAcceptedIds.resize(new_size: _detectedIds.total());
1153 for(unsigned int i = 0; i < _detectedIds.total(); i++) {
1154 finalAcceptedCorners[i] = _detectedCorners.getMat(i).clone();
1155 finalAcceptedIds[i] = _detectedIds.getMat().ptr<int>()[i];
1156 }
1157 vector<int> recoveredIdxs; // original indexes of accepted markers in _rejectedCorners
1158
1159 // for each missing marker, try to find a correspondence
1160 for(unsigned int i = 0; i < undetectedMarkersIds.size(); i++) {
1161
1162 // best match at the moment
1163 int closestCandidateIdx = -1;
1164 double closestCandidateDistance = refineParams.minRepDistance * refineParams.minRepDistance + 1;
1165 Mat closestRotatedMarker;
1166
1167 for(unsigned int j = 0; j < _rejectedCorners.total(); j++) {
1168 if(alreadyIdentified[j]) continue;
1169
1170 // check distance
1171 double minDistance = closestCandidateDistance + 1;
1172 bool valid = false;
1173 int validRot = 0;
1174 for(int c = 0; c < 4; c++) { // first corner in rejected candidate
1175 double currentMaxDistance = 0;
1176 for(int k = 0; k < 4; k++) {
1177 Point2f rejCorner = _rejectedCorners.getMat(i: j).ptr<Point2f>()[(c + k) % 4];
1178 Point2f distVector = undetectedMarkersCorners[i][k] - rejCorner;
1179 double cornerDist = distVector.x * distVector.x + distVector.y * distVector.y;
1180 currentMaxDistance = max(a: currentMaxDistance, b: cornerDist);
1181 }
1182 // if distance is better than current best distance
1183 if(currentMaxDistance < closestCandidateDistance) {
1184 valid = true;
1185 validRot = c;
1186 minDistance = currentMaxDistance;
1187 }
1188 if(!refineParams.checkAllOrders) break;
1189 }
1190
1191 if(!valid) continue;
1192
1193 // apply rotation
1194 Mat rotatedMarker;
1195 if(refineParams.checkAllOrders) {
1196 rotatedMarker = Mat(4, 1, CV_32FC2);
1197 for(int c = 0; c < 4; c++)
1198 rotatedMarker.ptr<Point2f>()[c] =
1199 _rejectedCorners.getMat(i: j).ptr<Point2f>()[(c + 4 + validRot) % 4];
1200 }
1201 else rotatedMarker = _rejectedCorners.getMat(i: j);
1202
1203 // last filter, check if inner code is close enough to the assigned marker code
1204 int codeDistance = 0;
1205 // if errorCorrectionRate, dont check code
1206 if(refineParams.errorCorrectionRate >= 0) {
1207
1208 // extract bits
1209 Mat bits = _extractBits(
1210 image: grey, corners: rotatedMarker, markerSize: dictionary.markerSize, markerBorderBits: detectorParams.markerBorderBits,
1211 cellSize: detectorParams.perspectiveRemovePixelPerCell,
1212 cellMarginRate: detectorParams.perspectiveRemoveIgnoredMarginPerCell, minStdDevOtsu: detectorParams.minOtsuStdDev);
1213
1214 Mat onlyBits =
1215 bits.rowRange(startrow: detectorParams.markerBorderBits, endrow: bits.rows - detectorParams.markerBorderBits)
1216 .colRange(startcol: detectorParams.markerBorderBits, endcol: bits.rows - detectorParams.markerBorderBits);
1217
1218 codeDistance =
1219 dictionary.getDistanceToId(bits: onlyBits, id: undetectedMarkersIds[i], allRotations: false);
1220 }
1221
1222 // if everythin is ok, assign values to current best match
1223 if(refineParams.errorCorrectionRate < 0 || codeDistance < maxCorrectionRecalculated) {
1224 closestCandidateIdx = j;
1225 closestCandidateDistance = minDistance;
1226 closestRotatedMarker = rotatedMarker;
1227 }
1228 }
1229
1230 // if at least one good match, we have rescue the missing marker
1231 if(closestCandidateIdx >= 0) {
1232
1233 // subpixel refinement
1234 if(detectorParams.cornerRefinementMethod == (int)CORNER_REFINE_SUBPIX) {
1235 CV_Assert(detectorParams.cornerRefinementWinSize > 0 &&
1236 detectorParams.cornerRefinementMaxIterations > 0 &&
1237 detectorParams.cornerRefinementMinAccuracy > 0);
1238
1239 std::vector<Point2f> marker(closestRotatedMarker.begin<Point2f>(), closestRotatedMarker.end<Point2f>());
1240 int cornerRefinementWinSize = std::max(a: 1, b: cvRound(value: detectorParams.relativeCornerRefinmentWinSize*
1241 getAverageModuleSize(markerCorners: marker, markerSize: dictionary.markerSize, markerBorderBits: detectorParams.markerBorderBits)));
1242 cornerRefinementWinSize = min(a: cornerRefinementWinSize, b: detectorParams.cornerRefinementWinSize);
1243 cornerSubPix(image: grey, corners: closestRotatedMarker,
1244 winSize: Size(cornerRefinementWinSize, cornerRefinementWinSize),
1245 zeroZone: Size(-1, -1), criteria: TermCriteria(TermCriteria::MAX_ITER | TermCriteria::EPS,
1246 detectorParams.cornerRefinementMaxIterations,
1247 detectorParams.cornerRefinementMinAccuracy));
1248 }
1249
1250 // remove from rejected
1251 alreadyIdentified[closestCandidateIdx] = true;
1252
1253 // add to detected
1254 finalAcceptedCorners.push_back(x: closestRotatedMarker);
1255 finalAcceptedIds.push_back(x: undetectedMarkersIds[i]);
1256
1257 // add the original index of the candidate
1258 recoveredIdxs.push_back(x: closestCandidateIdx);
1259 }
1260 }
1261
1262 // parse output
1263 if(finalAcceptedIds.size() != _detectedIds.total()) {
1264 // parse output
1265 Mat(finalAcceptedIds).copyTo(m: _detectedIds);
1266 _copyVector2Output(vec&: finalAcceptedCorners, out: _detectedCorners);
1267
1268 // recalculate _rejectedCorners based on alreadyIdentified
1269 vector<vector<Point2f> > finalRejected;
1270 for(unsigned int i = 0; i < alreadyIdentified.size(); i++) {
1271 if(!alreadyIdentified[i]) {
1272 finalRejected.push_back(x: _rejectedCorners.getMat(i).clone());
1273 }
1274 }
1275 _copyVector2Output(vec&: finalRejected, out: _rejectedCorners);
1276
1277 if(_recoveredIdxs.needed()) {
1278 Mat(recoveredIdxs).copyTo(m: _recoveredIdxs);
1279 }
1280 }
1281}
1282
1283void ArucoDetector::write(FileStorage &fs) const
1284{
1285 arucoDetectorImpl->dictionary.writeDictionary(fs);
1286 arucoDetectorImpl->detectorParams.writeDetectorParameters(fs);
1287 arucoDetectorImpl->refineParams.writeRefineParameters(fs);
1288}
1289
1290void ArucoDetector::read(const FileNode &fn) {
1291 arucoDetectorImpl->dictionary.readDictionary(fn);
1292 arucoDetectorImpl->detectorParams.readDetectorParameters(fn);
1293 arucoDetectorImpl->refineParams.readRefineParameters(fn);
1294}
1295
1296const Dictionary& ArucoDetector::getDictionary() const {
1297 return arucoDetectorImpl->dictionary;
1298}
1299
1300void ArucoDetector::setDictionary(const Dictionary& dictionary) {
1301 arucoDetectorImpl->dictionary = dictionary;
1302}
1303
1304const DetectorParameters& ArucoDetector::getDetectorParameters() const {
1305 return arucoDetectorImpl->detectorParams;
1306}
1307
1308void ArucoDetector::setDetectorParameters(const DetectorParameters& detectorParameters) {
1309 arucoDetectorImpl->detectorParams = detectorParameters;
1310}
1311
1312const RefineParameters& ArucoDetector::getRefineParameters() const {
1313 return arucoDetectorImpl->refineParams;
1314}
1315
1316void ArucoDetector::setRefineParameters(const RefineParameters& refineParameters) {
1317 arucoDetectorImpl->refineParams = refineParameters;
1318}
1319
1320void drawDetectedMarkers(InputOutputArray _image, InputArrayOfArrays _corners,
1321 InputArray _ids, Scalar borderColor) {
1322 CV_Assert(_image.getMat().total() != 0 &&
1323 (_image.getMat().channels() == 1 || _image.getMat().channels() == 3));
1324 CV_Assert((_corners.total() == _ids.total()) || _ids.total() == 0);
1325
1326 // calculate colors
1327 Scalar textColor, cornerColor;
1328 textColor = cornerColor = borderColor;
1329 swap(a&: textColor.val[0], b&: textColor.val[1]); // text color just sawp G and R
1330 swap(a&: cornerColor.val[1], b&: cornerColor.val[2]); // corner color just sawp G and B
1331
1332 int nMarkers = (int)_corners.total();
1333 for(int i = 0; i < nMarkers; i++) {
1334 Mat currentMarker = _corners.getMat(i);
1335 CV_Assert(currentMarker.total() == 4 && currentMarker.channels() == 2);
1336 if (currentMarker.type() != CV_32SC2)
1337 currentMarker.convertTo(m: currentMarker, CV_32SC2);
1338
1339 // draw marker sides
1340 for(int j = 0; j < 4; j++) {
1341 Point p0, p1;
1342 p0 = currentMarker.ptr<Point>(y: 0)[j];
1343 p1 = currentMarker.ptr<Point>(y: 0)[(j + 1) % 4];
1344 line(img: _image, pt1: p0, pt2: p1, color: borderColor, thickness: 1);
1345 }
1346 // draw first corner mark
1347 rectangle(img: _image, pt1: currentMarker.ptr<Point>(y: 0)[0] - Point(3, 3),
1348 pt2: currentMarker.ptr<Point>(y: 0)[0] + Point(3, 3), color: cornerColor, thickness: 1, lineType: LINE_AA);
1349
1350 // draw ID
1351 if(_ids.total() != 0) {
1352 Point cent(0, 0);
1353 for(int p = 0; p < 4; p++)
1354 cent += currentMarker.ptr<Point>(y: 0)[p];
1355 cent = cent / 4.;
1356 stringstream s;
1357 s << "id=" << _ids.getMat().ptr<int>(y: 0)[i];
1358 putText(img: _image, text: s.str(), org: cent, fontFace: FONT_HERSHEY_SIMPLEX, fontScale: 0.5, color: textColor, thickness: 2);
1359 }
1360 }
1361}
1362
1363void generateImageMarker(const Dictionary &dictionary, int id, int sidePixels, OutputArray _img, int borderBits) {
1364 dictionary.generateImageMarker(id, sidePixels, _img, borderBits);
1365}
1366
1367}
1368}
1369

Provided by KDAB

Privacy Policy
Learn to use CMake with our Intro Training
Find out more

source code of opencv/modules/objdetect/src/aruco/aruco_detector.cpp