1 | // This file is part of OpenCV project. |
---|---|
2 | // It is subject to the license terms in the LICENSE file found in the top-level directory |
3 | // of this distribution and at http://opencv.org/license.html |
4 | |
5 | #include "../precomp.hpp" |
6 | #include <opencv2/calib3d.hpp> |
7 | |
8 | #include "opencv2/objdetect/aruco_detector.hpp" |
9 | #include "opencv2/objdetect/aruco_board.hpp" |
10 | #include "apriltag/apriltag_quad_thresh.hpp" |
11 | #include "aruco_utils.hpp" |
12 | #include <cmath> |
13 | |
14 | namespace cv { |
15 | namespace aruco { |
16 | |
17 | using namespace std; |
18 | |
19 | static inline bool readWrite(DetectorParameters ¶ms, const FileNode* readNode, |
20 | FileStorage* writeStorage = nullptr) |
21 | { |
22 | CV_Assert(readNode || writeStorage); |
23 | bool check = false; |
24 | |
25 | check |= readWriteParameter(name: "adaptiveThreshWinSizeMin", parameter&: params.adaptiveThreshWinSizeMin, readNode, writeStorage); |
26 | check |= readWriteParameter(name: "adaptiveThreshWinSizeMax", parameter&: params.adaptiveThreshWinSizeMax, readNode, writeStorage); |
27 | check |= readWriteParameter(name: "adaptiveThreshWinSizeStep", parameter&: params.adaptiveThreshWinSizeStep, readNode, writeStorage); |
28 | check |= readWriteParameter(name: "adaptiveThreshConstant", parameter&: params.adaptiveThreshConstant, readNode, writeStorage); |
29 | check |= readWriteParameter(name: "minMarkerPerimeterRate", parameter&: params.minMarkerPerimeterRate, readNode, writeStorage); |
30 | check |= readWriteParameter(name: "maxMarkerPerimeterRate", parameter&: params.maxMarkerPerimeterRate, readNode, writeStorage); |
31 | check |= readWriteParameter(name: "polygonalApproxAccuracyRate", parameter&: params.polygonalApproxAccuracyRate, |
32 | readNode, writeStorage); |
33 | check |= readWriteParameter(name: "minCornerDistanceRate", parameter&: params.minCornerDistanceRate, readNode, writeStorage); |
34 | check |= readWriteParameter(name: "minDistanceToBorder", parameter&: params.minDistanceToBorder, readNode, writeStorage); |
35 | check |= readWriteParameter(name: "minMarkerDistanceRate", parameter&: params.minMarkerDistanceRate, readNode, writeStorage); |
36 | check |= readWriteParameter(name: "cornerRefinementMethod", parameter&: params.cornerRefinementMethod, readNode, writeStorage); |
37 | check |= readWriteParameter(name: "cornerRefinementWinSize", parameter&: params.cornerRefinementWinSize, readNode, writeStorage); |
38 | check |= readWriteParameter(name: "relativeCornerRefinmentWinSize", parameter&: params.relativeCornerRefinmentWinSize, readNode, |
39 | writeStorage); |
40 | check |= readWriteParameter(name: "cornerRefinementMaxIterations", parameter&: params.cornerRefinementMaxIterations, |
41 | readNode, writeStorage); |
42 | check |= readWriteParameter(name: "cornerRefinementMinAccuracy", parameter&: params.cornerRefinementMinAccuracy, |
43 | readNode, writeStorage); |
44 | check |= readWriteParameter(name: "markerBorderBits", parameter&: params.markerBorderBits, readNode, writeStorage); |
45 | check |= readWriteParameter(name: "perspectiveRemovePixelPerCell", parameter&: params.perspectiveRemovePixelPerCell, |
46 | readNode, writeStorage); |
47 | check |= readWriteParameter(name: "perspectiveRemoveIgnoredMarginPerCell", parameter&: params.perspectiveRemoveIgnoredMarginPerCell, |
48 | readNode, writeStorage); |
49 | check |= readWriteParameter(name: "maxErroneousBitsInBorderRate", parameter&: params.maxErroneousBitsInBorderRate, |
50 | readNode, writeStorage); |
51 | check |= readWriteParameter(name: "minOtsuStdDev", parameter&: params.minOtsuStdDev, readNode, writeStorage); |
52 | check |= readWriteParameter(name: "errorCorrectionRate", parameter&: params.errorCorrectionRate, readNode, writeStorage); |
53 | check |= readWriteParameter(name: "minGroupDistance", parameter&: params.minGroupDistance, readNode, writeStorage); |
54 | // new aruco 3 functionality |
55 | check |= readWriteParameter(name: "useAruco3Detection", parameter&: params.useAruco3Detection, readNode, writeStorage); |
56 | check |= readWriteParameter(name: "minSideLengthCanonicalImg", parameter&: params.minSideLengthCanonicalImg, readNode, writeStorage); |
57 | check |= readWriteParameter(name: "minMarkerLengthRatioOriginalImg", parameter&: params.minMarkerLengthRatioOriginalImg, |
58 | readNode, writeStorage); |
59 | return check; |
60 | } |
61 | |
62 | bool DetectorParameters::readDetectorParameters(const FileNode& fn) |
63 | { |
64 | if (fn.empty()) |
65 | return false; |
66 | return readWrite(params&: *this, readNode: &fn); |
67 | } |
68 | |
69 | bool DetectorParameters::writeDetectorParameters(FileStorage& fs, const String& name) |
70 | { |
71 | CV_Assert(fs.isOpened()); |
72 | if (!name.empty()) |
73 | fs << name << "{"; |
74 | bool res = readWrite(params&: *this, readNode: nullptr, writeStorage: &fs); |
75 | if (!name.empty()) |
76 | fs << "}"; |
77 | return res; |
78 | } |
79 | |
80 | static inline bool readWrite(RefineParameters& refineParameters, const FileNode* readNode, |
81 | FileStorage* writeStorage = nullptr) |
82 | { |
83 | CV_Assert(readNode || writeStorage); |
84 | bool check = false; |
85 | |
86 | check |= readWriteParameter(name: "minRepDistance", parameter&: refineParameters.minRepDistance, readNode, writeStorage); |
87 | check |= readWriteParameter(name: "errorCorrectionRate", parameter&: refineParameters.errorCorrectionRate, readNode, writeStorage); |
88 | check |= readWriteParameter(name: "checkAllOrders", parameter&: refineParameters.checkAllOrders, readNode, writeStorage); |
89 | return check; |
90 | } |
91 | |
92 | RefineParameters::RefineParameters(float _minRepDistance, float _errorCorrectionRate, bool _checkAllOrders): |
93 | minRepDistance(_minRepDistance), errorCorrectionRate(_errorCorrectionRate), |
94 | checkAllOrders(_checkAllOrders){} |
95 | |
96 | bool RefineParameters::readRefineParameters(const FileNode &fn) |
97 | { |
98 | if (fn.empty()) |
99 | return false; |
100 | return readWrite(refineParameters&: *this, readNode: &fn); |
101 | } |
102 | |
103 | bool RefineParameters::writeRefineParameters(FileStorage& fs, const String& name) |
104 | { |
105 | CV_Assert(fs.isOpened()); |
106 | if (!name.empty()) |
107 | fs << name << "{"; |
108 | bool res = readWrite(refineParameters&: *this, readNode: nullptr, writeStorage: &fs); |
109 | if (!name.empty()) |
110 | fs << "}"; |
111 | return res; |
112 | } |
113 | |
114 | /** |
115 | * @brief Threshold input image using adaptive thresholding |
116 | */ |
117 | static void _threshold(InputArray _in, OutputArray _out, int winSize, double constant) { |
118 | |
119 | CV_Assert(winSize >= 3); |
120 | if(winSize % 2 == 0) winSize++; // win size must be odd |
121 | adaptiveThreshold(src: _in, dst: _out, maxValue: 255, adaptiveMethod: ADAPTIVE_THRESH_MEAN_C, thresholdType: THRESH_BINARY_INV, blockSize: winSize, C: constant); |
122 | } |
123 | |
124 | |
125 | /** |
126 | * @brief Given a tresholded image, find the contours, calculate their polygonal approximation |
127 | * and take those that accomplish some conditions |
128 | */ |
129 | static void _findMarkerContours(const Mat &in, vector<vector<Point2f> > &candidates, |
130 | vector<vector<Point> > &contoursOut, double minPerimeterRate, |
131 | double maxPerimeterRate, double accuracyRate, |
132 | double minCornerDistanceRate, int minDistanceToBorder, int minSize) { |
133 | |
134 | CV_Assert(minPerimeterRate > 0 && maxPerimeterRate > 0 && accuracyRate > 0 && |
135 | minCornerDistanceRate >= 0 && minDistanceToBorder >= 0); |
136 | |
137 | // calculate maximum and minimum sizes in pixels |
138 | unsigned int minPerimeterPixels = |
139 | (unsigned int)(minPerimeterRate * max(a: in.cols, b: in.rows)); |
140 | unsigned int maxPerimeterPixels = |
141 | (unsigned int)(maxPerimeterRate * max(a: in.cols, b: in.rows)); |
142 | |
143 | // for aruco3 functionality |
144 | if (minSize != 0) { |
145 | minPerimeterPixels = 4*minSize; |
146 | } |
147 | |
148 | Mat contoursImg; |
149 | in.copyTo(m: contoursImg); |
150 | vector<vector<Point> > contours; |
151 | findContours(image: contoursImg, contours, mode: RETR_LIST, method: CHAIN_APPROX_NONE); |
152 | // now filter list of contours |
153 | for(unsigned int i = 0; i < contours.size(); i++) { |
154 | // check perimeter |
155 | if(contours[i].size() < minPerimeterPixels || contours[i].size() > maxPerimeterPixels) |
156 | continue; |
157 | |
158 | // check is square and is convex |
159 | vector<Point> approxCurve; |
160 | approxPolyDP(curve: contours[i], approxCurve, epsilon: double(contours[i].size()) * accuracyRate, closed: true); |
161 | if(approxCurve.size() != 4 || !isContourConvex(contour: approxCurve)) continue; |
162 | |
163 | // check min distance between corners |
164 | double minDistSq = |
165 | max(a: contoursImg.cols, b: contoursImg.rows) * max(a: contoursImg.cols, b: contoursImg.rows); |
166 | for(int j = 0; j < 4; j++) { |
167 | double d = (double)(approxCurve[j].x - approxCurve[(j + 1) % 4].x) * |
168 | (double)(approxCurve[j].x - approxCurve[(j + 1) % 4].x) + |
169 | (double)(approxCurve[j].y - approxCurve[(j + 1) % 4].y) * |
170 | (double)(approxCurve[j].y - approxCurve[(j + 1) % 4].y); |
171 | minDistSq = min(a: minDistSq, b: d); |
172 | } |
173 | double minCornerDistancePixels = double(contours[i].size()) * minCornerDistanceRate; |
174 | if(minDistSq < minCornerDistancePixels * minCornerDistancePixels) continue; |
175 | |
176 | // check if it is too near to the image border |
177 | bool tooNearBorder = false; |
178 | for(int j = 0; j < 4; j++) { |
179 | if(approxCurve[j].x < minDistanceToBorder || approxCurve[j].y < minDistanceToBorder || |
180 | approxCurve[j].x > contoursImg.cols - 1 - minDistanceToBorder || |
181 | approxCurve[j].y > contoursImg.rows - 1 - minDistanceToBorder) |
182 | tooNearBorder = true; |
183 | } |
184 | if(tooNearBorder) continue; |
185 | |
186 | // if it passes all the test, add to candidates vector |
187 | vector<Point2f> currentCandidate; |
188 | currentCandidate.resize(new_size: 4); |
189 | for(int j = 0; j < 4; j++) { |
190 | currentCandidate[j] = Point2f((float)approxCurve[j].x, (float)approxCurve[j].y); |
191 | } |
192 | candidates.push_back(x: currentCandidate); |
193 | contoursOut.push_back(x: contours[i]); |
194 | } |
195 | } |
196 | |
197 | |
198 | /** |
199 | * @brief Assure order of candidate corners is clockwise direction |
200 | */ |
201 | static void _reorderCandidatesCorners(vector<vector<Point2f> > &candidates) { |
202 | |
203 | for(unsigned int i = 0; i < candidates.size(); i++) { |
204 | double dx1 = candidates[i][1].x - candidates[i][0].x; |
205 | double dy1 = candidates[i][1].y - candidates[i][0].y; |
206 | double dx2 = candidates[i][2].x - candidates[i][0].x; |
207 | double dy2 = candidates[i][2].y - candidates[i][0].y; |
208 | double crossProduct = (dx1 * dy2) - (dy1 * dx2); |
209 | |
210 | if(crossProduct < 0.0) { // not clockwise direction |
211 | swap(a&: candidates[i][1], b&: candidates[i][3]); |
212 | } |
213 | } |
214 | } |
215 | |
216 | static float getAverageModuleSize(const vector<Point2f>& markerCorners, int markerSize, int markerBorderBits) { |
217 | float averageArucoModuleSize = 0.f; |
218 | for (size_t i = 0ull; i < 4ull; i++) { |
219 | averageArucoModuleSize += sqrt(x: normL2Sqr<float>(pt: Point2f(markerCorners[i] - markerCorners[(i+1ull) % 4ull]))); |
220 | } |
221 | int numModules = markerSize + markerBorderBits * 2; |
222 | averageArucoModuleSize /= ((float)markerCorners.size()*numModules); |
223 | return averageArucoModuleSize; |
224 | } |
225 | |
226 | static bool checkMarker1InMarker2(const vector<Point2f>& marker1, const vector<Point2f>& marker2) { |
227 | return pointPolygonTest(contour: marker2, pt: marker1[0], measureDist: false) >= 0 && pointPolygonTest(contour: marker2, pt: marker1[1], measureDist: false) >= 0 && |
228 | pointPolygonTest(contour: marker2, pt: marker1[2], measureDist: false) >= 0 && pointPolygonTest(contour: marker2, pt: marker1[3], measureDist: false) >= 0; |
229 | } |
230 | |
231 | struct MarkerCandidate { |
232 | vector<Point2f> corners; |
233 | vector<Point> contour; |
234 | float perimeter = 0.f; |
235 | }; |
236 | |
237 | struct MarkerCandidateTree : MarkerCandidate{ |
238 | int parent = -1; |
239 | int depth = 0; |
240 | vector<MarkerCandidate> closeContours; |
241 | |
242 | MarkerCandidateTree() {} |
243 | |
244 | MarkerCandidateTree(vector<Point2f>&& corners_, vector<Point>&& contour_) { |
245 | corners = std::move(corners_); |
246 | contour = std::move(contour_); |
247 | perimeter = 0.f; |
248 | for (size_t i = 0ull; i < 4ull; i++) { |
249 | perimeter += sqrt(x: normL2Sqr<float>(pt: Point2f(corners[i] - corners[(i+1ull) % 4ull]))); |
250 | } |
251 | } |
252 | |
253 | bool operator<(const MarkerCandidateTree& m) const { |
254 | // sorting the contors in descending order |
255 | return perimeter > m.perimeter; |
256 | } |
257 | }; |
258 | |
259 | |
260 | // returns the average distance between the marker points |
261 | float static inline getAverageDistance(const std::vector<Point2f>& marker1, const std::vector<Point2f>& marker2) { |
262 | float minDistSq = std::numeric_limits<float>::max(); |
263 | // fc is the first corner considered on one of the markers, 4 combinations are possible |
264 | for(int fc = 0; fc < 4; fc++) { |
265 | float distSq = 0; |
266 | for(int c = 0; c < 4; c++) { |
267 | // modC is the corner considering first corner is fc |
268 | int modC = (c + fc) % 4; |
269 | distSq += normL2Sqr<float>(pt: marker1[modC] - marker2[c]); |
270 | } |
271 | distSq /= 4.f; |
272 | minDistSq = min(a: minDistSq, b: distSq); |
273 | } |
274 | return sqrt(x: minDistSq); |
275 | } |
276 | |
277 | /** |
278 | * @brief Initial steps on finding square candidates |
279 | */ |
280 | static void _detectInitialCandidates(const Mat &grey, vector<vector<Point2f> > &candidates, |
281 | vector<vector<Point> > &contours, |
282 | const DetectorParameters ¶ms) { |
283 | |
284 | CV_Assert(params.adaptiveThreshWinSizeMin >= 3 && params.adaptiveThreshWinSizeMax >= 3); |
285 | CV_Assert(params.adaptiveThreshWinSizeMax >= params.adaptiveThreshWinSizeMin); |
286 | CV_Assert(params.adaptiveThreshWinSizeStep > 0); |
287 | |
288 | // number of window sizes (scales) to apply adaptive thresholding |
289 | int nScales = (params.adaptiveThreshWinSizeMax - params.adaptiveThreshWinSizeMin) / |
290 | params.adaptiveThreshWinSizeStep + 1; |
291 | |
292 | vector<vector<vector<Point2f> > > candidatesArrays((size_t) nScales); |
293 | vector<vector<vector<Point> > > contoursArrays((size_t) nScales); |
294 | |
295 | ////for each value in the interval of thresholding window sizes |
296 | parallel_for_(range: Range(0, nScales), functor: [&](const Range& range) { |
297 | const int begin = range.start; |
298 | const int end = range.end; |
299 | |
300 | for (int i = begin; i < end; i++) { |
301 | int currScale = params.adaptiveThreshWinSizeMin + i * params.adaptiveThreshWinSizeStep; |
302 | // threshold |
303 | Mat thresh; |
304 | _threshold(in: grey, out: thresh, winSize: currScale, constant: params.adaptiveThreshConstant); |
305 | |
306 | // detect rectangles |
307 | _findMarkerContours(in: thresh, candidates&: candidatesArrays[i], contoursOut&: contoursArrays[i], |
308 | minPerimeterRate: params.minMarkerPerimeterRate, maxPerimeterRate: params.maxMarkerPerimeterRate, |
309 | accuracyRate: params.polygonalApproxAccuracyRate, minCornerDistanceRate: params.minCornerDistanceRate, |
310 | minDistanceToBorder: params.minDistanceToBorder, minSize: params.minSideLengthCanonicalImg); |
311 | } |
312 | }); |
313 | // join candidates |
314 | for(int i = 0; i < nScales; i++) { |
315 | for(unsigned int j = 0; j < candidatesArrays[i].size(); j++) { |
316 | candidates.push_back(x: candidatesArrays[i][j]); |
317 | contours.push_back(x: contoursArrays[i][j]); |
318 | } |
319 | } |
320 | } |
321 | |
322 | |
323 | /** |
324 | * @brief Given an input image and candidate corners, extract the bits of the candidate, including |
325 | * the border bits |
326 | */ |
327 | static Mat _extractBits(InputArray _image, const vector<Point2f>& corners, int markerSize, |
328 | int markerBorderBits, int cellSize, double cellMarginRate, double minStdDevOtsu) { |
329 | CV_Assert(_image.getMat().channels() == 1); |
330 | CV_Assert(corners.size() == 4ull); |
331 | CV_Assert(markerBorderBits > 0 && cellSize > 0 && cellMarginRate >= 0 && cellMarginRate <= 1); |
332 | CV_Assert(minStdDevOtsu >= 0); |
333 | |
334 | // number of bits in the marker |
335 | int markerSizeWithBorders = markerSize + 2 * markerBorderBits; |
336 | int cellMarginPixels = int(cellMarginRate * cellSize); |
337 | |
338 | Mat resultImg; // marker image after removing perspective |
339 | int resultImgSize = markerSizeWithBorders * cellSize; |
340 | Mat resultImgCorners(4, 1, CV_32FC2); |
341 | resultImgCorners.ptr<Point2f>(y: 0)[0] = Point2f(0, 0); |
342 | resultImgCorners.ptr<Point2f>(y: 0)[1] = Point2f((float)resultImgSize - 1, 0); |
343 | resultImgCorners.ptr<Point2f>(y: 0)[2] = |
344 | Point2f((float)resultImgSize - 1, (float)resultImgSize - 1); |
345 | resultImgCorners.ptr<Point2f>(y: 0)[3] = Point2f(0, (float)resultImgSize - 1); |
346 | |
347 | // remove perspective |
348 | Mat transformation = getPerspectiveTransform(src: corners, dst: resultImgCorners); |
349 | warpPerspective(src: _image, dst: resultImg, M: transformation, dsize: Size(resultImgSize, resultImgSize), |
350 | flags: INTER_NEAREST); |
351 | |
352 | // output image containing the bits |
353 | Mat bits(markerSizeWithBorders, markerSizeWithBorders, CV_8UC1, Scalar::all(v0: 0)); |
354 | |
355 | // check if standard deviation is enough to apply Otsu |
356 | // if not enough, it probably means all bits are the same color (black or white) |
357 | Mat mean, stddev; |
358 | // Remove some border just to avoid border noise from perspective transformation |
359 | Mat innerRegion = resultImg.colRange(startcol: cellSize / 2, endcol: resultImg.cols - cellSize / 2) |
360 | .rowRange(startrow: cellSize / 2, endrow: resultImg.rows - cellSize / 2); |
361 | meanStdDev(src: innerRegion, mean, stddev); |
362 | if(stddev.ptr< double >(y: 0)[0] < minStdDevOtsu) { |
363 | // all black or all white, depending on mean value |
364 | if(mean.ptr< double >(y: 0)[0] > 127) |
365 | bits.setTo(value: 1); |
366 | else |
367 | bits.setTo(value: 0); |
368 | return bits; |
369 | } |
370 | |
371 | // now extract code, first threshold using Otsu |
372 | threshold(src: resultImg, dst: resultImg, thresh: 125, maxval: 255, type: THRESH_BINARY | THRESH_OTSU); |
373 | |
374 | // for each cell |
375 | for(int y = 0; y < markerSizeWithBorders; y++) { |
376 | for(int x = 0; x < markerSizeWithBorders; x++) { |
377 | int Xstart = x * (cellSize) + cellMarginPixels; |
378 | int Ystart = y * (cellSize) + cellMarginPixels; |
379 | Mat square = resultImg(Rect(Xstart, Ystart, cellSize - 2 * cellMarginPixels, |
380 | cellSize - 2 * cellMarginPixels)); |
381 | // count white pixels on each cell to assign its value |
382 | size_t nZ = (size_t) countNonZero(src: square); |
383 | if(nZ > square.total() / 2) bits.at<unsigned char>(i0: y, i1: x) = 1; |
384 | } |
385 | } |
386 | |
387 | return bits; |
388 | } |
389 | |
390 | |
391 | |
392 | /** |
393 | * @brief Return number of erroneous bits in border, i.e. number of white bits in border. |
394 | */ |
395 | static int _getBorderErrors(const Mat &bits, int markerSize, int borderSize) { |
396 | |
397 | int sizeWithBorders = markerSize + 2 * borderSize; |
398 | |
399 | CV_Assert(markerSize > 0 && bits.cols == sizeWithBorders && bits.rows == sizeWithBorders); |
400 | |
401 | int totalErrors = 0; |
402 | for(int y = 0; y < sizeWithBorders; y++) { |
403 | for(int k = 0; k < borderSize; k++) { |
404 | if(bits.ptr<unsigned char>(y)[k] != 0) totalErrors++; |
405 | if(bits.ptr<unsigned char>(y)[sizeWithBorders - 1 - k] != 0) totalErrors++; |
406 | } |
407 | } |
408 | for(int x = borderSize; x < sizeWithBorders - borderSize; x++) { |
409 | for(int k = 0; k < borderSize; k++) { |
410 | if(bits.ptr<unsigned char>(y: k)[x] != 0) totalErrors++; |
411 | if(bits.ptr<unsigned char>(y: sizeWithBorders - 1 - k)[x] != 0) totalErrors++; |
412 | } |
413 | } |
414 | return totalErrors; |
415 | } |
416 | |
417 | |
418 | /** |
419 | * @brief Tries to identify one candidate given the dictionary |
420 | * @return candidate typ. zero if the candidate is not valid, |
421 | * 1 if the candidate is a black candidate (default candidate) |
422 | * 2 if the candidate is a white candidate |
423 | */ |
424 | static uint8_t _identifyOneCandidate(const Dictionary& dictionary, const Mat& _image, |
425 | const vector<Point2f>& _corners, int& idx, |
426 | const DetectorParameters& params, int& rotation, |
427 | const float scale = 1.f) { |
428 | CV_DbgAssert(params.markerBorderBits > 0); |
429 | uint8_t typ=1; |
430 | // get bits |
431 | // scale corners to the correct size to search on the corresponding image pyramid |
432 | vector<Point2f> scaled_corners(4); |
433 | for (int i = 0; i < 4; ++i) { |
434 | scaled_corners[i].x = _corners[i].x * scale; |
435 | scaled_corners[i].y = _corners[i].y * scale; |
436 | } |
437 | |
438 | Mat candidateBits = |
439 | _extractBits(_image, corners: scaled_corners, markerSize: dictionary.markerSize, markerBorderBits: params.markerBorderBits, |
440 | cellSize: params.perspectiveRemovePixelPerCell, |
441 | cellMarginRate: params.perspectiveRemoveIgnoredMarginPerCell, minStdDevOtsu: params.minOtsuStdDev); |
442 | |
443 | // analyze border bits |
444 | int maximumErrorsInBorder = |
445 | int(dictionary.markerSize * dictionary.markerSize * params.maxErroneousBitsInBorderRate); |
446 | int borderErrors = |
447 | _getBorderErrors(bits: candidateBits, markerSize: dictionary.markerSize, borderSize: params.markerBorderBits); |
448 | |
449 | // check if it is a white marker |
450 | if(params.detectInvertedMarker){ |
451 | // to get from 255 to 1 |
452 | Mat invertedImg = ~candidateBits-254; |
453 | int invBError = _getBorderErrors(bits: invertedImg, markerSize: dictionary.markerSize, borderSize: params.markerBorderBits); |
454 | // white marker |
455 | if(invBError<borderErrors){ |
456 | borderErrors = invBError; |
457 | invertedImg.copyTo(m: candidateBits); |
458 | typ=2; |
459 | } |
460 | } |
461 | if(borderErrors > maximumErrorsInBorder) return 0; // border is wrong |
462 | |
463 | // take only inner bits |
464 | Mat onlyBits = |
465 | candidateBits.rowRange(startrow: params.markerBorderBits, |
466 | endrow: candidateBits.rows - params.markerBorderBits) |
467 | .colRange(startcol: params.markerBorderBits, endcol: candidateBits.cols - params.markerBorderBits); |
468 | |
469 | // try to indentify the marker |
470 | if(!dictionary.identify(onlyBits, idx, rotation, maxCorrectionRate: params.errorCorrectionRate)) |
471 | return 0; |
472 | |
473 | return typ; |
474 | } |
475 | |
476 | /** |
477 | * @brief rotate the initial corner to get to the right position |
478 | */ |
479 | static void correctCornerPosition(vector<Point2f>& _candidate, int rotate){ |
480 | std::rotate(first: _candidate.begin(), middle: _candidate.begin() + 4 - rotate, last: _candidate.end()); |
481 | } |
482 | |
483 | static size_t _findOptPyrImageForCanonicalImg( |
484 | const vector<Mat>& img_pyr, |
485 | const int scaled_width, |
486 | const int cur_perimeter, |
487 | const int min_perimeter) { |
488 | CV_Assert(scaled_width > 0); |
489 | size_t optLevel = 0; |
490 | float dist = std::numeric_limits<float>::max(); |
491 | for (size_t i = 0; i < img_pyr.size(); ++i) { |
492 | const float scale = img_pyr[i].cols / static_cast<float>(scaled_width); |
493 | const float perimeter_scaled = cur_perimeter * scale; |
494 | // instead of std::abs() favor the larger pyramid level by checking if the distance is postive |
495 | // will slow down the algorithm but find more corners in the end |
496 | const float new_dist = perimeter_scaled - min_perimeter; |
497 | if (new_dist < dist && new_dist > 0.f) { |
498 | dist = new_dist; |
499 | optLevel = i; |
500 | } |
501 | } |
502 | return optLevel; |
503 | } |
504 | |
505 | |
506 | /** |
507 | * Line fitting A * B = C :: Called from function refineCandidateLines |
508 | * @param nContours contour-container |
509 | */ |
510 | static Point3f _interpolate2Dline(const vector<Point2f>& nContours){ |
511 | CV_Assert(nContours.size() >= 2); |
512 | float minX, minY, maxX, maxY; |
513 | minX = maxX = nContours[0].x; |
514 | minY = maxY = nContours[0].y; |
515 | |
516 | for(unsigned int i = 0; i< nContours.size(); i++){ |
517 | minX = nContours[i].x < minX ? nContours[i].x : minX; |
518 | minY = nContours[i].y < minY ? nContours[i].y : minY; |
519 | maxX = nContours[i].x > maxX ? nContours[i].x : maxX; |
520 | maxY = nContours[i].y > maxY ? nContours[i].y : maxY; |
521 | } |
522 | |
523 | Mat A = Mat::ones(rows: (int)nContours.size(), cols: 2, CV_32F); // Coefficient Matrix (N x 2) |
524 | Mat B((int)nContours.size(), 1, CV_32F); // Variables Matrix (N x 1) |
525 | Mat C; // Constant |
526 | |
527 | if(maxX - minX > maxY - minY){ |
528 | for(unsigned int i =0; i < nContours.size(); i++){ |
529 | A.at<float>(i0: i,i1: 0)= nContours[i].x; |
530 | B.at<float>(i0: i,i1: 0)= nContours[i].y; |
531 | } |
532 | |
533 | solve(src1: A, src2: B, dst: C, flags: DECOMP_NORMAL); |
534 | |
535 | return Point3f(C.at<float>(i0: 0, i1: 0), -1., C.at<float>(i0: 1, i1: 0)); |
536 | } |
537 | else{ |
538 | for(unsigned int i =0; i < nContours.size(); i++){ |
539 | A.at<float>(i0: i,i1: 0)= nContours[i].y; |
540 | B.at<float>(i0: i,i1: 0)= nContours[i].x; |
541 | } |
542 | |
543 | solve(src1: A, src2: B, dst: C, flags: DECOMP_NORMAL); |
544 | |
545 | return Point3f(-1., C.at<float>(i0: 0, i1: 0), C.at<float>(i0: 1, i1: 0)); |
546 | } |
547 | |
548 | } |
549 | |
550 | /** |
551 | * Find the Point where the lines crosses :: Called from function refineCandidateLines |
552 | * @param nLine1 |
553 | * @param nLine2 |
554 | * @return Crossed Point |
555 | */ |
556 | static Point2f _getCrossPoint(Point3f nLine1, Point3f nLine2){ |
557 | Matx22f A(nLine1.x, nLine1.y, nLine2.x, nLine2.y); |
558 | Vec2f B(-nLine1.z, -nLine2.z); |
559 | return Vec2f(A.solve(rhs: B).val); |
560 | } |
561 | |
562 | /** |
563 | * Refine Corners using the contour vector :: Called from function detectMarkers |
564 | * @param nContours contour-container |
565 | * @param nCorners candidate Corners |
566 | */ |
567 | static void _refineCandidateLines(vector<Point>& nContours, vector<Point2f>& nCorners){ |
568 | vector<Point2f> contour2f(nContours.begin(), nContours.end()); |
569 | /* 5 groups :: to group the edges |
570 | * 4 - classified by its corner |
571 | * extra group - (temporary) if contours do not begin with a corner |
572 | */ |
573 | vector<Point2f> cntPts[5]; |
574 | int cornerIndex[4]={-1}; |
575 | int group=4; |
576 | |
577 | for ( unsigned int i =0; i < nContours.size(); i++ ) { |
578 | for(unsigned int j=0; j<4; j++){ |
579 | if ( nCorners[j] == contour2f[i] ){ |
580 | cornerIndex[j] = i; |
581 | group=j; |
582 | } |
583 | } |
584 | cntPts[group].push_back(x: contour2f[i]); |
585 | } |
586 | for (int i = 0; i < 4; i++) |
587 | { |
588 | CV_Assert(cornerIndex[i] != -1); |
589 | } |
590 | // saves extra group into corresponding |
591 | if( !cntPts[4].empty() ){ |
592 | for( unsigned int i=0; i < cntPts[4].size() ; i++ ) |
593 | cntPts[group].push_back(x: cntPts[4].at(n: i)); |
594 | cntPts[4].clear(); |
595 | } |
596 | |
597 | //Evaluate contour direction :: using the position of the detected corners |
598 | int inc=1; |
599 | |
600 | inc = ( (cornerIndex[0] > cornerIndex[1]) && (cornerIndex[3] > cornerIndex[0]) ) ? -1:inc; |
601 | inc = ( (cornerIndex[2] > cornerIndex[3]) && (cornerIndex[1] > cornerIndex[2]) ) ? -1:inc; |
602 | |
603 | // calculate the line :: who passes through the grouped points |
604 | Point3f lines[4]; |
605 | for(int i=0; i<4; i++){ |
606 | lines[i]=_interpolate2Dline(nContours: cntPts[i]); |
607 | } |
608 | |
609 | /* |
610 | * calculate the corner :: where the lines crosses to each other |
611 | * clockwise direction no clockwise direction |
612 | * 0 1 |
613 | * .---. 1 .---. 2 |
614 | * | | | | |
615 | * 3 .___. 0 .___. |
616 | * 2 3 |
617 | */ |
618 | for(int i=0; i < 4; i++){ |
619 | if(inc<0) |
620 | nCorners[i] = _getCrossPoint(nLine1: lines[ i ], nLine2: lines[ (i+1)%4 ]); // 01 12 23 30 |
621 | else |
622 | nCorners[i] = _getCrossPoint(nLine1: lines[ i ], nLine2: lines[ (i+3)%4 ]); // 30 01 12 23 |
623 | } |
624 | } |
625 | |
626 | static inline void findCornerInPyrImage(const float scale_init, const int closest_pyr_image_idx, |
627 | const vector<Mat>& grey_pyramid, Mat corners, |
628 | const DetectorParameters& params) { |
629 | // scale them to the closest pyramid level |
630 | if (scale_init != 1.f) |
631 | corners *= scale_init; // scale_init * scale_pyr |
632 | for (int idx = closest_pyr_image_idx - 1; idx >= 0; --idx) { |
633 | // scale them to new pyramid level |
634 | corners *= 2.f; // *= scale_pyr; |
635 | // use larger win size for larger images |
636 | const int subpix_win_size = std::max(a: grey_pyramid[idx].cols, b: grey_pyramid[idx].rows) > 1080 ? 5 : 3; |
637 | cornerSubPix(image: grey_pyramid[idx], corners, |
638 | winSize: Size(subpix_win_size, subpix_win_size), |
639 | zeroZone: Size(-1, -1), |
640 | criteria: TermCriteria(TermCriteria::MAX_ITER | TermCriteria::EPS, |
641 | params.cornerRefinementMaxIterations, |
642 | params.cornerRefinementMinAccuracy)); |
643 | } |
644 | } |
645 | |
646 | struct ArucoDetector::ArucoDetectorImpl { |
647 | /// dictionary indicates the type of markers that will be searched |
648 | Dictionary dictionary; |
649 | |
650 | /// marker detection parameters, check DetectorParameters docs to see available settings |
651 | DetectorParameters detectorParams; |
652 | |
653 | /// marker refine parameters |
654 | RefineParameters refineParams; |
655 | ArucoDetectorImpl() {} |
656 | |
657 | ArucoDetectorImpl(const Dictionary &_dictionary, const DetectorParameters &_detectorParams, |
658 | const RefineParameters& _refineParams): dictionary(_dictionary), |
659 | detectorParams(_detectorParams), refineParams(_refineParams) {} |
660 | /** |
661 | * @brief Detect square candidates in the input image |
662 | */ |
663 | void detectCandidates(const Mat& grey, vector<vector<Point2f> >& candidates, vector<vector<Point> >& contours) { |
664 | /// 1. DETECT FIRST SET OF CANDIDATES |
665 | _detectInitialCandidates(grey, candidates, contours, params: detectorParams); |
666 | /// 2. SORT CORNERS |
667 | _reorderCandidatesCorners(candidates); |
668 | } |
669 | |
670 | /** |
671 | * @brief FILTER OUT NEAR CANDIDATE PAIRS |
672 | * |
673 | * save the outter/inner border (i.e. potential candidates) to vector<MarkerCandidateTree>, |
674 | * clear candidates and contours |
675 | */ |
676 | vector<MarkerCandidateTree> |
677 | filterTooCloseCandidates(vector<vector<Point2f> > &candidates, vector<vector<Point> > &contours) { |
678 | CV_Assert(detectorParams.minMarkerDistanceRate >= 0.); |
679 | vector<MarkerCandidateTree> candidateTree(candidates.size()); |
680 | for(size_t i = 0ull; i < candidates.size(); i++) { |
681 | candidateTree[i] = MarkerCandidateTree(std::move(candidates[i]), std::move(contours[i])); |
682 | } |
683 | candidates.clear(); |
684 | contours.clear(); |
685 | |
686 | // sort candidates from big to small |
687 | std::sort(first: candidateTree.begin(), last: candidateTree.end()); |
688 | // group index for each candidate |
689 | vector<int> groupId(candidateTree.size(), -1); |
690 | vector<vector<size_t> > groupedCandidates; |
691 | vector<bool> isSelectedContours(candidateTree.size(), true); |
692 | |
693 | size_t countSelectedContours = 0ull; |
694 | for (size_t i = 0ull; i < candidateTree.size(); i++) { |
695 | for (size_t j = i + 1ull; j < candidateTree.size(); j++) { |
696 | float minDist = getAverageDistance(marker1: candidateTree[i].corners, marker2: candidateTree[j].corners); |
697 | // if mean distance is too low, group markers |
698 | // the distance between the points of two independent markers should be more than half the side of the marker |
699 | // half the side of the marker = (perimeter / 4) * 0.5 = perimeter * 0.125 |
700 | if(minDist < candidateTree[j].perimeter*(float)detectorParams.minMarkerDistanceRate) { |
701 | isSelectedContours[i] = false; |
702 | isSelectedContours[j] = false; |
703 | // i and j are not related to a group |
704 | if(groupId[i] < 0 && groupId[j] < 0){ |
705 | // mark candidates with their corresponding group number |
706 | groupId[i] = groupId[j] = (int)groupedCandidates.size(); |
707 | // create group |
708 | groupedCandidates.push_back(x: {i, j}); |
709 | } |
710 | // i is related to a group |
711 | else if(groupId[i] > -1 && groupId[j] == -1) { |
712 | int group = groupId[i]; |
713 | groupId[j] = group; |
714 | // add to group |
715 | groupedCandidates[group].push_back(x: j); |
716 | } |
717 | // j is related to a group |
718 | else if(groupId[j] > -1 && groupId[i] == -1) { |
719 | int group = groupId[j]; |
720 | groupId[i] = group; |
721 | // add to group |
722 | groupedCandidates[group].push_back(x: i); |
723 | } |
724 | } |
725 | } |
726 | countSelectedContours += isSelectedContours[i]; |
727 | } |
728 | |
729 | for (vector<size_t>& grouped : groupedCandidates) { |
730 | if (detectorParams.detectInvertedMarker) // if detectInvertedMarker choose smallest contours |
731 | std::sort(first: grouped.begin(), last: grouped.end(), comp: [](const size_t &a, const size_t &b) { |
732 | return a > b; |
733 | }); |
734 | else // if detectInvertedMarker==false choose largest contours |
735 | std::sort(first: grouped.begin(), last: grouped.end()); |
736 | size_t currId = grouped[0]; |
737 | isSelectedContours[currId] = true; |
738 | for (size_t i = 1ull; i < grouped.size(); i++) { |
739 | size_t id = grouped[i]; |
740 | float dist = getAverageDistance(marker1: candidateTree[id].corners, marker2: candidateTree[currId].corners); |
741 | float moduleSize = getAverageModuleSize(markerCorners: candidateTree[id].corners, markerSize: dictionary.markerSize, markerBorderBits: detectorParams.markerBorderBits); |
742 | if (dist > detectorParams.minGroupDistance*moduleSize) { |
743 | currId = id; |
744 | candidateTree[grouped[0]].closeContours.push_back(x: candidateTree[id]); |
745 | } |
746 | } |
747 | } |
748 | |
749 | vector<MarkerCandidateTree> selectedCandidates(countSelectedContours + groupedCandidates.size()); |
750 | countSelectedContours = 0ull; |
751 | for (size_t i = 0ull; i < candidateTree.size(); i++) { |
752 | if (isSelectedContours[i]) { |
753 | selectedCandidates[countSelectedContours] = std::move(candidateTree[i]); |
754 | countSelectedContours++; |
755 | } |
756 | } |
757 | |
758 | // find hierarchy in the candidate tree |
759 | for (int i = (int)selectedCandidates.size()-1; i >= 0; i--) { |
760 | for (int j = i - 1; j >= 0; j--) { |
761 | if (checkMarker1InMarker2(marker1: selectedCandidates[i].corners, marker2: selectedCandidates[j].corners)) { |
762 | selectedCandidates[i].parent = j; |
763 | selectedCandidates[j].depth = max(a: selectedCandidates[j].depth, b: selectedCandidates[i].depth + 1); |
764 | break; |
765 | } |
766 | } |
767 | } |
768 | return selectedCandidates; |
769 | } |
770 | |
771 | /** |
772 | * @brief Identify square candidates according to a marker dictionary |
773 | */ |
774 | void identifyCandidates(const Mat& grey, const vector<Mat>& image_pyr, vector<MarkerCandidateTree>& selectedContours, |
775 | vector<vector<Point2f> >& accepted, vector<vector<Point> >& contours, |
776 | vector<int>& ids, OutputArrayOfArrays _rejected = noArray()) { |
777 | size_t ncandidates = selectedContours.size(); |
778 | vector<vector<Point2f> > rejected; |
779 | |
780 | vector<int> idsTmp(ncandidates, -1); |
781 | vector<int> rotated(ncandidates, 0); |
782 | vector<uint8_t> validCandidates(ncandidates, 0); |
783 | vector<bool> was(ncandidates, false); |
784 | bool checkCloseContours = true; |
785 | |
786 | int maxDepth = 0; |
787 | for (size_t i = 0ull; i < selectedContours.size(); i++) |
788 | maxDepth = max(a: selectedContours[i].depth, b: maxDepth); |
789 | vector<vector<size_t>> depths(maxDepth+1); |
790 | for (size_t i = 0ull; i < selectedContours.size(); i++) { |
791 | depths[selectedContours[i].depth].push_back(x: i); |
792 | } |
793 | |
794 | //// Analyze each of the candidates |
795 | int depth = 0; |
796 | size_t counter = 0; |
797 | while (counter < ncandidates) { |
798 | parallel_for_(range: Range(0, (int)depths[depth].size()), functor: [&](const Range& range) { |
799 | const int begin = range.start; |
800 | const int end = range.end; |
801 | for (int i = begin; i < end; i++) { |
802 | size_t v = depths[depth][i]; |
803 | was[v] = true; |
804 | Mat img = grey; |
805 | // implements equation (4) |
806 | if (detectorParams.useAruco3Detection) { |
807 | const int minPerimeter = detectorParams.minSideLengthCanonicalImg * 4; |
808 | const size_t nearestImgId = _findOptPyrImageForCanonicalImg(img_pyr: image_pyr, scaled_width: grey.cols, cur_perimeter: static_cast<int>(selectedContours[v].contour.size()), min_perimeter: minPerimeter); |
809 | img = image_pyr[nearestImgId]; |
810 | } |
811 | const float scale = detectorParams.useAruco3Detection ? img.cols / static_cast<float>(grey.cols) : 1.f; |
812 | |
813 | validCandidates[v] = _identifyOneCandidate(dictionary, image: img, corners: selectedContours[v].corners, idx&: idsTmp[v], params: detectorParams, rotation&: rotated[v], scale); |
814 | |
815 | if (validCandidates[v] == 0 && checkCloseContours) { |
816 | for (const MarkerCandidate& closeMarkerCandidate: selectedContours[v].closeContours) { |
817 | validCandidates[v] = _identifyOneCandidate(dictionary, image: img, corners: closeMarkerCandidate.corners, idx&: idsTmp[v], params: detectorParams, rotation&: rotated[v], scale); |
818 | if (validCandidates[v] > 0) { |
819 | selectedContours[v].corners = closeMarkerCandidate.corners; |
820 | selectedContours[v].contour = closeMarkerCandidate.contour; |
821 | break; |
822 | } |
823 | } |
824 | } |
825 | } |
826 | }); |
827 | |
828 | // visit the parent vertices of the detected markers to skip identify parent contours |
829 | for(size_t v : depths[depth]) { |
830 | if(validCandidates[v] > 0) { |
831 | int parent = selectedContours[v].parent; |
832 | while (parent != -1) { |
833 | if (!was[parent]) { |
834 | was[parent] = true; |
835 | counter++; |
836 | } |
837 | parent = selectedContours[parent].parent; |
838 | } |
839 | } |
840 | counter++; |
841 | } |
842 | depth++; |
843 | } |
844 | |
845 | for (size_t i = 0ull; i < selectedContours.size(); i++) { |
846 | if (validCandidates[i] > 0) { |
847 | // shift corner positions to the correct rotation |
848 | correctCornerPosition(candidate&: selectedContours[i].corners, rotate: rotated[i]); |
849 | |
850 | accepted.push_back(x: selectedContours[i].corners); |
851 | contours.push_back(x: selectedContours[i].contour); |
852 | ids.push_back(x: idsTmp[i]); |
853 | } |
854 | else { |
855 | rejected.push_back(x: selectedContours[i].corners); |
856 | } |
857 | } |
858 | |
859 | // parse output |
860 | if(_rejected.needed()) { |
861 | _copyVector2Output(vec&: rejected, out: _rejected); |
862 | } |
863 | } |
864 | |
865 | }; |
866 | |
867 | ArucoDetector::ArucoDetector(const Dictionary &_dictionary, |
868 | const DetectorParameters &_detectorParams, |
869 | const RefineParameters& _refineParams) { |
870 | arucoDetectorImpl = makePtr<ArucoDetectorImpl>(a1: _dictionary, a1: _detectorParams, a1: _refineParams); |
871 | } |
872 | |
873 | void ArucoDetector::detectMarkers(InputArray _image, OutputArrayOfArrays _corners, OutputArray _ids, |
874 | OutputArrayOfArrays _rejectedImgPoints) const { |
875 | CV_Assert(!_image.empty()); |
876 | DetectorParameters& detectorParams = arucoDetectorImpl->detectorParams; |
877 | const Dictionary& dictionary = arucoDetectorImpl->dictionary; |
878 | |
879 | CV_Assert(detectorParams.markerBorderBits > 0); |
880 | // check that the parameters are set correctly if Aruco3 is used |
881 | CV_Assert(!(detectorParams.useAruco3Detection == true && |
882 | detectorParams.minSideLengthCanonicalImg == 0 && |
883 | detectorParams.minMarkerLengthRatioOriginalImg == 0.0)); |
884 | |
885 | Mat grey; |
886 | _convertToGrey(in: _image.getMat(), out: grey); |
887 | |
888 | // Aruco3 functionality is the extension of Aruco. |
889 | // The description can be found in: |
890 | // [1] Speeded up detection of squared fiducial markers, 2018, FJ Romera-Ramirez et al. |
891 | // if Aruco3 functionality if not wanted |
892 | // change some parameters to be sure to turn it off |
893 | if (!detectorParams.useAruco3Detection) { |
894 | detectorParams.minMarkerLengthRatioOriginalImg = 0.0; |
895 | detectorParams.minSideLengthCanonicalImg = 0; |
896 | } |
897 | else { |
898 | // always turn on corner refinement in case of Aruco3, due to upsampling |
899 | detectorParams.cornerRefinementMethod = (int)CORNER_REFINE_SUBPIX; |
900 | // only CORNER_REFINE_SUBPIX implement correctly for useAruco3Detection |
901 | // Todo: update other CORNER_REFINE methods |
902 | } |
903 | |
904 | /// Step 0: equation (2) from paper [1] |
905 | const float fxfy = (!detectorParams.useAruco3Detection ? 1.f : detectorParams.minSideLengthCanonicalImg / |
906 | (detectorParams.minSideLengthCanonicalImg + std::max(a: grey.cols, b: grey.rows)* |
907 | detectorParams.minMarkerLengthRatioOriginalImg)); |
908 | |
909 | /// Step 1: create image pyramid. Section 3.4. in [1] |
910 | vector<Mat> grey_pyramid; |
911 | int closest_pyr_image_idx = 0, num_levels = 0; |
912 | //// Step 1.1: resize image with equation (1) from paper [1] |
913 | if (detectorParams.useAruco3Detection) { |
914 | const float scale_pyr = 2.f; |
915 | const float img_area = static_cast<float>(grey.rows*grey.cols); |
916 | const float min_area_marker = static_cast<float>(detectorParams.minSideLengthCanonicalImg* |
917 | detectorParams.minSideLengthCanonicalImg); |
918 | // find max level |
919 | num_levels = static_cast<int>(log2(x: img_area / min_area_marker)/scale_pyr); |
920 | // the closest pyramid image to the downsampled segmentation image |
921 | // will later be used as start index for corner upsampling |
922 | const float scale_img_area = img_area * fxfy * fxfy; |
923 | closest_pyr_image_idx = cvRound(value: log2(x: img_area / scale_img_area)/scale_pyr); |
924 | } |
925 | buildPyramid(src: grey, dst: grey_pyramid, maxlevel: num_levels); |
926 | |
927 | // resize to segmentation image |
928 | // in this reduces size the contours will be detected |
929 | if (fxfy != 1.f) |
930 | resize(src: grey, dst: grey, dsize: Size(cvRound(value: fxfy * grey.cols), cvRound(value: fxfy * grey.rows))); |
931 | |
932 | /// STEP 2: Detect marker candidates |
933 | vector<vector<Point2f> > candidates; |
934 | vector<vector<Point> > contours; |
935 | vector<int> ids; |
936 | |
937 | /// STEP 2.a Detect marker candidates :: using AprilTag |
938 | if(detectorParams.cornerRefinementMethod == (int)CORNER_REFINE_APRILTAG){ |
939 | _apriltag(im_orig: grey, params: detectorParams, candidates, contours); |
940 | } |
941 | /// STEP 2.b Detect marker candidates :: traditional way |
942 | else { |
943 | arucoDetectorImpl->detectCandidates(grey, candidates, contours); |
944 | } |
945 | |
946 | /// STEP 2.c FILTER OUT NEAR CANDIDATE PAIRS |
947 | auto selectedCandidates = arucoDetectorImpl->filterTooCloseCandidates(candidates, contours); |
948 | |
949 | /// STEP 2: Check candidate codification (identify markers) |
950 | arucoDetectorImpl->identifyCandidates(grey, image_pyr: grey_pyramid, selectedContours&: selectedCandidates, accepted&: candidates, contours, |
951 | ids, rejected: _rejectedImgPoints); |
952 | |
953 | /// STEP 3: Corner refinement :: use corner subpix |
954 | if (detectorParams.cornerRefinementMethod == (int)CORNER_REFINE_SUBPIX) { |
955 | CV_Assert(detectorParams.cornerRefinementWinSize > 0 && detectorParams.cornerRefinementMaxIterations > 0 && |
956 | detectorParams.cornerRefinementMinAccuracy > 0); |
957 | // Do subpixel estimation. In Aruco3 start on the lowest pyramid level and upscale the corners |
958 | parallel_for_(range: Range(0, (int)candidates.size()), functor: [&](const Range& range) { |
959 | const int begin = range.start; |
960 | const int end = range.end; |
961 | |
962 | for (int i = begin; i < end; i++) { |
963 | if (detectorParams.useAruco3Detection) { |
964 | const float scale_init = (float) grey_pyramid[closest_pyr_image_idx].cols / grey.cols; |
965 | findCornerInPyrImage(scale_init, closest_pyr_image_idx, grey_pyramid, corners: Mat(candidates[i]), params: detectorParams); |
966 | } |
967 | else { |
968 | int cornerRefinementWinSize = std::max(a: 1, b: cvRound(value: detectorParams.relativeCornerRefinmentWinSize* |
969 | getAverageModuleSize(markerCorners: candidates[i], markerSize: dictionary.markerSize, markerBorderBits: detectorParams.markerBorderBits))); |
970 | cornerRefinementWinSize = min(a: cornerRefinementWinSize, b: detectorParams.cornerRefinementWinSize); |
971 | cornerSubPix(image: grey, corners: Mat(candidates[i]), winSize: Size(cornerRefinementWinSize, cornerRefinementWinSize), zeroZone: Size(-1, -1), |
972 | criteria: TermCriteria(TermCriteria::MAX_ITER | TermCriteria::EPS, |
973 | detectorParams.cornerRefinementMaxIterations, |
974 | detectorParams.cornerRefinementMinAccuracy)); |
975 | } |
976 | } |
977 | }); |
978 | } |
979 | |
980 | /// STEP 3, Optional : Corner refinement :: use contour container |
981 | if (detectorParams.cornerRefinementMethod == (int)CORNER_REFINE_CONTOUR){ |
982 | |
983 | if (!ids.empty()) { |
984 | |
985 | // do corner refinement using the contours for each detected markers |
986 | parallel_for_(range: Range(0, (int)candidates.size()), functor: [&](const Range& range) { |
987 | for (int i = range.start; i < range.end; i++) { |
988 | _refineCandidateLines(nContours&: contours[i], nCorners&: candidates[i]); |
989 | } |
990 | }); |
991 | } |
992 | } |
993 | |
994 | if (detectorParams.cornerRefinementMethod != (int)CORNER_REFINE_SUBPIX && fxfy != 1.f) { |
995 | // only CORNER_REFINE_SUBPIX implement correctly for useAruco3Detection |
996 | // Todo: update other CORNER_REFINE methods |
997 | |
998 | // scale to orignal size, this however will lead to inaccurate detections! |
999 | for (auto &vecPoints : candidates) |
1000 | for (auto &point : vecPoints) |
1001 | point *= 1.f/fxfy; |
1002 | } |
1003 | |
1004 | // copy to output arrays |
1005 | _copyVector2Output(vec&: candidates, out: _corners); |
1006 | Mat(ids).copyTo(m: _ids); |
1007 | } |
1008 | |
1009 | /** |
1010 | * Project board markers that are not included in the list of detected markers |
1011 | */ |
1012 | static inline void _projectUndetectedMarkers(const Board &board, InputOutputArrayOfArrays detectedCorners, |
1013 | InputOutputArray detectedIds, InputArray cameraMatrix, InputArray distCoeffs, |
1014 | vector<vector<Point2f> >& undetectedMarkersProjectedCorners, |
1015 | OutputArray undetectedMarkersIds) { |
1016 | Mat rvec, tvec; // first estimate board pose with the current avaible markers |
1017 | Mat objPoints, imgPoints; // object and image points for the solvePnP function |
1018 | // To refine corners of ArUco markers the function refineDetectedMarkers() find an aruco markers pose from 3D-2D point correspondences. |
1019 | // To find 3D-2D point correspondences uses matchImagePoints(). |
1020 | // The method matchImagePoints() works with ArUco corners (in Board/GridBoard cases) or with ChArUco corners (in CharucoBoard case). |
1021 | // To refine corners of ArUco markers we need work with ArUco corners only in all boards. |
1022 | // To call matchImagePoints() with ArUco corners for all boards we need to call matchImagePoints() from base class Board. |
1023 | // The method matchImagePoints() implemented in Pimpl and we need to create temp Board object to call the base method. |
1024 | Board(board.getObjPoints(), board.getDictionary(), board.getIds()).matchImagePoints(detectedCorners, detectedIds, objPoints, imgPoints); |
1025 | if (objPoints.total() < 4ull) // at least one marker from board so rvec and tvec are valid |
1026 | return; |
1027 | solvePnP(objectPoints: objPoints, imagePoints: imgPoints, cameraMatrix, distCoeffs, rvec, tvec); |
1028 | |
1029 | // search undetected markers and project them using the previous pose |
1030 | vector<vector<Point2f> > undetectedCorners; |
1031 | const std::vector<int>& ids = board.getIds(); |
1032 | vector<int> undetectedIds; |
1033 | for(unsigned int i = 0; i < ids.size(); i++) { |
1034 | int foundIdx = -1; |
1035 | for(unsigned int j = 0; j < detectedIds.total(); j++) { |
1036 | if(ids[i] == detectedIds.getMat().ptr<int>()[j]) { |
1037 | foundIdx = j; |
1038 | break; |
1039 | } |
1040 | } |
1041 | |
1042 | // not detected |
1043 | if(foundIdx == -1) { |
1044 | undetectedCorners.push_back(x: vector<Point2f>()); |
1045 | undetectedIds.push_back(x: ids[i]); |
1046 | projectPoints(objectPoints: board.getObjPoints()[i], rvec, tvec, cameraMatrix, distCoeffs, |
1047 | imagePoints: undetectedCorners.back()); |
1048 | } |
1049 | } |
1050 | // parse output |
1051 | Mat(undetectedIds).copyTo(m: undetectedMarkersIds); |
1052 | undetectedMarkersProjectedCorners = undetectedCorners; |
1053 | } |
1054 | |
1055 | /** |
1056 | * Interpolate board markers that are not included in the list of detected markers using |
1057 | * global homography |
1058 | */ |
1059 | static void _projectUndetectedMarkers(const Board &_board, InputOutputArrayOfArrays _detectedCorners, |
1060 | InputOutputArray _detectedIds, |
1061 | vector<vector<Point2f> >& _undetectedMarkersProjectedCorners, |
1062 | OutputArray _undetectedMarkersIds) { |
1063 | // check board points are in the same plane, if not, global homography cannot be applied |
1064 | CV_Assert(_board.getObjPoints().size() > 0); |
1065 | CV_Assert(_board.getObjPoints()[0].size() > 0); |
1066 | float boardZ = _board.getObjPoints()[0][0].z; |
1067 | for(unsigned int i = 0; i < _board.getObjPoints().size(); i++) { |
1068 | for(unsigned int j = 0; j < _board.getObjPoints()[i].size(); j++) |
1069 | CV_Assert(boardZ == _board.getObjPoints()[i][j].z); |
1070 | } |
1071 | |
1072 | vector<Point2f> detectedMarkersObj2DAll; // Object coordinates (without Z) of all the detected |
1073 | // marker corners in a single vector |
1074 | vector<Point2f> imageCornersAll; // Image corners of all detected markers in a single vector |
1075 | vector<vector<Point2f> > undetectedMarkersObj2D; // Object coordinates (without Z) of all |
1076 | // missing markers in different vectors |
1077 | vector<int> undetectedMarkersIds; // ids of missing markers |
1078 | // find markers included in board, and missing markers from board. Fill the previous vectors |
1079 | for(unsigned int j = 0; j < _board.getIds().size(); j++) { |
1080 | bool found = false; |
1081 | for(unsigned int i = 0; i < _detectedIds.total(); i++) { |
1082 | if(_detectedIds.getMat().ptr<int>()[i] == _board.getIds()[j]) { |
1083 | for(int c = 0; c < 4; c++) { |
1084 | imageCornersAll.push_back(x: _detectedCorners.getMat(i).ptr<Point2f>()[c]); |
1085 | detectedMarkersObj2DAll.push_back( |
1086 | x: Point2f(_board.getObjPoints()[j][c].x, _board.getObjPoints()[j][c].y)); |
1087 | } |
1088 | found = true; |
1089 | break; |
1090 | } |
1091 | } |
1092 | if(!found) { |
1093 | undetectedMarkersObj2D.push_back(x: vector<Point2f>()); |
1094 | for(int c = 0; c < 4; c++) { |
1095 | undetectedMarkersObj2D.back().push_back( |
1096 | x: Point2f(_board.getObjPoints()[j][c].x, _board.getObjPoints()[j][c].y)); |
1097 | } |
1098 | undetectedMarkersIds.push_back(x: _board.getIds()[j]); |
1099 | } |
1100 | } |
1101 | if(imageCornersAll.size() == 0) return; |
1102 | |
1103 | // get homography from detected markers |
1104 | Mat transformation = findHomography(srcPoints: detectedMarkersObj2DAll, dstPoints: imageCornersAll); |
1105 | |
1106 | _undetectedMarkersProjectedCorners.resize(new_size: undetectedMarkersIds.size()); |
1107 | |
1108 | // for each undetected marker, apply transformation |
1109 | for(unsigned int i = 0; i < undetectedMarkersObj2D.size(); i++) { |
1110 | perspectiveTransform(src: undetectedMarkersObj2D[i], dst: _undetectedMarkersProjectedCorners[i], m: transformation); |
1111 | } |
1112 | Mat(undetectedMarkersIds).copyTo(m: _undetectedMarkersIds); |
1113 | } |
1114 | |
1115 | void ArucoDetector::refineDetectedMarkers(InputArray _image, const Board& _board, |
1116 | InputOutputArrayOfArrays _detectedCorners, InputOutputArray _detectedIds, |
1117 | InputOutputArrayOfArrays _rejectedCorners, InputArray _cameraMatrix, |
1118 | InputArray _distCoeffs, OutputArray _recoveredIdxs) const { |
1119 | DetectorParameters& detectorParams = arucoDetectorImpl->detectorParams; |
1120 | const Dictionary& dictionary = arucoDetectorImpl->dictionary; |
1121 | RefineParameters& refineParams = arucoDetectorImpl->refineParams; |
1122 | CV_Assert(refineParams.minRepDistance > 0); |
1123 | |
1124 | if(_detectedIds.total() == 0 || _rejectedCorners.total() == 0) return; |
1125 | |
1126 | // get projections of missing markers in the board |
1127 | vector<vector<Point2f> > undetectedMarkersCorners; |
1128 | vector<int> undetectedMarkersIds; |
1129 | if(_cameraMatrix.total() != 0) { |
1130 | // reproject based on camera projection model |
1131 | _projectUndetectedMarkers(board: _board, detectedCorners: _detectedCorners, detectedIds: _detectedIds, cameraMatrix: _cameraMatrix, distCoeffs: _distCoeffs, |
1132 | undetectedMarkersProjectedCorners&: undetectedMarkersCorners, undetectedMarkersIds); |
1133 | |
1134 | } else { |
1135 | // reproject based on global homography |
1136 | _projectUndetectedMarkers(_board, _detectedCorners, _detectedIds, undetectedMarkersProjectedCorners&: undetectedMarkersCorners, |
1137 | undetectedMarkersIds: undetectedMarkersIds); |
1138 | } |
1139 | |
1140 | // list of missing markers indicating if they have been assigned to a candidate |
1141 | vector<bool > alreadyIdentified(_rejectedCorners.total(), false); |
1142 | |
1143 | // maximum bits that can be corrected |
1144 | int maxCorrectionRecalculated = |
1145 | int(double(dictionary.maxCorrectionBits) * refineParams.errorCorrectionRate); |
1146 | |
1147 | Mat grey; |
1148 | _convertToGrey(in: _image, out: grey); |
1149 | |
1150 | // vector of final detected marker corners and ids |
1151 | vector<vector<Point2f> > finalAcceptedCorners; |
1152 | vector<int> finalAcceptedIds; |
1153 | // fill with the current markers |
1154 | finalAcceptedCorners.resize(new_size: _detectedCorners.total()); |
1155 | finalAcceptedIds.resize(new_size: _detectedIds.total()); |
1156 | for(unsigned int i = 0; i < _detectedIds.total(); i++) { |
1157 | finalAcceptedCorners[i] = _detectedCorners.getMat(i).clone(); |
1158 | finalAcceptedIds[i] = _detectedIds.getMat().ptr<int>()[i]; |
1159 | } |
1160 | vector<int> recoveredIdxs; // original indexes of accepted markers in _rejectedCorners |
1161 | |
1162 | // for each missing marker, try to find a correspondence |
1163 | for(unsigned int i = 0; i < undetectedMarkersIds.size(); i++) { |
1164 | |
1165 | // best match at the moment |
1166 | int closestCandidateIdx = -1; |
1167 | double closestCandidateDistance = refineParams.minRepDistance * refineParams.minRepDistance + 1; |
1168 | Mat closestRotatedMarker; |
1169 | |
1170 | for(unsigned int j = 0; j < _rejectedCorners.total(); j++) { |
1171 | if(alreadyIdentified[j]) continue; |
1172 | |
1173 | // check distance |
1174 | double minDistance = closestCandidateDistance + 1; |
1175 | bool valid = false; |
1176 | int validRot = 0; |
1177 | for(int c = 0; c < 4; c++) { // first corner in rejected candidate |
1178 | double currentMaxDistance = 0; |
1179 | for(int k = 0; k < 4; k++) { |
1180 | Point2f rejCorner = _rejectedCorners.getMat(i: j).ptr<Point2f>()[(c + k) % 4]; |
1181 | Point2f distVector = undetectedMarkersCorners[i][k] - rejCorner; |
1182 | double cornerDist = distVector.x * distVector.x + distVector.y * distVector.y; |
1183 | currentMaxDistance = max(a: currentMaxDistance, b: cornerDist); |
1184 | } |
1185 | // if distance is better than current best distance |
1186 | if(currentMaxDistance < closestCandidateDistance) { |
1187 | valid = true; |
1188 | validRot = c; |
1189 | minDistance = currentMaxDistance; |
1190 | } |
1191 | if(!refineParams.checkAllOrders) break; |
1192 | } |
1193 | |
1194 | if(!valid) continue; |
1195 | |
1196 | // apply rotation |
1197 | Mat rotatedMarker; |
1198 | if(refineParams.checkAllOrders) { |
1199 | rotatedMarker = Mat(4, 1, CV_32FC2); |
1200 | for(int c = 0; c < 4; c++) |
1201 | rotatedMarker.ptr<Point2f>()[c] = |
1202 | _rejectedCorners.getMat(i: j).ptr<Point2f>()[(c + 4 + validRot) % 4]; |
1203 | } |
1204 | else rotatedMarker = _rejectedCorners.getMat(i: j); |
1205 | |
1206 | // last filter, check if inner code is close enough to the assigned marker code |
1207 | int codeDistance = 0; |
1208 | // if errorCorrectionRate, dont check code |
1209 | if(refineParams.errorCorrectionRate >= 0) { |
1210 | |
1211 | // extract bits |
1212 | Mat bits = _extractBits( |
1213 | image: grey, corners: rotatedMarker, markerSize: dictionary.markerSize, markerBorderBits: detectorParams.markerBorderBits, |
1214 | cellSize: detectorParams.perspectiveRemovePixelPerCell, |
1215 | cellMarginRate: detectorParams.perspectiveRemoveIgnoredMarginPerCell, minStdDevOtsu: detectorParams.minOtsuStdDev); |
1216 | |
1217 | Mat onlyBits = |
1218 | bits.rowRange(startrow: detectorParams.markerBorderBits, endrow: bits.rows - detectorParams.markerBorderBits) |
1219 | .colRange(startcol: detectorParams.markerBorderBits, endcol: bits.rows - detectorParams.markerBorderBits); |
1220 | |
1221 | codeDistance = |
1222 | dictionary.getDistanceToId(bits: onlyBits, id: undetectedMarkersIds[i], allRotations: false); |
1223 | } |
1224 | |
1225 | // if everythin is ok, assign values to current best match |
1226 | if(refineParams.errorCorrectionRate < 0 || codeDistance < maxCorrectionRecalculated) { |
1227 | closestCandidateIdx = j; |
1228 | closestCandidateDistance = minDistance; |
1229 | closestRotatedMarker = rotatedMarker; |
1230 | } |
1231 | } |
1232 | |
1233 | // if at least one good match, we have rescue the missing marker |
1234 | if(closestCandidateIdx >= 0) { |
1235 | |
1236 | // subpixel refinement |
1237 | if(detectorParams.cornerRefinementMethod == (int)CORNER_REFINE_SUBPIX) { |
1238 | CV_Assert(detectorParams.cornerRefinementWinSize > 0 && |
1239 | detectorParams.cornerRefinementMaxIterations > 0 && |
1240 | detectorParams.cornerRefinementMinAccuracy > 0); |
1241 | |
1242 | std::vector<Point2f> marker(closestRotatedMarker.begin<Point2f>(), closestRotatedMarker.end<Point2f>()); |
1243 | int cornerRefinementWinSize = std::max(a: 1, b: cvRound(value: detectorParams.relativeCornerRefinmentWinSize* |
1244 | getAverageModuleSize(markerCorners: marker, markerSize: dictionary.markerSize, markerBorderBits: detectorParams.markerBorderBits))); |
1245 | cornerRefinementWinSize = min(a: cornerRefinementWinSize, b: detectorParams.cornerRefinementWinSize); |
1246 | cornerSubPix(image: grey, corners: closestRotatedMarker, |
1247 | winSize: Size(cornerRefinementWinSize, cornerRefinementWinSize), |
1248 | zeroZone: Size(-1, -1), criteria: TermCriteria(TermCriteria::MAX_ITER | TermCriteria::EPS, |
1249 | detectorParams.cornerRefinementMaxIterations, |
1250 | detectorParams.cornerRefinementMinAccuracy)); |
1251 | } |
1252 | |
1253 | // remove from rejected |
1254 | alreadyIdentified[closestCandidateIdx] = true; |
1255 | |
1256 | // add to detected |
1257 | finalAcceptedCorners.push_back(x: closestRotatedMarker); |
1258 | finalAcceptedIds.push_back(x: undetectedMarkersIds[i]); |
1259 | |
1260 | // add the original index of the candidate |
1261 | recoveredIdxs.push_back(x: closestCandidateIdx); |
1262 | } |
1263 | } |
1264 | |
1265 | // parse output |
1266 | if(finalAcceptedIds.size() != _detectedIds.total()) { |
1267 | // parse output |
1268 | Mat(finalAcceptedIds).copyTo(m: _detectedIds); |
1269 | _copyVector2Output(vec&: finalAcceptedCorners, out: _detectedCorners); |
1270 | |
1271 | // recalculate _rejectedCorners based on alreadyIdentified |
1272 | vector<vector<Point2f> > finalRejected; |
1273 | for(unsigned int i = 0; i < alreadyIdentified.size(); i++) { |
1274 | if(!alreadyIdentified[i]) { |
1275 | finalRejected.push_back(x: _rejectedCorners.getMat(i).clone()); |
1276 | } |
1277 | } |
1278 | _copyVector2Output(vec&: finalRejected, out: _rejectedCorners); |
1279 | |
1280 | if(_recoveredIdxs.needed()) { |
1281 | Mat(recoveredIdxs).copyTo(m: _recoveredIdxs); |
1282 | } |
1283 | } |
1284 | } |
1285 | |
1286 | void ArucoDetector::write(FileStorage &fs) const |
1287 | { |
1288 | arucoDetectorImpl->dictionary.writeDictionary(fs); |
1289 | arucoDetectorImpl->detectorParams.writeDetectorParameters(fs); |
1290 | arucoDetectorImpl->refineParams.writeRefineParameters(fs); |
1291 | } |
1292 | |
1293 | void ArucoDetector::read(const FileNode &fn) { |
1294 | arucoDetectorImpl->dictionary.readDictionary(fn); |
1295 | arucoDetectorImpl->detectorParams.readDetectorParameters(fn); |
1296 | arucoDetectorImpl->refineParams.readRefineParameters(fn); |
1297 | } |
1298 | |
1299 | const Dictionary& ArucoDetector::getDictionary() const { |
1300 | return arucoDetectorImpl->dictionary; |
1301 | } |
1302 | |
1303 | void ArucoDetector::setDictionary(const Dictionary& dictionary) { |
1304 | arucoDetectorImpl->dictionary = dictionary; |
1305 | } |
1306 | |
1307 | const DetectorParameters& ArucoDetector::getDetectorParameters() const { |
1308 | return arucoDetectorImpl->detectorParams; |
1309 | } |
1310 | |
1311 | void ArucoDetector::setDetectorParameters(const DetectorParameters& detectorParameters) { |
1312 | arucoDetectorImpl->detectorParams = detectorParameters; |
1313 | } |
1314 | |
1315 | const RefineParameters& ArucoDetector::getRefineParameters() const { |
1316 | return arucoDetectorImpl->refineParams; |
1317 | } |
1318 | |
1319 | void ArucoDetector::setRefineParameters(const RefineParameters& refineParameters) { |
1320 | arucoDetectorImpl->refineParams = refineParameters; |
1321 | } |
1322 | |
1323 | void drawDetectedMarkers(InputOutputArray _image, InputArrayOfArrays _corners, |
1324 | InputArray _ids, Scalar borderColor) { |
1325 | CV_Assert(_image.getMat().total() != 0 && |
1326 | (_image.getMat().channels() == 1 || _image.getMat().channels() == 3)); |
1327 | CV_Assert((_corners.total() == _ids.total()) || _ids.total() == 0); |
1328 | |
1329 | // calculate colors |
1330 | Scalar textColor, cornerColor; |
1331 | textColor = cornerColor = borderColor; |
1332 | swap(a&: textColor.val[0], b&: textColor.val[1]); // text color just sawp G and R |
1333 | swap(a&: cornerColor.val[1], b&: cornerColor.val[2]); // corner color just sawp G and B |
1334 | |
1335 | int nMarkers = (int)_corners.total(); |
1336 | for(int i = 0; i < nMarkers; i++) { |
1337 | Mat currentMarker = _corners.getMat(i); |
1338 | CV_Assert(currentMarker.total() == 4 && currentMarker.channels() == 2); |
1339 | if (currentMarker.type() != CV_32SC2) |
1340 | currentMarker.convertTo(m: currentMarker, CV_32SC2); |
1341 | |
1342 | // draw marker sides |
1343 | for(int j = 0; j < 4; j++) { |
1344 | Point p0, p1; |
1345 | p0 = currentMarker.ptr<Point>(y: 0)[j]; |
1346 | p1 = currentMarker.ptr<Point>(y: 0)[(j + 1) % 4]; |
1347 | line(img: _image, pt1: p0, pt2: p1, color: borderColor, thickness: 1); |
1348 | } |
1349 | // draw first corner mark |
1350 | rectangle(img: _image, pt1: currentMarker.ptr<Point>(y: 0)[0] - Point(3, 3), |
1351 | pt2: currentMarker.ptr<Point>(y: 0)[0] + Point(3, 3), color: cornerColor, thickness: 1, lineType: LINE_AA); |
1352 | |
1353 | // draw ID |
1354 | if(_ids.total() != 0) { |
1355 | Point cent(0, 0); |
1356 | for(int p = 0; p < 4; p++) |
1357 | cent += currentMarker.ptr<Point>(y: 0)[p]; |
1358 | cent = cent / 4.; |
1359 | stringstream s; |
1360 | s << "id="<< _ids.getMat().ptr<int>(y: 0)[i]; |
1361 | putText(img: _image, text: s.str(), org: cent, fontFace: FONT_HERSHEY_SIMPLEX, fontScale: 0.5, color: textColor, thickness: 2); |
1362 | } |
1363 | } |
1364 | } |
1365 | |
1366 | void generateImageMarker(const Dictionary &dictionary, int id, int sidePixels, OutputArray _img, int borderBits) { |
1367 | dictionary.generateImageMarker(id, sidePixels, _img, borderBits); |
1368 | } |
1369 | |
1370 | } |
1371 | } |
1372 |
Definitions
- readWrite
- readDetectorParameters
- writeDetectorParameters
- readWrite
- RefineParameters
- readRefineParameters
- writeRefineParameters
- _threshold
- _findMarkerContours
- _reorderCandidatesCorners
- getAverageModuleSize
- checkMarker1InMarker2
- MarkerCandidate
- MarkerCandidateTree
- MarkerCandidateTree
- MarkerCandidateTree
- operator<
- getAverageDistance
- _detectInitialCandidates
- _extractBits
- _getBorderErrors
- _identifyOneCandidate
- correctCornerPosition
- _findOptPyrImageForCanonicalImg
- _interpolate2Dline
- _getCrossPoint
- _refineCandidateLines
- findCornerInPyrImage
- ArucoDetectorImpl
- ArucoDetectorImpl
- ArucoDetectorImpl
- detectCandidates
- filterTooCloseCandidates
- identifyCandidates
- ArucoDetector
- detectMarkers
- _projectUndetectedMarkers
- _projectUndetectedMarkers
- refineDetectedMarkers
- write
- read
- getDictionary
- setDictionary
- getDetectorParameters
- setDetectorParameters
- getRefineParameters
- setRefineParameters
- drawDetectedMarkers
Update your C++ knowledge – Modern C++11/14/17 Training
Find out more