1/*M///////////////////////////////////////////////////////////////////////////////////////
2//
3// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4//
5// By downloading, copying, installing or using the software you agree to this license.
6// If you do not agree to this license, do not download, install,
7// copy or use the software.
8//
9//
10// License Agreement
11// For Open Source Computer Vision Library
12//
13// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
14// Copyright (C) 2009, Willow Garage Inc., all rights reserved.
15// Third party copyrights are property of their respective owners.
16//
17// Redistribution and use in source and binary forms, with or without modification,
18// are permitted provided that the following conditions are met:
19//
20// * Redistribution's of source code must retain the above copyright notice,
21// this list of conditions and the following disclaimer.
22//
23// * Redistribution's in binary form must reproduce the above copyright notice,
24// this list of conditions and the following disclaimer in the documentation
25// and/or other materials provided with the distribution.
26//
27// * The name of the copyright holders may not be used to endorse or promote products
28// derived from this software without specific prior written permission.
29//
30// This software is provided by the copyright holders and contributors "as is" and
31// any express or implied warranties, including, but not limited to, the implied
32// warranties of merchantability and fitness for a particular purpose are disclaimed.
33// In no event shall the Intel Corporation or contributors be liable for any direct,
34// indirect, incidental, special, exemplary, or consequential damages
35// (including, but not limited to, procurement of substitute goods or services;
36// loss of use, data, or profits; or business interruption) however caused
37// and on any theory of liability, whether in contract, strict liability,
38// or tort (including negligence or otherwise) arising in any way out of
39// the use of this software, even if advised of the possibility of such damage.
40//
41//M*/
42
43#include "opencv2/core/types.hpp"
44#include "precomp.hpp"
45#include "distortion_model.hpp"
46
47#include "calib3d_c_api.h"
48
49#include "undistort.simd.hpp"
50#include "undistort.simd_declarations.hpp" // defines CV_CPU_DISPATCH_MODES_ALL=AVX2,...,BASELINE based on CMakeLists.txt content
51
52namespace cv
53{
54
55Mat getDefaultNewCameraMatrix( InputArray _cameraMatrix, Size imgsize,
56 bool centerPrincipalPoint )
57{
58 Mat cameraMatrix = _cameraMatrix.getMat();
59 if( !centerPrincipalPoint && cameraMatrix.type() == CV_64F )
60 return cameraMatrix;
61
62 Mat newCameraMatrix;
63 cameraMatrix.convertTo(m: newCameraMatrix, CV_64F);
64 if( centerPrincipalPoint )
65 {
66 newCameraMatrix.ptr<double>()[2] = (imgsize.width-1)*0.5;
67 newCameraMatrix.ptr<double>()[5] = (imgsize.height-1)*0.5;
68 }
69 return newCameraMatrix;
70}
71
72namespace {
73Ptr<ParallelLoopBody> getInitUndistortRectifyMapComputer(Size _size, Mat &_map1, Mat &_map2, int _m1type,
74 const double* _ir, Matx33d &_matTilt,
75 double _u0, double _v0, double _fx, double _fy,
76 double _k1, double _k2, double _p1, double _p2,
77 double _k3, double _k4, double _k5, double _k6,
78 double _s1, double _s2, double _s3, double _s4)
79{
80 CV_INSTRUMENT_REGION();
81
82 CV_CPU_DISPATCH(getInitUndistortRectifyMapComputer, (_size, _map1, _map2, _m1type, _ir, _matTilt, _u0, _v0, _fx, _fy, _k1, _k2, _p1, _p2, _k3, _k4, _k5, _k6, _s1, _s2, _s3, _s4),
83 CV_CPU_DISPATCH_MODES_ALL);
84}
85}
86
87void initUndistortRectifyMap( InputArray _cameraMatrix, InputArray _distCoeffs,
88 InputArray _matR, InputArray _newCameraMatrix,
89 Size size, int m1type, OutputArray _map1, OutputArray _map2 )
90{
91 Mat cameraMatrix = _cameraMatrix.getMat(), distCoeffs = _distCoeffs.getMat();
92 Mat matR = _matR.getMat(), newCameraMatrix = _newCameraMatrix.getMat();
93
94 if( m1type <= 0 )
95 m1type = CV_16SC2;
96 CV_Assert( m1type == CV_16SC2 || m1type == CV_32FC1 || m1type == CV_32FC2 );
97 _map1.create( sz: size, type: m1type );
98 Mat map1 = _map1.getMat(), map2;
99 if( m1type != CV_32FC2 )
100 {
101 _map2.create( sz: size, type: m1type == CV_16SC2 ? CV_16UC1 : CV_32FC1 );
102 map2 = _map2.getMat();
103 }
104 else
105 _map2.release();
106
107 Mat_<double> R = Mat_<double>::eye(rows: 3, cols: 3);
108 Mat_<double> A = Mat_<double>(cameraMatrix), Ar;
109
110 if( !newCameraMatrix.empty() )
111 Ar = Mat_<double>(newCameraMatrix);
112 else
113 Ar = getDefaultNewCameraMatrix( cameraMatrix: A, imgsize: size, centerPrincipalPoint: true );
114
115 if( !matR.empty() )
116 R = Mat_<double>(matR);
117
118 if( !distCoeffs.empty() )
119 distCoeffs = Mat_<double>(distCoeffs);
120 else
121 {
122 distCoeffs.create(rows: 14, cols: 1, CV_64F);
123 distCoeffs = 0.;
124 }
125
126 CV_Assert( A.size() == Size(3,3) && A.size() == R.size() );
127 CV_Assert( Ar.size() == Size(3,3) || Ar.size() == Size(4, 3));
128 Mat_<double> iR = (Ar.colRange(startcol: 0,endcol: 3)*R).inv(method: DECOMP_LU);
129 const double* ir = &iR(0,0);
130
131 double u0 = A(0, 2), v0 = A(1, 2);
132 double fx = A(0, 0), fy = A(1, 1);
133
134 CV_Assert( distCoeffs.size() == Size(1, 4) || distCoeffs.size() == Size(4, 1) ||
135 distCoeffs.size() == Size(1, 5) || distCoeffs.size() == Size(5, 1) ||
136 distCoeffs.size() == Size(1, 8) || distCoeffs.size() == Size(8, 1) ||
137 distCoeffs.size() == Size(1, 12) || distCoeffs.size() == Size(12, 1) ||
138 distCoeffs.size() == Size(1, 14) || distCoeffs.size() == Size(14, 1));
139
140 if( distCoeffs.rows != 1 && !distCoeffs.isContinuous() )
141 distCoeffs = distCoeffs.t();
142
143 const double* const distPtr = distCoeffs.ptr<double>();
144 double k1 = distPtr[0];
145 double k2 = distPtr[1];
146 double p1 = distPtr[2];
147 double p2 = distPtr[3];
148 double k3 = distCoeffs.cols + distCoeffs.rows - 1 >= 5 ? distPtr[4] : 0.;
149 double k4 = distCoeffs.cols + distCoeffs.rows - 1 >= 8 ? distPtr[5] : 0.;
150 double k5 = distCoeffs.cols + distCoeffs.rows - 1 >= 8 ? distPtr[6] : 0.;
151 double k6 = distCoeffs.cols + distCoeffs.rows - 1 >= 8 ? distPtr[7] : 0.;
152 double s1 = distCoeffs.cols + distCoeffs.rows - 1 >= 12 ? distPtr[8] : 0.;
153 double s2 = distCoeffs.cols + distCoeffs.rows - 1 >= 12 ? distPtr[9] : 0.;
154 double s3 = distCoeffs.cols + distCoeffs.rows - 1 >= 12 ? distPtr[10] : 0.;
155 double s4 = distCoeffs.cols + distCoeffs.rows - 1 >= 12 ? distPtr[11] : 0.;
156 double tauX = distCoeffs.cols + distCoeffs.rows - 1 >= 14 ? distPtr[12] : 0.;
157 double tauY = distCoeffs.cols + distCoeffs.rows - 1 >= 14 ? distPtr[13] : 0.;
158
159 // Matrix for trapezoidal distortion of tilted image sensor
160 Matx33d matTilt = Matx33d::eye();
161 detail::computeTiltProjectionMatrix(tauX, tauY, matTilt: &matTilt);
162
163 parallel_for_(range: Range(0, size.height), body: *getInitUndistortRectifyMapComputer(
164 size: size, map1&: map1, map2&: map2, m1type: m1type, ir: ir, matTilt&: matTilt, u0: u0, v0: v0,
165 fx: fx, fy: fy, k1: k1, k2: k2, p1: p1, p2: p2, k3: k3, k4: k4, k5: k5, k6: k6, s1: s1, s2: s2, s3: s3, s4: s4));
166}
167
168void initInverseRectificationMap( InputArray _cameraMatrix, InputArray _distCoeffs,
169 InputArray _matR, InputArray _newCameraMatrix,
170 const Size& size, int m1type, OutputArray _map1, OutputArray _map2 )
171{
172 // Parameters
173 Mat cameraMatrix = _cameraMatrix.getMat(), distCoeffs = _distCoeffs.getMat();
174 Mat matR = _matR.getMat(), newCameraMatrix = _newCameraMatrix.getMat();
175
176 // Check m1type validity
177 if( m1type <= 0 )
178 m1type = CV_16SC2;
179 CV_Assert( m1type == CV_16SC2 || m1type == CV_32FC1 || m1type == CV_32FC2 );
180
181 // Init Maps
182 _map1.create( sz: size, type: m1type );
183 Mat map1 = _map1.getMat(), map2;
184 if( m1type != CV_32FC2 )
185 {
186 _map2.create( sz: size, type: m1type == CV_16SC2 ? CV_16UC1 : CV_32FC1 );
187 map2 = _map2.getMat();
188 }
189 else {
190 _map2.release();
191 }
192
193 // Init camera intrinsics
194 Mat_<double> A = Mat_<double>(cameraMatrix), Ar;
195 if( !newCameraMatrix.empty() )
196 Ar = Mat_<double>(newCameraMatrix);
197 else
198 Ar = getDefaultNewCameraMatrix( cameraMatrix: A, imgsize: size, centerPrincipalPoint: true );
199 CV_Assert( A.size() == Size(3,3) );
200 CV_Assert( Ar.size() == Size(3,3) || Ar.size() == Size(4, 3));
201
202 // Init rotation matrix
203 Mat_<double> R = Mat_<double>::eye(rows: 3, cols: 3);
204 if( !matR.empty() )
205 {
206 R = Mat_<double>(matR);
207 //Note, do not inverse
208 }
209 CV_Assert( Size(3,3) == R.size() );
210
211 // Init distortion vector
212 if( !distCoeffs.empty() ){
213 distCoeffs = Mat_<double>(distCoeffs);
214
215 // Fix distortion vector orientation
216 if( distCoeffs.rows != 1 && !distCoeffs.isContinuous() ) {
217 distCoeffs = distCoeffs.t();
218 }
219 }
220
221 // Validate distortion vector size
222 CV_Assert( distCoeffs.empty() || // Empty allows cv::undistortPoints to skip distortion
223 distCoeffs.size() == Size(1, 4) || distCoeffs.size() == Size(4, 1) ||
224 distCoeffs.size() == Size(1, 5) || distCoeffs.size() == Size(5, 1) ||
225 distCoeffs.size() == Size(1, 8) || distCoeffs.size() == Size(8, 1) ||
226 distCoeffs.size() == Size(1, 12) || distCoeffs.size() == Size(12, 1) ||
227 distCoeffs.size() == Size(1, 14) || distCoeffs.size() == Size(14, 1));
228
229 // Create objectPoints
230 std::vector<cv::Point2i> p2i_objPoints;
231 std::vector<cv::Point2f> p2f_objPoints;
232 for (int r = 0; r < size.height; r++)
233 {
234 for (int c = 0; c < size.width; c++)
235 {
236 p2i_objPoints.push_back(x: cv::Point2i(c, r));
237 p2f_objPoints.push_back(x: cv::Point2f(static_cast<float>(c), static_cast<float>(r)));
238 }
239 }
240
241 // Undistort
242 std::vector<cv::Point2f> p2f_objPoints_undistorted;
243 undistortPoints(
244 src: p2f_objPoints,
245 dst: p2f_objPoints_undistorted,
246 cameraMatrix: A,
247 distCoeffs,
248 R: cv::Mat::eye(size: cv::Size(3, 3), CV_64FC1), // R
249 P: cv::Mat::eye(size: cv::Size(3, 3), CV_64FC1) // P = New K
250 );
251
252 // Rectify
253 std::vector<cv::Point2f> p2f_sourcePoints_pinHole;
254 perspectiveTransform(
255 src: p2f_objPoints_undistorted,
256 dst: p2f_sourcePoints_pinHole,
257 m: R
258 );
259
260 // Project points back to camera coordinates.
261 std::vector<cv::Point2f> p2f_sourcePoints;
262 undistortPoints(
263 src: p2f_sourcePoints_pinHole,
264 dst: p2f_sourcePoints,
265 cameraMatrix: cv::Mat::eye(size: cv::Size(3, 3), CV_32FC1), // K
266 distCoeffs: cv::Mat::zeros(size: cv::Size(1, 4), CV_32FC1), // Distortion
267 R: cv::Mat::eye(size: cv::Size(3, 3), CV_32FC1), // R
268 P: Ar // New K
269 );
270
271 // Copy to map
272 if (m1type == CV_16SC2) {
273 for (size_t i=0; i < p2i_objPoints.size(); i++) {
274 map1.at<Vec2s>(i0: p2i_objPoints[i].y, i1: p2i_objPoints[i].x) = Vec2s(saturate_cast<short>(v: p2f_sourcePoints[i].x), saturate_cast<short>(v: p2f_sourcePoints[i].y));
275 }
276 } else if (m1type == CV_32FC2) {
277 for (size_t i=0; i < p2i_objPoints.size(); i++) {
278 map1.at<Vec2f>(i0: p2i_objPoints[i].y, i1: p2i_objPoints[i].x) = Vec2f(p2f_sourcePoints[i]);
279 }
280 } else { // m1type == CV_32FC1
281 for (size_t i=0; i < p2i_objPoints.size(); i++) {
282 map1.at<float>(i0: p2i_objPoints[i].y, i1: p2i_objPoints[i].x) = p2f_sourcePoints[i].x;
283 map2.at<float>(i0: p2i_objPoints[i].y, i1: p2i_objPoints[i].x) = p2f_sourcePoints[i].y;
284 }
285 }
286}
287
288void undistort( InputArray _src, OutputArray _dst, InputArray _cameraMatrix,
289 InputArray _distCoeffs, InputArray _newCameraMatrix )
290{
291 CV_INSTRUMENT_REGION();
292
293 Mat src = _src.getMat(), cameraMatrix = _cameraMatrix.getMat();
294 Mat distCoeffs = _distCoeffs.getMat(), newCameraMatrix = _newCameraMatrix.getMat();
295
296 _dst.create( sz: src.size(), type: src.type() );
297 Mat dst = _dst.getMat();
298
299 CV_Assert( dst.data != src.data );
300
301 int stripe_size0 = std::min(a: std::max(a: 1, b: (1 << 12) / std::max(a: src.cols, b: 1)), b: src.rows);
302 Mat map1(stripe_size0, src.cols, CV_16SC2), map2(stripe_size0, src.cols, CV_16UC1);
303
304 Mat_<double> A, Ar, I = Mat_<double>::eye(rows: 3,cols: 3);
305
306 cameraMatrix.convertTo(m: A, CV_64F);
307 if( !distCoeffs.empty() )
308 distCoeffs = Mat_<double>(distCoeffs);
309 else
310 {
311 distCoeffs.create(rows: 5, cols: 1, CV_64F);
312 distCoeffs = 0.;
313 }
314
315 if( !newCameraMatrix.empty() )
316 newCameraMatrix.convertTo(m: Ar, CV_64F);
317 else
318 A.copyTo(m: Ar);
319
320 double v0 = Ar(1, 2);
321 for( int y = 0; y < src.rows; y += stripe_size0 )
322 {
323 int stripe_size = std::min( a: stripe_size0, b: src.rows - y );
324 Ar(1, 2) = v0 - y;
325 Mat map1_part = map1.rowRange(startrow: 0, endrow: stripe_size),
326 map2_part = map2.rowRange(startrow: 0, endrow: stripe_size),
327 dst_part = dst.rowRange(startrow: y, endrow: y + stripe_size);
328
329 initUndistortRectifyMap( cameraMatrix: A, distCoeffs: distCoeffs, matR: I, newCameraMatrix: Ar, size: Size(src.cols, stripe_size),
330 m1type: map1_part.type(), map1: map1_part, map2: map2_part );
331 remap( src, dst: dst_part, map1: map1_part, map2: map2_part, interpolation: INTER_LINEAR, borderMode: BORDER_CONSTANT );
332 }
333}
334
335}
336
337CV_IMPL void
338cvUndistort2( const CvArr* srcarr, CvArr* dstarr, const CvMat* Aarr, const CvMat* dist_coeffs, const CvMat* newAarr )
339{
340 cv::Mat src = cv::cvarrToMat(arr: srcarr), dst = cv::cvarrToMat(arr: dstarr), dst0 = dst;
341 cv::Mat A = cv::cvarrToMat(arr: Aarr), distCoeffs = cv::cvarrToMat(arr: dist_coeffs), newA;
342 if( newAarr )
343 newA = cv::cvarrToMat(arr: newAarr);
344
345 CV_Assert( src.size() == dst.size() && src.type() == dst.type() );
346 cv::undistort( src: src, dst: dst, cameraMatrix: A, distCoeffs: distCoeffs, newCameraMatrix: newA );
347}
348
349
350CV_IMPL void cvInitUndistortMap( const CvMat* Aarr, const CvMat* dist_coeffs,
351 CvArr* mapxarr, CvArr* mapyarr )
352{
353 cv::Mat A = cv::cvarrToMat(arr: Aarr), distCoeffs = cv::cvarrToMat(arr: dist_coeffs);
354 cv::Mat mapx = cv::cvarrToMat(arr: mapxarr), mapy, mapx0 = mapx, mapy0;
355
356 if( mapyarr )
357 mapy0 = mapy = cv::cvarrToMat(arr: mapyarr);
358
359 cv::initUndistortRectifyMap( cameraMatrix: A, distCoeffs: distCoeffs, matR: cv::Mat(), newCameraMatrix: A,
360 size: mapx.size(), m1type: mapx.type(), map1: mapx, map2: mapy );
361 CV_Assert( mapx0.data == mapx.data && mapy0.data == mapy.data );
362}
363
364void
365cvInitUndistortRectifyMap( const CvMat* Aarr, const CvMat* dist_coeffs,
366 const CvMat *Rarr, const CvMat* ArArr, CvArr* mapxarr, CvArr* mapyarr )
367{
368 cv::Mat A = cv::cvarrToMat(arr: Aarr), distCoeffs, R, Ar;
369 cv::Mat mapx = cv::cvarrToMat(arr: mapxarr), mapy, mapx0 = mapx, mapy0;
370
371 if( mapyarr )
372 mapy0 = mapy = cv::cvarrToMat(arr: mapyarr);
373
374 if( dist_coeffs )
375 distCoeffs = cv::cvarrToMat(arr: dist_coeffs);
376 if( Rarr )
377 R = cv::cvarrToMat(arr: Rarr);
378 if( ArArr )
379 Ar = cv::cvarrToMat(arr: ArArr);
380
381 cv::initUndistortRectifyMap( cameraMatrix: A, distCoeffs: distCoeffs, matR: R, newCameraMatrix: Ar, size: mapx.size(), m1type: mapx.type(), map1: mapx, map2: mapy );
382 CV_Assert( mapx0.data == mapx.data && mapy0.data == mapy.data );
383}
384
385static void cvUndistortPointsInternal( const CvMat* _src, CvMat* _dst, const CvMat* _cameraMatrix,
386 const CvMat* _distCoeffs,
387 const CvMat* matR, const CvMat* matP, cv::TermCriteria criteria)
388{
389 CV_Assert(criteria.isValid());
390 double A[3][3], RR[3][3], k[14]={0,0,0,0,0,0,0,0,0,0,0,0,0,0};
391 CvMat matA=cvMat(rows: 3, cols: 3, CV_64F, data: A), _Dk;
392 CvMat _RR=cvMat(rows: 3, cols: 3, CV_64F, data: RR);
393 cv::Matx33d invMatTilt = cv::Matx33d::eye();
394 cv::Matx33d matTilt = cv::Matx33d::eye();
395
396 CV_Assert( CV_IS_MAT(_src) && CV_IS_MAT(_dst) &&
397 (_src->rows == 1 || _src->cols == 1) &&
398 (_dst->rows == 1 || _dst->cols == 1) &&
399 _src->cols + _src->rows - 1 == _dst->rows + _dst->cols - 1 &&
400 (CV_MAT_TYPE(_src->type) == CV_32FC2 || CV_MAT_TYPE(_src->type) == CV_64FC2) &&
401 (CV_MAT_TYPE(_dst->type) == CV_32FC2 || CV_MAT_TYPE(_dst->type) == CV_64FC2));
402
403 CV_Assert( CV_IS_MAT(_cameraMatrix) &&
404 _cameraMatrix->rows == 3 && _cameraMatrix->cols == 3 );
405
406 cvConvert( _cameraMatrix, &matA );
407
408
409 if( _distCoeffs )
410 {
411 CV_Assert( CV_IS_MAT(_distCoeffs) &&
412 (_distCoeffs->rows == 1 || _distCoeffs->cols == 1) &&
413 (_distCoeffs->rows*_distCoeffs->cols == 4 ||
414 _distCoeffs->rows*_distCoeffs->cols == 5 ||
415 _distCoeffs->rows*_distCoeffs->cols == 8 ||
416 _distCoeffs->rows*_distCoeffs->cols == 12 ||
417 _distCoeffs->rows*_distCoeffs->cols == 14));
418
419 _Dk = cvMat( rows: _distCoeffs->rows, cols: _distCoeffs->cols,
420 CV_MAKETYPE(CV_64F,CV_MAT_CN(_distCoeffs->type)), data: k);
421
422 cvConvert( _distCoeffs, &_Dk );
423 if (k[12] != 0 || k[13] != 0)
424 {
425 cv::detail::computeTiltProjectionMatrix<double>(tauX: k[12], tauY: k[13], NULL, NULL, NULL, invMatTilt: &invMatTilt);
426 cv::detail::computeTiltProjectionMatrix<double>(tauX: k[12], tauY: k[13], matTilt: &matTilt, NULL, NULL);
427 }
428 }
429
430 if( matR )
431 {
432 CV_Assert( CV_IS_MAT(matR) && matR->rows == 3 && matR->cols == 3 );
433 cvConvert( matR, &_RR );
434 }
435 else
436 cvSetIdentity(mat: &_RR);
437
438 if( matP )
439 {
440 double PP[3][3];
441 CvMat _P3x3, _PP=cvMat(rows: 3, cols: 3, CV_64F, data: PP);
442 CV_Assert( CV_IS_MAT(matP) && matP->rows == 3 && (matP->cols == 3 || matP->cols == 4));
443 cvConvert( cvGetCols(matP, &_P3x3, 0, 3), &_PP );
444 cvMatMul( &_PP, &_RR, &_RR );
445 }
446
447 const CvPoint2D32f* srcf = (const CvPoint2D32f*)_src->data.ptr;
448 const CvPoint2D64f* srcd = (const CvPoint2D64f*)_src->data.ptr;
449 CvPoint2D32f* dstf = (CvPoint2D32f*)_dst->data.ptr;
450 CvPoint2D64f* dstd = (CvPoint2D64f*)_dst->data.ptr;
451 int stype = CV_MAT_TYPE(_src->type);
452 int dtype = CV_MAT_TYPE(_dst->type);
453 int sstep = _src->rows == 1 ? 1 : _src->step/CV_ELEM_SIZE(stype);
454 int dstep = _dst->rows == 1 ? 1 : _dst->step/CV_ELEM_SIZE(dtype);
455
456 double fx = A[0][0];
457 double fy = A[1][1];
458 double ifx = 1./fx;
459 double ify = 1./fy;
460 double cx = A[0][2];
461 double cy = A[1][2];
462
463 int n = _src->rows + _src->cols - 1;
464 for( int i = 0; i < n; i++ )
465 {
466 double x, y, x0 = 0, y0 = 0, u, v;
467 if( stype == CV_32FC2 )
468 {
469 x = srcf[i*sstep].x;
470 y = srcf[i*sstep].y;
471 }
472 else
473 {
474 x = srcd[i*sstep].x;
475 y = srcd[i*sstep].y;
476 }
477 u = x; v = y;
478 x = (x - cx)*ifx;
479 y = (y - cy)*ify;
480
481 if( _distCoeffs ) {
482 // compensate tilt distortion
483 cv::Vec3d vecUntilt = invMatTilt * cv::Vec3d(x, y, 1);
484 double invProj = vecUntilt(2) ? 1./vecUntilt(2) : 1;
485 x0 = x = invProj * vecUntilt(0);
486 y0 = y = invProj * vecUntilt(1);
487
488 double error = std::numeric_limits<double>::max();
489 // compensate distortion iteratively
490
491 for( int j = 0; ; j++ )
492 {
493 if ((criteria.type & cv::TermCriteria::COUNT) && j >= criteria.maxCount)
494 break;
495 if ((criteria.type & cv::TermCriteria::EPS) && error < criteria.epsilon)
496 break;
497 double r2 = x*x + y*y;
498 double icdist = (1 + ((k[7]*r2 + k[6])*r2 + k[5])*r2)/(1 + ((k[4]*r2 + k[1])*r2 + k[0])*r2);
499 if (icdist < 0) // test: undistortPoints.regression_14583
500 {
501 x = (u - cx)*ifx;
502 y = (v - cy)*ify;
503 break;
504 }
505 double deltaX = 2*k[2]*x*y + k[3]*(r2 + 2*x*x)+ k[8]*r2+k[9]*r2*r2;
506 double deltaY = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y+ k[10]*r2+k[11]*r2*r2;
507 x = (x0 - deltaX)*icdist;
508 y = (y0 - deltaY)*icdist;
509
510 if(criteria.type & cv::TermCriteria::EPS)
511 {
512 double r4, r6, a1, a2, a3, cdist, icdist2;
513 double xd, yd, xd0, yd0;
514 cv::Vec3d vecTilt;
515
516 r2 = x*x + y*y;
517 r4 = r2*r2;
518 r6 = r4*r2;
519 a1 = 2*x*y;
520 a2 = r2 + 2*x*x;
521 a3 = r2 + 2*y*y;
522 cdist = 1 + k[0]*r2 + k[1]*r4 + k[4]*r6;
523 icdist2 = 1./(1 + k[5]*r2 + k[6]*r4 + k[7]*r6);
524 xd0 = x*cdist*icdist2 + k[2]*a1 + k[3]*a2 + k[8]*r2+k[9]*r4;
525 yd0 = y*cdist*icdist2 + k[2]*a3 + k[3]*a1 + k[10]*r2+k[11]*r4;
526
527 vecTilt = matTilt*cv::Vec3d(xd0, yd0, 1);
528 invProj = vecTilt(2) ? 1./vecTilt(2) : 1;
529 xd = invProj * vecTilt(0);
530 yd = invProj * vecTilt(1);
531
532 double x_proj = xd*fx + cx;
533 double y_proj = yd*fy + cy;
534
535 error = sqrt( x: pow(x: x_proj - u, y: 2) + pow(x: y_proj - v, y: 2) );
536 }
537 }
538 }
539
540 double xx = RR[0][0]*x + RR[0][1]*y + RR[0][2];
541 double yy = RR[1][0]*x + RR[1][1]*y + RR[1][2];
542 double ww = 1./(RR[2][0]*x + RR[2][1]*y + RR[2][2]);
543 x = xx*ww;
544 y = yy*ww;
545
546 if( dtype == CV_32FC2 )
547 {
548 dstf[i*dstep].x = (float)x;
549 dstf[i*dstep].y = (float)y;
550 }
551 else
552 {
553 dstd[i*dstep].x = x;
554 dstd[i*dstep].y = y;
555 }
556 }
557}
558
559void cvUndistortPoints(const CvMat* _src, CvMat* _dst, const CvMat* _cameraMatrix,
560 const CvMat* _distCoeffs,
561 const CvMat* matR, const CvMat* matP)
562{
563 cvUndistortPointsInternal(_src, _dst, _cameraMatrix, _distCoeffs, matR, matP,
564 criteria: cv::TermCriteria(cv::TermCriteria::COUNT, 5, 0.01));
565}
566
567namespace cv {
568
569void undistortPoints(InputArray _src, OutputArray _dst,
570 InputArray _cameraMatrix,
571 InputArray _distCoeffs,
572 InputArray _Rmat,
573 InputArray _Pmat)
574{
575 undistortPoints(src: _src, dst: _dst, cameraMatrix: _cameraMatrix, distCoeffs: _distCoeffs, R: _Rmat, P: _Pmat, criteria: TermCriteria(TermCriteria::MAX_ITER, 5, 0.01));
576}
577
578void undistortPoints(InputArray _src, OutputArray _dst,
579 InputArray _cameraMatrix,
580 InputArray _distCoeffs,
581 InputArray _Rmat,
582 InputArray _Pmat,
583 TermCriteria criteria)
584{
585 Mat src = _src.getMat(), cameraMatrix = _cameraMatrix.getMat();
586 Mat distCoeffs = _distCoeffs.getMat(), R = _Rmat.getMat(), P = _Pmat.getMat();
587
588 int npoints = src.checkVector(elemChannels: 2), depth = src.depth();
589 if (npoints < 0)
590 src = src.t();
591 npoints = src.checkVector(elemChannels: 2);
592 CV_Assert(npoints >= 0 && src.isContinuous() && (depth == CV_32F || depth == CV_64F));
593
594 if (src.cols == 2)
595 src = src.reshape(cn: 2);
596
597 _dst.create(rows: npoints, cols: 1, CV_MAKETYPE(depth, 2), i: -1, allowTransposed: true);
598 Mat dst = _dst.getMat();
599
600 CvMat _csrc = cvMat(m: src), _cdst = cvMat(m: dst), _ccameraMatrix = cvMat(m: cameraMatrix);
601 CvMat matR, matP, _cdistCoeffs, *pR=0, *pP=0, *pD=0;
602 if( !R.empty() )
603 pR = &(matR = cvMat(m: R));
604 if( !P.empty() )
605 pP = &(matP = cvMat(m: P));
606 if( !distCoeffs.empty() )
607 pD = &(_cdistCoeffs = cvMat(m: distCoeffs));
608 cvUndistortPointsInternal(src: &_csrc, dst: &_cdst, cameraMatrix: &_ccameraMatrix, distCoeffs: pD, matR: pR, matP: pP, criteria);
609}
610
611void undistortImagePoints(InputArray src, OutputArray dst, InputArray cameraMatrix, InputArray distCoeffs, TermCriteria termCriteria)
612{
613 undistortPoints(src: src, dst: dst, cameraMatrix: cameraMatrix, distCoeffs: distCoeffs, Rmat: noArray(), Pmat: cameraMatrix, criteria: termCriteria);
614}
615
616static Point2f mapPointSpherical(const Point2f& p, float alpha, Vec4d* J, enum UndistortTypes projType)
617{
618 double x = p.x, y = p.y;
619 double beta = 1 + 2*alpha;
620 double v = x*x + y*y + 1, iv = 1/v;
621 double u = std::sqrt(x: beta*v + alpha*alpha);
622
623 double k = (u - alpha)*iv;
624 double kv = (v*beta/u - (u - alpha)*2)*iv*iv;
625 double kx = kv*x, ky = kv*y;
626
627 if( projType == PROJ_SPHERICAL_ORTHO )
628 {
629 if(J)
630 *J = Vec4d(kx*x + k, kx*y, ky*x, ky*y + k);
631 return Point2f((float)(x*k), (float)(y*k));
632 }
633 if( projType == PROJ_SPHERICAL_EQRECT )
634 {
635 // equirectangular
636 double iR = 1/(alpha + 1);
637 double x1 = std::max(a: std::min(a: x*k*iR, b: 1.), b: -1.);
638 double y1 = std::max(a: std::min(a: y*k*iR, b: 1.), b: -1.);
639
640 if(J)
641 {
642 double fx1 = iR/std::sqrt(x: 1 - x1*x1);
643 double fy1 = iR/std::sqrt(x: 1 - y1*y1);
644 *J = Vec4d(fx1*(kx*x + k), fx1*ky*x, fy1*kx*y, fy1*(ky*y + k));
645 }
646 return Point2f((float)asin(x: x1), (float)asin(x: y1));
647 }
648 CV_Error(Error::StsBadArg, "Unknown projection type");
649}
650
651
652static Point2f invMapPointSpherical(Point2f _p, float alpha, enum UndistortTypes projType)
653{
654 double eps = 1e-12;
655 Vec2d p(_p.x, _p.y), q(_p.x, _p.y), err;
656 Vec4d J;
657 int i, maxiter = 5;
658
659 for( i = 0; i < maxiter; i++ )
660 {
661 Point2f p1 = mapPointSpherical(p: Point2f((float)q[0], (float)q[1]), alpha, J: &J, projType);
662 err = Vec2d(p1.x, p1.y) - p;
663 if( err[0]*err[0] + err[1]*err[1] < eps )
664 break;
665
666 Vec4d JtJ(J[0]*J[0] + J[2]*J[2], J[0]*J[1] + J[2]*J[3],
667 J[0]*J[1] + J[2]*J[3], J[1]*J[1] + J[3]*J[3]);
668 double d = JtJ[0]*JtJ[3] - JtJ[1]*JtJ[2];
669 d = d ? 1./d : 0;
670 Vec4d iJtJ(JtJ[3]*d, -JtJ[1]*d, -JtJ[2]*d, JtJ[0]*d);
671 Vec2d JtErr(J[0]*err[0] + J[2]*err[1], J[1]*err[0] + J[3]*err[1]);
672
673 q -= Vec2d(iJtJ[0]*JtErr[0] + iJtJ[1]*JtErr[1], iJtJ[2]*JtErr[0] + iJtJ[3]*JtErr[1]);
674 //Matx22d J(kx*x + k, kx*y, ky*x, ky*y + k);
675 //q -= Vec2d((J.t()*J).inv()*(J.t()*err));
676 }
677
678 return i < maxiter ? Point2f((float)q[0], (float)q[1]) : Point2f(-FLT_MAX, -FLT_MAX);
679}
680
681float initWideAngleProjMap(InputArray _cameraMatrix0, InputArray _distCoeffs0,
682 Size imageSize, int destImageWidth, int m1type,
683 OutputArray _map1, OutputArray _map2,
684 enum UndistortTypes projType, double _alpha)
685{
686 Mat cameraMatrix0 = _cameraMatrix0.getMat(), distCoeffs0 = _distCoeffs0.getMat();
687 double k[14] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0}, M[9]={0,0,0,0,0,0,0,0,0};
688 Mat distCoeffs(distCoeffs0.rows, distCoeffs0.cols, CV_MAKETYPE(CV_64F,distCoeffs0.channels()), k);
689 Mat cameraMatrix(3,3,CV_64F,M);
690 Point2f scenter((float)cameraMatrix.at<double>(i0: 0,i1: 2), (float)cameraMatrix.at<double>(i0: 1,i1: 2));
691 Point2f dcenter((destImageWidth-1)*0.5f, 0.f);
692 float xmin = FLT_MAX, xmax = -FLT_MAX, ymin = FLT_MAX, ymax = -FLT_MAX;
693 int N = 9;
694 std::vector<Point2f> uvec(1), vvec(1);
695 Mat I = Mat::eye(rows: 3,cols: 3,CV_64F);
696 float alpha = (float)_alpha;
697
698 int ndcoeffs = distCoeffs0.cols*distCoeffs0.rows*distCoeffs0.channels();
699 CV_Assert((distCoeffs0.cols == 1 || distCoeffs0.rows == 1) &&
700 (ndcoeffs == 4 || ndcoeffs == 5 || ndcoeffs == 8 || ndcoeffs == 12 || ndcoeffs == 14));
701 CV_Assert(cameraMatrix0.size() == Size(3,3));
702 distCoeffs0.convertTo(m: distCoeffs,CV_64F);
703 cameraMatrix0.convertTo(m: cameraMatrix,CV_64F);
704
705 alpha = std::min(a: alpha, b: 0.999f);
706
707 for( int i = 0; i < N; i++ )
708 for( int j = 0; j < N; j++ )
709 {
710 Point2f p((float)j*imageSize.width/(N-1), (float)i*imageSize.height/(N-1));
711 uvec[0] = p;
712 undistortPoints(src: uvec, dst: vvec, cameraMatrix: cameraMatrix, distCoeffs: distCoeffs, Rmat: I, Pmat: I);
713 Point2f q = mapPointSpherical(p: vvec[0], alpha, J: 0, projType);
714 if( xmin > q.x ) xmin = q.x;
715 if( xmax < q.x ) xmax = q.x;
716 if( ymin > q.y ) ymin = q.y;
717 if( ymax < q.y ) ymax = q.y;
718 }
719
720 float scale = (float)std::min(a: dcenter.x/fabs(x: xmax), b: dcenter.x/fabs(x: xmin));
721 Size dsize(destImageWidth, cvCeil(value: std::max(a: scale*fabs(x: ymin)*2, b: scale*fabs(x: ymax)*2)));
722 dcenter.y = (dsize.height - 1)*0.5f;
723
724 Mat mapxy(dsize, CV_32FC2);
725 double k1 = k[0], k2 = k[1], k3 = k[2], p1 = k[3], p2 = k[4], k4 = k[5], k5 = k[6], k6 = k[7], s1 = k[8], s2 = k[9], s3 = k[10], s4 = k[11];
726 double fx = cameraMatrix.at<double>(i0: 0,i1: 0), fy = cameraMatrix.at<double>(i0: 1,i1: 1), cx = scenter.x, cy = scenter.y;
727 cv::Matx33d matTilt;
728 cv::detail::computeTiltProjectionMatrix(tauX: k[12], tauY: k[13], matTilt: &matTilt);
729
730 for( int y = 0; y < dsize.height; y++ )
731 {
732 Point2f* mxy = mapxy.ptr<Point2f>(y);
733 for( int x = 0; x < dsize.width; x++ )
734 {
735 Point2f p = (Point2f((float)x, (float)y) - dcenter)*(1.f/scale);
736 Point2f q = invMapPointSpherical(p: p, alpha, projType);
737 if( q.x <= -FLT_MAX && q.y <= -FLT_MAX )
738 {
739 mxy[x] = Point2f(-1.f, -1.f);
740 continue;
741 }
742 double x2 = q.x*q.x, y2 = q.y*q.y;
743 double r2 = x2 + y2, _2xy = 2*q.x*q.y;
744 double kr = 1 + ((k3*r2 + k2)*r2 + k1)*r2/(1 + ((k6*r2 + k5)*r2 + k4)*r2);
745 double xd = (q.x*kr + p1*_2xy + p2*(r2 + 2*x2) + s1*r2+ s2*r2*r2);
746 double yd = (q.y*kr + p1*(r2 + 2*y2) + p2*_2xy + s3*r2+ s4*r2*r2);
747 cv::Vec3d vecTilt = matTilt*cv::Vec3d(xd, yd, 1);
748 double invProj = vecTilt(2) ? 1./vecTilt(2) : 1;
749 double u = fx*invProj*vecTilt(0) + cx;
750 double v = fy*invProj*vecTilt(1) + cy;
751
752 mxy[x] = Point2f((float)u, (float)v);
753 }
754 }
755
756 if(m1type == CV_32FC2)
757 {
758 _map1.create(sz: mapxy.size(), type: mapxy.type());
759 Mat map1 = _map1.getMat();
760 mapxy.copyTo(m: map1);
761 _map2.release();
762 }
763 else
764 convertMaps(map1: mapxy, map2: Mat(), dstmap1: _map1, dstmap2: _map2, dstmap1type: m1type, nninterpolation: false);
765
766 return scale;
767}
768
769} // namespace
770/* End of file */
771

Provided by KDAB

Privacy Policy
Learn to use CMake with our Intro Training
Find out more

source code of opencv/modules/calib3d/src/undistort.dispatch.cpp