1 | /*M/////////////////////////////////////////////////////////////////////////////////////// |
2 | // |
3 | // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. |
4 | // |
5 | // By downloading, copying, installing or using the software you agree to this license. |
6 | // If you do not agree to this license, do not download, install, |
7 | // copy or use the software. |
8 | // |
9 | // |
10 | // License Agreement |
11 | // For Open Source Computer Vision Library |
12 | // |
13 | // Copyright (C) 2000-2008, Intel Corporation, all rights reserved. |
14 | // Copyright (C) 2009, Willow Garage Inc., all rights reserved. |
15 | // Third party copyrights are property of their respective owners. |
16 | // |
17 | // Redistribution and use in source and binary forms, with or without modification, |
18 | // are permitted provided that the following conditions are met: |
19 | // |
20 | // * Redistribution's of source code must retain the above copyright notice, |
21 | // this list of conditions and the following disclaimer. |
22 | // |
23 | // * Redistribution's in binary form must reproduce the above copyright notice, |
24 | // this list of conditions and the following disclaimer in the documentation |
25 | // and/or other materials provided with the distribution. |
26 | // |
27 | // * The name of the copyright holders may not be used to endorse or promote products |
28 | // derived from this software without specific prior written permission. |
29 | // |
30 | // This software is provided by the copyright holders and contributors "as is" and |
31 | // any express or implied warranties, including, but not limited to, the implied |
32 | // warranties of merchantability and fitness for a particular purpose are disclaimed. |
33 | // In no event shall the Intel Corporation or contributors be liable for any direct, |
34 | // indirect, incidental, special, exemplary, or consequential damages |
35 | // (including, but not limited to, procurement of substitute goods or services; |
36 | // loss of use, data, or profits; or business interruption) however caused |
37 | // and on any theory of liability, whether in contract, strict liability, |
38 | // or tort (including negligence or otherwise) arising in any way out of |
39 | // the use of this software, even if advised of the possibility of such damage. |
40 | // |
41 | //M*/ |
42 | |
43 | #include "precomp.hpp" |
44 | #include "opencl_kernels_stitching.hpp" |
45 | #include <iostream> |
46 | namespace cv { |
47 | |
48 | PyRotationWarper::PyRotationWarper(String warp_type, float scale) |
49 | { |
50 | Ptr<WarperCreator> warper_creator; |
51 | if (warp_type == "plane" ) |
52 | warper_creator = makePtr<cv::PlaneWarper>(); |
53 | else if (warp_type == "affine" ) |
54 | warper_creator = makePtr<cv::AffineWarper>(); |
55 | else if (warp_type == "cylindrical" ) |
56 | warper_creator = makePtr<cv::CylindricalWarper>(); |
57 | else if (warp_type == "spherical" ) |
58 | warper_creator = makePtr<cv::SphericalWarper>(); |
59 | else if (warp_type == "fisheye" ) |
60 | warper_creator = makePtr<cv::FisheyeWarper>(); |
61 | else if (warp_type == "stereographic" ) |
62 | warper_creator = makePtr<cv::StereographicWarper>(); |
63 | else if (warp_type == "compressedPlaneA2B1" ) |
64 | warper_creator = makePtr<cv::CompressedRectilinearWarper>(a1: 2.0f, a1: 1.0f); |
65 | else if (warp_type == "compressedPlaneA1.5B1" ) |
66 | warper_creator = makePtr<cv::CompressedRectilinearWarper>(a1: 1.5f, a1: 1.0f); |
67 | else if (warp_type == "compressedPlanePortraitA2B1" ) |
68 | warper_creator = makePtr<cv::CompressedRectilinearPortraitWarper>(a1: 2.0f, a1: 1.0f); |
69 | else if (warp_type == "compressedPlanePortraitA1.5B1" ) |
70 | warper_creator = makePtr<cv::CompressedRectilinearPortraitWarper>(a1: 1.5f, a1: 1.0f); |
71 | else if (warp_type == "paniniA2B1" ) |
72 | warper_creator = makePtr<cv::PaniniWarper>(a1: 2.0f, a1: 1.0f); |
73 | else if (warp_type == "paniniA1.5B1" ) |
74 | warper_creator = makePtr<cv::PaniniWarper>(a1: 1.5f, a1: 1.0f); |
75 | else if (warp_type == "paniniPortraitA2B1" ) |
76 | warper_creator = makePtr<cv::PaniniPortraitWarper>(a1: 2.0f, a1: 1.0f); |
77 | else if (warp_type == "paniniPortraitA1.5B1" ) |
78 | warper_creator = makePtr<cv::PaniniPortraitWarper>(a1: 1.5f, a1: 1.0f); |
79 | else if (warp_type == "mercator" ) |
80 | warper_creator = makePtr<cv::MercatorWarper>(); |
81 | else if (warp_type == "transverseMercator" ) |
82 | warper_creator = makePtr<cv::TransverseMercatorWarper>(); |
83 | if (warper_creator.get() != nullptr) |
84 | { |
85 | rw = warper_creator->create(scale); |
86 | |
87 | } |
88 | else |
89 | CV_Error(Error::StsError, "unknown warper :" + warp_type); |
90 | } |
91 | Point2f PyRotationWarper::warpPoint(const Point2f &pt, InputArray K, InputArray R) |
92 | { |
93 | return rw.get()->warpPoint(pt, K, R); |
94 | } |
95 | |
96 | #if CV_VERSION_MAJOR != 4 |
97 | Point2f PyRotationWarper::warpPointBackward(const Point2f& pt, InputArray K, InputArray R) |
98 | { |
99 | return rw.get()->warpPointBackward(pt, K, R); |
100 | } |
101 | #endif |
102 | |
103 | Rect PyRotationWarper::buildMaps(Size src_size, InputArray K, InputArray R, OutputArray xmap, OutputArray ymap) |
104 | { |
105 | return rw.get()->buildMaps(src_size, K, R, xmap, ymap); |
106 | } |
107 | Point PyRotationWarper::warp(InputArray src, InputArray K, InputArray R, int interp_mode, int border_mode, |
108 | OutputArray dst) |
109 | { |
110 | if (rw.get() == nullptr) |
111 | CV_Error(Error::StsError, "Warper is null" ); |
112 | Point p = rw.get()->warp(src, K, R, interp_mode, border_mode, dst); |
113 | return p; |
114 | |
115 | } |
116 | void PyRotationWarper::warpBackward(InputArray src, InputArray K, InputArray R, int interp_mode, int border_mode, |
117 | Size dst_size, OutputArray dst) |
118 | { |
119 | return rw.get()->warpBackward(src, K, R, interp_mode, border_mode, dst_size, dst); |
120 | } |
121 | Rect PyRotationWarper::warpRoi(Size src_size, InputArray K, InputArray R) |
122 | { |
123 | return rw.get()->warpRoi(src_size, K, R); |
124 | } |
125 | |
126 | namespace detail { |
127 | |
128 | void ProjectorBase::setCameraParams(InputArray _K, InputArray _R, InputArray _T) |
129 | { |
130 | Mat K = _K.getMat(), R = _R.getMat(), T = _T.getMat(); |
131 | |
132 | CV_Assert(K.size() == Size(3, 3) && K.type() == CV_32F); |
133 | CV_Assert(R.size() == Size(3, 3) && R.type() == CV_32F); |
134 | CV_Assert((T.size() == Size(1, 3) || T.size() == Size(3, 1)) && T.type() == CV_32F); |
135 | |
136 | Mat_<float> K_(K); |
137 | k[0] = K_(0,0); k[1] = K_(0,1); k[2] = K_(0,2); |
138 | k[3] = K_(1,0); k[4] = K_(1,1); k[5] = K_(1,2); |
139 | k[6] = K_(2,0); k[7] = K_(2,1); k[8] = K_(2,2); |
140 | |
141 | Mat_<float> Rinv = R.t(); |
142 | rinv[0] = Rinv(0,0); rinv[1] = Rinv(0,1); rinv[2] = Rinv(0,2); |
143 | rinv[3] = Rinv(1,0); rinv[4] = Rinv(1,1); rinv[5] = Rinv(1,2); |
144 | rinv[6] = Rinv(2,0); rinv[7] = Rinv(2,1); rinv[8] = Rinv(2,2); |
145 | |
146 | Mat_<float> R_Kinv = R * K.inv(); |
147 | r_kinv[0] = R_Kinv(0,0); r_kinv[1] = R_Kinv(0,1); r_kinv[2] = R_Kinv(0,2); |
148 | r_kinv[3] = R_Kinv(1,0); r_kinv[4] = R_Kinv(1,1); r_kinv[5] = R_Kinv(1,2); |
149 | r_kinv[6] = R_Kinv(2,0); r_kinv[7] = R_Kinv(2,1); r_kinv[8] = R_Kinv(2,2); |
150 | |
151 | Mat_<float> K_Rinv = K * Rinv; |
152 | k_rinv[0] = K_Rinv(0,0); k_rinv[1] = K_Rinv(0,1); k_rinv[2] = K_Rinv(0,2); |
153 | k_rinv[3] = K_Rinv(1,0); k_rinv[4] = K_Rinv(1,1); k_rinv[5] = K_Rinv(1,2); |
154 | k_rinv[6] = K_Rinv(2,0); k_rinv[7] = K_Rinv(2,1); k_rinv[8] = K_Rinv(2,2); |
155 | |
156 | Mat_<float> T_(T.reshape(cn: 0, rows: 3)); |
157 | t[0] = T_(0,0); t[1] = T_(1,0); t[2] = T_(2,0); |
158 | } |
159 | |
160 | |
161 | Point2f PlaneWarper::warpPoint(const Point2f &pt, InputArray K, InputArray R, InputArray T) |
162 | { |
163 | projector_.setCameraParams(K: K, R: R, T: T); |
164 | Point2f uv; |
165 | projector_.mapForward(x: pt.x, y: pt.y, u&: uv.x, v&: uv.y); |
166 | return uv; |
167 | } |
168 | |
169 | Point2f PlaneWarper::warpPoint(const Point2f &pt, InputArray K, InputArray R) |
170 | { |
171 | float tz[] = {0.f, 0.f, 0.f}; |
172 | Mat_<float> T(3, 1, tz); |
173 | return warpPoint(pt, K, R, T); |
174 | } |
175 | Point2f PlaneWarper::warpPointBackward(const Point2f& pt, InputArray K, InputArray R, InputArray T) |
176 | { |
177 | projector_.setCameraParams(K: K, R: R, T: T); |
178 | Point2f xy; |
179 | projector_.mapBackward(u: pt.x, v: pt.y, x&: xy.x, y&: xy.y); |
180 | return xy; |
181 | } |
182 | |
183 | Point2f PlaneWarper::warpPointBackward(const Point2f& pt, InputArray K, InputArray R) |
184 | { |
185 | float tz[] = { 0.f, 0.f, 0.f }; |
186 | Mat_<float> T(3, 1, tz); |
187 | return warpPointBackward(pt, K, R, T); |
188 | } |
189 | |
190 | Rect PlaneWarper::buildMaps(Size src_size, InputArray K, InputArray R, OutputArray xmap, OutputArray ymap) |
191 | { |
192 | return buildMaps(src_size, K, R, T: Mat::zeros(rows: 3, cols: 1, CV_32FC1), xmap, ymap); |
193 | } |
194 | |
195 | Rect PlaneWarper::buildMaps(Size src_size, InputArray K, InputArray R, InputArray T, OutputArray _xmap, OutputArray _ymap) |
196 | { |
197 | projector_.setCameraParams(K: K, R: R, T: T); |
198 | |
199 | Point dst_tl, dst_br; |
200 | detectResultRoi(src_size, dst_tl, dst_br); |
201 | |
202 | Size dsize(dst_br.x - dst_tl.x + 1, dst_br.y - dst_tl.y + 1); |
203 | _xmap.create(sz: dsize, CV_32FC1); |
204 | _ymap.create(sz: dsize, CV_32FC1); |
205 | |
206 | #ifdef HAVE_OPENCL |
207 | if (ocl::isOpenCLActivated()) |
208 | { |
209 | ocl::Kernel k("buildWarpPlaneMaps" , ocl::stitching::warpers_oclsrc); |
210 | if (!k.empty()) |
211 | { |
212 | int rowsPerWI = ocl::Device::getDefault().isIntel() ? 4 : 1; |
213 | Mat k_rinv(1, 9, CV_32FC1, projector_.k_rinv), t(1, 3, CV_32FC1, projector_.t); |
214 | UMat uxmap = _xmap.getUMat(), uymap = _ymap.getUMat(), |
215 | uk_rinv = k_rinv.getUMat(accessFlags: ACCESS_READ), ut = t.getUMat(accessFlags: ACCESS_READ); |
216 | |
217 | k.args(kernel_args: ocl::KernelArg::WriteOnlyNoSize(m: uxmap), kernel_args: ocl::KernelArg::WriteOnly(m: uymap), |
218 | kernel_args: ocl::KernelArg::PtrReadOnly(m: uk_rinv), kernel_args: ocl::KernelArg::PtrReadOnly(m: ut), |
219 | kernel_args: dst_tl.x, kernel_args: dst_tl.y, kernel_args: 1/projector_.scale, kernel_args: rowsPerWI); |
220 | |
221 | size_t globalsize[2] = { (size_t)dsize.width, ((size_t)dsize.height + rowsPerWI - 1) / rowsPerWI }; |
222 | if (k.run(dims: 2, globalsize, NULL, sync: true)) |
223 | { |
224 | CV_IMPL_ADD(CV_IMPL_OCL); |
225 | return Rect(dst_tl, dst_br); |
226 | } |
227 | } |
228 | } |
229 | #endif |
230 | |
231 | Mat xmap = _xmap.getMat(), ymap = _ymap.getMat(); |
232 | |
233 | float x, y; |
234 | for (int v = dst_tl.y; v <= dst_br.y; ++v) |
235 | { |
236 | for (int u = dst_tl.x; u <= dst_br.x; ++u) |
237 | { |
238 | projector_.mapBackward(u: static_cast<float>(u), v: static_cast<float>(v), x, y); |
239 | xmap.at<float>(i0: v - dst_tl.y, i1: u - dst_tl.x) = x; |
240 | ymap.at<float>(i0: v - dst_tl.y, i1: u - dst_tl.x) = y; |
241 | } |
242 | } |
243 | |
244 | return Rect(dst_tl, dst_br); |
245 | } |
246 | |
247 | |
248 | Point PlaneWarper::warp(InputArray src, InputArray K, InputArray R, InputArray T, int interp_mode, int border_mode, |
249 | OutputArray dst) |
250 | { |
251 | UMat uxmap, uymap; |
252 | Rect dst_roi = buildMaps(src_size: src.size(), K, R, T, xmap: uxmap, ymap: uymap); |
253 | dst.create(rows: dst_roi.height + 1, cols: dst_roi.width + 1, type: src.type()); |
254 | remap(src, dst, map1: uxmap, map2: uymap, interpolation: interp_mode, borderMode: border_mode); |
255 | |
256 | return dst_roi.tl(); |
257 | } |
258 | |
259 | Point PlaneWarper::warp(InputArray src, InputArray K, InputArray R, |
260 | int interp_mode, int border_mode, OutputArray dst) |
261 | { |
262 | float tz[] = {0.f, 0.f, 0.f}; |
263 | Mat_<float> T(3, 1, tz); |
264 | return warp(src, K, R, T, interp_mode, border_mode, dst); |
265 | } |
266 | |
267 | Rect PlaneWarper::warpRoi(Size src_size, InputArray K, InputArray R, InputArray T) |
268 | { |
269 | projector_.setCameraParams(K: K, R: R, T: T); |
270 | |
271 | Point dst_tl, dst_br; |
272 | detectResultRoi(src_size, dst_tl, dst_br); |
273 | |
274 | return Rect(dst_tl, Point(dst_br.x + 1, dst_br.y + 1)); |
275 | } |
276 | |
277 | Rect PlaneWarper::warpRoi(Size src_size, InputArray K, InputArray R) |
278 | { |
279 | float tz[] = {0.f, 0.f, 0.f}; |
280 | Mat_<float> T(3, 1, tz); |
281 | return warpRoi(src_size, K, R, T); |
282 | } |
283 | |
284 | |
285 | void PlaneWarper::detectResultRoi(Size src_size, Point &dst_tl, Point &dst_br) |
286 | { |
287 | float tl_uf = std::numeric_limits<float>::max(); |
288 | float tl_vf = std::numeric_limits<float>::max(); |
289 | float br_uf = -std::numeric_limits<float>::max(); |
290 | float br_vf = -std::numeric_limits<float>::max(); |
291 | |
292 | float u, v; |
293 | |
294 | projector_.mapForward(x: 0, y: 0, u, v); |
295 | tl_uf = std::min(a: tl_uf, b: u); tl_vf = std::min(a: tl_vf, b: v); |
296 | br_uf = std::max(a: br_uf, b: u); br_vf = std::max(a: br_vf, b: v); |
297 | |
298 | projector_.mapForward(x: 0, y: static_cast<float>(src_size.height - 1), u, v); |
299 | tl_uf = std::min(a: tl_uf, b: u); tl_vf = std::min(a: tl_vf, b: v); |
300 | br_uf = std::max(a: br_uf, b: u); br_vf = std::max(a: br_vf, b: v); |
301 | |
302 | projector_.mapForward(x: static_cast<float>(src_size.width - 1), y: 0, u, v); |
303 | tl_uf = std::min(a: tl_uf, b: u); tl_vf = std::min(a: tl_vf, b: v); |
304 | br_uf = std::max(a: br_uf, b: u); br_vf = std::max(a: br_vf, b: v); |
305 | |
306 | projector_.mapForward(x: static_cast<float>(src_size.width - 1), y: static_cast<float>(src_size.height - 1), u, v); |
307 | tl_uf = std::min(a: tl_uf, b: u); tl_vf = std::min(a: tl_vf, b: v); |
308 | br_uf = std::max(a: br_uf, b: u); br_vf = std::max(a: br_vf, b: v); |
309 | |
310 | dst_tl.x = static_cast<int>(tl_uf); |
311 | dst_tl.y = static_cast<int>(tl_vf); |
312 | dst_br.x = static_cast<int>(br_uf); |
313 | dst_br.y = static_cast<int>(br_vf); |
314 | } |
315 | |
316 | |
317 | Point2f AffineWarper::warpPoint(const Point2f &pt, InputArray K, InputArray H) |
318 | { |
319 | Mat R, T; |
320 | getRTfromHomogeneous(H, R, T); |
321 | return PlaneWarper::warpPoint(pt, K, R, T); |
322 | } |
323 | |
324 | Point2f AffineWarper::warpPointBackward(const Point2f& pt, InputArray K, InputArray H) |
325 | { |
326 | Mat R, T; |
327 | getRTfromHomogeneous(H, R, T); |
328 | return PlaneWarper::warpPointBackward(pt, K, R, T); |
329 | } |
330 | |
331 | Rect AffineWarper::buildMaps(Size src_size, InputArray K, InputArray H, OutputArray xmap, OutputArray ymap) |
332 | { |
333 | Mat R, T; |
334 | getRTfromHomogeneous(H, R, T); |
335 | return PlaneWarper::buildMaps(src_size, K, R, T, xmap: xmap, ymap: ymap); |
336 | } |
337 | |
338 | |
339 | Point AffineWarper::warp(InputArray src, InputArray K, InputArray H, |
340 | int interp_mode, int border_mode, OutputArray dst) |
341 | { |
342 | Mat R, T; |
343 | getRTfromHomogeneous(H, R, T); |
344 | return PlaneWarper::warp(src, K, R, T, interp_mode, border_mode, dst); |
345 | } |
346 | |
347 | |
348 | Rect AffineWarper::warpRoi(Size src_size, InputArray K, InputArray H) |
349 | { |
350 | Mat R, T; |
351 | getRTfromHomogeneous(H, R, T); |
352 | return PlaneWarper::warpRoi(src_size, K, R, T); |
353 | } |
354 | |
355 | |
356 | void AffineWarper::getRTfromHomogeneous(InputArray H_, Mat &R, Mat &T) |
357 | { |
358 | Mat H = H_.getMat(); |
359 | CV_Assert(H.size() == Size(3, 3) && H.type() == CV_32F); |
360 | |
361 | T = Mat::zeros(rows: 3, cols: 1, CV_32F); |
362 | R = H.clone(); |
363 | |
364 | T.at<float>(i0: 0,i1: 0) = R.at<float>(i0: 0,i1: 2); |
365 | T.at<float>(i0: 1,i1: 0) = R.at<float>(i0: 1,i1: 2); |
366 | R.at<float>(i0: 0,i1: 2) = 0.f; |
367 | R.at<float>(i0: 1,i1: 2) = 0.f; |
368 | |
369 | // we want to compensate transform to fit into plane warper |
370 | R = R.t(); |
371 | T = (R * T) * -1; |
372 | } |
373 | |
374 | |
375 | void SphericalWarper::detectResultRoi(Size src_size, Point &dst_tl, Point &dst_br) |
376 | { |
377 | detectResultRoiByBorder(src_size, dst_tl, dst_br); |
378 | |
379 | float tl_uf = static_cast<float>(dst_tl.x); |
380 | float tl_vf = static_cast<float>(dst_tl.y); |
381 | float br_uf = static_cast<float>(dst_br.x); |
382 | float br_vf = static_cast<float>(dst_br.y); |
383 | |
384 | float x = projector_.rinv[1]; |
385 | float y = projector_.rinv[4]; |
386 | float z = projector_.rinv[7]; |
387 | if (y > 0.f) |
388 | { |
389 | float x_ = (projector_.k[0] * x + projector_.k[1] * y) / z + projector_.k[2]; |
390 | float y_ = projector_.k[4] * y / z + projector_.k[5]; |
391 | if (x_ > 0.f && x_ < src_size.width && y_ > 0.f && y_ < src_size.height) |
392 | { |
393 | tl_uf = std::min(a: tl_uf, b: 0.f); tl_vf = std::min(a: tl_vf, b: static_cast<float>(CV_PI * projector_.scale)); |
394 | br_uf = std::max(a: br_uf, b: 0.f); br_vf = std::max(a: br_vf, b: static_cast<float>(CV_PI * projector_.scale)); |
395 | } |
396 | } |
397 | |
398 | x = projector_.rinv[1]; |
399 | y = -projector_.rinv[4]; |
400 | z = projector_.rinv[7]; |
401 | if (y > 0.f) |
402 | { |
403 | float x_ = (projector_.k[0] * x + projector_.k[1] * y) / z + projector_.k[2]; |
404 | float y_ = projector_.k[4] * y / z + projector_.k[5]; |
405 | if (x_ > 0.f && x_ < src_size.width && y_ > 0.f && y_ < src_size.height) |
406 | { |
407 | tl_uf = std::min(a: tl_uf, b: 0.f); tl_vf = std::min(a: tl_vf, b: static_cast<float>(0)); |
408 | br_uf = std::max(a: br_uf, b: 0.f); br_vf = std::max(a: br_vf, b: static_cast<float>(0)); |
409 | } |
410 | } |
411 | |
412 | dst_tl.x = static_cast<int>(tl_uf); |
413 | dst_tl.y = static_cast<int>(tl_vf); |
414 | dst_br.x = static_cast<int>(br_uf); |
415 | dst_br.y = static_cast<int>(br_vf); |
416 | } |
417 | |
418 | void SphericalPortraitWarper::detectResultRoi(Size src_size, Point &dst_tl, Point &dst_br) |
419 | { |
420 | detectResultRoiByBorder(src_size, dst_tl, dst_br); |
421 | |
422 | float tl_uf = static_cast<float>(dst_tl.x); |
423 | float tl_vf = static_cast<float>(dst_tl.y); |
424 | float br_uf = static_cast<float>(dst_br.x); |
425 | float br_vf = static_cast<float>(dst_br.y); |
426 | |
427 | float x = projector_.rinv[0]; |
428 | float y = projector_.rinv[3]; |
429 | float z = projector_.rinv[6]; |
430 | if (y > 0.f) |
431 | { |
432 | float x_ = (projector_.k[0] * x + projector_.k[1] * y) / z + projector_.k[2]; |
433 | float y_ = projector_.k[4] * y / z + projector_.k[5]; |
434 | if (x_ > 0.f && x_ < src_size.width && y_ > 0.f && y_ < src_size.height) |
435 | { |
436 | tl_uf = std::min(a: tl_uf, b: 0.f); tl_vf = std::min(a: tl_vf, b: static_cast<float>(CV_PI * projector_.scale)); |
437 | br_uf = std::max(a: br_uf, b: 0.f); br_vf = std::max(a: br_vf, b: static_cast<float>(CV_PI * projector_.scale)); |
438 | } |
439 | } |
440 | |
441 | x = projector_.rinv[0]; |
442 | y = -projector_.rinv[3]; |
443 | z = projector_.rinv[6]; |
444 | if (y > 0.f) |
445 | { |
446 | float x_ = (projector_.k[0] * x + projector_.k[1] * y) / z + projector_.k[2]; |
447 | float y_ = projector_.k[4] * y / z + projector_.k[5]; |
448 | if (x_ > 0.f && x_ < src_size.width && y_ > 0.f && y_ < src_size.height) |
449 | { |
450 | tl_uf = std::min(a: tl_uf, b: 0.f); tl_vf = std::min(a: tl_vf, b: static_cast<float>(0)); |
451 | br_uf = std::max(a: br_uf, b: 0.f); br_vf = std::max(a: br_vf, b: static_cast<float>(0)); |
452 | } |
453 | } |
454 | |
455 | dst_tl.x = static_cast<int>(tl_uf); |
456 | dst_tl.y = static_cast<int>(tl_vf); |
457 | dst_br.x = static_cast<int>(br_uf); |
458 | dst_br.y = static_cast<int>(br_vf); |
459 | } |
460 | |
461 | /////////////////////////////////////////// SphericalWarper //////////////////////////////////////// |
462 | |
463 | Rect SphericalWarper::buildMaps(Size src_size, InputArray K, InputArray R, OutputArray xmap, OutputArray ymap) |
464 | { |
465 | #ifdef HAVE_OPENCL |
466 | if (ocl::isOpenCLActivated()) |
467 | { |
468 | ocl::Kernel k("buildWarpSphericalMaps" , ocl::stitching::warpers_oclsrc); |
469 | if (!k.empty()) |
470 | { |
471 | int rowsPerWI = ocl::Device::getDefault().isIntel() ? 4 : 1; |
472 | projector_.setCameraParams(K: K, R: R); |
473 | |
474 | Point dst_tl, dst_br; |
475 | detectResultRoi(src_size, dst_tl, dst_br); |
476 | |
477 | Size dsize(dst_br.x - dst_tl.x + 1, dst_br.y - dst_tl.y + 1); |
478 | xmap.create(sz: dsize, CV_32FC1); |
479 | ymap.create(sz: dsize, CV_32FC1); |
480 | |
481 | Mat k_rinv(1, 9, CV_32FC1, projector_.k_rinv); |
482 | UMat uxmap = xmap.getUMat(), uymap = ymap.getUMat(), uk_rinv = k_rinv.getUMat(accessFlags: ACCESS_READ); |
483 | |
484 | k.args(kernel_args: ocl::KernelArg::WriteOnlyNoSize(m: uxmap), kernel_args: ocl::KernelArg::WriteOnly(m: uymap), |
485 | kernel_args: ocl::KernelArg::PtrReadOnly(m: uk_rinv), kernel_args: dst_tl.x, kernel_args: dst_tl.y, kernel_args: 1/projector_.scale, kernel_args: rowsPerWI); |
486 | |
487 | size_t globalsize[2] = { (size_t)dsize.width, ((size_t)dsize.height + rowsPerWI - 1) / rowsPerWI }; |
488 | if (k.run(dims: 2, globalsize, NULL, sync: true)) |
489 | { |
490 | CV_IMPL_ADD(CV_IMPL_OCL); |
491 | return Rect(dst_tl, dst_br); |
492 | } |
493 | } |
494 | } |
495 | #endif |
496 | return RotationWarperBase<SphericalProjector>::buildMaps(src_size, K, R, xmap: xmap, ymap: ymap); |
497 | } |
498 | |
499 | Point SphericalWarper::warp(InputArray src, InputArray K, InputArray R, int interp_mode, int border_mode, OutputArray dst) |
500 | { |
501 | UMat uxmap, uymap; |
502 | Rect dst_roi = buildMaps(src_size: src.size(), K, R, xmap: uxmap, ymap: uymap); |
503 | |
504 | dst.create(rows: dst_roi.height + 1, cols: dst_roi.width + 1, type: src.type()); |
505 | remap(src, dst, map1: uxmap, map2: uymap, interpolation: interp_mode, borderMode: border_mode); |
506 | |
507 | return dst_roi.tl(); |
508 | } |
509 | |
510 | /////////////////////////////////////////// CylindricalWarper //////////////////////////////////////// |
511 | |
512 | Rect CylindricalWarper::buildMaps(Size src_size, InputArray K, InputArray R, OutputArray xmap, OutputArray ymap) |
513 | { |
514 | #ifdef HAVE_OPENCL |
515 | if (ocl::isOpenCLActivated()) |
516 | { |
517 | ocl::Kernel k("buildWarpCylindricalMaps" , ocl::stitching::warpers_oclsrc); |
518 | if (!k.empty()) |
519 | { |
520 | int rowsPerWI = ocl::Device::getDefault().isIntel() ? 4 : 1; |
521 | projector_.setCameraParams(K: K, R: R); |
522 | |
523 | Point dst_tl, dst_br; |
524 | detectResultRoi(src_size, dst_tl, dst_br); |
525 | |
526 | Size dsize(dst_br.x - dst_tl.x + 1, dst_br.y - dst_tl.y + 1); |
527 | xmap.create(sz: dsize, CV_32FC1); |
528 | ymap.create(sz: dsize, CV_32FC1); |
529 | |
530 | Mat k_rinv(1, 9, CV_32FC1, projector_.k_rinv); |
531 | UMat uxmap = xmap.getUMat(), uymap = ymap.getUMat(), uk_rinv = k_rinv.getUMat(accessFlags: ACCESS_READ); |
532 | |
533 | k.args(kernel_args: ocl::KernelArg::WriteOnlyNoSize(m: uxmap), kernel_args: ocl::KernelArg::WriteOnly(m: uymap), |
534 | kernel_args: ocl::KernelArg::PtrReadOnly(m: uk_rinv), kernel_args: dst_tl.x, kernel_args: dst_tl.y, kernel_args: 1/projector_.scale, |
535 | kernel_args: rowsPerWI); |
536 | |
537 | size_t globalsize[2] = { (size_t)dsize.width, ((size_t)dsize.height + rowsPerWI - 1) / rowsPerWI }; |
538 | if (k.run(dims: 2, globalsize, NULL, sync: true)) |
539 | { |
540 | CV_IMPL_ADD(CV_IMPL_OCL); |
541 | return Rect(dst_tl, dst_br); |
542 | } |
543 | } |
544 | } |
545 | #endif |
546 | return RotationWarperBase<CylindricalProjector>::buildMaps(src_size, K, R, xmap: xmap, ymap: ymap); |
547 | } |
548 | |
549 | Point CylindricalWarper::warp(InputArray src, InputArray K, InputArray R, int interp_mode, int border_mode, OutputArray dst) |
550 | { |
551 | UMat uxmap, uymap; |
552 | Rect dst_roi = buildMaps(src_size: src.size(), K, R, xmap: uxmap, ymap: uymap); |
553 | |
554 | dst.create(rows: dst_roi.height + 1, cols: dst_roi.width + 1, type: src.type()); |
555 | remap(src, dst, map1: uxmap, map2: uymap, interpolation: interp_mode, borderMode: border_mode); |
556 | |
557 | return dst_roi.tl(); |
558 | } |
559 | |
560 | } // namespace detail |
561 | } // namespace cv |
562 | |