| 1 | /*M/////////////////////////////////////////////////////////////////////////////////////// |
| 2 | // |
| 3 | // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. |
| 4 | // |
| 5 | // By downloading, copying, installing or using the software you agree to this license. |
| 6 | // If you do not agree to this license, do not download, install, |
| 7 | // copy or use the software. |
| 8 | // |
| 9 | // |
| 10 | // License Agreement |
| 11 | // For Open Source Computer Vision Library |
| 12 | // |
| 13 | // Copyright (C) 2000-2008, Intel Corporation, all rights reserved. |
| 14 | // Copyright (C) 2009, Willow Garage Inc., all rights reserved. |
| 15 | // Third party copyrights are property of their respective owners. |
| 16 | // |
| 17 | // Redistribution and use in source and binary forms, with or without modification, |
| 18 | // are permitted provided that the following conditions are met: |
| 19 | // |
| 20 | // * Redistribution's of source code must retain the above copyright notice, |
| 21 | // this list of conditions and the following disclaimer. |
| 22 | // |
| 23 | // * Redistribution's in binary form must reproduce the above copyright notice, |
| 24 | // this list of conditions and the following disclaimer in the documentation |
| 25 | // and/or other materials provided with the distribution. |
| 26 | // |
| 27 | // * The name of the copyright holders may not be used to endorse or promote products |
| 28 | // derived from this software without specific prior written permission. |
| 29 | // |
| 30 | // This software is provided by the copyright holders and contributors "as is" and |
| 31 | // any express or implied warranties, including, but not limited to, the implied |
| 32 | // warranties of merchantability and fitness for a particular purpose are disclaimed. |
| 33 | // In no event shall the Intel Corporation or contributors be liable for any direct, |
| 34 | // indirect, incidental, special, exemplary, or consequential damages |
| 35 | // (including, but not limited to, procurement of substitute goods or services; |
| 36 | // loss of use, data, or profits; or business interruption) however caused |
| 37 | // and on any theory of liability, whether in contract, strict liability, |
| 38 | // or tort (including negligence or otherwise) arising in any way out of |
| 39 | // the use of this software, even if advised of the possibility of such damage. |
| 40 | // |
| 41 | //M*/ |
| 42 | |
| 43 | #ifndef OPENCV_STITCHING_STITCHER_HPP |
| 44 | #define OPENCV_STITCHING_STITCHER_HPP |
| 45 | |
| 46 | #include "opencv2/core.hpp" |
| 47 | #include "opencv2/features2d.hpp" |
| 48 | #include "opencv2/stitching/warpers.hpp" |
| 49 | #include "opencv2/stitching/detail/matchers.hpp" |
| 50 | #include "opencv2/stitching/detail/motion_estimators.hpp" |
| 51 | #include "opencv2/stitching/detail/exposure_compensate.hpp" |
| 52 | #include "opencv2/stitching/detail/seam_finders.hpp" |
| 53 | #include "opencv2/stitching/detail/blenders.hpp" |
| 54 | #include "opencv2/stitching/detail/camera.hpp" |
| 55 | |
| 56 | |
| 57 | #if defined(Status) |
| 58 | # warning Detected X11 'Status' macro definition, it can cause build conflicts. Please, include this header before any X11 headers. |
| 59 | #endif |
| 60 | |
| 61 | |
| 62 | /** |
| 63 | @defgroup stitching Images stitching |
| 64 | |
| 65 | This figure illustrates the stitching module pipeline implemented in the Stitcher class. Using that |
| 66 | class it's possible to configure/remove some steps, i.e. adjust the stitching pipeline according to |
| 67 | the particular needs. All building blocks from the pipeline are available in the detail namespace, |
| 68 | one can combine and use them separately. |
| 69 | |
| 70 | The implemented stitching pipeline is very similar to the one proposed in @cite BL07 . |
| 71 | |
| 72 |  |
| 73 | |
| 74 | Camera models |
| 75 | ------------- |
| 76 | |
| 77 | There are currently 2 camera models implemented in stitching pipeline. |
| 78 | |
| 79 | - _Homography model_ expecting perspective transformations between images |
| 80 | implemented in @ref cv::detail::BestOf2NearestMatcher cv::detail::HomographyBasedEstimator |
| 81 | cv::detail::BundleAdjusterReproj cv::detail::BundleAdjusterRay |
| 82 | - _Affine model_ expecting affine transformation with 6 DOF or 4 DOF implemented in |
| 83 | @ref cv::detail::AffineBestOf2NearestMatcher cv::detail::AffineBasedEstimator |
| 84 | cv::detail::BundleAdjusterAffine cv::detail::BundleAdjusterAffinePartial cv::AffineWarper |
| 85 | |
| 86 | Homography model is useful for creating photo panoramas captured by camera, |
| 87 | while affine-based model can be used to stitch scans and object captured by |
| 88 | specialized devices. Use @ref cv::Stitcher::create to get preconfigured pipeline for one |
| 89 | of those models. |
| 90 | |
| 91 | @note |
| 92 | Certain detailed settings of @ref cv::Stitcher might not make sense. Especially |
| 93 | you should not mix classes implementing affine model and classes implementing |
| 94 | Homography model, as they work with different transformations. |
| 95 | |
| 96 | @{ |
| 97 | @defgroup stitching_match Features Finding and Images Matching |
| 98 | @defgroup stitching_rotation Rotation Estimation |
| 99 | @defgroup stitching_autocalib Autocalibration |
| 100 | @defgroup stitching_warp Images Warping |
| 101 | @defgroup stitching_seam Seam Estimation |
| 102 | @defgroup stitching_exposure Exposure Compensation |
| 103 | @defgroup stitching_blend Image Blenders |
| 104 | @} |
| 105 | */ |
| 106 | |
| 107 | namespace cv { |
| 108 | |
| 109 | //! @addtogroup stitching |
| 110 | //! @{ |
| 111 | |
| 112 | /** @example samples/cpp/stitching.cpp |
| 113 | A basic example on image stitching |
| 114 | */ |
| 115 | |
| 116 | /** @example samples/python/stitching.py |
| 117 | A basic example on image stitching in Python. |
| 118 | */ |
| 119 | |
| 120 | /** @example samples/cpp/stitching_detailed.cpp |
| 121 | A detailed example on image stitching |
| 122 | */ |
| 123 | |
| 124 | /** @brief High level image stitcher. |
| 125 | |
| 126 | It's possible to use this class without being aware of the entire stitching pipeline. However, to |
| 127 | be able to achieve higher stitching stability and quality of the final images at least being |
| 128 | familiar with the theory is recommended. |
| 129 | |
| 130 | @note |
| 131 | - A basic example on image stitching can be found at |
| 132 | opencv_source_code/samples/cpp/stitching.cpp |
| 133 | - A basic example on image stitching in Python can be found at |
| 134 | opencv_source_code/samples/python/stitching.py |
| 135 | - A detailed example on image stitching can be found at |
| 136 | opencv_source_code/samples/cpp/stitching_detailed.cpp |
| 137 | */ |
| 138 | class CV_EXPORTS_W Stitcher |
| 139 | { |
| 140 | public: |
| 141 | /** |
| 142 | * When setting a resolution for stitching, this values is a placeholder |
| 143 | * for preserving the original resolution. |
| 144 | */ |
| 145 | #if __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1900/*MSVS 2015*/) |
| 146 | static constexpr double ORIG_RESOL = -1.0; |
| 147 | #else |
| 148 | // support MSVS 2013 |
| 149 | static const double ORIG_RESOL; // Initialized in stitcher.cpp |
| 150 | #endif |
| 151 | |
| 152 | enum Status |
| 153 | { |
| 154 | OK = 0, |
| 155 | ERR_NEED_MORE_IMGS = 1, |
| 156 | ERR_HOMOGRAPHY_EST_FAIL = 2, |
| 157 | ERR_CAMERA_PARAMS_ADJUST_FAIL = 3 |
| 158 | }; |
| 159 | |
| 160 | enum Mode |
| 161 | { |
| 162 | /** Mode for creating photo panoramas. Expects images under perspective |
| 163 | transformation and projects resulting pano to sphere. |
| 164 | |
| 165 | @sa detail::BestOf2NearestMatcher SphericalWarper |
| 166 | */ |
| 167 | PANORAMA = 0, |
| 168 | /** Mode for composing scans. Expects images under affine transformation does |
| 169 | not compensate exposure by default. |
| 170 | |
| 171 | @sa detail::AffineBestOf2NearestMatcher AffineWarper |
| 172 | */ |
| 173 | SCANS = 1, |
| 174 | |
| 175 | }; |
| 176 | |
| 177 | /** @brief Creates a Stitcher configured in one of the stitching modes. |
| 178 | |
| 179 | @param mode Scenario for stitcher operation. This is usually determined by source of images |
| 180 | to stitch and their transformation. Default parameters will be chosen for operation in given |
| 181 | scenario. |
| 182 | @return Stitcher class instance. |
| 183 | */ |
| 184 | CV_WRAP static Ptr<Stitcher> create(Mode mode = Stitcher::PANORAMA); |
| 185 | |
| 186 | CV_WRAP double registrationResol() const { return registr_resol_; } |
| 187 | CV_WRAP void setRegistrationResol(double resol_mpx) { registr_resol_ = resol_mpx; } |
| 188 | |
| 189 | CV_WRAP double seamEstimationResol() const { return seam_est_resol_; } |
| 190 | CV_WRAP void setSeamEstimationResol(double resol_mpx) { seam_est_resol_ = resol_mpx; } |
| 191 | |
| 192 | CV_WRAP double compositingResol() const { return compose_resol_; } |
| 193 | CV_WRAP void setCompositingResol(double resol_mpx) { compose_resol_ = resol_mpx; } |
| 194 | |
| 195 | CV_WRAP double panoConfidenceThresh() const { return conf_thresh_; } |
| 196 | CV_WRAP void setPanoConfidenceThresh(double conf_thresh) { conf_thresh_ = conf_thresh; } |
| 197 | |
| 198 | CV_WRAP bool waveCorrection() const { return do_wave_correct_; } |
| 199 | CV_WRAP void setWaveCorrection(bool flag) { do_wave_correct_ = flag; } |
| 200 | |
| 201 | CV_WRAP InterpolationFlags interpolationFlags() const { return interp_flags_; } |
| 202 | CV_WRAP void setInterpolationFlags(InterpolationFlags interp_flags) { interp_flags_ = interp_flags; } |
| 203 | |
| 204 | detail::WaveCorrectKind waveCorrectKind() const { return wave_correct_kind_; } |
| 205 | void setWaveCorrectKind(detail::WaveCorrectKind kind) { wave_correct_kind_ = kind; } |
| 206 | |
| 207 | Ptr<Feature2D> featuresFinder() { return features_finder_; } |
| 208 | Ptr<Feature2D> featuresFinder() const { return features_finder_; } |
| 209 | void setFeaturesFinder(Ptr<Feature2D> features_finder) |
| 210 | { features_finder_ = features_finder; } |
| 211 | |
| 212 | Ptr<detail::FeaturesMatcher> featuresMatcher() { return features_matcher_; } |
| 213 | Ptr<detail::FeaturesMatcher> featuresMatcher() const { return features_matcher_; } |
| 214 | void setFeaturesMatcher(Ptr<detail::FeaturesMatcher> features_matcher) |
| 215 | { features_matcher_ = features_matcher; } |
| 216 | |
| 217 | const cv::UMat& matchingMask() const { return matching_mask_; } |
| 218 | void setMatchingMask(const cv::UMat &mask) |
| 219 | { |
| 220 | CV_Assert(mask.type() == CV_8U && mask.cols == mask.rows); |
| 221 | matching_mask_ = mask.clone(); |
| 222 | } |
| 223 | |
| 224 | Ptr<detail::BundleAdjusterBase> bundleAdjuster() { return bundle_adjuster_; } |
| 225 | const Ptr<detail::BundleAdjusterBase> bundleAdjuster() const { return bundle_adjuster_; } |
| 226 | void setBundleAdjuster(Ptr<detail::BundleAdjusterBase> bundle_adjuster) |
| 227 | { bundle_adjuster_ = bundle_adjuster; } |
| 228 | |
| 229 | Ptr<detail::Estimator> estimator() { return estimator_; } |
| 230 | const Ptr<detail::Estimator> estimator() const { return estimator_; } |
| 231 | void setEstimator(Ptr<detail::Estimator> estimator) |
| 232 | { estimator_ = estimator; } |
| 233 | |
| 234 | Ptr<WarperCreator> warper() { return warper_; } |
| 235 | const Ptr<WarperCreator> warper() const { return warper_; } |
| 236 | void setWarper(Ptr<WarperCreator> creator) { warper_ = creator; } |
| 237 | |
| 238 | Ptr<detail::ExposureCompensator> exposureCompensator() { return exposure_comp_; } |
| 239 | const Ptr<detail::ExposureCompensator> exposureCompensator() const { return exposure_comp_; } |
| 240 | void setExposureCompensator(Ptr<detail::ExposureCompensator> exposure_comp) |
| 241 | { exposure_comp_ = exposure_comp; } |
| 242 | |
| 243 | Ptr<detail::SeamFinder> seamFinder() { return seam_finder_; } |
| 244 | const Ptr<detail::SeamFinder> seamFinder() const { return seam_finder_; } |
| 245 | void setSeamFinder(Ptr<detail::SeamFinder> seam_finder) { seam_finder_ = seam_finder; } |
| 246 | |
| 247 | Ptr<detail::Blender> blender() { return blender_; } |
| 248 | const Ptr<detail::Blender> blender() const { return blender_; } |
| 249 | void setBlender(Ptr<detail::Blender> b) { blender_ = b; } |
| 250 | |
| 251 | /** @brief These functions try to match the given images and to estimate rotations of each camera. |
| 252 | |
| 253 | @note Use the functions only if you're aware of the stitching pipeline, otherwise use |
| 254 | Stitcher::stitch. |
| 255 | |
| 256 | @param images Input images. |
| 257 | @param masks Masks for each input image specifying where to look for keypoints (optional). |
| 258 | @return Status code. |
| 259 | */ |
| 260 | CV_WRAP Status estimateTransform(InputArrayOfArrays images, InputArrayOfArrays masks = noArray()); |
| 261 | |
| 262 | /** @brief These function restors camera rotation and camera intrinsics of each camera |
| 263 | * that can be got with @ref Stitcher::cameras call |
| 264 | |
| 265 | @param images Input images. |
| 266 | @param cameras Estimated rotation of cameras for each of the input images. |
| 267 | @param component Indices (0-based) of images constituting the final panorama (optional). |
| 268 | @return Status code. |
| 269 | */ |
| 270 | Status setTransform(InputArrayOfArrays images, |
| 271 | const std::vector<detail::CameraParams> &cameras, |
| 272 | const std::vector<int> &component); |
| 273 | /** @overload */ |
| 274 | Status setTransform(InputArrayOfArrays images, const std::vector<detail::CameraParams> &cameras); |
| 275 | |
| 276 | /** @overload */ |
| 277 | CV_WRAP Status composePanorama(OutputArray pano); |
| 278 | /** @brief These functions try to compose the given images (or images stored internally from the other function |
| 279 | calls) into the final pano under the assumption that the image transformations were estimated |
| 280 | before. |
| 281 | |
| 282 | @note Use the functions only if you're aware of the stitching pipeline, otherwise use |
| 283 | Stitcher::stitch. |
| 284 | |
| 285 | @param images Input images. |
| 286 | @param pano Final pano. |
| 287 | @return Status code. |
| 288 | */ |
| 289 | CV_WRAP Status composePanorama(InputArrayOfArrays images, OutputArray pano); |
| 290 | |
| 291 | /** @overload */ |
| 292 | CV_WRAP Status stitch(InputArrayOfArrays images, OutputArray pano); |
| 293 | /** @brief These functions try to stitch the given images. |
| 294 | |
| 295 | @param images Input images. |
| 296 | @param masks Masks for each input image specifying where to look for keypoints (optional). |
| 297 | @param pano Final pano. |
| 298 | @return Status code. |
| 299 | */ |
| 300 | CV_WRAP Status stitch(InputArrayOfArrays images, InputArrayOfArrays masks, OutputArray pano); |
| 301 | |
| 302 | /** @brief Returns indeces of input images used in panorama stitching |
| 303 | */ |
| 304 | CV_WRAP std::vector<int> component() const { return indices_; } |
| 305 | |
| 306 | /** Returns estimated camera parameters for all stitched images |
| 307 | */ |
| 308 | CV_WRAP std::vector<cv::detail::CameraParams> cameras() const { return cameras_; } |
| 309 | CV_WRAP double workScale() const { return work_scale_; } |
| 310 | |
| 311 | /** @brief Return the mask of the panorama. |
| 312 | |
| 313 | The mask is a 8U UMat with the values: 0xFF (white) for pixels filled by the input images, |
| 314 | 0 (black) for unused pixels. It can be used as the mask for inpaint. |
| 315 | |
| 316 | @return The mask. |
| 317 | */ |
| 318 | UMat resultMask() const { return result_mask_; } |
| 319 | |
| 320 | private: |
| 321 | Status matchImages(); |
| 322 | Status estimateCameraParams(); |
| 323 | |
| 324 | double registr_resol_; |
| 325 | double seam_est_resol_; |
| 326 | double compose_resol_; |
| 327 | double conf_thresh_; |
| 328 | InterpolationFlags interp_flags_; |
| 329 | Ptr<Feature2D> features_finder_; |
| 330 | Ptr<detail::FeaturesMatcher> features_matcher_; |
| 331 | cv::UMat matching_mask_; |
| 332 | Ptr<detail::BundleAdjusterBase> bundle_adjuster_; |
| 333 | Ptr<detail::Estimator> estimator_; |
| 334 | bool do_wave_correct_; |
| 335 | detail::WaveCorrectKind wave_correct_kind_; |
| 336 | Ptr<WarperCreator> warper_; |
| 337 | Ptr<detail::ExposureCompensator> exposure_comp_; |
| 338 | Ptr<detail::SeamFinder> seam_finder_; |
| 339 | Ptr<detail::Blender> blender_; |
| 340 | |
| 341 | std::vector<cv::UMat> imgs_; |
| 342 | std::vector<cv::UMat> masks_; |
| 343 | std::vector<cv::Size> full_img_sizes_; |
| 344 | std::vector<detail::ImageFeatures> features_; |
| 345 | std::vector<detail::MatchesInfo> pairwise_matches_; |
| 346 | std::vector<cv::UMat> seam_est_imgs_; |
| 347 | std::vector<int> indices_; |
| 348 | std::vector<detail::CameraParams> cameras_; |
| 349 | UMat result_mask_; |
| 350 | double work_scale_; |
| 351 | double seam_scale_; |
| 352 | double seam_work_aspect_; |
| 353 | double warped_image_scale_; |
| 354 | }; |
| 355 | |
| 356 | /** |
| 357 | * @deprecated use Stitcher::create |
| 358 | */ |
| 359 | CV_DEPRECATED Ptr<Stitcher> createStitcher(bool try_use_gpu = false); |
| 360 | |
| 361 | /** |
| 362 | * @deprecated use Stitcher::create |
| 363 | */ |
| 364 | CV_DEPRECATED Ptr<Stitcher> createStitcherScans(bool try_use_gpu = false); |
| 365 | |
| 366 | //! @} stitching |
| 367 | |
| 368 | } // namespace cv |
| 369 | |
| 370 | #endif // OPENCV_STITCHING_STITCHER_HPP |
| 371 | |