1 /*M///////////////////////////////////////////////////////////////////////////////////////
3 // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
5 // By downloading, copying, installing or using the software you agree to this license.
6 // If you do not agree to this license, do not download, install,
7 // copy or use the software.
11 // For Open Source Computer Vision Library
13 // Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
14 // Copyright (C) 2009, Willow Garage Inc., all rights reserved.
15 // Third party copyrights are property of their respective owners.
17 // Redistribution and use in source and binary forms, with or without modification,
18 // are permitted provided that the following conditions are met:
20 // * Redistribution's of source code must retain the above copyright notice,
21 // this list of conditions and the following disclaimer.
23 // * Redistribution's in binary form must reproduce the above copyright notice,
24 // this list of conditions and the following disclaimer in the documentation
25 // and/or other materials provided with the distribution.
27 // * The name of the copyright holders may not be used to endorse or promote products
28 // derived from this software without specific prior written permission.
30 // This software is provided by the copyright holders and contributors "as is" and
31 // any express or implied warranties, including, but not limited to, the implied
32 // warranties of merchantability and fitness for a particular purpose are disclaimed.
33 // In no event shall the Intel Corporation or contributors be liable for any direct,
34 // indirect, incidental, special, exemplary, or consequential damages
35 // (including, but not limited to, procurement of substitute goods or services;
36 // loss of use, data, or profits; or business interruption) however caused
37 // and on any theory of liability, whether in contract, strict liability,
38 // or tort (including negligence or otherwise) arising in any way out of
39 // the use of this software, even if advised of the possibility of such damage.
43 #include "precomp.hpp"
49 Stitcher Stitcher::createDefault(bool try_use_gpu)
52 stitcher.setRegistrationResol(0.6);
53 stitcher.setSeamEstimationResol(0.1);
54 stitcher.setCompositingResol(ORIG_RESOL);
55 stitcher.setPanoConfidenceThresh(1);
56 stitcher.setWaveCorrection(true);
57 stitcher.setWaveCorrectKind(detail::WAVE_CORRECT_HORIZ);
58 stitcher.setFeaturesMatcher(new detail::BestOf2NearestMatcher(try_use_gpu));
59 stitcher.setBundleAdjuster(new detail::BundleAdjusterRay());
61 #ifdef HAVE_OPENCV_GPU
62 if (try_use_gpu && gpu::getCudaEnabledDeviceCount() > 0)
64 #ifdef HAVE_OPENCV_NONFREE
65 stitcher.setFeaturesFinder(new detail::SurfFeaturesFinderGpu());
67 stitcher.setFeaturesFinder(new detail::OrbFeaturesFinder());
69 stitcher.setWarper(new SphericalWarperGpu());
70 stitcher.setSeamFinder(new detail::GraphCutSeamFinderGpu());
75 #ifdef HAVE_OPENCV_NONFREE
76 stitcher.setFeaturesFinder(new detail::SurfFeaturesFinder());
78 stitcher.setFeaturesFinder(new detail::OrbFeaturesFinder());
80 stitcher.setWarper(new SphericalWarper());
81 stitcher.setSeamFinder(new detail::GraphCutSeamFinder(detail::GraphCutSeamFinderBase::COST_COLOR));
84 stitcher.setExposureCompensator(new detail::BlocksGainCompensator());
85 stitcher.setBlender(new detail::MultiBandBlender(try_use_gpu));
91 Stitcher::Status Stitcher::estimateTransform(InputArray images)
93 return estimateTransform(images, vector<vector<Rect> >());
97 Stitcher::Status Stitcher::estimateTransform(InputArray images, const vector<vector<Rect> > &rois)
99 images.getMatVector(imgs_);
104 if ((status = matchImages()) != OK)
107 estimateCameraParams();
114 Stitcher::Status Stitcher::composePanorama(OutputArray pano)
116 return composePanorama(vector<Mat>(), pano);
120 Stitcher::Status Stitcher::composePanorama(InputArray images, OutputArray pano)
122 LOGLN("Warping images (auxiliary)... ");
125 images.getMatVector(imgs);
128 CV_Assert(imgs.size() == imgs_.size());
131 seam_est_imgs_.resize(imgs.size());
133 for (size_t i = 0; i < imgs.size(); ++i)
136 resize(imgs[i], img, Size(), seam_scale_, seam_scale_);
137 seam_est_imgs_[i] = img.clone();
140 vector<Mat> seam_est_imgs_subset;
141 vector<Mat> imgs_subset;
143 for (size_t i = 0; i < indices_.size(); ++i)
145 imgs_subset.push_back(imgs_[indices_[i]]);
146 seam_est_imgs_subset.push_back(seam_est_imgs_[indices_[i]]);
149 seam_est_imgs_ = seam_est_imgs_subset;
153 Mat &pano_ = pano.getMatRef();
155 int64 t = getTickCount();
157 vector<Point> corners(imgs_.size());
158 vector<Mat> masks_warped(imgs_.size());
159 vector<Mat> images_warped(imgs_.size());
160 vector<Size> sizes(imgs_.size());
161 vector<Mat> masks(imgs_.size());
163 // Prepare image masks
164 for (size_t i = 0; i < imgs_.size(); ++i)
166 masks[i].create(seam_est_imgs_[i].size(), CV_8U);
167 masks[i].setTo(Scalar::all(255));
170 // Warp images and their masks
171 Ptr<detail::RotationWarper> w = warper_->create(float(warped_image_scale_ * seam_work_aspect_));
172 for (size_t i = 0; i < imgs_.size(); ++i)
175 cameras_[i].K().convertTo(K, CV_32F);
176 K(0,0) *= (float)seam_work_aspect_;
177 K(0,2) *= (float)seam_work_aspect_;
178 K(1,1) *= (float)seam_work_aspect_;
179 K(1,2) *= (float)seam_work_aspect_;
181 corners[i] = w->warp(seam_est_imgs_[i], K, cameras_[i].R, INTER_LINEAR, BORDER_REFLECT, images_warped[i]);
182 sizes[i] = images_warped[i].size();
184 w->warp(masks[i], K, cameras_[i].R, INTER_NEAREST, BORDER_CONSTANT, masks_warped[i]);
187 vector<Mat> images_warped_f(imgs_.size());
188 for (size_t i = 0; i < imgs_.size(); ++i)
189 images_warped[i].convertTo(images_warped_f[i], CV_32F);
191 LOGLN("Warping images, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec");
194 exposure_comp_->feed(corners, images_warped, masks_warped);
195 seam_finder_->find(images_warped_f, corners, masks_warped);
197 // Release unused memory
198 seam_est_imgs_.clear();
199 images_warped.clear();
200 images_warped_f.clear();
203 LOGLN("Compositing...");
206 Mat img_warped, img_warped_s;
207 Mat dilated_mask, seam_mask, mask, mask_warped;
209 //double compose_seam_aspect = 1;
210 double compose_work_aspect = 1;
211 bool is_blender_prepared = false;
213 double compose_scale = 1;
214 bool is_compose_scale_set = false;
217 for (size_t img_idx = 0; img_idx < imgs_.size(); ++img_idx)
219 LOGLN("Compositing image #" << indices_[img_idx] + 1);
221 // Read image and resize it if necessary
222 full_img = imgs_[img_idx];
223 if (!is_compose_scale_set)
225 if (compose_resol_ > 0)
226 compose_scale = min(1.0, sqrt(compose_resol_ * 1e6 / full_img.size().area()));
227 is_compose_scale_set = true;
229 // Compute relative scales
230 //compose_seam_aspect = compose_scale / seam_scale_;
231 compose_work_aspect = compose_scale / work_scale_;
233 // Update warped image scale
234 warped_image_scale_ *= static_cast<float>(compose_work_aspect);
235 w = warper_->create((float)warped_image_scale_);
237 // Update corners and sizes
238 for (size_t i = 0; i < imgs_.size(); ++i)
241 cameras_[i].focal *= compose_work_aspect;
242 cameras_[i].ppx *= compose_work_aspect;
243 cameras_[i].ppy *= compose_work_aspect;
245 // Update corner and size
246 Size sz = full_img_sizes_[i];
247 if (std::abs(compose_scale - 1) > 1e-1)
249 sz.width = cvRound(full_img_sizes_[i].width * compose_scale);
250 sz.height = cvRound(full_img_sizes_[i].height * compose_scale);
254 cameras_[i].K().convertTo(K, CV_32F);
255 Rect roi = w->warpRoi(sz, K, cameras_[i].R);
256 corners[i] = roi.tl();
257 sizes[i] = roi.size();
260 if (std::abs(compose_scale - 1) > 1e-1)
261 resize(full_img, img, Size(), compose_scale, compose_scale);
265 Size img_size = img.size();
268 cameras_[img_idx].K().convertTo(K, CV_32F);
270 // Warp the current image
271 w->warp(img, K, cameras_[img_idx].R, INTER_LINEAR, BORDER_REFLECT, img_warped);
273 // Warp the current image mask
274 mask.create(img_size, CV_8U);
275 mask.setTo(Scalar::all(255));
276 w->warp(mask, K, cameras_[img_idx].R, INTER_NEAREST, BORDER_CONSTANT, mask_warped);
278 // Compensate exposure
279 exposure_comp_->apply((int)img_idx, corners[img_idx], img_warped, mask_warped);
281 img_warped.convertTo(img_warped_s, CV_16S);
282 img_warped.release();
286 // Make sure seam mask has proper size
287 dilate(masks_warped[img_idx], dilated_mask, Mat());
288 resize(dilated_mask, seam_mask, mask_warped.size());
290 mask_warped = seam_mask & mask_warped;
292 if (!is_blender_prepared)
294 blender_->prepare(corners, sizes);
295 is_blender_prepared = true;
298 // Blend the current image
299 blender_->feed(img_warped_s, mask_warped, corners[img_idx]);
302 Mat result, result_mask;
303 blender_->blend(result, result_mask);
305 LOGLN("Compositing, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec");
307 // Preliminary result is in CV_16SC3 format, but all values are in [0,255] range,
308 // so convert it to avoid user confusing
309 result.convertTo(pano_, CV_8U);
315 Stitcher::Status Stitcher::stitch(InputArray images, OutputArray pano)
317 Status status = estimateTransform(images);
320 return composePanorama(pano);
324 Stitcher::Status Stitcher::stitch(InputArray images, const vector<vector<Rect> > &rois, OutputArray pano)
326 Status status = estimateTransform(images, rois);
329 return composePanorama(pano);
333 Stitcher::Status Stitcher::matchImages()
335 if ((int)imgs_.size() < 2)
337 LOGLN("Need more images");
338 return ERR_NEED_MORE_IMGS;
342 seam_work_aspect_ = 1;
344 bool is_work_scale_set = false;
345 bool is_seam_scale_set = false;
347 features_.resize(imgs_.size());
348 seam_est_imgs_.resize(imgs_.size());
349 full_img_sizes_.resize(imgs_.size());
351 LOGLN("Finding features...");
352 int64 t = getTickCount();
354 for (size_t i = 0; i < imgs_.size(); ++i)
357 full_img_sizes_[i] = full_img.size();
359 if (registr_resol_ < 0)
363 is_work_scale_set = true;
367 if (!is_work_scale_set)
369 work_scale_ = min(1.0, sqrt(registr_resol_ * 1e6 / full_img.size().area()));
370 is_work_scale_set = true;
372 resize(full_img, img, Size(), work_scale_, work_scale_);
374 if (!is_seam_scale_set)
376 seam_scale_ = min(1.0, sqrt(seam_est_resol_ * 1e6 / full_img.size().area()));
377 seam_work_aspect_ = seam_scale_ / work_scale_;
378 is_seam_scale_set = true;
382 (*features_finder_)(img, features_[i]);
384 (*features_finder_)(img, features_[i], rois_[i]);
385 features_[i].img_idx = (int)i;
386 LOGLN("Features in image #" << i+1 << ": " << features_[i].keypoints.size());
388 resize(full_img, img, Size(), seam_scale_, seam_scale_);
389 seam_est_imgs_[i] = img.clone();
392 // Do it to save memory
393 features_finder_->collectGarbage();
397 LOGLN("Finding features, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec");
399 LOG("Pairwise matching");
401 (*features_matcher_)(features_, pairwise_matches_, matching_mask_);
402 features_matcher_->collectGarbage();
403 LOGLN("Pairwise matching, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec");
405 // Leave only images we are sure are from the same panorama
406 indices_ = detail::leaveBiggestComponent(features_, pairwise_matches_, (float)conf_thresh_);
407 vector<Mat> seam_est_imgs_subset;
408 vector<Mat> imgs_subset;
409 vector<Size> full_img_sizes_subset;
410 for (size_t i = 0; i < indices_.size(); ++i)
412 imgs_subset.push_back(imgs_[indices_[i]]);
413 seam_est_imgs_subset.push_back(seam_est_imgs_[indices_[i]]);
414 full_img_sizes_subset.push_back(full_img_sizes_[indices_[i]]);
416 seam_est_imgs_ = seam_est_imgs_subset;
418 full_img_sizes_ = full_img_sizes_subset;
420 if ((int)imgs_.size() < 2)
422 LOGLN("Need more images");
423 return ERR_NEED_MORE_IMGS;
430 void Stitcher::estimateCameraParams()
432 detail::HomographyBasedEstimator estimator;
433 estimator(features_, pairwise_matches_, cameras_);
435 for (size_t i = 0; i < cameras_.size(); ++i)
438 cameras_[i].R.convertTo(R, CV_32F);
440 LOGLN("Initial intrinsic parameters #" << indices_[i] + 1 << ":\n " << cameras_[i].K());
443 bundle_adjuster_->setConfThresh(conf_thresh_);
444 (*bundle_adjuster_)(features_, pairwise_matches_, cameras_);
446 // Find median focal length and use it as final image scale
447 vector<double> focals;
448 for (size_t i = 0; i < cameras_.size(); ++i)
450 LOGLN("Camera #" << indices_[i] + 1 << ":\n" << cameras_[i].K());
451 focals.push_back(cameras_[i].focal);
454 sort(focals.begin(), focals.end());
455 if (focals.size() % 2 == 1)
456 warped_image_scale_ = static_cast<float>(focals[focals.size() / 2]);
458 warped_image_scale_ = static_cast<float>(focals[focals.size() / 2 - 1] + focals[focals.size() / 2]) * 0.5f;
460 if (do_wave_correct_)
463 for (size_t i = 0; i < cameras_.size(); ++i)
464 rmats.push_back(cameras_[i].R);
465 detail::waveCorrect(rmats, wave_correct_kind_);
466 for (size_t i = 0; i < cameras_.size(); ++i)
467 cameras_[i].R = rmats[i];