Warning fixes continued
[platform/upstream/opencv.git] / modules / stitching / src / stitcher.cpp
1 /*M///////////////////////////////////////////////////////////////////////////////////////
2 //
3 //  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4 //
5 //  By downloading, copying, installing or using the software you agree to this license.
6 //  If you do not agree to this license, do not download, install,
7 //  copy or use the software.
8 //
9 //
10 //                          License Agreement
11 //                For Open Source Computer Vision Library
12 //
13 // Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
14 // Copyright (C) 2009, Willow Garage Inc., all rights reserved.
15 // Third party copyrights are property of their respective owners.
16 //
17 // Redistribution and use in source and binary forms, with or without modification,
18 // are permitted provided that the following conditions are met:
19 //
20 //   * Redistribution's of source code must retain the above copyright notice,
21 //     this list of conditions and the following disclaimer.
22 //
23 //   * Redistribution's in binary form must reproduce the above copyright notice,
24 //     this list of conditions and the following disclaimer in the documentation
25 //     and/or other materials provided with the distribution.
26 //
27 //   * The name of the copyright holders may not be used to endorse or promote products
28 //     derived from this software without specific prior written permission.
29 //
30 // This software is provided by the copyright holders and contributors "as is" and
31 // any express or implied warranties, including, but not limited to, the implied
32 // warranties of merchantability and fitness for a particular purpose are disclaimed.
33 // In no event shall the Intel Corporation or contributors be liable for any direct,
34 // indirect, incidental, special, exemplary, or consequential damages
35 // (including, but not limited to, procurement of substitute goods or services;
36 // loss of use, data, or profits; or business interruption) however caused
37 // and on any theory of liability, whether in contract, strict liability,
38 // or tort (including negligence or otherwise) arising in any way out of
39 // the use of this software, even if advised of the possibility of such damage.
40 //
41 //M*/
42
43 #include "precomp.hpp"
44
45 using namespace std;
46
47 namespace cv {
48
49 Stitcher Stitcher::createDefault(bool try_use_gpu)
50 {
51     Stitcher stitcher;
52     stitcher.setRegistrationResol(0.6);
53     stitcher.setSeamEstimationResol(0.1);
54     stitcher.setCompositingResol(ORIG_RESOL);
55     stitcher.setPanoConfidenceThresh(1);
56     stitcher.setWaveCorrection(true);
57     stitcher.setWaveCorrectKind(detail::WAVE_CORRECT_HORIZ);
58     stitcher.setFeaturesMatcher(new detail::BestOf2NearestMatcher(try_use_gpu));
59     stitcher.setBundleAdjuster(new detail::BundleAdjusterRay());
60
61 #ifdef HAVE_OPENCV_GPU
62     if (try_use_gpu && gpu::getCudaEnabledDeviceCount() > 0)
63     {
64 #ifdef HAVE_OPENCV_NONFREE
65         stitcher.setFeaturesFinder(new detail::SurfFeaturesFinderGpu());
66 #else
67         stitcher.setFeaturesFinder(new detail::OrbFeaturesFinder());
68 #endif
69         stitcher.setWarper(new SphericalWarperGpu());
70         stitcher.setSeamFinder(new detail::GraphCutSeamFinderGpu());
71     }
72     else
73 #endif
74     {
75 #ifdef HAVE_OPENCV_NONFREE
76         stitcher.setFeaturesFinder(new detail::SurfFeaturesFinder());
77 #else
78         stitcher.setFeaturesFinder(new detail::OrbFeaturesFinder());
79 #endif
80         stitcher.setWarper(new SphericalWarper());
81         stitcher.setSeamFinder(new detail::GraphCutSeamFinder(detail::GraphCutSeamFinderBase::COST_COLOR));
82     }
83
84     stitcher.setExposureCompensator(new detail::BlocksGainCompensator());
85     stitcher.setBlender(new detail::MultiBandBlender(try_use_gpu));
86
87     return stitcher;
88 }
89
90
91 Stitcher::Status Stitcher::estimateTransform(InputArray images)
92 {
93     return estimateTransform(images, vector<vector<Rect> >());
94 }
95
96
97 Stitcher::Status Stitcher::estimateTransform(InputArray images, const vector<vector<Rect> > &rois)
98 {
99     images.getMatVector(imgs_);
100     rois_ = rois;
101
102     Status status;
103
104     if ((status = matchImages()) != OK)
105         return status;
106
107     estimateCameraParams();
108
109     return OK;
110 }
111
112
113
114 Stitcher::Status Stitcher::composePanorama(OutputArray pano)
115 {
116     return composePanorama(vector<Mat>(), pano);
117 }
118
119
120 Stitcher::Status Stitcher::composePanorama(InputArray images, OutputArray pano)
121 {
122     LOGLN("Warping images (auxiliary)... ");
123
124     vector<Mat> imgs;
125     images.getMatVector(imgs);
126     if (!imgs.empty())
127     {
128         CV_Assert(imgs.size() == imgs_.size());
129
130         Mat img;
131         seam_est_imgs_.resize(imgs.size());
132
133         for (size_t i = 0; i < imgs.size(); ++i)
134         {
135             imgs_[i] = imgs[i];
136             resize(imgs[i], img, Size(), seam_scale_, seam_scale_);
137             seam_est_imgs_[i] = img.clone();
138         }
139
140         vector<Mat> seam_est_imgs_subset;
141         vector<Mat> imgs_subset;
142
143         for (size_t i = 0; i < indices_.size(); ++i)
144         {
145             imgs_subset.push_back(imgs_[indices_[i]]);
146             seam_est_imgs_subset.push_back(seam_est_imgs_[indices_[i]]);
147         }
148
149         seam_est_imgs_ = seam_est_imgs_subset;
150         imgs_ = imgs_subset;
151     }
152
153     Mat &pano_ = pano.getMatRef();
154
155     int64 t = getTickCount();
156
157     vector<Point> corners(imgs_.size());
158     vector<Mat> masks_warped(imgs_.size());
159     vector<Mat> images_warped(imgs_.size());
160     vector<Size> sizes(imgs_.size());
161     vector<Mat> masks(imgs_.size());
162
163     // Prepare image masks
164     for (size_t i = 0; i < imgs_.size(); ++i)
165     {
166         masks[i].create(seam_est_imgs_[i].size(), CV_8U);
167         masks[i].setTo(Scalar::all(255));
168     }
169
170     // Warp images and their masks
171     Ptr<detail::RotationWarper> w = warper_->create(float(warped_image_scale_ * seam_work_aspect_));
172     for (size_t i = 0; i < imgs_.size(); ++i)
173     {
174         Mat_<float> K;
175         cameras_[i].K().convertTo(K, CV_32F);
176         K(0,0) *= (float)seam_work_aspect_;
177         K(0,2) *= (float)seam_work_aspect_;
178         K(1,1) *= (float)seam_work_aspect_;
179         K(1,2) *= (float)seam_work_aspect_;
180
181         corners[i] = w->warp(seam_est_imgs_[i], K, cameras_[i].R, INTER_LINEAR, BORDER_REFLECT, images_warped[i]);
182         sizes[i] = images_warped[i].size();
183
184         w->warp(masks[i], K, cameras_[i].R, INTER_NEAREST, BORDER_CONSTANT, masks_warped[i]);
185     }
186
187     vector<Mat> images_warped_f(imgs_.size());
188     for (size_t i = 0; i < imgs_.size(); ++i)
189         images_warped[i].convertTo(images_warped_f[i], CV_32F);
190
191     LOGLN("Warping images, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec");
192
193     // Find seams
194     exposure_comp_->feed(corners, images_warped, masks_warped);
195     seam_finder_->find(images_warped_f, corners, masks_warped);
196
197     // Release unused memory
198     seam_est_imgs_.clear();
199     images_warped.clear();
200     images_warped_f.clear();
201     masks.clear();
202
203     LOGLN("Compositing...");
204     t = getTickCount();
205
206     Mat img_warped, img_warped_s;
207     Mat dilated_mask, seam_mask, mask, mask_warped;
208
209     //double compose_seam_aspect = 1;
210     double compose_work_aspect = 1;
211     bool is_blender_prepared = false;
212
213     double compose_scale = 1;
214     bool is_compose_scale_set = false;
215
216     Mat full_img, img;
217     for (size_t img_idx = 0; img_idx < imgs_.size(); ++img_idx)
218     {
219         LOGLN("Compositing image #" << indices_[img_idx] + 1);
220
221         // Read image and resize it if necessary
222         full_img = imgs_[img_idx];
223         if (!is_compose_scale_set)
224         {
225             if (compose_resol_ > 0)
226                 compose_scale = min(1.0, sqrt(compose_resol_ * 1e6 / full_img.size().area()));
227             is_compose_scale_set = true;
228
229             // Compute relative scales
230             //compose_seam_aspect = compose_scale / seam_scale_;
231             compose_work_aspect = compose_scale / work_scale_;
232
233             // Update warped image scale
234             warped_image_scale_ *= static_cast<float>(compose_work_aspect);
235             w = warper_->create((float)warped_image_scale_);
236
237             // Update corners and sizes
238             for (size_t i = 0; i < imgs_.size(); ++i)
239             {
240                 // Update intrinsics
241                 cameras_[i].focal *= compose_work_aspect;
242                 cameras_[i].ppx *= compose_work_aspect;
243                 cameras_[i].ppy *= compose_work_aspect;
244
245                 // Update corner and size
246                 Size sz = full_img_sizes_[i];
247                 if (std::abs(compose_scale - 1) > 1e-1)
248                 {
249                     sz.width = cvRound(full_img_sizes_[i].width * compose_scale);
250                     sz.height = cvRound(full_img_sizes_[i].height * compose_scale);
251                 }
252
253                 Mat K;
254                 cameras_[i].K().convertTo(K, CV_32F);
255                 Rect roi = w->warpRoi(sz, K, cameras_[i].R);
256                 corners[i] = roi.tl();
257                 sizes[i] = roi.size();
258             }
259         }
260         if (std::abs(compose_scale - 1) > 1e-1)
261             resize(full_img, img, Size(), compose_scale, compose_scale);
262         else
263             img = full_img;
264         full_img.release();
265         Size img_size = img.size();
266
267         Mat K;
268         cameras_[img_idx].K().convertTo(K, CV_32F);
269
270         // Warp the current image
271         w->warp(img, K, cameras_[img_idx].R, INTER_LINEAR, BORDER_REFLECT, img_warped);
272
273         // Warp the current image mask
274         mask.create(img_size, CV_8U);
275         mask.setTo(Scalar::all(255));
276         w->warp(mask, K, cameras_[img_idx].R, INTER_NEAREST, BORDER_CONSTANT, mask_warped);
277
278         // Compensate exposure
279         exposure_comp_->apply((int)img_idx, corners[img_idx], img_warped, mask_warped);
280
281         img_warped.convertTo(img_warped_s, CV_16S);
282         img_warped.release();
283         img.release();
284         mask.release();
285
286         // Make sure seam mask has proper size
287         dilate(masks_warped[img_idx], dilated_mask, Mat());
288         resize(dilated_mask, seam_mask, mask_warped.size());
289
290         mask_warped = seam_mask & mask_warped;
291
292         if (!is_blender_prepared)
293         {
294             blender_->prepare(corners, sizes);
295             is_blender_prepared = true;
296         }
297
298         // Blend the current image
299         blender_->feed(img_warped_s, mask_warped, corners[img_idx]);
300     }
301
302     Mat result, result_mask;
303     blender_->blend(result, result_mask);
304
305     LOGLN("Compositing, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec");
306
307     // Preliminary result is in CV_16SC3 format, but all values are in [0,255] range,
308     // so convert it to avoid user confusing
309     result.convertTo(pano_, CV_8U);
310
311     return OK;
312 }
313
314
315 Stitcher::Status Stitcher::stitch(InputArray images, OutputArray pano)
316 {
317     Status status = estimateTransform(images);
318     if (status != OK)
319         return status;
320     return composePanorama(pano);
321 }
322
323
324 Stitcher::Status Stitcher::stitch(InputArray images, const vector<vector<Rect> > &rois, OutputArray pano)
325 {
326     Status status = estimateTransform(images, rois);
327     if (status != OK)
328         return status;
329     return composePanorama(pano);
330 }
331
332
333 Stitcher::Status Stitcher::matchImages()
334 {
335     if ((int)imgs_.size() < 2)
336     {
337         LOGLN("Need more images");
338         return ERR_NEED_MORE_IMGS;
339     }
340
341     work_scale_ = 1;
342     seam_work_aspect_ = 1;
343     seam_scale_ = 1;
344     bool is_work_scale_set = false;
345     bool is_seam_scale_set = false;
346     Mat full_img, img;
347     features_.resize(imgs_.size());
348     seam_est_imgs_.resize(imgs_.size());
349     full_img_sizes_.resize(imgs_.size());
350
351     LOGLN("Finding features...");
352     int64 t = getTickCount();
353
354     for (size_t i = 0; i < imgs_.size(); ++i)
355     {
356         full_img = imgs_[i];
357         full_img_sizes_[i] = full_img.size();
358
359         if (registr_resol_ < 0)
360         {
361             img = full_img;
362             work_scale_ = 1;
363             is_work_scale_set = true;
364         }
365         else
366         {
367             if (!is_work_scale_set)
368             {
369                 work_scale_ = min(1.0, sqrt(registr_resol_ * 1e6 / full_img.size().area()));
370                 is_work_scale_set = true;
371             }
372             resize(full_img, img, Size(), work_scale_, work_scale_);
373         }
374         if (!is_seam_scale_set)
375         {
376             seam_scale_ = min(1.0, sqrt(seam_est_resol_ * 1e6 / full_img.size().area()));
377             seam_work_aspect_ = seam_scale_ / work_scale_;
378             is_seam_scale_set = true;
379         }
380
381         if (rois_.empty())
382             (*features_finder_)(img, features_[i]);
383         else
384             (*features_finder_)(img, features_[i], rois_[i]);
385         features_[i].img_idx = (int)i;
386         LOGLN("Features in image #" << i+1 << ": " << features_[i].keypoints.size());
387
388         resize(full_img, img, Size(), seam_scale_, seam_scale_);
389         seam_est_imgs_[i] = img.clone();
390     }
391
392     // Do it to save memory
393     features_finder_->collectGarbage();
394     full_img.release();
395     img.release();
396
397     LOGLN("Finding features, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec");
398
399     LOG("Pairwise matching");
400     t = getTickCount();
401     (*features_matcher_)(features_, pairwise_matches_, matching_mask_);
402     features_matcher_->collectGarbage();
403     LOGLN("Pairwise matching, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec");
404
405     // Leave only images we are sure are from the same panorama
406     indices_ = detail::leaveBiggestComponent(features_, pairwise_matches_, (float)conf_thresh_);
407     vector<Mat> seam_est_imgs_subset;
408     vector<Mat> imgs_subset;
409     vector<Size> full_img_sizes_subset;
410     for (size_t i = 0; i < indices_.size(); ++i)
411     {
412         imgs_subset.push_back(imgs_[indices_[i]]);
413         seam_est_imgs_subset.push_back(seam_est_imgs_[indices_[i]]);
414         full_img_sizes_subset.push_back(full_img_sizes_[indices_[i]]);
415     }
416     seam_est_imgs_ = seam_est_imgs_subset;
417     imgs_ = imgs_subset;
418     full_img_sizes_ = full_img_sizes_subset;
419
420     if ((int)imgs_.size() < 2)
421     {
422         LOGLN("Need more images");
423         return ERR_NEED_MORE_IMGS;
424     }
425
426     return OK;
427 }
428
429
430 void Stitcher::estimateCameraParams()
431 {
432     detail::HomographyBasedEstimator estimator;
433     estimator(features_, pairwise_matches_, cameras_);
434
435     for (size_t i = 0; i < cameras_.size(); ++i)
436     {
437         Mat R;
438         cameras_[i].R.convertTo(R, CV_32F);
439         cameras_[i].R = R;
440         LOGLN("Initial intrinsic parameters #" << indices_[i] + 1 << ":\n " << cameras_[i].K());
441     }
442
443     bundle_adjuster_->setConfThresh(conf_thresh_);
444     (*bundle_adjuster_)(features_, pairwise_matches_, cameras_);
445
446     // Find median focal length and use it as final image scale
447     vector<double> focals;
448     for (size_t i = 0; i < cameras_.size(); ++i)
449     {
450         LOGLN("Camera #" << indices_[i] + 1 << ":\n" << cameras_[i].K());
451         focals.push_back(cameras_[i].focal);
452     }
453
454     sort(focals.begin(), focals.end());
455     if (focals.size() % 2 == 1)
456         warped_image_scale_ = static_cast<float>(focals[focals.size() / 2]);
457     else
458         warped_image_scale_ = static_cast<float>(focals[focals.size() / 2 - 1] + focals[focals.size() / 2]) * 0.5f;
459
460     if (do_wave_correct_)
461     {
462         vector<Mat> rmats;
463         for (size_t i = 0; i < cameras_.size(); ++i)
464             rmats.push_back(cameras_[i].R);
465         detail::waveCorrect(rmats, wave_correct_kind_);
466         for (size_t i = 0; i < cameras_.size(); ++i)
467             cameras_[i].R = rmats[i];
468     }
469 }
470
471 } // namespace cv