Fixed warper selection bug in stitching_detailed. Removed estimation of aspect ratio...
authorAlexey Spizhevoy <no@email>
Mon, 19 Sep 2011 06:22:40 +0000 (06:22 +0000)
committerAlexey Spizhevoy <no@email>
Mon, 19 Sep 2011 06:22:40 +0000 (06:22 +0000)
modules/stitching/include/opencv2/stitching/detail/motion_estimators.hpp
modules/stitching/src/motion_estimators.cpp
modules/stitching/src/stitcher.cpp
samples/cpp/stitching_detailed.cpp

index 2ec1a4e..fcfb50e 100644 (file)
@@ -80,15 +80,11 @@ private:
 };\r
 \r
 \r
-class CV_EXPORTS BundleAdjuster : public Estimator\r
+// Minimizes reprojection error\r
+class CV_EXPORTS BundleAdjusterReproj : public Estimator\r
 {\r
 public:\r
-    enum { NO, RAY_SPACE, FOCAL_RAY_SPACE };\r
-\r
-    BundleAdjuster(int cost_space = FOCAL_RAY_SPACE, float conf_thresh = 1.f) \r
-        : cost_space_(cost_space), conf_thresh_(conf_thresh) {}\r
-\r
-    Mat K;\r
+    BundleAdjusterReproj(float conf_thresh = 1.f) : conf_thresh_(conf_thresh) {}\r
 \r
 private:\r
     void estimate(const std::vector<ImageFeatures> &features, const std::vector<MatchesInfo> &pairwise_matches, \r
@@ -104,7 +100,6 @@ private:
     Mat cameras_;\r
     std::vector<std::pair<int,int> > edges_;\r
 \r
-    int cost_space_;\r
     float conf_thresh_;\r
     Mat err_, err1_, err2_;\r
     Mat J_;\r
index 4d5503b..9ebe341 100644 (file)
@@ -155,12 +155,10 @@ void HomographyBasedEstimator::estimate(const vector<ImageFeatures> &features, c
 \r
 //////////////////////////////////////////////////////////////////////////////\r
 \r
-void BundleAdjuster::estimate(const vector<ImageFeatures> &features, const vector<MatchesInfo> &pairwise_matches,\r
-                              vector<CameraParams> &cameras)\r
+void BundleAdjusterReproj::estimate(const vector<ImageFeatures> &features,\r
+                                    const vector<MatchesInfo> &pairwise_matches,\r
+                                    vector<CameraParams> &cameras)\r
 {\r
-    if (cost_space_ == NO)\r
-        return;\r
-\r
     LOG("Bundle adjustment");\r
     int64 t = getTickCount();\r
 \r
@@ -169,14 +167,13 @@ void BundleAdjuster::estimate(const vector<ImageFeatures> &features, const vecto
     pairwise_matches_ = &pairwise_matches[0];\r
 \r
     // Prepare focals and rotations\r
-    cameras_.create(num_images_ * 7, 1, CV_64F);\r
+    cameras_.create(num_images_ * 6, 1, CV_64F);\r
     SVD svd;\r
     for (int i = 0; i < num_images_; ++i)\r
     {\r
-        cameras_.at<double>(i * 7, 0) = cameras[i].focal;\r
-        cameras_.at<double>(i * 7 + 1, 0) = cameras[i].ppx;\r
-        cameras_.at<double>(i * 7 + 2, 0) = cameras[i].ppy;\r
-        cameras_.at<double>(i * 7 + 3, 0) = cameras[i].aspect;\r
+        cameras_.at<double>(i * 6, 0) = cameras[i].focal;\r
+        cameras_.at<double>(i * 6 + 1, 0) = cameras[i].ppx;\r
+        cameras_.at<double>(i * 6 + 2, 0) = cameras[i].ppy;\r
 \r
         svd(cameras[i].R, SVD::FULL_UV);\r
         Mat R = svd.u * svd.vt;\r
@@ -185,9 +182,9 @@ void BundleAdjuster::estimate(const vector<ImageFeatures> &features, const vecto
 \r
         Mat rvec;\r
         Rodrigues(R, rvec); CV_Assert(rvec.type() == CV_32F);\r
-        cameras_.at<double>(i * 7 + 4, 0) = rvec.at<float>(0, 0);\r
-        cameras_.at<double>(i * 7 + 5, 0) = rvec.at<float>(1, 0);\r
-        cameras_.at<double>(i * 7 + 6, 0) = rvec.at<float>(2, 0);\r
+        cameras_.at<double>(i * 6 + 3, 0) = rvec.at<float>(0, 0);\r
+        cameras_.at<double>(i * 6 + 4, 0) = rvec.at<float>(1, 0);\r
+        cameras_.at<double>(i * 6 + 5, 0) = rvec.at<float>(2, 0);\r
     }\r
 \r
     // Select only consistent image pairs for futher adjustment\r
@@ -207,7 +204,7 @@ void BundleAdjuster::estimate(const vector<ImageFeatures> &features, const vecto
     for (size_t i = 0; i < edges_.size(); ++i)\r
         total_num_matches_ += static_cast<int>(pairwise_matches[edges_[i].first * num_images_ + edges_[i].second].num_inliers);\r
 \r
-    CvLevMarq solver(num_images_ * 7, total_num_matches_ * 2,\r
+    CvLevMarq solver(num_images_ * 6, total_num_matches_ * 2,\r
                      cvTermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 1000, DBL_EPSILON));\r
 \r
     CvMat matParams = cameras_;\r
@@ -250,14 +247,13 @@ void BundleAdjuster::estimate(const vector<ImageFeatures> &features, const vecto
     // Obtain global motion\r
     for (int i = 0; i < num_images_; ++i)\r
     {\r
-        cameras[i].focal = cameras_.at<double>(i * 7, 0);\r
-        cameras[i].ppx = cameras_.at<double>(i * 7 + 1, 0);\r
-        cameras[i].ppy = cameras_.at<double>(i * 7 + 2, 0);\r
-        cameras[i].aspect = cameras_.at<double>(i * 7 + 3, 0);\r
+        cameras[i].focal = cameras_.at<double>(i * 6, 0);\r
+        cameras[i].ppx = cameras_.at<double>(i * 6 + 1, 0);\r
+        cameras[i].ppy = cameras_.at<double>(i * 6 + 2, 0);\r
         Mat rvec(3, 1, CV_64F);\r
-        rvec.at<double>(0, 0) = cameras_.at<double>(i * 7 + 4, 0);\r
-        rvec.at<double>(1, 0) = cameras_.at<double>(i * 7 + 5, 0);\r
-        rvec.at<double>(2, 0) = cameras_.at<double>(i * 7 + 6, 0);\r
+        rvec.at<double>(0, 0) = cameras_.at<double>(i * 6 + 3, 0);\r
+        rvec.at<double>(1, 0) = cameras_.at<double>(i * 6 + 4, 0);\r
+        rvec.at<double>(2, 0) = cameras_.at<double>(i * 6 + 5, 0);\r
         Rodrigues(rvec, cameras[i].R);\r
         Mat Mf;\r
         cameras[i].R.convertTo(Mf, CV_32F);\r
@@ -276,7 +272,7 @@ void BundleAdjuster::estimate(const vector<ImageFeatures> &features, const vecto
 }\r
 \r
 \r
-void BundleAdjuster::calcError(Mat &err)\r
+void BundleAdjusterReproj::calcError(Mat &err)\r
 {\r
     err.create(total_num_matches_ * 2, 1, CV_64F);\r
 \r
@@ -285,28 +281,26 @@ void BundleAdjuster::calcError(Mat &err)
     {\r
         int i = edges_[edge_idx].first;\r
         int j = edges_[edge_idx].second;\r
-        double f1 = cameras_.at<double>(i * 7, 0);\r
-        double f2 = cameras_.at<double>(j * 7, 0);\r
-        double ppx1 = cameras_.at<double>(i * 7 + 1, 0);\r
-        double ppx2 = cameras_.at<double>(j * 7 + 1, 0);\r
-        double ppy1 = cameras_.at<double>(i * 7 + 2, 0);\r
-        double ppy2 = cameras_.at<double>(j * 7 + 2, 0);\r
-        double a1 = cameras_.at<double>(i * 7 + 3, 0);\r
-        double a2 = cameras_.at<double>(j * 7 + 3, 0);\r
+        double f1 = cameras_.at<double>(i * 6, 0);\r
+        double f2 = cameras_.at<double>(j * 6, 0);\r
+        double ppx1 = cameras_.at<double>(i * 6 + 1, 0);\r
+        double ppx2 = cameras_.at<double>(j * 6 + 1, 0);\r
+        double ppy1 = cameras_.at<double>(i * 6 + 2, 0);\r
+        double ppy2 = cameras_.at<double>(j * 6 + 2, 0);\r
 \r
         double R1[9];\r
         Mat R1_(3, 3, CV_64F, R1);\r
         Mat rvec(3, 1, CV_64F);\r
-        rvec.at<double>(0, 0) = cameras_.at<double>(i * 7 + 4, 0);\r
-        rvec.at<double>(1, 0) = cameras_.at<double>(i * 7 + 5, 0);\r
-        rvec.at<double>(2, 0) = cameras_.at<double>(i * 7 + 6, 0);\r
+        rvec.at<double>(0, 0) = cameras_.at<double>(i * 6 + 3, 0);\r
+        rvec.at<double>(1, 0) = cameras_.at<double>(i * 6 + 4, 0);\r
+        rvec.at<double>(2, 0) = cameras_.at<double>(i * 6 + 5, 0);\r
         Rodrigues(rvec, R1_);\r
 \r
         double R2[9];\r
         Mat R2_(3, 3, CV_64F, R2);\r
-        rvec.at<double>(0, 0) = cameras_.at<double>(j * 7 + 4, 0);\r
-        rvec.at<double>(1, 0) = cameras_.at<double>(j * 7 + 5, 0);\r
-        rvec.at<double>(2, 0) = cameras_.at<double>(j * 7 + 6, 0);\r
+        rvec.at<double>(0, 0) = cameras_.at<double>(j * 6 + 3, 0);\r
+        rvec.at<double>(1, 0) = cameras_.at<double>(j * 6 + 4, 0);\r
+        rvec.at<double>(2, 0) = cameras_.at<double>(j * 6 + 5, 0);\r
         Rodrigues(rvec, R2_);\r
 \r
         const ImageFeatures& features1 = features_[i];\r
@@ -315,11 +309,11 @@ void BundleAdjuster::calcError(Mat &err)
 \r
         Mat_<double> K1 = Mat::eye(3, 3, CV_64F);\r
         K1(0,0) = f1; K1(0,2) = ppx1;\r
-        K1(1,1) = f1*a1; K1(1,2) = ppy1;\r
+        K1(1,1) = f1; K1(1,2) = ppy1;\r
 \r
         Mat_<double> K2 = Mat::eye(3, 3, CV_64F);\r
         K2(0,0) = f2; K2(0,2) = ppx2;\r
-        K2(1,1) = f2*a2; K2(1,2) = ppy2;\r
+        K2(1,1) = f2; K2(1,2) = ppy2;\r
 \r
         Mat_<double> H = K2 * R2_.inv() * R1_ * K1.inv();\r
 \r
@@ -329,8 +323,8 @@ void BundleAdjuster::calcError(Mat &err)
                 continue;\r
             const DMatch& m = matches_info.matches[k];\r
 \r
-            Point2d p1 = features1.keypoints[m.queryIdx].pt;\r
-            Point2d p2 = features2.keypoints[m.trainIdx].pt;\r
+            Point2f p1 = features1.keypoints[m.queryIdx].pt;\r
+            Point2f p2 = features2.keypoints[m.trainIdx].pt;\r
             double x = H(0,0)*p1.x + H(0,1)*p1.y + H(0,2);\r
             double y = H(1,0)*p1.x + H(1,1)*p1.y + H(1,2);\r
             double z = H(2,0)*p1.x + H(2,1)*p1.y + H(2,2);\r
@@ -343,24 +337,24 @@ void BundleAdjuster::calcError(Mat &err)
 }\r
 \r
 \r
-void BundleAdjuster::calcJacobian()\r
+void BundleAdjusterReproj::calcJacobian()\r
 {\r
-    J_.create(total_num_matches_ * 2, num_images_ * 7, CV_64F);\r
+    J_.create(total_num_matches_ * 2, num_images_ * 6, CV_64F);\r
 \r
     double val;\r
-    const double step = 1e-3;\r
+    const double step = 1e-4;\r
 \r
     for (int i = 0; i < num_images_; ++i)\r
     {\r
-        for (int j = 0; j < 7; ++j)\r
+        for (int j = 0; j < 6; ++j)\r
         {\r
-            val = cameras_.at<double>(i * 7 + j, 0);\r
-            cameras_.at<double>(i * 7+ j, 0) = val - step;\r
+            val = cameras_.at<double>(i * 6 + j, 0);\r
+            cameras_.at<double>(i * + j, 0) = val - step;\r
             calcError(err1_);\r
-            cameras_.at<double>(i * 7 + j, 0) = val + step;\r
+            cameras_.at<double>(i * 6 + j, 0) = val + step;\r
             calcError(err2_);\r
-            calcDeriv(err1_, err2_, 2 * step, J_.col(i * 7 + j));\r
-            cameras_.at<double>(i * 7 + j, 0) = val;\r
+            calcDeriv(err1_, err2_, 2 * step, J_.col(i * 6 + j));\r
+            cameras_.at<double>(i * 6 + j, 0) = val;\r
         }\r
     }\r
 }\r
index 45a9f97..f8362f6 100644 (file)
@@ -189,7 +189,7 @@ Stitcher::Status Stitcher::stitch(InputArray imgs_, OutputArray pano_)
         LOGLN("Initial intrinsic parameters #" << indices[i]+1 << ":\n " << cameras[i].K());
     }
 
-    detail::BundleAdjuster adjuster(detail::BundleAdjuster::FOCAL_RAY_SPACE, conf_thresh_);
+    detail::BundleAdjusterReproj adjuster(conf_thresh_);
     adjuster(features, pairwise_matches, cameras);
 
     // Find median focal length
index fc5653c..5ed3d29 100644 (file)
@@ -79,8 +79,6 @@ void printUsage()
         "  --conf_thresh <float>\n"
         "      Threshold for two images are from the same panorama confidence.\n"
         "      The default is 1.0.\n"
-        "  --ba (no|ray|focal_ray)\n"
-        "      Bundle adjustment cost function. The default is 'focal_ray'.\n"
         "  --wave_correct (no|yes)\n"
         "      Perform wave effect correction. The default is 'yes'.\n"
         "  --save_graph <file_name>\n"
@@ -115,7 +113,6 @@ bool try_gpu = false;
 double work_megapix = 0.6;
 double seam_megapix = 0.1;
 double compose_megapix = -1;
-int ba_space = BundleAdjuster::FOCAL_RAY_SPACE;
 float conf_thresh = 1.f;
 bool wave_correct = true;
 bool save_graph = false;
@@ -184,21 +181,6 @@ int parseCmdArgs(int argc, char** argv)
             match_conf = static_cast<float>(atof(argv[i + 1]));
             i++;
         }
-        else if (string(argv[i]) == "--ba")
-        {
-            if (string(argv[i + 1]) == "no")
-                ba_space = BundleAdjuster::NO;
-            else if (string(argv[i + 1]) == "ray")
-                ba_space = BundleAdjuster::RAY_SPACE;
-            else if (string(argv[i + 1]) == "focal_ray")
-                ba_space = BundleAdjuster::FOCAL_RAY_SPACE;
-            else
-            {
-                cout << "Bad bundle adjustment space\n";
-                return -1;
-            }
-            i++;
-        }
         else if (string(argv[i]) == "--conf_thresh")
         {
             conf_thresh = static_cast<float>(atof(argv[i + 1]));
@@ -431,14 +413,14 @@ int main(int argc, char* argv[])
         LOGLN("Initial focal length #" << indices[i]+1 << ": " << cameras[i].focal);
     }
 
-    BundleAdjuster adjuster(ba_space, conf_thresh);
+    BundleAdjusterReproj adjuster(conf_thresh);
     adjuster(features, pairwise_matches, cameras);
 
     // Find median focal length
     vector<double> focals;
     for (size_t i = 0; i < cameras.size(); ++i)
     {
-        LOGLN("Camera #" << indices[i]+1 << " focal length: " << cameras[i].focal);
+        LOGLN("Camera #" << indices[i]+1 << ":\n" << cameras[i].K());
         focals.push_back(cameras[i].focal);
     }
     nth_element(focals.begin(), focals.begin() + focals.size()/2, focals.end());
@@ -476,16 +458,16 @@ int main(int argc, char* argv[])
 #ifndef ANDROID
     if (try_gpu && gpu::getCudaEnabledDeviceCount() > 0)
     {
-        if (warp_type == "plane") warper_creator = new cv::PlaneWarper();
-        else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarper();
-        else if (warp_type == "spherical") warper_creator = new cv::SphericalWarper();
+        if (warp_type == "plane") warper_creator = new cv::PlaneWarperGpu();
+        else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarperGpu();
+        else if (warp_type == "spherical") warper_creator = new cv::SphericalWarperGpu();
     }
     else
 #endif
     {
-        if (warp_type == "plane") warper_creator = new cv::PlaneWarperGpu();
-        else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarperGpu();
-        else if (warp_type == "spherical") warper_creator = new cv::SphericalWarperGpu();
+        if (warp_type == "plane") warper_creator = new cv::PlaneWarper();
+        else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarper();
+        else if (warp_type == "spherical") warper_creator = new cv::SphericalWarper();
     }
 
     if (warper_creator.empty())