From: Vadim Pisarevsky Date: Wed, 29 Jul 2020 13:28:01 +0000 (+0300) Subject: Merge pull request #17816 from vpisarev:essential_2cameras X-Git-Tag: submit/tizen/20210224.033012~2^2~102 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=a2f7ef9d210e5be5741c6e3109c233672534a1ed;p=platform%2Fupstream%2Fopencv.git Merge pull request #17816 from vpisarev:essential_2cameras * add findEssentialMat for two different cameras * added smoke test for the newly added variant of findEssentialMatrix Co-authored-by: tompollok --- diff --git a/modules/calib3d/include/opencv2/calib3d.hpp b/modules/calib3d/include/opencv2/calib3d.hpp index 79cc992..17544cb 100644 --- a/modules/calib3d/include/opencv2/calib3d.hpp +++ b/modules/calib3d/include/opencv2/calib3d.hpp @@ -2522,6 +2522,57 @@ CV_EXPORTS_W Mat findEssentialMat( InputArray points1, InputArray points2, int method = RANSAC, double prob = 0.999, double threshold = 1.0, OutputArray mask = noArray() ); +/** @brief Calculates an essential matrix from the corresponding points in two images from potentially two different cameras. + +@param points1 Array of N (N \>= 5) 2D points from the first image. The point coordinates should +be floating-point (single or double precision). +@param points2 Array of the second image points of the same size and format as points1 . +@param cameraMatrix1 Camera matrix \f$K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\f$ . +Note that this function assumes that points1 and points2 are feature points from cameras with the +same camera matrix. If this assumption does not hold for your use case, use +`undistortPoints()` with `P = cv::NoArray()` for both cameras to transform image points +to normalized image coordinates, which are valid for the identity camera matrix. When +passing these coordinates, pass the identity matrix for this parameter. +@param cameraMatrix2 Camera matrix \f$K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\f$ . +Note that this function assumes that points1 and points2 are feature points from cameras with the +same camera matrix. If this assumption does not hold for your use case, use +`undistortPoints()` with `P = cv::NoArray()` for both cameras to transform image points +to normalized image coordinates, which are valid for the identity camera matrix. When +passing these coordinates, pass the identity matrix for this parameter. +@param distCoeffs1 Input vector of distortion coefficients +\f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$ +of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed. +@param distCoeffs2 Input vector of distortion coefficients +\f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$ +of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed. +@param method Method for computing an essential matrix. +- **RANSAC** for the RANSAC algorithm. +- **LMEDS** for the LMedS algorithm. +@param prob Parameter used for the RANSAC or LMedS methods only. It specifies a desirable level of +confidence (probability) that the estimated matrix is correct. +@param threshold Parameter used for RANSAC. It is the maximum distance from a point to an epipolar +line in pixels, beyond which the point is considered an outlier and is not used for computing the +final fundamental matrix. It can be set to something like 1-3, depending on the accuracy of the +point localization, image resolution, and the image noise. +@param mask Output array of N elements, every element of which is set to 0 for outliers and to 1 +for the other points. The array is computed only in the RANSAC and LMedS methods. + +This function estimates essential matrix based on the five-point algorithm solver in @cite Nister03 . +@cite SteweniusCFS is also a related. The epipolar geometry is described by the following equation: + +\f[[p_2; 1]^T K^{-T} E K^{-1} [p_1; 1] = 0\f] + +where \f$E\f$ is an essential matrix, \f$p_1\f$ and \f$p_2\f$ are corresponding points in the first and the +second images, respectively. The result of this function may be passed further to +decomposeEssentialMat or recoverPose to recover the relative pose between cameras. + */ +CV_EXPORTS_W Mat findEssentialMat( InputArray points1, InputArray points2, + InputArray cameraMatrix1, InputArray distCoeffs1, + InputArray cameraMatrix2, InputArray distCoeffs2, + int method = RANSAC, + double prob = 0.999, double threshold = 1.0, + OutputArray mask = noArray() ); + /** @brief Decompose an essential matrix to possible rotations and translation. @param E The input essential matrix. diff --git a/modules/calib3d/src/five-point.cpp b/modules/calib3d/src/five-point.cpp index 5909c2e..c9847b4 100644 --- a/modules/calib3d/src/five-point.cpp +++ b/modules/calib3d/src/five-point.cpp @@ -458,6 +458,35 @@ cv::Mat cv::findEssentialMat( InputArray _points1, InputArray _points2, double f return cv::findEssentialMat(_points1, _points2, cameraMatrix, method, prob, threshold, _mask); } +cv::Mat cv::findEssentialMat( InputArray _points1, InputArray _points2, + InputArray cameraMatrix1, InputArray distCoeffs1, + InputArray cameraMatrix2, InputArray distCoeffs2, + int method, double prob, double threshold, OutputArray _mask) +{ + CV_INSTRUMENT_REGION(); + + // Undistort image points, bring them to 3x3 identity "camera matrix" + Mat _pointsUntistorted1, _pointsUntistorted2; + undistortPoints(_points1, _pointsUntistorted1, cameraMatrix1, distCoeffs1); + undistortPoints(_points2, _pointsUntistorted2, cameraMatrix2, distCoeffs2); + + // Scale the points back. We use "arithmetic mean" between the supplied two camera matrices. + // Thanks to such 2-stage procedure RANSAC threshold still makes sense, because the undistorted + // and rescaled points have a similar value range to the original ones. + Mat cm1 = cameraMatrix1.getMat(), cm2 = cameraMatrix2.getMat(), cm0; + Mat(cm1 + cm2).convertTo(cm0, CV_64F, 0.5); + CV_Assert(cm0.rows == 3 && cm0.cols == 3); + CV_Assert(std::abs(cm0.at(2, 0)) < 1e-3 && + std::abs(cm0.at(2, 1)) < 1e-3 && + std::abs(cm0.at(2, 2) - 1.) < 1e-3); + Mat affine = cm0.rowRange(0, 2); + + transform(_pointsUntistorted1, _pointsUntistorted1, affine); + transform(_pointsUntistorted2, _pointsUntistorted2, affine); + + return findEssentialMat(_pointsUntistorted1, _pointsUntistorted2, cm0, method, prob, threshold, _mask); +} + int cv::recoverPose( InputArray E, InputArray _points1, InputArray _points2, InputArray _cameraMatrix, OutputArray _R, OutputArray _t, double distanceThresh, InputOutputArray _mask, OutputArray triangulatedPoints) diff --git a/modules/calib3d/test/test_cameracalibration.cpp b/modules/calib3d/test/test_cameracalibration.cpp index dbad5c1..9f3663e 100644 --- a/modules/calib3d/test/test_cameracalibration.cpp +++ b/modules/calib3d/test/test_cameracalibration.cpp @@ -2142,6 +2142,7 @@ TEST(CV_RecoverPoseTest, regression_15341) // camera matrix with both focal lengths = 1, and principal point = (0, 0) const Mat cameraMatrix = Mat::eye(3, 3, CV_64F); + const Mat zeroDistCoeffs = Mat::zeros(1, 5, CV_64F); int Inliers = 0; @@ -2156,8 +2157,11 @@ TEST(CV_RecoverPoseTest, regression_15341) vector points2(_points2); // Estimation of fundamental matrix using the RANSAC algorithm - Mat E, R, t; + Mat E, E2, R, t; E = findEssentialMat(points1, points2, cameraMatrix, RANSAC, 0.999, 1.0, mask); + E2 = findEssentialMat(points1, points2, cameraMatrix, zeroDistCoeffs, cameraMatrix, zeroDistCoeffs, RANSAC, 0.999, 1.0, mask); + EXPECT_LT(cv::norm(E, E2, NORM_INF), 1e-4) << + "Two big difference between the same essential matrices computed using different functions, testcase " << testcase; EXPECT_EQ(0, (int)mask[13]) << "Detecting outliers in function findEssentialMat failed, testcase " << testcase; points2[12] = Point2f(0.0f, 0.0f); // provoke another outlier detection for recover Pose Inliers = recoverPose(E, points1, points2, cameraMatrix, R, t, mask); @@ -2180,8 +2184,11 @@ TEST(CV_RecoverPoseTest, regression_15341) } // Estimation of fundamental matrix using the RANSAC algorithm - Mat E, R, t; + Mat E, E2, R, t; E = findEssentialMat(points1, points2, cameraMatrix, RANSAC, 0.999, 1.0, mask); + E2 = findEssentialMat(points1, points2, cameraMatrix, zeroDistCoeffs, cameraMatrix, zeroDistCoeffs, RANSAC, 0.999, 1.0, mask); + EXPECT_LT(cv::norm(E, E2, NORM_INF), 1e-4) << + "Two big difference between the same essential matrices computed using different functions, testcase " << testcase; EXPECT_EQ(0, (int)mask.at(13)) << "Detecting outliers in function findEssentialMat failed, testcase " << testcase; points2.at(12) = Point2f(0.0f, 0.0f); // provoke an outlier detection Inliers = recoverPose(E, points1, points2, cameraMatrix, R, t, mask);