Merge remote-tracking branch 'upstream/3.4' into merge-3.4
authorAlexander Alekhin <alexander.a.alekhin@gmail.com>
Tue, 27 Dec 2022 08:58:28 +0000 (08:58 +0000)
committerAlexander Alekhin <alexander.a.alekhin@gmail.com>
Tue, 27 Dec 2022 08:58:28 +0000 (08:58 +0000)
1  2 
modules/calib3d/misc/java/test/Calib3dTest.java
modules/calib3d/src/fisheye.cpp
modules/calib3d/test/test_fisheye.cpp

@@@ -3,7 -3,6 +3,7 @@@ package org.opencv.test.calib3d
  import java.util.ArrayList;
  
  import org.opencv.calib3d.Calib3d;
 +import org.opencv.core.Core;
  import org.opencv.core.CvType;
  import org.opencv.core.Mat;
  import org.opencv.core.MatOfDouble;
@@@ -17,15 -16,6 +17,15 @@@ import org.opencv.imgproc.Imgproc
  
  public class Calib3dTest extends OpenCVTestCase {
  
 +    Size size;
 +
 +    @Override
 +    protected void setUp() throws Exception {
 +        super.setUp();
 +
 +        size = new Size(3, 3);
 +    }
 +
      public void testCalibrateCameraListOfMatListOfMatSizeMatMatListOfMatListOfMat() {
          fail("Not yet implemented");
      }
          assertMatEqual(truth_tvec, tvecs.get(0), 1000 * EPS);
      }
  
 +    public void testGetDefaultNewCameraMatrixMat() {
 +        Mat mtx = Calib3d.getDefaultNewCameraMatrix(gray0);
 +
 +        assertFalse(mtx.empty());
 +        assertEquals(0, Core.countNonZero(mtx));
 +    }
 +
 +    public void testGetDefaultNewCameraMatrixMatSizeBoolean() {
 +        Mat mtx = Calib3d.getDefaultNewCameraMatrix(gray0, size, true);
 +
 +        assertFalse(mtx.empty());
 +        assertFalse(0 == Core.countNonZero(mtx));
 +        // TODO_: write better test
 +    }
 +
 +    public void testInitUndistortRectifyMap() {
 +        fail("Not yet implemented");
 +        Mat cameraMatrix = new Mat(3, 3, CvType.CV_32F);
 +        cameraMatrix.put(0, 0, 1, 0, 1);
 +        cameraMatrix.put(1, 0, 0, 1, 1);
 +        cameraMatrix.put(2, 0, 0, 0, 1);
 +
 +        Mat R = new Mat(3, 3, CvType.CV_32F, new Scalar(2));
 +        Mat newCameraMatrix = new Mat(3, 3, CvType.CV_32F, new Scalar(3));
 +
 +        Mat distCoeffs = new Mat();
 +        Mat map1 = new Mat();
 +        Mat map2 = new Mat();
 +
 +        // TODO: complete this test
 +        Calib3d.initUndistortRectifyMap(cameraMatrix, distCoeffs, R, newCameraMatrix, size, CvType.CV_32F, map1, map2);
 +    }
 +
 +    public void testInitWideAngleProjMapMatMatSizeIntIntMatMat() {
 +        fail("Not yet implemented");
 +        Mat cameraMatrix = new Mat(3, 3, CvType.CV_32F);
 +        Mat distCoeffs = new Mat(1, 4, CvType.CV_32F);
 +        // Size imageSize = new Size(2, 2);
 +
 +        cameraMatrix.put(0, 0, 1, 0, 1);
 +        cameraMatrix.put(1, 0, 0, 1, 2);
 +        cameraMatrix.put(2, 0, 0, 0, 1);
 +
 +        distCoeffs.put(0, 0, 1, 3, 2, 4);
 +        truth = new Mat(3, 3, CvType.CV_32F);
 +        truth.put(0, 0, 0, 0, 0);
 +        truth.put(1, 0, 0, 0, 0);
 +        truth.put(2, 0, 0, 3, 0);
 +        // TODO: No documentation for this function
 +        // Calib3d.initWideAngleProjMap(cameraMatrix, distCoeffs, imageSize,
 +        // 5, m1type, truthput1, truthput2);
 +    }
 +
 +    public void testInitWideAngleProjMapMatMatSizeIntIntMatMatInt() {
 +        fail("Not yet implemented");
 +    }
 +
 +    public void testInitWideAngleProjMapMatMatSizeIntIntMatMatIntDouble() {
 +        fail("Not yet implemented");
 +    }
 +
 +    public void testUndistortMatMatMatMat() {
 +        Mat src = new Mat(3, 3, CvType.CV_32F, new Scalar(3));
 +        Mat cameraMatrix = new Mat(3, 3, CvType.CV_32F) {
 +            {
 +                put(0, 0, 1, 0, 1);
 +                put(1, 0, 0, 1, 2);
 +                put(2, 0, 0, 0, 1);
 +            }
 +        };
 +        Mat distCoeffs = new Mat(1, 4, CvType.CV_32F) {
 +            {
 +                put(0, 0, 1, 3, 2, 4);
 +            }
 +        };
 +
 +        Calib3d.undistort(src, dst, cameraMatrix, distCoeffs);
 +
 +        truth = new Mat(3, 3, CvType.CV_32F) {
 +            {
 +                put(0, 0, 0, 0, 0);
 +                put(1, 0, 0, 0, 0);
 +                put(2, 0, 0, 3, 0);
 +            }
 +        };
 +        assertMatEqual(truth, dst, EPS);
 +    }
 +
 +    public void testUndistortMatMatMatMatMat() {
 +        Mat src = new Mat(3, 3, CvType.CV_32F, new Scalar(3));
 +        Mat cameraMatrix = new Mat(3, 3, CvType.CV_32F) {
 +            {
 +                put(0, 0, 1, 0, 1);
 +                put(1, 0, 0, 1, 2);
 +                put(2, 0, 0, 0, 1);
 +            }
 +        };
 +        Mat distCoeffs = new Mat(1, 4, CvType.CV_32F) {
 +            {
 +                put(0, 0, 2, 1, 4, 5);
 +            }
 +        };
 +        Mat newCameraMatrix = new Mat(3, 3, CvType.CV_32F, new Scalar(1));
 +
 +        Calib3d.undistort(src, dst, cameraMatrix, distCoeffs, newCameraMatrix);
 +
 +        truth = new Mat(3, 3, CvType.CV_32F, new Scalar(3));
 +        assertMatEqual(truth, dst, EPS);
 +    }
 +
 +    //undistortPoints(List<Point> src, List<Point> dst, Mat cameraMatrix, Mat distCoeffs)
 +    public void testUndistortPointsListOfPointListOfPointMatMat() {
 +        MatOfPoint2f src = new MatOfPoint2f(new Point(1, 2), new Point(3, 4), new Point(-1, -1));
 +        MatOfPoint2f dst = new MatOfPoint2f();
 +        Mat cameraMatrix = Mat.eye(3, 3, CvType.CV_64FC1);
 +        Mat distCoeffs = new Mat(8, 1, CvType.CV_64FC1, new Scalar(0));
 +
 +        Calib3d.undistortPoints(src, dst, cameraMatrix, distCoeffs);
 +
 +        assertEquals(src.size(), dst.size());
 +        for(int i=0; i<src.toList().size(); i++) {
 +            //Log.d("UndistortPoints", "s="+src.get(i)+", d="+dst.get(i));
 +            assertTrue(src.toList().get(i).equals(dst.toList().get(i)));
 +        }
 +    }
 +
      public void testEstimateNewCameraMatrixForUndistortRectify() {
          Mat K = new Mat().eye(3, 3, CvType.CV_64FC1);
          Mat K_new = new Mat().eye(3, 3, CvType.CV_64FC1);
          D.put(2,0,-0.021509225493198905);
          D.put(3,0,0.0043378096628297145);
  
-         K_new_truth.put(0,0, 387.4809086880343);
-         K_new_truth.put(0,2, 1036.669802754649);
-         K_new_truth.put(1,1, 373.6375700303157);
-         K_new_truth.put(1,2, 538.8373261247601);
+         K_new_truth.put(0,0, 387.5118215642316);
+         K_new_truth.put(0,2, 1033.936556777084);
+         K_new_truth.put(1,1, 373.6673784974842);
+         K_new_truth.put(1,2, 538.794152656429);
  
          Calib3d.fisheye_estimateNewCameraMatrixForUndistortRectify(K,D,new Size(1920,1080),
                      new Mat().eye(3, 3, CvType.CV_64F), K_new, 0.0, new Size(1920,1080));
  
          assertMatEqual(K_new, K_new_truth, EPS);
      }
 +
  }
@@@ -318,8 -318,7 +318,8 @@@ void cv::fisheye::distortPoints(InputAr
  //////////////////////////////////////////////////////////////////////////////////////////////////////////////
  /// cv::fisheye::undistortPoints
  
 -void cv::fisheye::undistortPoints( InputArray distorted, OutputArray undistorted, InputArray K, InputArray D, InputArray R, InputArray P)
 +void cv::fisheye::undistortPoints( InputArray distorted, OutputArray undistorted, InputArray K, InputArray D,
 +                                   InputArray R, InputArray P, TermCriteria criteria)
  {
      CV_INSTRUMENT_REGION();
  
      CV_Assert(R.empty() || R.size() == Size(3, 3) || R.total() * R.channels() == 3);
      CV_Assert(D.total() == 4 && K.size() == Size(3, 3) && (K.depth() == CV_32F || K.depth() == CV_64F));
  
 +    CV_Assert(criteria.isValid());
 +
      cv::Vec2d f, c;
      if (K.depth() == CV_32F)
      {
      size_t n = distorted.total();
      int sdepth = distorted.depth();
  
 +    const bool isEps = (criteria.type & TermCriteria::EPS) != 0;
 +
 +    /* Define max count for solver iterations */
 +    int maxCount = std::numeric_limits<int>::max();
 +    if (criteria.type & TermCriteria::MAX_ITER) {
 +        maxCount = criteria.maxCount;
 +    }
 +
 +
      for(size_t i = 0; i < n; i++ )
      {
          Vec2d pi = sdepth == CV_32F ? (Vec2d)srcf[i] : srcd[i];  // image point
          Vec2d pw((pi[0] - c[0])/f[0], (pi[1] - c[1])/f[1]);      // world point
  
 -        double scale = 1.0;
 -
          double theta_d = sqrt(pw[0]*pw[0] + pw[1]*pw[1]);
  
          // the current camera model is only valid up to 180 FOV
          // clip values so we still get plausible results for super fisheye images > 180 grad
          theta_d = min(max(-CV_PI/2., theta_d), CV_PI/2.);
  
 -        if (theta_d > 1e-8)
 +        bool converged = false;
 +        double theta = theta_d;
 +
 +        double scale = 0.0;
 +
 +        if (!isEps || fabs(theta_d) > criteria.epsilon)
          {
-             // compensate distortion iteratively
+             // compensate distortion iteratively using Newton method
 -            double theta = theta_d;
  
 -            const double EPS = 1e-8; // or std::numeric_limits<double>::epsilon();
 -            for (int j = 0; j < 10; j++)
 +            for (int j = 0; j < maxCount; j++)
              {
                  double theta2 = theta*theta, theta4 = theta2*theta2, theta6 = theta4*theta2, theta8 = theta6*theta2;
                  double k0_theta2 = k[0] * theta2, k1_theta4 = k[1] * theta4, k2_theta6 = k[2] * theta6, k3_theta8 = k[3] * theta8;
                  double theta_fix = (theta * (1 + k0_theta2 + k1_theta4 + k2_theta6 + k3_theta8) - theta_d) /
                                     (1 + 3*k0_theta2 + 5*k1_theta4 + 7*k2_theta6 + 9*k3_theta8);
                  theta = theta - theta_fix;
 -                if (fabs(theta_fix) < EPS)
 +
 +                if (isEps && (fabs(theta_fix) < criteria.epsilon))
 +                {
 +                    converged = true;
                      break;
 +                }
              }
  
              scale = std::tan(theta) / theta_d;
          }
 +        else
 +        {
 +            converged = true;
 +        }
  
 -        Vec2d pu = pw * scale; //undistorted point
 +        // theta is monotonously increasing or decreasing depending on the sign of theta
 +        // if theta has flipped, it might converge due to symmetry but on the opposite of the camera center
 +        // so we can check whether theta has changed the sign during the optimization
 +        bool theta_flipped = ((theta_d < 0 && theta > 0) || (theta_d > 0 && theta < 0));
 +
 +        if ((converged || !isEps) && !theta_flipped)
 +        {
 +            Vec2d pu = pw * scale; //undistorted point
  
 -        // reproject
 -        Vec3d pr = RR * Vec3d(pu[0], pu[1], 1.0); // rotated point optionally multiplied by new camera matrix
 -        Vec2d fi(pr[0]/pr[2], pr[1]/pr[2]);       // final
 +            // reproject
 +            Vec3d pr = RR * Vec3d(pu[0], pu[1], 1.0); // rotated point optionally multiplied by new camera matrix
 +            Vec2d fi(pr[0]/pr[2], pr[1]/pr[2]);       // final
  
 -        if( sdepth == CV_32F )
 -            dstf[i] = fi;
 +            if( sdepth == CV_32F )
 +                dstf[i] = fi;
 +            else
 +                dstd[i] = fi;
 +        }
          else
 -            dstd[i] = fi;
 +        {
 +            // Vec2d fi(std::numeric_limits<double>::quiet_NaN(), std::numeric_limits<double>::quiet_NaN());
 +            Vec2d fi(-1000000.0, -1000000.0);
 +
 +            if( sdepth == CV_32F )
 +                dstf[i] = fi;
 +            else
 +                dstd[i] = fi;
 +        }
      }
  }
  
@@@ -611,7 -572,7 +611,7 @@@ void cv::fisheye::estimateNewCameraMatr
                                                  : K.getMat().at<double>(0,0)/K.getMat().at<double>(1,1);
  
      // convert to identity ratio
-     cn[0] *= aspect_ratio;
+     cn[1] *= aspect_ratio;
      for(size_t i = 0; i < points.total(); ++i)
          pptr[i][1] *= aspect_ratio;
  
@@@ -765,8 -726,8 +765,8 @@@ double cv::fisheye::calibrate(InputArra
      IntrinsicParams currentParam;
      IntrinsicParams errors;
  
 -    finalParam.isEstimate[0] = 1;
 -    finalParam.isEstimate[1] = 1;
 +    finalParam.isEstimate[0] = flags & CALIB_FIX_FOCAL_LENGTH ? 0 : 1;
 +    finalParam.isEstimate[1] = flags & CALIB_FIX_FOCAL_LENGTH ? 0 : 1;
      finalParam.isEstimate[2] = flags & CALIB_FIX_PRINCIPAL_POINT ? 0 : 1;
      finalParam.isEstimate[3] = flags & CALIB_FIX_PRINCIPAL_POINT ? 0 : 1;
      finalParam.isEstimate[4] = flags & CALIB_FIX_SKEW ? 0 : 1;
@@@ -886,13 -847,6 +886,13 @@@ double cv::fisheye::stereoCalibrate(Inp
                                      InputOutputArray K1, InputOutputArray D1, InputOutputArray K2, InputOutputArray D2, Size imageSize,
                                      OutputArray R, OutputArray T, int flags, TermCriteria criteria)
  {
 +    return cv::fisheye::stereoCalibrate(objectPoints, imagePoints1, imagePoints2, K1, D1, K2, D2, imageSize, R, T, noArray(), noArray(), flags, criteria);
 +}
 +
 +double cv::fisheye::stereoCalibrate(InputArrayOfArrays objectPoints, InputArrayOfArrays imagePoints1, InputArrayOfArrays imagePoints2,
 +                                    InputOutputArray K1, InputOutputArray D1, InputOutputArray K2, InputOutputArray D2, Size imageSize,
 +                                    OutputArray R, OutputArray T, OutputArrayOfArrays rvecs, OutputArrayOfArrays tvecs, int flags, TermCriteria criteria)
 +{
      CV_INSTRUMENT_REGION();
  
      CV_Assert(!objectPoints.empty() && !imagePoints1.empty() && !imagePoints2.empty());
      if (D2.needed()) cv::Mat(intrinsicRight.k).convertTo(D2, D2.empty() ? CV_64FC1 : D2.type());
      if (R.needed()) _R.convertTo(R, R.empty() ? CV_64FC1 : R.type());
      if (T.needed()) cv::Mat(Tcur).convertTo(T, T.empty() ? CV_64FC1 : T.type());
 +    if (rvecs.isMatVector())
 +    {
 +        if(rvecs.empty())
 +            rvecs.create(n_images, 1, CV_64FC3);
 +
 +        if(tvecs.empty())
 +            tvecs.create(n_images, 1, CV_64FC3);
 +
 +        for(int i = 0; i < n_images; i++ )
 +        {
 +            rvecs.create(3, 1, CV_64F, i, true);
 +            tvecs.create(3, 1, CV_64F, i, true);
 +            memcpy(rvecs.getMat(i).ptr(), rvecs1[i].val, sizeof(Vec3d));
 +            memcpy(tvecs.getMat(i).ptr(), tvecs1[i].val, sizeof(Vec3d));
 +        }
 +    }
 +    else
 +    {
 +        if (rvecs.needed()) cv::Mat(rvecs1).convertTo(rvecs, rvecs.empty() ? CV_64FC3 : rvecs.type());
 +        if (tvecs.needed()) cv::Mat(tvecs1).convertTo(tvecs, tvecs.empty() ? CV_64FC3 : tvecs.type());
 +    }
  
      return rms;
  }
@@@ -101,57 -101,17 +101,66 @@@ TEST_F(fisheyeTest, projectPoints
      EXPECT_MAT_NEAR(distorted0, distorted2, 1e-10);
  }
  
 -// we use it to reduce patch size for images in testdata
 -static void throwAwayHalf(Mat img)
 +TEST_F(fisheyeTest, distortUndistortPoints)
  {
 -    int whalf = img.cols / 2, hhalf = img.rows / 2;
 -    Rect tl(0, 0, whalf, hhalf), br(whalf, hhalf, whalf, hhalf);
 -    img(tl) = 0;
 -    img(br) = 0;
 -};
 +    int width = imageSize.width;
 +    int height = imageSize.height;
 +
 +    /* Create test points */
 +    std::vector<cv::Point2d> points0Vector;
 +    cv::Mat principalPoints = (cv::Mat_<double>(5, 2) << K(0, 2), K(1, 2), // (cx, cy)
 +                                                                    /* Image corners */
 +                                                                    0, 0,
 +                                                                    0, height,
 +                                                                    width, 0,
 +                                                                    width, height
 +                                                                    );
 +
 +    /* Random points inside image */
 +    cv::Mat xy[2] = {};
 +    xy[0].create(100, 1, CV_64F);
 +    theRNG().fill(xy[0], cv::RNG::UNIFORM, 0, width); // x
 +    xy[1].create(100, 1, CV_64F);
 +    theRNG().fill(xy[1], cv::RNG::UNIFORM, 0, height); // y
 +
 +    cv::Mat randomPoints;
 +    merge(xy, 2, randomPoints);
 +
 +    cv::Mat points0;
 +    cv::vconcat(principalPoints.reshape(2), randomPoints, points0);
 +
 +    /* Test with random D set */
 +    for (size_t i = 0; i < 10; ++i) {
 +        cv::Mat distortion(1, 4, CV_64F);
 +        theRNG().fill(distortion, cv::RNG::UNIFORM, -0.00001, 0.00001);
 +
 +        /* Distort -> Undistort */
 +        cv::Mat distortedPoints;
 +        cv::fisheye::distortPoints(points0, distortedPoints, K, distortion);
 +        cv::Mat undistortedPoints;
 +        cv::fisheye::undistortPoints(distortedPoints, undistortedPoints, K, distortion);
 +
 +        EXPECT_MAT_NEAR(points0, undistortedPoints, 1e-8);
 +
 +        /* Undistort -> Distort */
 +        cv::fisheye::undistortPoints(points0, undistortedPoints, K, distortion);
 +        cv::fisheye::distortPoints(undistortedPoints, distortedPoints, K, distortion);
 +
 +        EXPECT_MAT_NEAR(points0, distortedPoints, 1e-8);
 +    }
 +}
  
  TEST_F(fisheyeTest, undistortImage)
  {
++    // we use it to reduce patch size for images in testdata
++    auto throwAwayHalf = [](Mat img)
++    {
++        int whalf = img.cols / 2, hhalf = img.rows / 2;
++        Rect tl(0, 0, whalf, hhalf), br(whalf, hhalf, whalf, hhalf);
++        img(tl) = 0;
++        img(br) = 0;
++    };
++
      cv::Matx33d theK = this->K;
      cv::Mat theD = cv::Mat(this->D);
      std::string file = combine(datasets_repository_path, "/calib-3_stereo_from_JY/left/stereo_pair_014.jpg");
          newK(0, 0) = 100;
          newK(1, 1) = 100;
          cv::fisheye::undistortImage(distorted, undistorted, theK, theD, newK);
-         cv::Mat correct = cv::imread(combine(datasets_repository_path, "new_f_100.png"));
-         if (correct.empty())
-             CV_Assert(cv::imwrite(combine(datasets_repository_path, "new_f_100.png"), undistorted));
-         else
-             EXPECT_MAT_NEAR(correct, undistorted, 1e-10);
+         std::string imageFilename = combine(datasets_repository_path, "new_f_100.png");
+         cv::Mat correct = cv::imread(imageFilename);
+         ASSERT_FALSE(correct.empty()) << "Correct image " << imageFilename.c_str() << " can not be read" << std::endl;
+         throwAwayHalf(correct);
+         throwAwayHalf(undistorted);
+         EXPECT_MAT_NEAR(correct, undistorted, 1e-10);
      }
      {
          double balance = 1.0;
          cv::fisheye::estimateNewCameraMatrixForUndistortRectify(theK, theD, distorted.size(), cv::noArray(), newK, balance);
          cv::fisheye::undistortImage(distorted, undistorted, theK, theD, newK);
-         cv::Mat correct = cv::imread(combine(datasets_repository_path, "balance_1.0.png"));
-         if (correct.empty())
-             CV_Assert(cv::imwrite(combine(datasets_repository_path, "balance_1.0.png"), undistorted));
-         else
-             EXPECT_MAT_NEAR(correct, undistorted, 1e-10);
+         std::string imageFilename = combine(datasets_repository_path, "balance_1.0.png");
+         cv::Mat correct = cv::imread(imageFilename);
+         ASSERT_FALSE(correct.empty()) << "Correct image " << imageFilename.c_str() << " can not be read" << std::endl;
+         throwAwayHalf(correct);
+         throwAwayHalf(undistorted);
+         EXPECT_MAT_NEAR(correct, undistorted, 1e-10);
      }
  
      {
          double balance = 0.0;
          cv::fisheye::estimateNewCameraMatrixForUndistortRectify(theK, theD, distorted.size(), cv::noArray(), newK, balance);
          cv::fisheye::undistortImage(distorted, undistorted, theK, theD, newK);
-         cv::Mat correct = cv::imread(combine(datasets_repository_path, "balance_0.0.png"));
-         if (correct.empty())
-             CV_Assert(cv::imwrite(combine(datasets_repository_path, "balance_0.0.png"), undistorted));
-         else
-             EXPECT_MAT_NEAR(correct, undistorted, 1e-10);
+         std::string imageFilename = combine(datasets_repository_path, "balance_0.0.png");
+         cv::Mat correct = cv::imread(imageFilename);
+         ASSERT_FALSE(correct.empty()) << "Correct image " << imageFilename.c_str() << " can not be read" << std::endl;
+         throwAwayHalf(correct);
+         throwAwayHalf(undistorted);
+         EXPECT_MAT_NEAR(correct, undistorted, 1e-10);
      }
  }
  
-     CV_Assert(cv::imwrite(combine(datasets_repository_path, "new_distortion.png"), image_projected));
 +TEST_F(fisheyeTest, undistortAndDistortImage)
 +{
 +    cv::Matx33d K_src = this->K;
 +    cv::Mat D_src = cv::Mat(this->D);
 +    std::string file = combine(datasets_repository_path, "/calib-3_stereo_from_JY/left/stereo_pair_014.jpg");
 +    cv::Matx33d K_dst = K_src;
 +    cv::Mat image = cv::imread(file), image_projected;
 +    cv::Vec4d D_dst_vec (-1.0, 0.0, 0.0, 0.0);
 +    cv::Mat D_dst = cv::Mat(D_dst_vec);
 +
 +    int imageWidth = (int)this->imageSize.width;
 +    int imageHeight = (int)this->imageSize.height;
 +
 +    cv::Mat imagePoints(imageHeight, imageWidth, CV_32FC2), undPoints, distPoints;
 +    cv::Vec2f* pts = imagePoints.ptr<cv::Vec2f>();
 +
 +    for(int y = 0, k = 0; y < imageHeight; ++y)
 +    {
 +        for(int x = 0; x < imageWidth; ++x)
 +        {
 +            cv::Vec2f point((float)x, (float)y);
 +            pts[k++] = point;
 +        }
 +    }
 +
 +    cv::fisheye::undistortPoints(imagePoints, undPoints, K_dst, D_dst);
 +    cv::fisheye::distortPoints(undPoints, distPoints, K_src, D_src);
 +    cv::remap(image, image_projected, distPoints, cv::noArray(), cv::INTER_LINEAR);
 +
 +    float dx, dy, r_sq;
 +    float R_MAX = 250;
 +    float imageCenterX = (float)imageWidth / 2;
 +    float imageCenterY = (float)imageHeight / 2;
 +
 +    cv::Mat undPointsGt(imageHeight, imageWidth, CV_32FC2);
 +    cv::Mat imageGt(imageHeight, imageWidth, CV_8UC3);
 +
 +    for(int y = 0; y < imageHeight; ++y)
 +    {
 +        for(int x = 0; x < imageWidth; ++x)
 +        {
 +            dx = x - imageCenterX;
 +            dy = y - imageCenterY;
 +            r_sq = dy * dy + dx * dx;
 +
 +            Vec2f & und_vec = undPoints.at<Vec2f>(y,x);
 +            Vec3b & pixel = image_projected.at<Vec3b>(y,x);
 +
 +            Vec2f & undist_vec_gt = undPointsGt.at<Vec2f>(y,x);
 +            Vec3b & pixel_gt = imageGt.at<Vec3b>(y,x);
 +
 +            if (r_sq > R_MAX * R_MAX)
 +            {
 +
 +                undist_vec_gt[0] = -1e6;
 +                undist_vec_gt[1] = -1e6;
 +
 +                pixel_gt[0] = 0;
 +                pixel_gt[1] = 0;
 +                pixel_gt[2] = 0;
 +            }
 +            else
 +            {
 +                undist_vec_gt[0] = und_vec[0];
 +                undist_vec_gt[1] = und_vec[1];
 +
 +                pixel_gt[0] = pixel[0];
 +                pixel_gt[1] = pixel[1];
 +                pixel_gt[2] = pixel[2];
 +            }
 +
 +        }
 +    }
 +
 +    EXPECT_MAT_NEAR(undPoints, undPointsGt, 1e-10);
 +    EXPECT_MAT_NEAR(image_projected, imageGt, 1e-10);
 +
 +    Vec2f dist_point_1 = distPoints.at<Vec2f>(400, 640);
 +    Vec2f dist_point_1_gt(640.044f, 400.041f);
 +
 +    Vec2f dist_point_2 = distPoints.at<Vec2f>(400, 440);
 +    Vec2f dist_point_2_gt(409.731f, 403.029f);
 +
 +    Vec2f dist_point_3 = distPoints.at<Vec2f>(200, 640);
 +    Vec2f dist_point_3_gt(643.341f, 168.896f);
 +
 +    Vec2f dist_point_4 = distPoints.at<Vec2f>(300, 480);
 +    Vec2f dist_point_4_gt(463.402f, 290.317f);
 +
 +    Vec2f dist_point_5 = distPoints.at<Vec2f>(550, 750);
 +    Vec2f dist_point_5_gt(797.51f, 611.637f);
 +
 +    EXPECT_MAT_NEAR(dist_point_1, dist_point_1_gt, 1e-2);
 +    EXPECT_MAT_NEAR(dist_point_2, dist_point_2_gt, 1e-2);
 +    EXPECT_MAT_NEAR(dist_point_3, dist_point_3_gt, 1e-2);
 +    EXPECT_MAT_NEAR(dist_point_4, dist_point_4_gt, 1e-2);
 +    EXPECT_MAT_NEAR(dist_point_5, dist_point_5_gt, 1e-2);
 +
++    // Add the "--test_debug" to arguments for file output
++    if (cvtest::debugLevel > 0)
++        cv::imwrite(combine(datasets_repository_path, "new_distortion.png"), image_projected);
 +}
 +
  TEST_F(fisheyeTest, jacobians)
  {
      int n = 10;
@@@ -393,7 -261,7 +413,7 @@@ TEST_F(fisheyeTest, Calibration
      std::vector<std::vector<cv::Point2d> > imagePoints(n_images);
      std::vector<std::vector<cv::Point3d> > objectPoints(n_images);
  
 -    const std::string folder =combine(datasets_repository_path, "calib-3_stereo_from_JY");
 +    const std::string folder = combine(datasets_repository_path, "calib-3_stereo_from_JY");
      cv::FileStorage fs_left(combine(folder, "left.xml"), cv::FileStorage::READ);
      CV_Assert(fs_left.isOpened());
      for(int i = 0; i < n_images; ++i)
      EXPECT_MAT_NEAR(theD, this->D, 1e-10);
  }
  
 +TEST_F(fisheyeTest, CalibrationWithFixedFocalLength)
 +{
 +    const int n_images = 34;
 +
 +    std::vector<std::vector<cv::Point2d> > imagePoints(n_images);
 +    std::vector<std::vector<cv::Point3d> > objectPoints(n_images);
 +
 +    const std::string folder =combine(datasets_repository_path, "calib-3_stereo_from_JY");
 +    cv::FileStorage fs_left(combine(folder, "left.xml"), cv::FileStorage::READ);
 +    CV_Assert(fs_left.isOpened());
 +    for(int i = 0; i < n_images; ++i)
 +        fs_left[cv::format("image_%d", i )] >> imagePoints[i];
 +    fs_left.release();
 +
 +    cv::FileStorage fs_object(combine(folder, "object.xml"), cv::FileStorage::READ);
 +    CV_Assert(fs_object.isOpened());
 +    for(int i = 0; i < n_images; ++i)
 +        fs_object[cv::format("image_%d", i )] >> objectPoints[i];
 +    fs_object.release();
 +
 +    int flag = 0;
 +    flag |= cv::fisheye::CALIB_RECOMPUTE_EXTRINSIC;
 +    flag |= cv::fisheye::CALIB_CHECK_COND;
 +    flag |= cv::fisheye::CALIB_FIX_SKEW;
 +    flag |= cv::fisheye::CALIB_FIX_FOCAL_LENGTH;
 +    flag |= cv::fisheye::CALIB_USE_INTRINSIC_GUESS;
 +
 +    cv::Matx33d theK = this->K;
 +    const cv::Matx33d newK(
 +        558.478088, 0.000000, 620.458461,
 +        0.000000, 560.506767, 381.939362,
 +        0.000000, 0.000000, 1.000000);
 +
 +    cv::Vec4d theD;
 +    const cv::Vec4d newD(-0.001461, -0.003298, 0.006057, -0.003742);
 +
 +    cv::fisheye::calibrate(objectPoints, imagePoints, imageSize, theK, theD,
 +                           cv::noArray(), cv::noArray(), flag, cv::TermCriteria(3, 20, 1e-6));
 +
 +    // ensure that CALIB_FIX_FOCAL_LENGTH works and focal lenght has not changed
 +    EXPECT_EQ(theK(0,0), K(0,0));
 +    EXPECT_EQ(theK(1,1), K(1,1));
 +
 +    EXPECT_MAT_NEAR(theK, newK, 1e-6);
 +    EXPECT_MAT_NEAR(theD, newD, 1e-6);
 +}
 +
  TEST_F(fisheyeTest, Homography)
  {
      const int n_images = 1;
      std::vector<std::vector<cv::Point2d> > imagePoints(n_images);
      std::vector<std::vector<cv::Point3d> > objectPoints(n_images);
  
 -    const std::string folder =combine(datasets_repository_path, "calib-3_stereo_from_JY");
 +    const std::string folder = combine(datasets_repository_path, "calib-3_stereo_from_JY");
      cv::FileStorage fs_left(combine(folder, "left.xml"), cv::FileStorage::READ);
      CV_Assert(fs_left.isOpened());
      for(int i = 0; i < n_images; ++i)
@@@ -593,7 -414,7 +613,7 @@@ TEST_F(fisheyeTest, stereoRectify
          "For the purpose of continuity the following should be true: cv::CALIB_ZERO_DISPARITY == cv::fisheye::CALIB_ZERO_DISPARITY"
      );
  
 -    const std::string folder =combine(datasets_repository_path, "calib-3_stereo_from_JY");
 +    const std::string folder = combine(datasets_repository_path, "calib-3_stereo_from_JY");
  
      cv::Size calibration_size = this->imageSize, requested_size = calibration_size;
      cv::Matx33d K1 = this->K, K2 = K1;
          0.002076471801477729, 0.006463478587068991, 0.9999769555891836
      );
      cv::Matx34d P1_ref(
-         420.8551870450913, 0, 586.501617798451, 0,
-         0, 420.8551870450913, 374.7667511986098, 0,
+         420.9684016542647, 0, 586.3059567784627, 0,
+         0, 420.9684016542647, 374.8571836462291, 0,
          0, 0, 1, 0
      );
      cv::Matx34d P2_ref(
-         420.8551870450913, 0, 586.501617798451, -41.77758076597302,
-         0, 420.8551870450913, 374.7667511986098, 0,
+         420.9684016542647, 0, 586.3059567784627, -41.78881938824554,
+         0, 420.9684016542647, 374.8571836462291, 0,
          0, 0, 1, 0
      );
      cv::Matx44d Q_ref(
-         1, 0, 0, -586.501617798451,
-         0, 1, 0, -374.7667511986098,
-         0, 0, 0, 420.8551870450913,
+         1, 0, 0, -586.3059567784627,
+         0, 1, 0, -374.8571836462291,
+         0, 0, 0, 420.9684016542647,
          0, 0, 10.07370889670733, -0
      );
  
          cv::Mat rectification;
          merge4(l, r, lundist, rundist, rectification);
  
-         cv::imwrite(cv::format("fisheye_rectification_AB_%03d.png", i), rectification);
+         // Add the "--test_debug" to arguments for file output
+         if (cvtest::debugLevel > 0)
+             cv::imwrite(cv::format("fisheye_rectification_AB_%03d.png", i), rectification);
      }
  }
  
@@@ -694,7 -517,7 +716,7 @@@ TEST_F(fisheyeTest, stereoCalibrate
  {
      const int n_images = 34;
  
 -    const std::string folder =combine(datasets_repository_path, "calib-3_stereo_from_JY");
 +    const std::string folder = combine(datasets_repository_path, "calib-3_stereo_from_JY");
  
      std::vector<std::vector<cv::Point2d> > leftPoints(n_images);
      std::vector<std::vector<cv::Point2d> > rightPoints(n_images);
@@@ -761,7 -584,7 +783,7 @@@ TEST_F(fisheyeTest, stereoCalibrateFixI
  {
      const int n_images = 34;
  
 -    const std::string folder =combine(datasets_repository_path, "calib-3_stereo_from_JY");
 +    const std::string folder = combine(datasets_repository_path, "calib-3_stereo_from_JY");
  
      std::vector<std::vector<cv::Point2d> > leftPoints(n_images);
      std::vector<std::vector<cv::Point2d> > rightPoints(n_images);
@@@ -861,113 -684,6 +883,113 @@@ TEST_F(fisheyeTest, CalibrationWithDiff
          cv::noArray(), cv::noArray(), flag, cv::TermCriteria(3, 20, 1e-6));
  }
  
 +TEST_F(fisheyeTest, stereoCalibrateWithPerViewTransformations)
 +{
 +    const int n_images = 34;
 +
 +    const std::string folder = combine(datasets_repository_path, "calib-3_stereo_from_JY");
 +
 +    std::vector<std::vector<cv::Point2d> > leftPoints(n_images);
 +    std::vector<std::vector<cv::Point2d> > rightPoints(n_images);
 +    std::vector<std::vector<cv::Point3d> > objectPoints(n_images);
 +
 +    cv::FileStorage fs_left(combine(folder, "left.xml"), cv::FileStorage::READ);
 +    CV_Assert(fs_left.isOpened());
 +    for(int i = 0; i < n_images; ++i)
 +        fs_left[cv::format("image_%d", i )] >> leftPoints[i];
 +    fs_left.release();
 +
 +    cv::FileStorage fs_right(combine(folder, "right.xml"), cv::FileStorage::READ);
 +    CV_Assert(fs_right.isOpened());
 +    for(int i = 0; i < n_images; ++i)
 +        fs_right[cv::format("image_%d", i )] >> rightPoints[i];
 +    fs_right.release();
 +
 +    cv::FileStorage fs_object(combine(folder, "object.xml"), cv::FileStorage::READ);
 +    CV_Assert(fs_object.isOpened());
 +    for(int i = 0; i < n_images; ++i)
 +        fs_object[cv::format("image_%d", i )] >> objectPoints[i];
 +    fs_object.release();
 +
 +    cv::Matx33d K1, K2, theR;
 +    cv::Vec3d theT;
 +    cv::Vec4d D1, D2;
 +
 +    std::vector<cv::Mat> rvecs, tvecs;
 +
 +    int flag = 0;
 +    flag |= cv::fisheye::CALIB_RECOMPUTE_EXTRINSIC;
 +    flag |= cv::fisheye::CALIB_CHECK_COND;
 +    flag |= cv::fisheye::CALIB_FIX_SKEW;
 +
 +    double rmsErrorStereoCalib = cv::fisheye::stereoCalibrate(objectPoints, leftPoints, rightPoints,
 +                    K1, D1, K2, D2, imageSize, theR, theT, rvecs, tvecs, flag,
 +                    cv::TermCriteria(3, 12, 0));
 +
 +    std::vector<cv::Point2d> reprojectedImgPts[2] = {std::vector<cv::Point2d>(n_images), std::vector<cv::Point2d>(n_images)};
 +    size_t totalPoints = 0;
 +    double totalMSError[2] = { 0, 0 };
 +    for( size_t i = 0; i < n_images; i++ )
 +    {
 +        cv::Matx33d viewRotMat1, viewRotMat2;
 +        cv::Vec3d viewT1, viewT2;
 +        cv::Mat rVec;
 +        cv::Rodrigues( rvecs[i], rVec );
 +        rVec.convertTo(viewRotMat1, CV_64F);
 +        tvecs[i].convertTo(viewT1, CV_64F);
 +
 +        viewRotMat2 = theR * viewRotMat1;
 +        cv::Vec3d T2t = theR * viewT1;
 +        viewT2 = T2t + theT;
 +
 +        cv::Vec3d viewRotVec1, viewRotVec2;
 +        cv::Rodrigues(viewRotMat1, viewRotVec1);
 +        cv::Rodrigues(viewRotMat2, viewRotVec2);
 +
 +        double alpha1 = K1(0, 1) / K1(0, 0);
 +        double alpha2 = K2(0, 1) / K2(0, 0);
 +        cv::fisheye::projectPoints(objectPoints[i], reprojectedImgPts[0], viewRotVec1, viewT1, K1, D1, alpha1);
 +        cv::fisheye::projectPoints(objectPoints[i], reprojectedImgPts[1], viewRotVec2, viewT2, K2, D2, alpha2);
 +
 +        double viewMSError[2] = {
 +            cv::norm(leftPoints[i], reprojectedImgPts[0], cv::NORM_L2SQR),
 +            cv::norm(rightPoints[i], reprojectedImgPts[1], cv::NORM_L2SQR)
 +        };
 +
 +        size_t n = objectPoints[i].size();
 +        totalMSError[0] += viewMSError[0];
 +        totalMSError[1] += viewMSError[1];
 +        totalPoints += n;
 +    }
 +    double rmsErrorFromReprojectedImgPts = std::sqrt((totalMSError[0] + totalMSError[1]) / (2 * totalPoints));
 +
 +    cv::Matx33d R_correct(   0.9975587205950972,   0.06953016383322372, 0.006492709911733523,
 +                           -0.06956823121068059,    0.9975601387249519, 0.005833595226966235,
 +                          -0.006071257768382089, -0.006271040135405457, 0.9999619062167968);
 +    cv::Vec3d T_correct(-0.099402724724121, 0.00270812139265413, 0.00129330292472699);
 +    cv::Matx33d K1_correct (561.195925927249,                0, 621.282400272412,
 +                                   0, 562.849402029712, 380.555455380889,
 +                                   0,                0,                1);
 +
 +    cv::Matx33d K2_correct (560.395452535348,                0, 678.971652040359,
 +                                   0,  561.90171021422, 380.401340535339,
 +                                   0,                0,                1);
 +
 +    cv::Vec4d D1_correct (-7.44253716539556e-05, -0.00702662033932424, 0.00737569823650885, -0.00342230256441771);
 +    cv::Vec4d D2_correct (-0.0130785435677431, 0.0284434505383497, -0.0360333869900506, 0.0144724062347222);
 +
 +    EXPECT_MAT_NEAR(theR, R_correct, 1e-10);
 +    EXPECT_MAT_NEAR(theT, T_correct, 1e-10);
 +
 +    EXPECT_MAT_NEAR(K1, K1_correct, 1e-10);
 +    EXPECT_MAT_NEAR(K2, K2_correct, 1e-10);
 +
 +    EXPECT_MAT_NEAR(D1, D1_correct, 1e-10);
 +    EXPECT_MAT_NEAR(D2, D2_correct, 1e-10);
 +
 +    EXPECT_NEAR(rmsErrorStereoCalib, rmsErrorFromReprojectedImgPts, 1e-4);
 +}
 +
  TEST_F(fisheyeTest, estimateNewCameraMatrixForUndistortRectify)
  {
      cv::Size size(1920, 1080);
  
      cv::Mat K_new_truth(3, 3, cv::DataType<double>::type);
  
-     K_new_truth.at<double>(0, 0) = 387.4809086880343;
+     K_new_truth.at<double>(0, 0) = 387.5118215642316;
      K_new_truth.at<double>(0, 1) = 0.0;
-     K_new_truth.at<double>(0, 2) = 1036.669802754649;
+     K_new_truth.at<double>(0, 2) = 1033.936556777084;
  
      K_new_truth.at<double>(1, 0) = 0.0;
-     K_new_truth.at<double>(1, 1) = 373.6375700303157;
-     K_new_truth.at<double>(1, 2) = 538.8373261247601;
+     K_new_truth.at<double>(1, 1) = 373.6673784974842;
+     K_new_truth.at<double>(1, 2) = 538.794152656429;
  
      K_new_truth.at<double>(2, 0) = 0.0;
      K_new_truth.at<double>(2, 1) = 0.0;
@@@ -1024,7 -740,6 +1046,7 @@@ const cv::Matx33d fisheyeTest::K(558.47
  
  const cv::Vec4d fisheyeTest::D(-0.0014613319981768, -0.00329861110580401, 0.00605760088590183, -0.00374209380722371);
  
 +
  const cv::Matx33d fisheyeTest::R ( 9.9756700084424932e-01, 6.9698277640183867e-02, 1.4929569991321144e-03,
                              -6.9711825162322980e-02, 9.9748249845531767e-01, 1.2997180766418455e-02,
                              -5.8331736398316541e-04,-1.3069635393884985e-02, 9.9991441852366736e-01);