InputArray R, InputArray newCameraMatrix,
Size size, int m1type, OutputArray map1, OutputArray map2);
+/** @brief Computes the projection and inverse-rectification transformation map. In essense, this is the inverse of
+#initUndistortRectifyMap to accomodate stereo-rectification of projectors ('inverse-cameras') in projector-camera pairs.
+
+The function computes the joint projection and inverse rectification transformation and represents the
+result in the form of maps for #remap. The projected image looks like a distorted version of the original which,
+once projected by a projector, should visually match the original. In case of a monocular camera, newCameraMatrix
+is usually equal to cameraMatrix, or it can be computed by
+#getOptimalNewCameraMatrix for a better control over scaling. In case of a projector-camera pair,
+newCameraMatrix is normally set to P1 or P2 computed by #stereoRectify .
+
+The projector is oriented differently in the coordinate space, according to R. In case of projector-camera pairs,
+this helps align the projector (in the same manner as #initUndistortRectifyMap for the camera) to create a stereo-rectified pair. This
+allows epipolar lines on both images to become horizontal and have the same y-coordinate (in case of a horizontally aligned projector-camera pair).
+
+The function builds the maps for the inverse mapping algorithm that is used by #remap. That
+is, for each pixel \f$(u, v)\f$ in the destination (projected and inverse-rectified) image, the function
+computes the corresponding coordinates in the source image (that is, in the original digital image). The following process is applied:
+
+\f[
+\begin{array}{l}
+\text{newCameraMatrix}\\
+x \leftarrow (u - {c'}_x)/{f'}_x \\
+y \leftarrow (v - {c'}_y)/{f'}_y \\
+
+\\\text{Undistortion}
+\\\scriptsize{\textit{though equation shown is for radial undistortion, function implements cv::undistortPoints()}}\\
+r^2 \leftarrow x^2 + y^2 \\
+\theta \leftarrow \frac{1 + k_1 r^2 + k_2 r^4 + k_3 r^6}{1 + k_4 r^2 + k_5 r^4 + k_6 r^6}\\
+x' \leftarrow \frac{x}{\theta} \\
+y' \leftarrow \frac{y}{\theta} \\
+
+\\\text{Rectification}\\
+{[X\,Y\,W]} ^T \leftarrow R*[x' \, y' \, 1]^T \\
+x'' \leftarrow X/W \\
+y'' \leftarrow Y/W \\
+
+\\\text{cameraMatrix}\\
+map_x(u,v) \leftarrow x'' f_x + c_x \\
+map_y(u,v) \leftarrow y'' f_y + c_y
+\end{array}
+\f]
+where \f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$
+are the distortion coefficients vector distCoeffs.
+
+In case of a stereo-rectified projector-camera pair, this function is called for the projector while #initUndistortRectifyMap is called for the camera head.
+This is done after #stereoRectify, which in turn is called after #stereoCalibrate. If the projector-camera pair
+is not calibrated, it is still possible to compute the rectification transformations directly from
+the fundamental matrix using #stereoRectifyUncalibrated. For the projector and camera, the function computes
+homography H as the rectification transformation in a pixel domain, not a rotation matrix R in 3D
+space. R can be computed from H as
+\f[\texttt{R} = \texttt{cameraMatrix} ^{-1} \cdot \texttt{H} \cdot \texttt{cameraMatrix}\f]
+where cameraMatrix can be chosen arbitrarily.
+
+@param cameraMatrix Input camera matrix \f$A=\vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{1}\f$ .
+@param distCoeffs Input vector of distortion coefficients
+\f$(k_1, k_2, p_1, p_2[, k_3[, k_4, k_5, k_6[, s_1, s_2, s_3, s_4[, \tau_x, \tau_y]]]])\f$
+of 4, 5, 8, 12 or 14 elements. If the vector is NULL/empty, the zero distortion coefficients are assumed.
+@param R Optional rectification transformation in the object space (3x3 matrix). R1 or R2,
+computed by #stereoRectify can be passed here. If the matrix is empty, the identity transformation
+is assumed.
+@param newCameraMatrix New camera matrix \f$A'=\vecthreethree{f_x'}{0}{c_x'}{0}{f_y'}{c_y'}{0}{0}{1}\f$.
+@param size Undistorted image size.
+@param m1type Type of the first output map that can be CV_32FC1, CV_32FC2 or CV_16SC2, see #convertMaps
+@param map1 The first output map for #remap.
+@param map2 The second output map for #remap.
+ */
+CV_EXPORTS_W
+void initInverseRectificationMap( InputArray cameraMatrix, InputArray distCoeffs,
+ InputArray R, InputArray newCameraMatrix,
+ Size size, int m1type, OutputArray map1, OutputArray map2 );
+
//! initializes maps for #remap for wide-angle
CV_EXPORTS
float initWideAngleProjMap(InputArray cameraMatrix, InputArray distCoeffs,
SANITY_CHECK_NOTHING();
}
+PERF_TEST(Undistort, InitInverseRectificationMap)
+{
+ Size size_w_h(512 + 3, 512);
+ Mat k(3, 3, CV_32FC1);
+ Mat d(1, 14, CV_64FC1);
+ Mat dst(size_w_h, CV_32FC2);
+ declare.in(k, d, WARMUP_RNG).out(dst);
+ TEST_CYCLE() initInverseRectificationMap(k, d, noArray(), k, size_w_h, CV_32FC2, dst, noArray());
+ SANITY_CHECK_NOTHING();
+}
+
} // namespace
fx, fy, k1, k2, p1, p2, k3, k4, k5, k6, s1, s2, s3, s4));
}
+void initInverseRectificationMap( InputArray _cameraMatrix, InputArray _distCoeffs,
+ InputArray _matR, InputArray _newCameraMatrix,
+ Size size, int m1type, OutputArray _map1, OutputArray _map2 )
+{
+ // Parameters
+ Mat cameraMatrix = _cameraMatrix.getMat(), distCoeffs = _distCoeffs.getMat();
+ Mat matR = _matR.getMat(), newCameraMatrix = _newCameraMatrix.getMat();
+
+ // Check m1type validity
+ if( m1type <= 0 )
+ m1type = CV_16SC2;
+ CV_Assert( m1type == CV_16SC2 || m1type == CV_32FC1 || m1type == CV_32FC2 );
+
+ // Init Maps
+ _map1.create( size, m1type );
+ Mat map1 = _map1.getMat(), map2;
+ if( m1type != CV_32FC2 )
+ {
+ _map2.create( size, m1type == CV_16SC2 ? CV_16UC1 : CV_32FC1 );
+ map2 = _map2.getMat();
+ }
+ else {
+ _map2.release();
+ }
+
+ // Init camera intrinsics
+ Mat_<double> A = Mat_<double>(cameraMatrix), Ar;
+ if( !newCameraMatrix.empty() )
+ Ar = Mat_<double>(newCameraMatrix);
+ else
+ Ar = getDefaultNewCameraMatrix( A, size, true );
+ CV_Assert( A.size() == Size(3,3) );
+ CV_Assert( Ar.size() == Size(3,3) || Ar.size() == Size(4, 3));
+
+ // Init rotation matrix
+ Mat_<double> R = Mat_<double>::eye(3, 3);
+ if( !matR.empty() )
+ {
+ R = Mat_<double>(matR);
+ //Note, do not inverse
+ }
+ CV_Assert( Size(3,3) == R.size() );
+
+ // Init distortion vector
+ if( !distCoeffs.empty() )
+ distCoeffs = Mat_<double>(distCoeffs);
+ else
+ {
+ distCoeffs.create(14, 1, CV_64F);
+ distCoeffs = 0.;
+ }
+
+ // Validate distortion vector size
+ CV_Assert( distCoeffs.size() == Size(1, 4) || distCoeffs.size() == Size(4, 1) ||
+ distCoeffs.size() == Size(1, 5) || distCoeffs.size() == Size(5, 1) ||
+ distCoeffs.size() == Size(1, 8) || distCoeffs.size() == Size(8, 1) ||
+ distCoeffs.size() == Size(1, 12) || distCoeffs.size() == Size(12, 1) ||
+ distCoeffs.size() == Size(1, 14) || distCoeffs.size() == Size(14, 1));
+
+ // Fix distortion vector orientation
+ if( distCoeffs.rows != 1 && !distCoeffs.isContinuous() )
+ distCoeffs = distCoeffs.t();
+
+ // Create objectPoints
+ std::vector<cv::Point2i> p2i_objPoints;
+ std::vector<cv::Point2f> p2f_objPoints;
+ for (int r = 0; r < size.height; r++)
+ {
+ for (int c = 0; c < size.width; c++)
+ {
+ p2i_objPoints.push_back(cv::Point2i(c, r));
+ p2f_objPoints.push_back(cv::Point2f(static_cast<float>(c), static_cast<float>(r)));
+ }
+ }
+
+ // Undistort
+ std::vector<cv::Point2f> p2f_objPoints_undistorted;
+ undistortPoints(
+ p2f_objPoints,
+ p2f_objPoints_undistorted,
+ A,
+ distCoeffs,
+ cv::Mat::eye(cv::Size(3, 3), CV_64FC1), // R
+ cv::Mat::eye(cv::Size(3, 3), CV_64FC1) // P = New K
+ );
+
+ // Rectify
+ std::vector<cv::Point2f> p2f_sourcePoints_pinHole;
+ perspectiveTransform(
+ p2f_objPoints_undistorted,
+ p2f_sourcePoints_pinHole,
+ R
+ );
+
+ // Project points back to camera coordinates.
+ std::vector<cv::Point2f> p2f_sourcePoints;
+ undistortPoints(
+ p2f_sourcePoints_pinHole,
+ p2f_sourcePoints,
+ cv::Mat::eye(cv::Size(3, 3), CV_32FC1), // K
+ cv::Mat::zeros(cv::Size(1, 4), CV_32FC1), // Distortion
+ cv::Mat::eye(cv::Size(3, 3), CV_32FC1), // R
+ Ar // New K
+ );
+
+ // Copy to map
+ if (m1type == CV_16SC2) {
+ for (size_t i=0; i < p2i_objPoints.size(); i++) {
+ map1.at<Vec2s>(p2i_objPoints[i].y, p2i_objPoints[i].x) = Vec2s(saturate_cast<short>(p2f_sourcePoints[i].x), saturate_cast<short>(p2f_sourcePoints[i].y));
+ }
+ } else if (m1type == CV_32FC2) {
+ for (size_t i=0; i < p2i_objPoints.size(); i++) {
+ map1.at<Vec2f>(p2i_objPoints[i].y, p2i_objPoints[i].x) = Vec2f(p2f_sourcePoints[i]);
+ }
+ } else { // m1type == CV_32FC1
+ for (size_t i=0; i < p2i_objPoints.size(); i++) {
+ map1.at<float>(p2i_objPoints[i].y, p2i_objPoints[i].x) = p2f_sourcePoints[i].x;
+ map2.at<float>(p2i_objPoints[i].y, p2i_objPoints[i].x) = p2f_sourcePoints[i].y;
+ }
+ }
+}
void undistort( InputArray _src, OutputArray _dst, InputArray _cameraMatrix,
InputArray _distCoeffs, InputArray _newCameraMatrix )
return 8;
}
+//------------------------------------------------------
+
+class CV_InitInverseRectificationMapTest : public cvtest::ArrayTest
+{
+public:
+ CV_InitInverseRectificationMapTest();
+protected:
+ int prepare_test_case (int test_case_idx);
+ void prepare_to_validation( int test_case_idx );
+ void get_test_array_types_and_sizes( int test_case_idx, vector<vector<Size> >& sizes, vector<vector<int> >& types );
+ double get_success_error_level( int test_case_idx, int i, int j );
+ void run_func();
+
+private:
+ static const int MAX_X = 1024;
+ static const int MAX_Y = 1024;
+ bool zero_new_cam;
+ bool zero_distortion;
+ bool zero_R;
+
+ cv::Size img_size;
+ int map_type;
+};
+
+CV_InitInverseRectificationMapTest::CV_InitInverseRectificationMapTest()
+{
+ test_array[INPUT].push_back(NULL); // camera matrix
+ test_array[INPUT].push_back(NULL); // distortion coeffs
+ test_array[INPUT].push_back(NULL); // R matrix
+ test_array[INPUT].push_back(NULL); // new camera matrix
+ test_array[OUTPUT].push_back(NULL); // inverse rectified mapx
+ test_array[OUTPUT].push_back(NULL); // inverse rectified mapy
+ test_array[REF_OUTPUT].push_back(NULL);
+ test_array[REF_OUTPUT].push_back(NULL);
+
+ zero_distortion = zero_new_cam = zero_R = false;
+ map_type = 0;
+}
+
+void CV_InitInverseRectificationMapTest::get_test_array_types_and_sizes( int test_case_idx, vector<vector<Size> >& sizes, vector<vector<int> >& types )
+{
+ cvtest::ArrayTest::get_test_array_types_and_sizes(test_case_idx,sizes,types);
+ RNG& rng = ts->get_rng();
+ //rng.next();
+
+ map_type = CV_32F;
+ types[OUTPUT][0] = types[OUTPUT][1] = types[REF_OUTPUT][0] = types[REF_OUTPUT][1] = map_type;
+
+ img_size.width = cvtest::randInt(rng) % MAX_X + 1;
+ img_size.height = cvtest::randInt(rng) % MAX_Y + 1;
+
+ types[INPUT][0] = cvtest::randInt(rng)%2 ? CV_64F : CV_32F;
+ types[INPUT][1] = cvtest::randInt(rng)%2 ? CV_64F : CV_32F;
+ types[INPUT][2] = cvtest::randInt(rng)%2 ? CV_64F : CV_32F;
+ types[INPUT][3] = cvtest::randInt(rng)%2 ? CV_64F : CV_32F;
+
+ sizes[OUTPUT][0] = sizes[OUTPUT][1] = sizes[REF_OUTPUT][0] = sizes[REF_OUTPUT][1] = img_size;
+ sizes[INPUT][0] = sizes[INPUT][2] = sizes[INPUT][3] = cvSize(3,3);
+
+ Size dsize;
+
+ if (cvtest::randInt(rng)%2)
+ {
+ if (cvtest::randInt(rng)%2)
+ {
+ dsize = Size(1,4);
+ }
+ else
+ {
+ dsize = Size(1,5);
+ }
+ }
+ else
+ {
+ if (cvtest::randInt(rng)%2)
+ {
+ dsize = Size(4,1);
+ }
+ else
+ {
+ dsize = Size(5,1);
+ }
+ }
+ sizes[INPUT][1] = dsize;
+}
+
+
+int CV_InitInverseRectificationMapTest::prepare_test_case(int test_case_idx)
+{
+ RNG& rng = ts->get_rng();
+ int code = cvtest::ArrayTest::prepare_test_case( test_case_idx );
+
+ if (code <= 0)
+ return code;
+
+ int dist_size = test_mat[INPUT][1].cols > test_mat[INPUT][1].rows ? test_mat[INPUT][1].cols : test_mat[INPUT][1].rows;
+ double cam[9] = {0,0,0,0,0,0,0,0,1};
+ vector<double> dist(dist_size);
+ vector<double> new_cam(test_mat[INPUT][3].cols * test_mat[INPUT][3].rows);
+
+ Mat _camera(3,3,CV_64F,cam);
+ Mat _distort(test_mat[INPUT][1].size(),CV_64F,&dist[0]);
+ Mat _new_cam(test_mat[INPUT][3].size(),CV_64F,&new_cam[0]);
+
+ //Generating camera matrix
+ double sz = MAX(img_size.width,img_size.height);
+ double aspect_ratio = cvtest::randReal(rng)*0.6 + 0.7;
+ cam[2] = (img_size.width - 1)*0.5 + cvtest::randReal(rng)*10 - 5;
+ cam[5] = (img_size.height - 1)*0.5 + cvtest::randReal(rng)*10 - 5;
+ cam[0] = sz/(0.9 - cvtest::randReal(rng)*0.6);
+ cam[4] = aspect_ratio*cam[0];
+
+ //Generating distortion coeffs
+ dist[0] = cvtest::randReal(rng)*0.06 - 0.03;
+ dist[1] = cvtest::randReal(rng)*0.06 - 0.03;
+ if( dist[0]*dist[1] > 0 )
+ dist[1] = -dist[1];
+ if( cvtest::randInt(rng)%4 != 0 )
+ {
+ dist[2] = cvtest::randReal(rng)*0.004 - 0.002;
+ dist[3] = cvtest::randReal(rng)*0.004 - 0.002;
+ if (dist_size > 4)
+ dist[4] = cvtest::randReal(rng)*0.004 - 0.002;
+ }
+ else
+ {
+ dist[2] = dist[3] = 0;
+ if (dist_size > 4)
+ dist[4] = 0;
+ }
+
+ //Generating new camera matrix
+ _new_cam = Scalar::all(0);
+ new_cam[8] = 1;
+
+ // If P == K
+ //new_cam[0] = cam[0];
+ //new_cam[4] = cam[4];
+ //new_cam[2] = cam[2];
+ //new_cam[5] = cam[5];
+
+ // If P != K
+ new_cam[0] = cam[0] + (cvtest::randReal(rng) - (double)0.5)*0.2*cam[0]; //10%
+ new_cam[4] = cam[4] + (cvtest::randReal(rng) - (double)0.5)*0.2*cam[4]; //10%
+ new_cam[2] = cam[2] + (cvtest::randReal(rng) - (double)0.5)*0.3*img_size.width; //15%
+ new_cam[5] = cam[5] + (cvtest::randReal(rng) - (double)0.5)*0.3*img_size.height; //15%
+
+ //Generating R matrix
+ Mat _rot(3,3,CV_64F);
+ Mat rotation(1,3,CV_64F);
+ rotation.at<double>(0) = CV_PI/8*(cvtest::randReal(rng) - (double)0.5); // phi
+ rotation.at<double>(1) = CV_PI/8*(cvtest::randReal(rng) - (double)0.5); // ksi
+ rotation.at<double>(2) = CV_PI/3*(cvtest::randReal(rng) - (double)0.5); //khi
+ cvtest::Rodrigues(rotation, _rot);
+
+ //cvSetIdentity(_rot);
+ //copying data
+ cvtest::convert( _camera, test_mat[INPUT][0], test_mat[INPUT][0].type());
+ cvtest::convert( _distort, test_mat[INPUT][1], test_mat[INPUT][1].type());
+ cvtest::convert( _rot, test_mat[INPUT][2], test_mat[INPUT][2].type());
+ cvtest::convert( _new_cam, test_mat[INPUT][3], test_mat[INPUT][3].type());
+
+ zero_distortion = (cvtest::randInt(rng)%2) == 0 ? false : true;
+ zero_new_cam = (cvtest::randInt(rng)%2) == 0 ? false : true;
+ zero_R = (cvtest::randInt(rng)%2) == 0 ? false : true;
+
+ return code;
+}
+
+void CV_InitInverseRectificationMapTest::prepare_to_validation(int/* test_case_idx*/)
+{
+ cvtest::initInverseRectificationMap(test_mat[INPUT][0],
+ zero_distortion ? cv::Mat() : test_mat[INPUT][1],
+ zero_R ? cv::Mat() : test_mat[INPUT][2],
+ zero_new_cam ? test_mat[INPUT][0] : test_mat[INPUT][3],
+ img_size, test_mat[REF_OUTPUT][0], test_mat[REF_OUTPUT][1],
+ test_mat[REF_OUTPUT][0].type());
+}
+
+void CV_InitInverseRectificationMapTest::run_func()
+{
+ cv::Mat camera_mat = test_mat[INPUT][0];
+ cv::Mat dist = zero_distortion ? cv::Mat() : test_mat[INPUT][1];
+ cv::Mat R = zero_R ? cv::Mat() : test_mat[INPUT][2];
+ cv::Mat new_cam = zero_new_cam ? cv::Mat() : test_mat[INPUT][3];
+ cv::Mat& mapx = test_mat[OUTPUT][0], &mapy = test_mat[OUTPUT][1];
+ cv::initInverseRectificationMap(camera_mat,dist,R,new_cam,img_size,map_type,mapx,mapy);
+}
+
+double CV_InitInverseRectificationMapTest::get_success_error_level( int /*test_case_idx*/, int /*i*/, int /*j*/ )
+{
+ return 8;
+}
+
//////////////////////////////////////////////////////////////////////////////////////////////////////
TEST(Calib3d_DefaultNewCameraMatrix, accuracy) { CV_DefaultNewCameraMatrixTest test; test.safe_run(); }
TEST(Calib3d_UndistortPoints, accuracy) { CV_UndistortPointsTest test; test.safe_run(); }
TEST(Calib3d_InitUndistortRectifyMap, accuracy) { CV_InitUndistortRectifyMapTest test; test.safe_run(); }
+TEST(Calib3d_InitInverseRectificationMap, accuracy) { CV_InitInverseRectificationMapTest test; test.safe_run(); }
////////////////////////////// undistort /////////////////////////////////
Mat calcLaplaceKernel2D( int aperture_size );
void initUndistortMap( const Mat& a, const Mat& k, const Mat& R, const Mat& new_a, Size sz, Mat& mapx, Mat& mapy, int map_type );
+void initInverseRectificationMap( const Mat& a, const Mat& k, const Mat& R, const Mat& new_a, Size sz, Mat& mapx, Mat& mapy, int map_type );
void minMaxLoc(const Mat& src, double* minval, double* maxval,
vector<int>* minloc, vector<int>* maxloc, const Mat& mask=Mat());
_mapy.convertTo(__mapy, map_type);
}
+void initInverseRectificationMap( const Mat& _a0, const Mat& _k0, const Mat& _R0, const Mat& _new_cam0, Size sz, Mat& __mapx, Mat& __mapy, int map_type )
+{
+ Mat _mapx(sz, CV_32F), _mapy(sz, CV_32F);
+
+ double a[9], k[5]={0,0,0,0,0}, iR[9]={1, 0, 0, 0, 1, 0, 0, 0, 1}, a1[9];
+ Mat _a(3, 3, CV_64F, a), _a1(3, 3, CV_64F, a1);
+ Mat _k(_k0.rows,_k0.cols, CV_MAKETYPE(CV_64F,_k0.channels()),k);
+ Mat _iR(3, 3, CV_64F, iR);
+ double fx, fy, cx, cy, ifx, ify, cxn, cyn;
+
+ // Camera matrix
+ CV_Assert(_a0.size() == Size(3, 3));
+ _a0.convertTo(_a, CV_64F);
+ if( !_new_cam0.empty() )
+ {
+ CV_Assert(_new_cam0.size() == Size(3, 3));
+ _new_cam0.convertTo(_a1, CV_64F);
+ }
+ else
+ {
+ _a.copyTo(_a1);
+ }
+
+ // Distortion
+ CV_Assert(_k0.empty() ||
+ _k0.size() == Size(5, 1) ||
+ _k0.size() == Size(1, 5) ||
+ _k0.size() == Size(4, 1) ||
+ _k0.size() == Size(1, 4));
+ if( !_k0.empty() )
+ _k0.convertTo(_k, CV_64F);
+
+ // Rotation
+ if( !_R0.empty() )
+ {
+ CV_Assert(_R0.size() == Size(3, 3));
+ Mat tmp;
+ _R0.convertTo(_iR, CV_64F);
+ //invert(tmp, _iR, DECOMP_LU);
+ }
+
+ // Copy camera matrix
+ fx = a[0]; fy = a[4]; cx = a[2]; cy = a[5];
+
+ // Copy new camera matrix
+ ifx = a1[0]; ify = a1[4]; cxn = a1[2]; cyn = a1[5];
+
+ // Undistort
+ for( int v = 0; v < sz.height; v++ )
+ {
+ for( int u = 0; u < sz.width; u++ )
+ {
+ // Convert from image to pin-hole coordinates
+ double x = (u - cx)/fx;
+ double y = (v - cy)/fy;
+
+ // Undistort
+ double x2 = x*x, y2 = y*y;
+ double r2 = x2 + y2;
+ double cdist = 1./(1 + (k[0] + (k[1] + k[4]*r2)*r2)*r2); // (1 + (k[5] + (k[6] + k[7]*r2)*r2)*r2) == 1 as K[5-7]=0;
+ double x_ = x*cdist - k[2]*2*x*y + k[3]*(r2 + 2*x2);
+ double y_ = y*cdist - k[3]*2*x*y + k[2]*(r2 + 2*y2);
+
+ // Rectify
+ double X = iR[0]*x_ + iR[1]*y_ + iR[2];
+ double Y = iR[3]*x_ + iR[4]*y_ + iR[5];
+ double Z = iR[6]*x_ + iR[7]*y_ + iR[8];
+ double x__ = X/Z;
+ double y__ = Y/Z;
+
+ // Convert from pin-hole to image coordinates
+ _mapy.at<float>(v, u) = (float)(y__*ify + cyn);
+ _mapx.at<float>(v, u) = (float)(x__*ifx + cxn);
+ }
+ }
+
+ _mapx.convertTo(__mapx, map_type);
+ _mapy.convertTo(__mapy, map_type);
+}
+
std::ostream& operator << (std::ostream& out, const MatInfo& m)
{
if( !m.m || m.m->empty() )