Merge pull request #22519 from stefan-spiss:stereo_calib_per_obj_extr_ret

Stereo Calibration: Return rotation and transformation vectors for each calibration object
pull/22540/head
Alexander Smorkalov 2 years ago committed by GitHub
commit fcf9f117b0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 37
      modules/calib3d/include/opencv2/calib3d.hpp
  2. 128
      modules/calib3d/src/calibration.cpp
  3. 28
      modules/calib3d/src/fisheye.cpp
  4. 165
      modules/calib3d/test/test_cameracalibration.cpp
  5. 105
      modules/calib3d/test/test_fisheye.cpp

@ -1748,6 +1748,15 @@ second camera coordinate system.
@param T Output translation vector, see description above.
@param E Output essential matrix.
@param F Output fundamental matrix.
@param rvecs Output vector of rotation vectors ( @ref Rodrigues ) estimated for each pattern view in the
coordinate system of the first camera of the stereo pair (e.g. std::vector<cv::Mat>). More in detail, each
i-th rotation vector together with the corresponding i-th translation vector (see the next output parameter
description) brings the calibration pattern from the object coordinate space (in which object points are
specified) to the camera coordinate space of the first camera of the stereo pair. In more technical terms,
the tuple of the i-th rotation and translation vector performs a change of basis from object coordinate space
to camera coordinate space of the first camera of the stereo pair.
@param tvecs Output vector of translation vectors estimated for each pattern view, see parameter description
of previous output parameter ( rvecs ).
@param perViewErrors Output vector of the RMS re-projection error estimated for each pattern view.
@param flags Different flags that may be zero or a combination of the following values:
- @ref CALIB_FIX_INTRINSIC Fix cameraMatrix? and distCoeffs? so that only R, T, E, and F
@ -1844,8 +1853,8 @@ CV_EXPORTS_AS(stereoCalibrateExtended) double stereoCalibrate( InputArrayOfArray
InputArrayOfArrays imagePoints1, InputArrayOfArrays imagePoints2,
InputOutputArray cameraMatrix1, InputOutputArray distCoeffs1,
InputOutputArray cameraMatrix2, InputOutputArray distCoeffs2,
Size imageSize, InputOutputArray R,InputOutputArray T, OutputArray E, OutputArray F,
OutputArray perViewErrors, int flags = CALIB_FIX_INTRINSIC,
Size imageSize, InputOutputArray R, InputOutputArray T, OutputArray E, OutputArray F,
OutputArrayOfArrays rvecs, OutputArrayOfArrays tvecs, OutputArray perViewErrors, int flags = CALIB_FIX_INTRINSIC,
TermCriteria criteria = TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 30, 1e-6) );
/// @overload
@ -1857,6 +1866,15 @@ CV_EXPORTS_W double stereoCalibrate( InputArrayOfArrays objectPoints,
int flags = CALIB_FIX_INTRINSIC,
TermCriteria criteria = TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 30, 1e-6) );
/// @overload
CV_EXPORTS_W double stereoCalibrate( InputArrayOfArrays objectPoints,
InputArrayOfArrays imagePoints1, InputArrayOfArrays imagePoints2,
InputOutputArray cameraMatrix1, InputOutputArray distCoeffs1,
InputOutputArray cameraMatrix2, InputOutputArray distCoeffs2,
Size imageSize, InputOutputArray R, InputOutputArray T, OutputArray E, OutputArray F,
OutputArray perViewErrors, int flags = CALIB_FIX_INTRINSIC,
TermCriteria criteria = TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 30, 1e-6) );
/** @brief Computes rectification transforms for each head of a calibrated stereo camera.
@param cameraMatrix1 First camera intrinsic matrix.
@ -3977,6 +3995,15 @@ optimization. It is the \f$max(width,height)/\pi\f$ or the provided \f$f_x\f$, \
@param imageSize Size of the image used only to initialize camera intrinsic matrix.
@param R Output rotation matrix between the 1st and the 2nd camera coordinate systems.
@param T Output translation vector between the coordinate systems of the cameras.
@param rvecs Output vector of rotation vectors ( @ref Rodrigues ) estimated for each pattern view in the
coordinate system of the first camera of the stereo pair (e.g. std::vector<cv::Mat>). More in detail, each
i-th rotation vector together with the corresponding i-th translation vector (see the next output parameter
description) brings the calibration pattern from the object coordinate space (in which object points are
specified) to the camera coordinate space of the first camera of the stereo pair. In more technical terms,
the tuple of the i-th rotation and translation vector performs a change of basis from object coordinate space
to camera coordinate space of the first camera of the stereo pair.
@param tvecs Output vector of translation vectors estimated for each pattern view, see parameter description
of previous output parameter ( rvecs ).
@param flags Different flags that may be zero or a combination of the following values:
- @ref fisheye::CALIB_FIX_INTRINSIC Fix K1, K2? and D1, D2? so that only R, T matrices
are estimated.
@ -3991,6 +4018,12 @@ optimization. It is the \f$max(width,height)/\pi\f$ or the provided \f$f_x\f$, \
zero.
@param criteria Termination criteria for the iterative optimization algorithm.
*/
CV_EXPORTS_W double stereoCalibrate(InputArrayOfArrays objectPoints, InputArrayOfArrays imagePoints1, InputArrayOfArrays imagePoints2,
InputOutputArray K1, InputOutputArray D1, InputOutputArray K2, InputOutputArray D2, Size imageSize,
OutputArray R, OutputArray T, OutputArrayOfArrays rvecs, OutputArrayOfArrays tvecs, int flags = fisheye::CALIB_FIX_INTRINSIC,
TermCriteria criteria = TermCriteria(TermCriteria::COUNT + TermCriteria::EPS, 100, DBL_EPSILON));
/// @overload
CV_EXPORTS_W double stereoCalibrate(InputArrayOfArrays objectPoints, InputArrayOfArrays imagePoints1, InputArrayOfArrays imagePoints2,
InputOutputArray K1, InputOutputArray D1, InputOutputArray K2, InputOutputArray D2, Size imageSize,
OutputArray R, OutputArray T, int flags = fisheye::CALIB_FIX_INTRINSIC,

@ -1973,7 +1973,7 @@ static double cvStereoCalibrateImpl( const CvMat* _objectPoints, const CvMat* _i
CvMat* _cameraMatrix2, CvMat* _distCoeffs2,
CvSize imageSize, CvMat* matR, CvMat* matT,
CvMat* matE, CvMat* matF,
CvMat* perViewErr, int flags,
CvMat* rvecs, CvMat* tvecs, CvMat* perViewErr, int flags,
CvTermCriteria termCrit )
{
const int NINTRINSIC = 18;
@ -2013,6 +2013,28 @@ static double cvStereoCalibrateImpl( const CvMat* _objectPoints, const CvMat* _i
cvConvert( _objectPoints, objectPoints );
cvReshape( objectPoints, objectPoints, 3, 1 );
if( rvecs )
{
int cn = CV_MAT_CN(rvecs->type);
if( !CV_IS_MAT(rvecs) ||
(CV_MAT_DEPTH(rvecs->type) != CV_32F && CV_MAT_DEPTH(rvecs->type) != CV_64F) ||
((rvecs->rows != nimages || (rvecs->cols*cn != 3 && rvecs->cols*cn != 9)) &&
(rvecs->rows != 1 || rvecs->cols != nimages || cn != 3)) )
CV_Error( CV_StsBadArg, "the output array of rotation vectors must be 3-channel "
"1xn or nx1 array or 1-channel nx3 or nx9 array, where n is the number of views" );
}
if( tvecs )
{
int cn = CV_MAT_CN(tvecs->type);
if( !CV_IS_MAT(tvecs) ||
(CV_MAT_DEPTH(tvecs->type) != CV_32F && CV_MAT_DEPTH(tvecs->type) != CV_64F) ||
((tvecs->rows != nimages || tvecs->cols*cn != 3) &&
(tvecs->rows != 1 || tvecs->cols != nimages || cn != 3)) )
CV_Error( CV_StsBadArg, "the output array of translation vectors must be 3-channel "
"1xn or nx1 array or 1-channel nx3 array, where n is the number of views" );
}
for( k = 0; k < 2; k++ )
{
const CvMat* points = k == 0 ? _imagePoints1 : _imagePoints2;
@ -2440,6 +2462,39 @@ static double cvStereoCalibrateImpl( const CvMat* _objectPoints, const CvMat* _i
}
}
CvMat tmp = cvMat(3, 3, CV_64F);
for( i = 0; i < nimages; i++ )
{
CvMat src, dst;
if( rvecs )
{
src = cvMat(3, 1, CV_64F, solver.param->data.db+(i+1)*6);
if( rvecs->rows == nimages && rvecs->cols*CV_MAT_CN(rvecs->type) == 9 )
{
dst = cvMat(3, 3, CV_MAT_DEPTH(rvecs->type),
rvecs->data.ptr + rvecs->step*i);
cvRodrigues2( &src, &tmp );
cvConvert( &tmp, &dst );
}
else
{
dst = cvMat(3, 1, CV_MAT_DEPTH(rvecs->type), rvecs->rows == 1 ?
rvecs->data.ptr + i*CV_ELEM_SIZE(rvecs->type) :
rvecs->data.ptr + rvecs->step*i);
cvConvert( &src, &dst );
}
}
if( tvecs )
{
src = cvMat(3, 1,CV_64F,solver.param->data.db+(i+1)*6+3);
dst = cvMat(3, 1, CV_MAT_DEPTH(tvecs->type), tvecs->rows == 1 ?
tvecs->data.ptr + i*CV_ELEM_SIZE(tvecs->type) :
tvecs->data.ptr + tvecs->step*i);
cvConvert( &src, &dst );
}
}
return std::sqrt(reprojErr/(pointsTotal*2));
}
double cvStereoCalibrate( const CvMat* _objectPoints, const CvMat* _imagePoints1,
@ -2453,7 +2508,7 @@ double cvStereoCalibrate( const CvMat* _objectPoints, const CvMat* _imagePoints1
{
return cvStereoCalibrateImpl(_objectPoints, _imagePoints1, _imagePoints2, _npoints, _cameraMatrix1,
_distCoeffs1, _cameraMatrix2, _distCoeffs2, imageSize, matR, matT, matE,
matF, NULL, flags, termCrit);
matF, NULL, NULL, NULL, flags, termCrit);
}
static void
@ -3886,7 +3941,22 @@ double cv::stereoCalibrate( InputArrayOfArrays _objectPoints,
InputOutputArray _cameraMatrix2, InputOutputArray _distCoeffs2,
Size imageSize, InputOutputArray _Rmat, InputOutputArray _Tmat,
OutputArray _Emat, OutputArray _Fmat,
OutputArray _perViewErrors, int flags ,
OutputArray _perViewErrors, int flags,
TermCriteria criteria)
{
return stereoCalibrate(_objectPoints, _imagePoints1, _imagePoints2, _cameraMatrix1, _distCoeffs1,
_cameraMatrix2, _distCoeffs2, imageSize, _Rmat, _Tmat, _Emat, _Fmat,
noArray(), noArray(), _perViewErrors, flags, criteria);
}
double cv::stereoCalibrate( InputArrayOfArrays _objectPoints,
InputArrayOfArrays _imagePoints1,
InputArrayOfArrays _imagePoints2,
InputOutputArray _cameraMatrix1, InputOutputArray _distCoeffs1,
InputOutputArray _cameraMatrix2, InputOutputArray _distCoeffs2,
Size imageSize, InputOutputArray _Rmat, InputOutputArray _Tmat,
OutputArray _Emat, OutputArray _Fmat,
OutputArrayOfArrays _rvecs, OutputArrayOfArrays _tvecs, OutputArray _perViewErrors, int flags,
TermCriteria criteria)
{
int rtype = CV_64F;
@ -3901,19 +3971,22 @@ double cv::stereoCalibrate( InputArrayOfArrays _objectPoints,
if( !(flags & CALIB_RATIONAL_MODEL) &&
(!(flags & CALIB_THIN_PRISM_MODEL)) &&
(!(flags & CALIB_TILTED_MODEL)))
(!(flags & CALIB_TILTED_MODEL)) )
{
distCoeffs1 = distCoeffs1.rows == 1 ? distCoeffs1.colRange(0, 5) : distCoeffs1.rowRange(0, 5);
distCoeffs2 = distCoeffs2.rows == 1 ? distCoeffs2.colRange(0, 5) : distCoeffs2.rowRange(0, 5);
}
if((flags & CALIB_USE_EXTRINSIC_GUESS) == 0)
if( (flags & CALIB_USE_EXTRINSIC_GUESS) == 0 )
{
_Rmat.create(3, 3, rtype);
_Tmat.create(3, 1, rtype);
}
Mat objPt, imgPt, imgPt2, npoints;
int nimages = int(_objectPoints.total());
CV_Assert( nimages > 0 );
Mat objPt, imgPt, imgPt2, npoints, rvecLM, tvecLM;
collectCalibrationData( _objectPoints, _imagePoints1, _imagePoints2,
objPt, imgPt, &imgPt2, npoints );
@ -3923,7 +3996,7 @@ double cv::stereoCalibrate( InputArrayOfArrays _objectPoints,
Mat matR_ = _Rmat.getMat(), matT_ = _Tmat.getMat();
CvMat c_matR = cvMat(matR_), c_matT = cvMat(matT_), c_matE, c_matF, c_matErr;
bool E_needed = _Emat.needed(), F_needed = _Fmat.needed(), errors_needed = _perViewErrors.needed();
bool E_needed = _Emat.needed(), F_needed = _Fmat.needed(), rvecs_needed = _rvecs.needed(), tvecs_needed = _tvecs.needed(), errors_needed = _perViewErrors.needed();
Mat matE_, matF_, matErr_;
if( E_needed )
@ -3939,9 +4012,31 @@ double cv::stereoCalibrate( InputArrayOfArrays _objectPoints,
c_matF = cvMat(matF_);
}
bool rvecs_mat_vec = _rvecs.isMatVector();
bool tvecs_mat_vec = _tvecs.isMatVector();
if( rvecs_needed )
{
_rvecs.create(nimages, 1, CV_64FC3);
if( rvecs_mat_vec )
rvecLM.create(nimages, 3, CV_64F);
else
rvecLM = _rvecs.getMat();
}
if( tvecs_needed )
{
_tvecs.create(nimages, 1, CV_64FC3);
if( tvecs_mat_vec )
tvecLM.create(nimages, 3, CV_64F);
else
tvecLM = _tvecs.getMat();
}
CvMat c_rvecLM = cvMat(rvecLM), c_tvecLM = cvMat(tvecLM);
if( errors_needed )
{
int nimages = int(_objectPoints.total());
_perViewErrors.create(nimages, 2, CV_64F);
matErr_ = _perViewErrors.getMat();
c_matErr = cvMat(matErr_);
@ -3950,6 +4045,7 @@ double cv::stereoCalibrate( InputArrayOfArrays _objectPoints,
double err = cvStereoCalibrateImpl(&c_objPt, &c_imgPt, &c_imgPt2, &c_npoints, &c_cameraMatrix1,
&c_distCoeffs1, &c_cameraMatrix2, &c_distCoeffs2, cvSize(imageSize), &c_matR,
&c_matT, E_needed ? &c_matE : NULL, F_needed ? &c_matF : NULL,
rvecs_needed ? &c_rvecLM : NULL, tvecs_needed ? &c_tvecLM : NULL,
errors_needed ? &c_matErr : NULL, flags, cvTermCriteria(criteria));
cameraMatrix1.copyTo(_cameraMatrix1);
@ -3957,6 +4053,22 @@ double cv::stereoCalibrate( InputArrayOfArrays _objectPoints,
distCoeffs1.copyTo(_distCoeffs1);
distCoeffs2.copyTo(_distCoeffs2);
for(int i = 0; i < nimages; i++ )
{
if( rvecs_needed && rvecs_mat_vec )
{
_rvecs.create(3, 1, CV_64F, i, true);
Mat rv = _rvecs.getMat(i);
memcpy(rv.ptr(), rvecLM.ptr(i), 3*sizeof(double));
}
if( tvecs_needed && tvecs_mat_vec )
{
_tvecs.create(3, 1, CV_64F, i, true);
Mat tv = _tvecs.getMat(i);
memcpy(tv.ptr(), tvecLM.ptr(i), 3*sizeof(double));
}
}
return err;
}

@ -885,6 +885,13 @@ double cv::fisheye::calibrate(InputArrayOfArrays objectPoints, InputArrayOfArray
double cv::fisheye::stereoCalibrate(InputArrayOfArrays objectPoints, InputArrayOfArrays imagePoints1, InputArrayOfArrays imagePoints2,
InputOutputArray K1, InputOutputArray D1, InputOutputArray K2, InputOutputArray D2, Size imageSize,
OutputArray R, OutputArray T, int flags, TermCriteria criteria)
{
return cv::fisheye::stereoCalibrate(objectPoints, imagePoints1, imagePoints2, K1, D1, K2, D2, imageSize, R, T, noArray(), noArray(), flags, criteria);
}
double cv::fisheye::stereoCalibrate(InputArrayOfArrays objectPoints, InputArrayOfArrays imagePoints1, InputArrayOfArrays imagePoints2,
InputOutputArray K1, InputOutputArray D1, InputOutputArray K2, InputOutputArray D2, Size imageSize,
OutputArray R, OutputArray T, OutputArrayOfArrays rvecs, OutputArrayOfArrays tvecs, int flags, TermCriteria criteria)
{
CV_INSTRUMENT_REGION();
@ -1116,6 +1123,27 @@ double cv::fisheye::stereoCalibrate(InputArrayOfArrays objectPoints, InputArrayO
if (D2.needed()) cv::Mat(intrinsicRight.k).convertTo(D2, D2.empty() ? CV_64FC1 : D2.type());
if (R.needed()) _R.convertTo(R, R.empty() ? CV_64FC1 : R.type());
if (T.needed()) cv::Mat(Tcur).convertTo(T, T.empty() ? CV_64FC1 : T.type());
if (rvecs.isMatVector())
{
if(rvecs.empty())
rvecs.create(n_images, 1, CV_64FC3);
if(tvecs.empty())
tvecs.create(n_images, 1, CV_64FC3);
for(int i = 0; i < n_images; i++ )
{
rvecs.create(3, 1, CV_64F, i, true);
tvecs.create(3, 1, CV_64F, i, true);
memcpy(rvecs.getMat(i).ptr(), rvecs1[i].val, sizeof(Vec3d));
memcpy(tvecs.getMat(i).ptr(), tvecs1[i].val, sizeof(Vec3d));
}
}
else
{
if (rvecs.needed()) cv::Mat(rvecs1).convertTo(rvecs, rvecs.empty() ? CV_64FC3 : rvecs.type());
if (tvecs.needed()) cv::Mat(tvecs1).convertTo(tvecs, tvecs.empty() ? CV_64FC3 : tvecs.type());
}
return rms;
}

@ -1237,7 +1237,10 @@ protected:
Mat& cameraMatrix1, Mat& distCoeffs1,
Mat& cameraMatrix2, Mat& distCoeffs2,
Size imageSize, Mat& R, Mat& T,
Mat& E, Mat& F, TermCriteria criteria, int flags ) = 0;
Mat& E, Mat& F,
std::vector<RotMat>& rotationMatrices, std::vector<Vec3d>& translationVectors,
vector<double>& perViewErrors1, vector<double>& perViewErrors2,
TermCriteria criteria, int flags ) = 0;
virtual void rectify( const Mat& cameraMatrix1, const Mat& distCoeffs1,
const Mat& cameraMatrix2, const Mat& distCoeffs2,
Size imageSize, const Mat& R, const Mat& T,
@ -1253,6 +1256,8 @@ protected:
virtual void correct( const Mat& F,
const Mat &points1, const Mat &points2,
Mat &newPoints1, Mat &newPoints2 ) = 0;
int compare(double* val, double* refVal, int len,
double eps, const char* paramName);
void run(int);
};
@ -1319,12 +1324,19 @@ bool CV_StereoCalibrationTest::checkPandROI( int test_case_idx, const Mat& M, co
return true;
}
int CV_StereoCalibrationTest::compare(double* val, double* ref_val, int len,
double eps, const char* param_name )
{
return cvtest::cmpEps2_64f( ts, val, ref_val, len, eps, param_name );
}
void CV_StereoCalibrationTest::run( int )
{
const int ntests = 1;
const double maxReprojErr = 2;
const double maxScanlineDistErr_c = 3;
const double maxScanlineDistErr_uc = 4;
const double maxDiffBtwRmsErrors = 1e-4;
FILE* f = 0;
for(int testcase = 1; testcase <= ntests; testcase++)
@ -1401,13 +1413,23 @@ void CV_StereoCalibrationTest::run( int )
objpt[i].push_back(Point3f((float)(j%patternSize.width), (float)(j/patternSize.width), 0.f));
}
vector<RotMat> rotMats1(nframes);
vector<Vec3d> transVecs1(nframes);
vector<RotMat> rotMats2(nframes);
vector<Vec3d> transVecs2(nframes);
vector<double> rmsErrorPerView1(nframes);
vector<double> rmsErrorPerView2(nframes);
vector<double> rmsErrorPerViewFromReprojectedImgPts1(nframes);
vector<double> rmsErrorPerViewFromReprojectedImgPts2(nframes);
// rectify (calibrated)
Mat M1 = Mat::eye(3,3,CV_64F), M2 = Mat::eye(3,3,CV_64F), D1(5,1,CV_64F), D2(5,1,CV_64F), R, T, E, F;
M1.at<double>(0,2) = M2.at<double>(0,2)=(imgsize.width-1)*0.5;
M1.at<double>(1,2) = M2.at<double>(1,2)=(imgsize.height-1)*0.5;
D1 = Scalar::all(0);
D2 = Scalar::all(0);
double err = calibrateStereoCamera(objpt, imgpt1, imgpt2, M1, D1, M2, D2, imgsize, R, T, E, F,
double rmsErrorFromStereoCalib = calibrateStereoCamera(objpt, imgpt1, imgpt2, M1, D1, M2, D2, imgsize, R, T, E, F,
rotMats1, transVecs1, rmsErrorPerView1, rmsErrorPerView2,
TermCriteria(TermCriteria::MAX_ITER+TermCriteria::EPS, 30, 1e-6),
CV_CALIB_SAME_FOCAL_LENGTH
//+ CV_CALIB_FIX_ASPECT_RATIO
@ -1416,11 +1438,89 @@ void CV_StereoCalibrationTest::run( int )
+ CV_CALIB_FIX_K3
+ CV_CALIB_FIX_K4 + CV_CALIB_FIX_K5 //+ CV_CALIB_FIX_K6
);
err /= nframes*npoints;
if( err > maxReprojErr )
/* rmsErrorFromStereoCalib /= nframes*npoints; */
if (rmsErrorFromStereoCalib > maxReprojErr)
{
ts->printf( cvtest::TS::LOG, "The average reprojection error is too big (=%g), testcase %d\n", err, testcase);
ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_OUTPUT );
ts->printf(cvtest::TS::LOG, "The average reprojection error is too big (=%g), testcase %d\n",
rmsErrorFromStereoCalib, testcase);
ts->set_failed_test_info(cvtest::TS::FAIL_INVALID_OUTPUT);
return;
}
double rmsErrorFromReprojectedImgPts = 0.0f;
if (rotMats1.empty() || transVecs1.empty())
{
rmsErrorPerViewFromReprojectedImgPts1 = rmsErrorPerView1;
rmsErrorPerViewFromReprojectedImgPts2 = rmsErrorPerView2;
rmsErrorFromReprojectedImgPts = rmsErrorFromStereoCalib;
}
else
{
vector<Point2f > reprojectedImgPts[2] = {vector<Point2f>(nframes), vector<Point2f>(nframes)};
size_t totalPoints = 0;
double totalErr[2] = { 0, 0 }, viewErr[2];
for (size_t i = 0; i < objpt.size(); ++i) {
RotMat r1 = rotMats1[i];
Vec3d t1 = transVecs1[i];
RotMat r2 = Mat(R * r1);
Mat T2t = R * t1;
Vec3d t2 = Mat(T2t + T);
projectPoints(objpt[i], r1, t1, M1, D1, reprojectedImgPts[0]);
projectPoints(objpt[i], r2, t2, M2, D2, reprojectedImgPts[1]);
viewErr[0] = cv::norm(imgpt1[i], reprojectedImgPts[0], cv::NORM_L2SQR);
viewErr[1] = cv::norm(imgpt2[i], reprojectedImgPts[1], cv::NORM_L2SQR);
size_t n = objpt[i].size();
totalErr[0] += viewErr[0];
totalErr[1] += viewErr[1];
totalPoints += n;
rmsErrorPerViewFromReprojectedImgPts1[i] = sqrt(viewErr[0] / n);
rmsErrorPerViewFromReprojectedImgPts2[i] = sqrt(viewErr[1] / n);
}
rmsErrorFromReprojectedImgPts = std::sqrt((totalErr[0] + totalErr[1]) / (2 * totalPoints));
}
if (abs(rmsErrorFromStereoCalib - rmsErrorFromReprojectedImgPts) > maxDiffBtwRmsErrors)
{
ts->printf(cvtest::TS::LOG,
"The difference of the average reprojection error from the calibration function and from the "
"reprojected image points is too big (|%g - %g| = %g), testcase %d\n",
rmsErrorFromStereoCalib, rmsErrorFromReprojectedImgPts,
(rmsErrorFromStereoCalib - rmsErrorFromReprojectedImgPts), testcase);
ts->set_failed_test_info(cvtest::TS::FAIL_INVALID_OUTPUT);
return;
}
/* ----- Compare per view rms re-projection errors ----- */
CV_Assert(rmsErrorPerView1.size() == (size_t)nframes);
CV_Assert(rmsErrorPerViewFromReprojectedImgPts1.size() == (size_t)nframes);
CV_Assert(rmsErrorPerView2.size() == (size_t)nframes);
CV_Assert(rmsErrorPerViewFromReprojectedImgPts2.size() == (size_t)nframes);
int code1 = compare(&rmsErrorPerView1[0], &rmsErrorPerViewFromReprojectedImgPts1[0], nframes,
maxDiffBtwRmsErrors, "per view errors vector");
int code2 = compare(&rmsErrorPerView2[0], &rmsErrorPerViewFromReprojectedImgPts2[0], nframes,
maxDiffBtwRmsErrors, "per view errors vector");
if (code1 < 0)
{
ts->printf(cvtest::TS::LOG,
"Some of the per view rms reprojection errors differ between calibration function and reprojected "
"points, for the first camera, testcase %d\n",
testcase);
ts->set_failed_test_info(code1);
return;
}
if (code2 < 0)
{
ts->printf(cvtest::TS::LOG,
"Some of the per view rms reprojection errors differ between calibration function and reprojected "
"points, for the second camera, testcase %d\n",
testcase);
ts->set_failed_test_info(code2);
return;
}
@ -1640,7 +1740,10 @@ protected:
Mat& cameraMatrix1, Mat& distCoeffs1,
Mat& cameraMatrix2, Mat& distCoeffs2,
Size imageSize, Mat& R, Mat& T,
Mat& E, Mat& F, TermCriteria criteria, int flags );
Mat& E, Mat& F,
std::vector<RotMat>& rotationMatrices, std::vector<Vec3d>& translationVectors,
vector<double>& perViewErrors1, vector<double>& perViewErrors2,
TermCriteria criteria, int flags );
virtual void rectify( const Mat& cameraMatrix1, const Mat& distCoeffs1,
const Mat& cameraMatrix2, const Mat& distCoeffs2,
Size imageSize, const Mat& R, const Mat& T,
@ -1664,11 +1767,53 @@ double CV_StereoCalibrationTest_CPP::calibrateStereoCamera( const vector<vector<
Mat& cameraMatrix1, Mat& distCoeffs1,
Mat& cameraMatrix2, Mat& distCoeffs2,
Size imageSize, Mat& R, Mat& T,
Mat& E, Mat& F, TermCriteria criteria, int flags )
Mat& E, Mat& F,
std::vector<RotMat>& rotationMatrices, std::vector<Vec3d>& translationVectors,
vector<double>& perViewErrors1, vector<double>& perViewErrors2,
TermCriteria criteria, int flags )
{
return stereoCalibrate( objectPoints, imagePoints1, imagePoints2,
vector<Mat> rvecs, tvecs;
Mat perViewErrorsMat;
double avgErr = stereoCalibrate( objectPoints, imagePoints1, imagePoints2,
cameraMatrix1, distCoeffs1, cameraMatrix2, distCoeffs2,
imageSize, R, T, E, F, flags, criteria );
imageSize, R, T, E, F,
rvecs, tvecs, perViewErrorsMat,
flags, criteria );
size_t numImgs = imagePoints1.size();
if (perViewErrors1.size() != numImgs)
{
perViewErrors1.resize(numImgs);
}
if (perViewErrors2.size() != numImgs)
{
perViewErrors2.resize(numImgs);
}
for (size_t i = 0; i<numImgs; i++)
{
perViewErrors1[i] = perViewErrorsMat.at<double>(i, 0);
perViewErrors2[i] = perViewErrorsMat.at<double>(i, 1);
}
if (rotationMatrices.size() != numImgs)
{
rotationMatrices.resize(numImgs);
}
if (translationVectors.size() != numImgs)
{
translationVectors.resize(numImgs);
}
for( size_t i = 0; i < numImgs; i++ )
{
Mat r9;
cv::Rodrigues( rvecs[i], r9 );
r9.convertTo(rotationMatrices[i], CV_64F);
tvecs[i].convertTo(translationVectors[i], CV_64F);
}
return avgErr;
}
void CV_StereoCalibrationTest_CPP::rectify( const Mat& cameraMatrix1, const Mat& distCoeffs1,

@ -861,6 +861,111 @@ TEST_F(fisheyeTest, CalibrationWithDifferentPointsNumber)
cv::noArray(), cv::noArray(), flag, cv::TermCriteria(3, 20, 1e-6));
}
TEST_F(fisheyeTest, stereoCalibrateWithPerViewTransformations)
{
const int n_images = 34;
const std::string folder = combine(datasets_repository_path, "calib-3_stereo_from_JY");
std::vector<std::vector<cv::Point2d> > leftPoints(n_images);
std::vector<std::vector<cv::Point2d> > rightPoints(n_images);
std::vector<std::vector<cv::Point3d> > objectPoints(n_images);
cv::FileStorage fs_left(combine(folder, "left.xml"), cv::FileStorage::READ);
CV_Assert(fs_left.isOpened());
for(int i = 0; i < n_images; ++i)
fs_left[cv::format("image_%d", i )] >> leftPoints[i];
fs_left.release();
cv::FileStorage fs_right(combine(folder, "right.xml"), cv::FileStorage::READ);
CV_Assert(fs_right.isOpened());
for(int i = 0; i < n_images; ++i)
fs_right[cv::format("image_%d", i )] >> rightPoints[i];
fs_right.release();
cv::FileStorage fs_object(combine(folder, "object.xml"), cv::FileStorage::READ);
CV_Assert(fs_object.isOpened());
for(int i = 0; i < n_images; ++i)
fs_object[cv::format("image_%d", i )] >> objectPoints[i];
fs_object.release();
cv::Matx33d K1, K2, theR;
cv::Vec3d theT;
cv::Vec4d D1, D2;
std::vector<cv::Mat> rvecs, tvecs;
int flag = 0;
flag |= cv::fisheye::CALIB_RECOMPUTE_EXTRINSIC;
flag |= cv::fisheye::CALIB_CHECK_COND;
flag |= cv::fisheye::CALIB_FIX_SKEW;
double rmsErrorStereoCalib = cv::fisheye::stereoCalibrate(objectPoints, leftPoints, rightPoints,
K1, D1, K2, D2, imageSize, theR, theT, rvecs, tvecs, flag,
cv::TermCriteria(3, 12, 0));
std::vector<cv::Point2d> reprojectedImgPts[2] = {std::vector<cv::Point2d>(n_images), std::vector<cv::Point2d>(n_images)};
size_t totalPoints = 0;
float totalMSError[2] = { 0, 0 }, viewMSError[2];
for( size_t i = 0; i < n_images; i++ )
{
cv::Matx33d viewRotMat1, viewRotMat2;
cv::Vec3d viewT1, viewT2;
cv::Mat rVec;
cv::Rodrigues( rvecs[i], rVec );
rVec.convertTo(viewRotMat1, CV_64F);
tvecs[i].convertTo(viewT1, CV_64F);
viewRotMat2 = theR * viewRotMat1;
cv::Vec3d T2t = theR * viewT1;
viewT2 = T2t + theT;
cv::Vec3d viewRotVec1, viewRotVec2;
cv::Rodrigues(viewRotMat1, viewRotVec1);
cv::Rodrigues(viewRotMat2, viewRotVec2);
double alpha1 = K1(0, 1) / K1(0, 0);
double alpha2 = K2(0, 1) / K2(0, 0);
cv::fisheye::projectPoints(objectPoints[i], reprojectedImgPts[0], viewRotVec1, viewT1, K1, D1, alpha1);
cv::fisheye::projectPoints(objectPoints[i], reprojectedImgPts[1], viewRotVec2, viewT2, K2, D2, alpha2);
viewMSError[0] = cv::norm(leftPoints[i], reprojectedImgPts[0], cv::NORM_L2SQR);
viewMSError[1] = cv::norm(rightPoints[i], reprojectedImgPts[1], cv::NORM_L2SQR);
size_t n = objectPoints[i].size();
totalMSError[0] += viewMSError[0];
totalMSError[1] += viewMSError[1];
totalPoints += n;
}
double rmsErrorFromReprojectedImgPts = std::sqrt((totalMSError[0] + totalMSError[1]) / (2 * totalPoints));
cv::Matx33d R_correct( 0.9975587205950972, 0.06953016383322372, 0.006492709911733523,
-0.06956823121068059, 0.9975601387249519, 0.005833595226966235,
-0.006071257768382089, -0.006271040135405457, 0.9999619062167968);
cv::Vec3d T_correct(-0.099402724724121, 0.00270812139265413, 0.00129330292472699);
cv::Matx33d K1_correct (561.195925927249, 0, 621.282400272412,
0, 562.849402029712, 380.555455380889,
0, 0, 1);
cv::Matx33d K2_correct (560.395452535348, 0, 678.971652040359,
0, 561.90171021422, 380.401340535339,
0, 0, 1);
cv::Vec4d D1_correct (-7.44253716539556e-05, -0.00702662033932424, 0.00737569823650885, -0.00342230256441771);
cv::Vec4d D2_correct (-0.0130785435677431, 0.0284434505383497, -0.0360333869900506, 0.0144724062347222);
EXPECT_MAT_NEAR(theR, R_correct, 1e-10);
EXPECT_MAT_NEAR(theT, T_correct, 1e-10);
EXPECT_MAT_NEAR(K1, K1_correct, 1e-10);
EXPECT_MAT_NEAR(K2, K2_correct, 1e-10);
EXPECT_MAT_NEAR(D1, D1_correct, 1e-10);
EXPECT_MAT_NEAR(D2, D2_correct, 1e-10);
EXPECT_NEAR(rmsErrorStereoCalib, rmsErrorFromReprojectedImgPts, 1e-4);
}
TEST_F(fisheyeTest, estimateNewCameraMatrixForUndistortRectify)
{
cv::Size size(1920, 1080);

Loading…
Cancel
Save