Merge pull request #18203 from catree:feat_robot_world_hand_eye_calib

pull/17068/merge
Alexander Alekhin 5 years ago
commit 192e5c8166
  1. 18
      doc/opencv.bib
  2. BIN
      modules/calib3d/doc/pics/robot-world_hand-eye_figure.png
  3. 172
      modules/calib3d/include/opencv2/calib3d.hpp
  4. 246
      modules/calib3d/src/calibration_handeye.cpp
  5. 810
      modules/calib3d/test/test_calibration_hand_eye.cpp

@ -217,7 +217,7 @@
hal_id = {inria-00350283}, hal_id = {inria-00350283},
hal_version = {v1}, hal_version = {v1},
} }
@article{Collins14 @article{Collins14,
year = {2014}, year = {2014},
issn = {0920-5691}, issn = {0920-5691},
journal = {International Journal of Computer Vision}, journal = {International Journal of Computer Vision},
@ -612,6 +612,14 @@
number = {8}, number = {8},
publisher = {IOP Publishing Ltd} publisher = {IOP Publishing Ltd}
} }
@article{Li2010SimultaneousRA,
title = {Simultaneous robot-world and hand-eye calibration using dual-quaternions and Kronecker product},
author = {Aiguo Li and Lin Wang and Defeng Wu},
journal = {International Journal of Physical Sciences},
year = {2010},
volume = {5},
pages = {1530-1536}
}
@article{LibSVM, @article{LibSVM,
author = {Chang, Chih-Chung and Lin, Chih-Jen}, author = {Chang, Chih-Chung and Lin, Chih-Jen},
title = {LIBSVM: a library for support vector machines}, title = {LIBSVM: a library for support vector machines},
@ -922,6 +930,14 @@
number = {2}, number = {2},
publisher = {Springer} publisher = {Springer}
} }
@article{Shah2013SolvingTR,
title = {Solving the Robot-World/Hand-Eye Calibration Problem Using the Kronecker Product},
author = {Mili Shah},
journal = {Journal of Mechanisms and Robotics},
year = {2013},
volume = {5},
pages = {031007}
}
@inproceedings{Shi94, @inproceedings{Shi94,
author = {Shi, Jianbo and Tomasi, Carlo}, author = {Shi, Jianbo and Tomasi, Carlo},
title = {Good features to track}, title = {Good features to track},

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

@ -535,6 +535,12 @@ enum HandEyeCalibrationMethod
CALIB_HAND_EYE_DANIILIDIS = 4 //!< Hand-Eye Calibration Using Dual Quaternions @cite Daniilidis98 CALIB_HAND_EYE_DANIILIDIS = 4 //!< Hand-Eye Calibration Using Dual Quaternions @cite Daniilidis98
}; };
enum RobotWorldHandEyeCalibrationMethod
{
CALIB_ROBOT_WORLD_HAND_EYE_SHAH = 0, //!< Solving the robot-world/hand-eye calibration problem using the kronecker product @cite Shah2013SolvingTR
CALIB_ROBOT_WORLD_HAND_EYE_LI = 1 //!< Simultaneous robot-world and hand-eye calibration using dual-quaternions and kronecker product @cite Li2010SimultaneousRA
};
enum SamplingMethod { SAMPLING_UNIFORM, SAMPLING_PROGRESSIVE_NAPSAC, SAMPLING_NAPSAC, enum SamplingMethod { SAMPLING_UNIFORM, SAMPLING_PROGRESSIVE_NAPSAC, SAMPLING_NAPSAC,
SAMPLING_PROSAC }; SAMPLING_PROSAC };
enum LocalOptimMethod {LOCAL_OPTIM_NULL, LOCAL_OPTIM_INNER_LO, LOCAL_OPTIM_INNER_AND_ITER_LO, enum LocalOptimMethod {LOCAL_OPTIM_NULL, LOCAL_OPTIM_INNER_LO, LOCAL_OPTIM_INNER_AND_ITER_LO,
@ -2288,12 +2294,16 @@ rotation then the translation (separable solutions) and the following methods ar
- R. Horaud, F. Dornaika Hand-Eye Calibration \cite Horaud95 - R. Horaud, F. Dornaika Hand-Eye Calibration \cite Horaud95
Another approach consists in estimating simultaneously the rotation and the translation (simultaneous solutions), Another approach consists in estimating simultaneously the rotation and the translation (simultaneous solutions),
with the following implemented method: with the following implemented methods:
- N. Andreff, R. Horaud, B. Espiau On-line Hand-Eye Calibration \cite Andreff99 - N. Andreff, R. Horaud, B. Espiau On-line Hand-Eye Calibration \cite Andreff99
- K. Daniilidis Hand-Eye Calibration Using Dual Quaternions \cite Daniilidis98 - K. Daniilidis Hand-Eye Calibration Using Dual Quaternions \cite Daniilidis98
The following picture describes the Hand-Eye calibration problem where the transformation between a camera ("eye") The following picture describes the Hand-Eye calibration problem where the transformation between a camera ("eye")
mounted on a robot gripper ("hand") has to be estimated. mounted on a robot gripper ("hand") has to be estimated. This configuration is called eye-in-hand.
The eye-to-hand configuration consists in a static camera observing a calibration pattern mounted on the robot
end-effector. The transformation from the camera to the robot base frame can then be estimated by inputting
the suitable transformations to the function, see below.
![](pics/hand-eye_figure.png) ![](pics/hand-eye_figure.png)
@ -2366,6 +2376,7 @@ The Hand-Eye calibration procedure returns the following homogeneous transformat
\f] \f]
This problem is also known as solving the \f$\mathbf{A}\mathbf{X}=\mathbf{X}\mathbf{B}\f$ equation: This problem is also known as solving the \f$\mathbf{A}\mathbf{X}=\mathbf{X}\mathbf{B}\f$ equation:
- for an eye-in-hand configuration
\f[ \f[
\begin{align*} \begin{align*}
^{b}{\textrm{T}_g}^{(1)} \hspace{0.2em} ^{g}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(1)} &= ^{b}{\textrm{T}_g}^{(1)} \hspace{0.2em} ^{g}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(1)} &=
@ -2378,6 +2389,19 @@ This problem is also known as solving the \f$\mathbf{A}\mathbf{X}=\mathbf{X}\mat
\end{align*} \end{align*}
\f] \f]
- for an eye-to-hand configuration
\f[
\begin{align*}
^{g}{\textrm{T}_b}^{(1)} \hspace{0.2em} ^{b}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(1)} &=
\hspace{0.1em} ^{g}{\textrm{T}_b}^{(2)} \hspace{0.2em} ^{b}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(2)} \\
(^{g}{\textrm{T}_b}^{(2)})^{-1} \hspace{0.2em} ^{g}{\textrm{T}_b}^{(1)} \hspace{0.2em} ^{b}\textrm{T}_c &=
\hspace{0.1em} ^{b}\textrm{T}_c \hspace{0.2em} ^{c}{\textrm{T}_t}^{(2)} (^{c}{\textrm{T}_t}^{(1)})^{-1} \\
\textrm{A}_i \textrm{X} &= \textrm{X} \textrm{B}_i \\
\end{align*}
\f]
\note \note
Additional information can be found on this [website](http://campar.in.tum.de/Chair/HandEyeCalibration). Additional information can be found on this [website](http://campar.in.tum.de/Chair/HandEyeCalibration).
\note \note
@ -2390,6 +2414,150 @@ CV_EXPORTS_W void calibrateHandEye( InputArrayOfArrays R_gripper2base, InputArra
OutputArray R_cam2gripper, OutputArray t_cam2gripper, OutputArray R_cam2gripper, OutputArray t_cam2gripper,
HandEyeCalibrationMethod method=CALIB_HAND_EYE_TSAI ); HandEyeCalibrationMethod method=CALIB_HAND_EYE_TSAI );
/** @brief Computes Robot-World/Hand-Eye calibration: \f$_{}^{w}\textrm{T}_b\f$ and \f$_{}^{c}\textrm{T}_g\f$
@param[in] R_world2cam Rotation part extracted from the homogeneous matrix that transforms a point
expressed in the world frame to the camera frame (\f$_{}^{c}\textrm{T}_w\f$).
This is a vector (`vector<Mat>`) that contains the rotation, `(3x3)` rotation matrices or `(3x1)` rotation vectors,
for all the transformations from world frame to the camera frame.
@param[in] t_world2cam Translation part extracted from the homogeneous matrix that transforms a point
expressed in the world frame to the camera frame (\f$_{}^{c}\textrm{T}_w\f$).
This is a vector (`vector<Mat>`) that contains the `(3x1)` translation vectors for all the transformations
from world frame to the camera frame.
@param[in] R_base2gripper Rotation part extracted from the homogeneous matrix that transforms a point
expressed in the robot base frame to the gripper frame (\f$_{}^{g}\textrm{T}_b\f$).
This is a vector (`vector<Mat>`) that contains the rotation, `(3x3)` rotation matrices or `(3x1)` rotation vectors,
for all the transformations from robot base frame to the gripper frame.
@param[in] t_base2gripper Rotation part extracted from the homogeneous matrix that transforms a point
expressed in the robot base frame to the gripper frame (\f$_{}^{g}\textrm{T}_b\f$).
This is a vector (`vector<Mat>`) that contains the `(3x1)` translation vectors for all the transformations
from robot base frame to the gripper frame.
@param[out] R_base2world Estimated `(3x3)` rotation part extracted from the homogeneous matrix that transforms a point
expressed in the robot base frame to the world frame (\f$_{}^{w}\textrm{T}_b\f$).
@param[out] t_base2world Estimated `(3x1)` translation part extracted from the homogeneous matrix that transforms a point
expressed in the robot base frame to the world frame (\f$_{}^{w}\textrm{T}_b\f$).
@param[out] R_gripper2cam Estimated `(3x3)` rotation part extracted from the homogeneous matrix that transforms a point
expressed in the gripper frame to the camera frame (\f$_{}^{c}\textrm{T}_g\f$).
@param[out] t_gripper2cam Estimated `(3x1)` translation part extracted from the homogeneous matrix that transforms a point
expressed in the gripper frame to the camera frame (\f$_{}^{c}\textrm{T}_g\f$).
@param[in] method One of the implemented Robot-World/Hand-Eye calibration method, see cv::RobotWorldHandEyeCalibrationMethod
The function performs the Robot-World/Hand-Eye calibration using various methods. One approach consists in estimating the
rotation then the translation (separable solutions):
- M. Shah, Solving the robot-world/hand-eye calibration problem using the kronecker product \cite Shah2013SolvingTR
Another approach consists in estimating simultaneously the rotation and the translation (simultaneous solutions),
with the following implemented method:
- A. Li, L. Wang, and D. Wu, Simultaneous robot-world and hand-eye calibration using dual-quaternions and kronecker product \cite Li2010SimultaneousRA
The following picture describes the Robot-World/Hand-Eye calibration problem where the transformations between a robot and a world frame
and between a robot gripper ("hand") and a camera ("eye") mounted at the robot end-effector have to be estimated.
![](pics/robot-world_hand-eye_figure.png)
The calibration procedure is the following:
- a static calibration pattern is used to estimate the transformation between the target frame
and the camera frame
- the robot gripper is moved in order to acquire several poses
- for each pose, the homogeneous transformation between the gripper frame and the robot base frame is recorded using for
instance the robot kinematics
\f[
\begin{bmatrix}
X_g\\
Y_g\\
Z_g\\
1
\end{bmatrix}
=
\begin{bmatrix}
_{}^{g}\textrm{R}_b & _{}^{g}\textrm{t}_b \\
0_{1 \times 3} & 1
\end{bmatrix}
\begin{bmatrix}
X_b\\
Y_b\\
Z_b\\
1
\end{bmatrix}
\f]
- for each pose, the homogeneous transformation between the calibration target frame (the world frame) and the camera frame is recorded using
for instance a pose estimation method (PnP) from 2D-3D point correspondences
\f[
\begin{bmatrix}
X_c\\
Y_c\\
Z_c\\
1
\end{bmatrix}
=
\begin{bmatrix}
_{}^{c}\textrm{R}_w & _{}^{c}\textrm{t}_w \\
0_{1 \times 3} & 1
\end{bmatrix}
\begin{bmatrix}
X_w\\
Y_w\\
Z_w\\
1
\end{bmatrix}
\f]
The Robot-World/Hand-Eye calibration procedure returns the following homogeneous transformations
\f[
\begin{bmatrix}
X_w\\
Y_w\\
Z_w\\
1
\end{bmatrix}
=
\begin{bmatrix}
_{}^{w}\textrm{R}_b & _{}^{w}\textrm{t}_b \\
0_{1 \times 3} & 1
\end{bmatrix}
\begin{bmatrix}
X_b\\
Y_b\\
Z_b\\
1
\end{bmatrix}
\f]
\f[
\begin{bmatrix}
X_c\\
Y_c\\
Z_c\\
1
\end{bmatrix}
=
\begin{bmatrix}
_{}^{c}\textrm{R}_g & _{}^{c}\textrm{t}_g \\
0_{1 \times 3} & 1
\end{bmatrix}
\begin{bmatrix}
X_g\\
Y_g\\
Z_g\\
1
\end{bmatrix}
\f]
This problem is also known as solving the \f$\mathbf{A}\mathbf{X}=\mathbf{Z}\mathbf{B}\f$ equation, with:
- \f$\mathbf{A} \Leftrightarrow \hspace{0.1em} _{}^{c}\textrm{T}_w\f$
- \f$\mathbf{X} \Leftrightarrow \hspace{0.1em} _{}^{w}\textrm{T}_b\f$
- \f$\mathbf{Z} \Leftrightarrow \hspace{0.1em} _{}^{c}\textrm{T}_g\f$
- \f$\mathbf{B} \Leftrightarrow \hspace{0.1em} _{}^{g}\textrm{T}_b\f$
\note
At least 3 measurements are required (input vectors size must be greater or equal to 3).
*/
CV_EXPORTS_W void calibrateRobotWorldHandEye( InputArrayOfArrays R_world2cam, InputArrayOfArrays t_world2cam,
InputArrayOfArrays R_base2gripper, InputArrayOfArrays t_base2gripper,
OutputArray R_base2world, OutputArray t_base2world,
OutputArray R_gripper2cam, OutputArray t_gripper2cam,
RobotWorldHandEyeCalibrationMethod method=CALIB_ROBOT_WORLD_HAND_EYE_SHAH );
/** @brief Converts points from Euclidean to homogeneous space. /** @brief Converts points from Euclidean to homogeneous space.
@param src Input vector of N-dimensional points. @param src Input vector of N-dimensional points.

@ -514,10 +514,32 @@ static void calibrateHandEyeHoraud(const std::vector<Mat>& Hg, const std::vector
t_cam2gripper = t; t_cam2gripper = t;
} }
// sign function, return -1 if negative values, +1 otherwise static Mat_<double> normalizeRotation(const Mat_<double>& R_)
static int sign_double(double val)
{ {
return (0 < val) - (val < 0); // Make R unit determinant
Mat_<double> R = R_.clone();
double det = determinant(R);
if (std::fabs(det) < FLT_EPSILON)
{
CV_Error(Error::StsNoConv, "Rotation normalization issue: determinant(R) is null");
}
R = std::cbrt(std::copysign(1, det) / std::fabs(det)) * R;
// Make R orthogonal
Mat w, u, vt;
SVDecomp(R, w, u, vt);
R = u*vt;
// Handle reflection case
if (determinant(R) < 0)
{
Matx33d diag(1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, -1.0);
R = u*diag*vt;
}
return R;
} }
//Reference: //Reference:
@ -573,29 +595,8 @@ static void calibrateHandEyeAndreff(const std::vector<Mat>& Hg, const std::vecto
int newSize[] = {3, 3}; int newSize[] = {3, 3};
R = R.reshape(1, 2, newSize); R = R.reshape(1, 2, newSize);
//Eq 15 //Eq 15
double det = determinant(R); R_cam2gripper = normalizeRotation(R);
if (std::fabs(det) < FLT_EPSILON) t_cam2gripper = X(Rect(0, 9, 1, 3));
{
CV_Error(Error::StsNoConv, "calibrateHandEye() with CALIB_HAND_EYE_ANDREFF method: determinant(R) is null");
}
R = cubeRoot(static_cast<float>(sign_double(det) / abs(det))) * R;
Mat w, u, vt;
SVDecomp(R, w, u, vt);
R = u*vt;
if (determinant(R) < 0)
{
Mat diag = (Mat_<double>(3,3) << 1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, -1.0);
R = u*diag*vt;
}
R_cam2gripper = R;
Mat t = X(Rect(0, 9, 1, 3));
t_cam2gripper = t;
} }
//Reference: //Reference:
@ -708,7 +709,7 @@ void calibrateHandEye(InputArrayOfArrays R_gripper2base, InputArrayOfArrays t_gr
CV_Assert(R_gripper2base_.size() == t_gripper2base_.size() && CV_Assert(R_gripper2base_.size() == t_gripper2base_.size() &&
R_target2cam_.size() == t_target2cam_.size() && R_target2cam_.size() == t_target2cam_.size() &&
R_gripper2base_.size() == R_target2cam_.size()); R_gripper2base_.size() == R_target2cam_.size());
CV_Assert(R_gripper2base_.size() >= 3); CV_Check(R_gripper2base_.size(), R_gripper2base_.size() >= 3, "At least 3 measurements are needed");
//Notation used in Tsai paper //Notation used in Tsai paper
//Defines coordinate transformation from G (gripper) to RW (robot base) //Defines coordinate transformation from G (gripper) to RW (robot base)
@ -779,4 +780,195 @@ void calibrateHandEye(InputArrayOfArrays R_gripper2base, InputArrayOfArrays t_gr
Rcg.copyTo(R_cam2gripper); Rcg.copyTo(R_cam2gripper);
Tcg.copyTo(t_cam2gripper); Tcg.copyTo(t_cam2gripper);
} }
//Reference:
//M. Shah, "Solving the robot-world/hand-eye calibration problem using the kronecker product"
//Journal of Mechanisms and Robotics, vol. 5, p. 031007, 2013.
//Matlab code: http://math.loyola.edu/~mili/Calibration/
static void calibrateRobotWorldHandEyeShah(const std::vector<Mat_<double>>& cRw, const std::vector<Mat_<double>>& ctw,
const std::vector<Mat_<double>>& gRb, const std::vector<Mat_<double>>& gtb,
Matx33d& wRb, Matx31d& wtb, Matx33d& cRg, Matx31d& ctg)
{
Mat_<double> T = Mat_<double>::zeros(9, 9);
for (size_t i = 0; i < cRw.size(); i++)
{
T += kron(gRb[i], cRw[i]);
}
Mat_<double> w, u, vt;
SVDecomp(T, w, u, vt);
Mat_<double> RX(3,3), RZ(3,3);
for (int i = 0; i < 3; i++)
{
for (int j = 0; j < 3; j++)
{
RX(j,i) = vt(0, i*3+j);
RZ(j,i) = u(i*3+j, 0);
}
}
wRb = normalizeRotation(RX);
cRg = normalizeRotation(RZ);
Mat_<double> Z = Mat(cRg.t()).reshape(1, 9);
const int n = static_cast<int>(cRw.size());
Mat_<double> A = Mat_<double>::zeros(3*n, 6);
Mat_<double> b = Mat_<double>::zeros(3*n, 1);
Mat_<double> I3 = Mat_<double>::eye(3,3);
for (int i = 0; i < n; i++)
{
Mat cRw_ = -cRw[i];
cRw_.copyTo(A(Range(i*3, (i+1)*3), Range(0,3)));
I3.copyTo(A(Range(i*3, (i+1)*3), Range(3,6)));
Mat ctw_ = ctw[i] - kron(gtb[i].t(), I3) * Z;
ctw_.copyTo(b(Range(i*3, (i+1)*3), Range::all()));
}
Mat_<double> t;
solve(A, b, t, DECOMP_SVD);
for (int i = 0; i < 3; i++)
{
wtb(i) = t(i);
ctg(i) = t(i+3);
}
}
//Reference:
//A. Li, L. Wang, and D. Wu, "Simultaneous robot-world and hand-eye calibration using dual-quaternions and kronecker product"
//International Journal of Physical Sciences, vol. 5, pp. 1530–1536, 2010.
//Matlab code: http://math.loyola.edu/~mili/Calibration/
static void calibrateRobotWorldHandEyeLi(const std::vector<Mat_<double>>& cRw, const std::vector<Mat_<double>>& ctw,
const std::vector<Mat_<double>>& gRb, const std::vector<Mat_<double>>& gtb,
Matx33d& wRb, Matx31d& wtb, Matx33d& cRg, Matx31d& ctg)
{
const int n = static_cast<int>(cRw.size());
Mat_<double> A = Mat_<double>::zeros(12*n, 24);
Mat_<double> b = Mat_<double>::zeros(12*n, 1);
Mat_<double> I3 = Mat_<double>::eye(3,3);
for (int i = 0; i < n; i++)
{
//Eq 19
kron(cRw[i], I3).copyTo(A(Range(i*12, i*12 + 9), Range(0, 9)));
kron(-I3, gRb[i].t()).copyTo(A(Range(i*12, i*12 + 9), Range(9, 18)));
kron(I3, gtb[i].t()).copyTo(A(Range(i*12 + 9, (i+1)*12), Range(9, 18)));
Mat cRw_ = -cRw[i];
cRw_.copyTo(A(Range(i*12 + 9, (i+1)*12), Range(18, 21)));
I3.copyTo(A(Range(i*12 + 9, (i+1)*12), Range(21, 24)));
ctw[i].copyTo(b(Range(i*12 + 9, i*12+12), Range::all()));
}
Mat_<double> x;
solve(A, b, x, DECOMP_SVD);
Mat_<double> RX = x(Range(0,9), Range::all()).reshape(3, 3);
wRb = normalizeRotation(RX);
x(Range(18,21), Range::all()).copyTo(wtb);
Mat_<double> RZ = x(Range(9,18), Range::all()).reshape(3, 3);
cRg = normalizeRotation(RZ);
x(Range(21,24), Range::all()).copyTo(ctg);
}
void calibrateRobotWorldHandEye(InputArrayOfArrays R_world2cam, InputArrayOfArrays t_world2cam,
InputArrayOfArrays R_base2gripper, InputArrayOfArrays t_base2gripper,
OutputArray R_base2world, OutputArray t_base2world,
OutputArray R_gripper2cam, OutputArray t_gripper2cam,
RobotWorldHandEyeCalibrationMethod method)
{
CV_Assert(R_base2gripper.isMatVector() && t_base2gripper.isMatVector() &&
R_world2cam.isMatVector() && t_world2cam.isMatVector());
std::vector<Mat> R_base2gripper_tmp, t_base2gripper_tmp;
R_base2gripper.getMatVector(R_base2gripper_tmp);
t_base2gripper.getMatVector(t_base2gripper_tmp);
std::vector<Mat> R_world2cam_tmp, t_world2cam_tmp;
R_world2cam.getMatVector(R_world2cam_tmp);
t_world2cam.getMatVector(t_world2cam_tmp);
CV_Assert(R_base2gripper_tmp.size() == t_base2gripper_tmp.size() &&
R_world2cam_tmp.size() == t_world2cam_tmp.size() &&
R_base2gripper_tmp.size() == R_world2cam_tmp.size());
CV_Check(R_base2gripper_tmp.size(), R_base2gripper_tmp.size() >= 3, "At least 3 measurements are needed");
// Convert to double
std::vector<Mat_<double>> R_base2gripper_, t_base2gripper_;
std::vector<Mat_<double>> R_world2cam_, t_world2cam_;
R_base2gripper_.reserve(R_base2gripper_tmp.size());
t_base2gripper_.reserve(R_base2gripper_tmp.size());
R_world2cam_.reserve(R_world2cam_tmp.size());
t_world2cam_.reserve(R_base2gripper_tmp.size());
// Convert to rotation matrix if needed
for (size_t i = 0; i < R_base2gripper_tmp.size(); i++)
{
{
Mat rot = R_base2gripper_tmp[i];
Mat R(3, 3, CV_64FC1);
if (rot.size() == Size(3,3))
{
rot.convertTo(R, CV_64F);
R_base2gripper_.push_back(R);
}
else
{
Rodrigues(rot, R);
R_base2gripper_.push_back(R);
}
Mat tvec = t_base2gripper_tmp[i];
Mat t;
tvec.convertTo(t, CV_64F);
t_base2gripper_.push_back(t);
}
{
Mat rot = R_world2cam_tmp[i];
Mat R(3, 3, CV_64FC1);
if (rot.size() == Size(3,3))
{
rot.convertTo(R, CV_64F);
R_world2cam_.push_back(R);
}
else
{
Rodrigues(rot, R);
R_world2cam_.push_back(R);
}
Mat tvec = t_world2cam_tmp[i];
Mat t;
tvec.convertTo(t, CV_64F);
t_world2cam_.push_back(t);
}
}
CV_Assert(R_world2cam_.size() == t_world2cam_.size() &&
R_base2gripper_.size() == t_base2gripper_.size() &&
R_world2cam_.size() == R_base2gripper_.size());
Matx33d wRb, cRg;
Matx31d wtb, ctg;
switch (method)
{
case CALIB_ROBOT_WORLD_HAND_EYE_SHAH:
calibrateRobotWorldHandEyeShah(R_world2cam_, t_world2cam_, R_base2gripper_, t_base2gripper_, wRb, wtb, cRg, ctg);
break;
case CALIB_ROBOT_WORLD_HAND_EYE_LI:
calibrateRobotWorldHandEyeLi(R_world2cam_, t_world2cam_, R_base2gripper_, t_base2gripper_, wRb, wtb, cRg, ctg);
break;
}
Mat(wRb).copyTo(R_base2world);
Mat(wtb).copyTo(t_base2world);
Mat(cRg).copyTo(R_gripper2cam);
Mat(ctg).copyTo(t_gripper2cam);
}
} }

@ -7,6 +7,224 @@
namespace opencv_test { namespace { namespace opencv_test { namespace {
static void generatePose(RNG& rng, double min_theta, double max_theta,
double min_tx, double max_tx,
double min_ty, double max_ty,
double min_tz, double max_tz,
Mat& R, Mat& tvec,
bool random_sign)
{
Mat axis(3, 1, CV_64FC1);
for (int i = 0; i < 3; i++)
{
axis.at<double>(i,0) = rng.uniform(-1.0, 1.0);
}
double theta = rng.uniform(min_theta, max_theta);
if (random_sign)
{
theta *= std::copysign(1.0, rng.uniform(-1.0, 1.0));
}
Mat rvec(3, 1, CV_64FC1);
rvec.at<double>(0,0) = theta*axis.at<double>(0,0);
rvec.at<double>(1,0) = theta*axis.at<double>(1,0);
rvec.at<double>(2,0) = theta*axis.at<double>(2,0);
tvec.create(3, 1, CV_64FC1);
tvec.at<double>(0,0) = rng.uniform(min_tx, max_tx);
tvec.at<double>(1,0) = rng.uniform(min_ty, max_ty);
tvec.at<double>(2,0) = rng.uniform(min_tz, max_tz);
if (random_sign)
{
tvec.at<double>(0,0) *= std::copysign(1.0, rng.uniform(-1.0, 1.0));
tvec.at<double>(1,0) *= std::copysign(1.0, rng.uniform(-1.0, 1.0));
tvec.at<double>(2,0) *= std::copysign(1.0, rng.uniform(-1.0, 1.0));
}
cv::Rodrigues(rvec, R);
}
static Mat homogeneousInverse(const Mat& T)
{
CV_Assert( T.rows == 4 && T.cols == 4 );
Mat R = T(Rect(0, 0, 3, 3));
Mat t = T(Rect(3, 0, 1, 3));
Mat Rt = R.t();
Mat tinv = -Rt * t;
Mat Tinv = Mat::eye(4, 4, T.type());
Rt.copyTo(Tinv(Rect(0, 0, 3, 3)));
tinv.copyTo(Tinv(Rect(3, 0, 1, 3)));
return Tinv;
}
static void simulateDataEyeInHand(RNG& rng, int nPoses,
std::vector<Mat> &R_gripper2base, std::vector<Mat> &t_gripper2base,
std::vector<Mat> &R_target2cam, std::vector<Mat> &t_target2cam,
bool noise, Mat& R_cam2gripper, Mat& t_cam2gripper)
{
//to avoid generating values close to zero,
//we use positive range values and randomize the sign
const bool random_sign = true;
generatePose(rng, 10.0*CV_PI/180.0, 50.0*CV_PI/180.0,
0.05, 0.5, 0.05, 0.5, 0.05, 0.5,
R_cam2gripper, t_cam2gripper, random_sign);
Mat R_target2base, t_target2base;
generatePose(rng, 5.0*CV_PI/180.0, 85.0*CV_PI/180.0,
0.5, 3.5, 0.5, 3.5, 0.5, 3.5,
R_target2base, t_target2base, random_sign);
for (int i = 0; i < nPoses; i++)
{
Mat R_gripper2base_, t_gripper2base_;
generatePose(rng, 5.0*CV_PI/180.0, 45.0*CV_PI/180.0,
0.5, 1.5, 0.5, 1.5, 0.5, 1.5,
R_gripper2base_, t_gripper2base_, random_sign);
R_gripper2base.push_back(R_gripper2base_);
t_gripper2base.push_back(t_gripper2base_);
Mat T_cam2gripper = Mat::eye(4, 4, CV_64FC1);
R_cam2gripper.copyTo(T_cam2gripper(Rect(0, 0, 3, 3)));
t_cam2gripper.copyTo(T_cam2gripper(Rect(3, 0, 1, 3)));
Mat T_gripper2base = Mat::eye(4, 4, CV_64FC1);
R_gripper2base_.copyTo(T_gripper2base(Rect(0, 0, 3, 3)));
t_gripper2base_.copyTo(T_gripper2base(Rect(3, 0, 1, 3)));
Mat T_base2cam = homogeneousInverse(T_cam2gripper) * homogeneousInverse(T_gripper2base);
Mat T_target2base = Mat::eye(4, 4, CV_64FC1);
R_target2base.copyTo(T_target2base(Rect(0, 0, 3, 3)));
t_target2base.copyTo(T_target2base(Rect(3, 0, 1, 3)));
Mat T_target2cam = T_base2cam * T_target2base;
if (noise)
{
//Add some noise for the transformation between the target and the camera
Mat R_target2cam_noise = T_target2cam(Rect(0, 0, 3, 3));
Mat rvec_target2cam_noise;
cv::Rodrigues(R_target2cam_noise, rvec_target2cam_noise);
rvec_target2cam_noise.at<double>(0,0) += rng.gaussian(0.002);
rvec_target2cam_noise.at<double>(1,0) += rng.gaussian(0.002);
rvec_target2cam_noise.at<double>(2,0) += rng.gaussian(0.002);
cv::Rodrigues(rvec_target2cam_noise, R_target2cam_noise);
Mat t_target2cam_noise = T_target2cam(Rect(3, 0, 1, 3));
t_target2cam_noise.at<double>(0,0) += rng.gaussian(0.005);
t_target2cam_noise.at<double>(1,0) += rng.gaussian(0.005);
t_target2cam_noise.at<double>(2,0) += rng.gaussian(0.005);
//Add some noise for the transformation between the gripper and the robot base
Mat R_gripper2base_noise = T_gripper2base(Rect(0, 0, 3, 3));
Mat rvec_gripper2base_noise;
cv::Rodrigues(R_gripper2base_noise, rvec_gripper2base_noise);
rvec_gripper2base_noise.at<double>(0,0) += rng.gaussian(0.001);
rvec_gripper2base_noise.at<double>(1,0) += rng.gaussian(0.001);
rvec_gripper2base_noise.at<double>(2,0) += rng.gaussian(0.001);
cv::Rodrigues(rvec_gripper2base_noise, R_gripper2base_noise);
Mat t_gripper2base_noise = T_gripper2base(Rect(3, 0, 1, 3));
t_gripper2base_noise.at<double>(0,0) += rng.gaussian(0.001);
t_gripper2base_noise.at<double>(1,0) += rng.gaussian(0.001);
t_gripper2base_noise.at<double>(2,0) += rng.gaussian(0.001);
}
//Test rvec representation
Mat rvec_target2cam;
cv::Rodrigues(T_target2cam(Rect(0, 0, 3, 3)), rvec_target2cam);
R_target2cam.push_back(rvec_target2cam);
t_target2cam.push_back(T_target2cam(Rect(3, 0, 1, 3)));
}
}
static void simulateDataEyeToHand(RNG& rng, int nPoses,
std::vector<Mat> &R_base2gripper, std::vector<Mat> &t_base2gripper,
std::vector<Mat> &R_target2cam, std::vector<Mat> &t_target2cam,
bool noise, Mat& R_cam2base, Mat& t_cam2base)
{
//to avoid generating values close to zero,
//we use positive range values and randomize the sign
const bool random_sign = true;
generatePose(rng, 10.0*CV_PI/180.0, 50.0*CV_PI/180.0,
0.5, 3.5, 0.5, 3.5, 0.5, 3.5,
R_cam2base, t_cam2base, random_sign);
Mat R_target2gripper, t_target2gripper;
generatePose(rng, 5.0*CV_PI/180.0, 85.0*CV_PI/180.0,
0.05, 0.5, 0.05, 0.5, 0.05, 0.5,
R_target2gripper, t_target2gripper, random_sign);
Mat T_target2gripper = Mat::eye(4, 4, CV_64FC1);
R_target2gripper.copyTo(T_target2gripper(Rect(0, 0, 3, 3)));
t_target2gripper.copyTo(T_target2gripper(Rect(3, 0, 1, 3)));
for (int i = 0; i < nPoses; i++)
{
Mat R_gripper2base_, t_gripper2base_;
generatePose(rng, 5.0*CV_PI/180.0, 45.0*CV_PI/180.0,
0.5, 1.5, 0.5, 1.5, 0.5, 1.5,
R_gripper2base_, t_gripper2base_, random_sign);
Mat R_base2gripper_ = R_gripper2base_.t();
Mat t_base2gripper_ = -R_base2gripper_ * t_gripper2base_;
Mat T_gripper2base = Mat::eye(4, 4, CV_64FC1);
R_gripper2base_.copyTo(T_gripper2base(Rect(0, 0, 3, 3)));
t_gripper2base_.copyTo(T_gripper2base(Rect(3, 0, 1, 3)));
Mat T_cam2base = Mat::eye(4, 4, CV_64FC1);
R_cam2base.copyTo(T_cam2base(Rect(0, 0, 3, 3)));
t_cam2base.copyTo(T_cam2base(Rect(3, 0, 1, 3)));
Mat T_target2cam = homogeneousInverse(T_cam2base) * T_gripper2base * T_target2gripper;
if (noise)
{
//Add some noise for the transformation between the target and the camera
Mat R_target2cam_noise = T_target2cam(Rect(0, 0, 3, 3));
Mat rvec_target2cam_noise;
cv::Rodrigues(R_target2cam_noise, rvec_target2cam_noise);
rvec_target2cam_noise.at<double>(0,0) += rng.gaussian(0.002);
rvec_target2cam_noise.at<double>(1,0) += rng.gaussian(0.002);
rvec_target2cam_noise.at<double>(2,0) += rng.gaussian(0.002);
cv::Rodrigues(rvec_target2cam_noise, R_target2cam_noise);
Mat t_target2cam_noise = T_target2cam(Rect(3, 0, 1, 3));
t_target2cam_noise.at<double>(0,0) += rng.gaussian(0.005);
t_target2cam_noise.at<double>(1,0) += rng.gaussian(0.005);
t_target2cam_noise.at<double>(2,0) += rng.gaussian(0.005);
//Add some noise for the transformation between the robot base and the gripper
Mat rvec_base2gripper_noise;
cv::Rodrigues(R_base2gripper_, rvec_base2gripper_noise);
rvec_base2gripper_noise.at<double>(0,0) += rng.gaussian(0.001);
rvec_base2gripper_noise.at<double>(1,0) += rng.gaussian(0.001);
rvec_base2gripper_noise.at<double>(2,0) += rng.gaussian(0.001);
cv::Rodrigues(rvec_base2gripper_noise, R_base2gripper_);
t_base2gripper_.at<double>(0,0) += rng.gaussian(0.001);
t_base2gripper_.at<double>(1,0) += rng.gaussian(0.001);
t_base2gripper_.at<double>(2,0) += rng.gaussian(0.001);
}
R_base2gripper.push_back(R_base2gripper_);
t_base2gripper.push_back(t_base2gripper_);
//Test rvec representation
Mat rvec_target2cam;
cv::Rodrigues(T_target2cam(Rect(0, 0, 3, 3)), rvec_target2cam);
R_target2cam.push_back(rvec_target2cam);
t_target2cam.push_back(T_target2cam(Rect(3, 0, 1, 3)));
}
}
static std::string getMethodName(HandEyeCalibrationMethod method) static std::string getMethodName(HandEyeCalibrationMethod method)
{ {
std::string method_name = ""; std::string method_name = "";
@ -39,10 +257,158 @@ static std::string getMethodName(HandEyeCalibrationMethod method)
return method_name; return method_name;
} }
static std::string getMethodName(RobotWorldHandEyeCalibrationMethod method)
{
std::string method_name = "";
switch (method)
{
case CALIB_ROBOT_WORLD_HAND_EYE_SHAH:
method_name = "Shah";
break;
case CALIB_ROBOT_WORLD_HAND_EYE_LI:
method_name = "Li";
break;
default:
break;
}
return method_name;
}
static void printStats(const std::string& methodName, const std::vector<double>& rvec_diff, const std::vector<double>& tvec_diff)
{
double max_rvec_diff = *std::max_element(rvec_diff.begin(), rvec_diff.end());
double mean_rvec_diff = std::accumulate(rvec_diff.begin(),
rvec_diff.end(), 0.0) / rvec_diff.size();
double sq_sum_rvec_diff = std::inner_product(rvec_diff.begin(), rvec_diff.end(),
rvec_diff.begin(), 0.0);
double std_rvec_diff = std::sqrt(sq_sum_rvec_diff / rvec_diff.size() - mean_rvec_diff * mean_rvec_diff);
double max_tvec_diff = *std::max_element(tvec_diff.begin(), tvec_diff.end());
double mean_tvec_diff = std::accumulate(tvec_diff.begin(),
tvec_diff.end(), 0.0) / tvec_diff.size();
double sq_sum_tvec_diff = std::inner_product(tvec_diff.begin(), tvec_diff.end(),
tvec_diff.begin(), 0.0);
double std_tvec_diff = std::sqrt(sq_sum_tvec_diff / tvec_diff.size() - mean_tvec_diff * mean_tvec_diff);
std::cout << "Method " << methodName << ":\n"
<< "Max rvec error: " << max_rvec_diff << ", Mean rvec error: " << mean_rvec_diff
<< ", Std rvec error: " << std_rvec_diff << "\n"
<< "Max tvec error: " << max_tvec_diff << ", Mean tvec error: " << mean_tvec_diff
<< ", Std tvec error: " << std_tvec_diff << std::endl;
}
static void loadDataset(std::vector<Mat>& R_target2cam, std::vector<Mat>& t_target2cam,
std::vector<Mat>& R_base2gripper, std::vector<Mat>& t_base2gripper)
{
const std::string camera_poses_filename = findDataFile("cv/robot_world_hand_eye_calibration/cali.txt");
const std::string end_effector_poses = findDataFile("cv/robot_world_hand_eye_calibration/robot_cali.txt");
// Parse camera poses, the pose of the chessboard in the camera frame
{
std::ifstream file(camera_poses_filename);
ASSERT_TRUE(file.is_open());
int ndata = 0;
file >> ndata;
R_target2cam.reserve(ndata);
t_target2cam.reserve(ndata);
std::string image_name;
Matx33d cameraMatrix;
Matx33d R;
Matx31d t;
Matx16d distCoeffs;
Matx13d distCoeffs2;
while (file >> image_name >>
cameraMatrix(0,0) >> cameraMatrix(0,1) >> cameraMatrix(0,2) >>
cameraMatrix(1,0) >> cameraMatrix(1,1) >> cameraMatrix(1,2) >>
cameraMatrix(2,0) >> cameraMatrix(2,1) >> cameraMatrix(2,2) >>
R(0,0) >> R(0,1) >> R(0,2) >>
R(1,0) >> R(1,1) >> R(1,2) >>
R(2,0) >> R(2,1) >> R(2,2) >>
t(0) >> t(1) >> t(2) >>
distCoeffs(0) >> distCoeffs(1) >> distCoeffs(2) >> distCoeffs(3) >> distCoeffs(4) >>
distCoeffs2(0) >> distCoeffs2(1) >> distCoeffs2(2)) {
R_target2cam.push_back(Mat(R));
t_target2cam.push_back(Mat(t));
}
}
// Parse robot poses, the pose of the robot base in the robot hand frame
{
std::ifstream file(end_effector_poses);
ASSERT_TRUE(file.is_open());
int ndata = 0;
file >> ndata;
R_base2gripper.reserve(ndata);
t_base2gripper.reserve(ndata);
Matx33d R;
Matx31d t;
Matx14d last_row;
while (file >>
R(0,0) >> R(0,1) >> R(0,2) >> t(0) >>
R(1,0) >> R(1,1) >> R(1,2) >> t(1) >>
R(2,0) >> R(2,1) >> R(2,2) >> t(2) >>
last_row(0) >> last_row(1) >> last_row(2) >> last_row(3)) {
R_base2gripper.push_back(Mat(R));
t_base2gripper.push_back(Mat(t));
}
}
}
static void loadResults(Matx33d& wRb, Matx31d& wtb, Matx33d& cRg, Matx31d& ctg)
{
const std::string transformations_filename = findDataFile("cv/robot_world_hand_eye_calibration/rwhe_AA_RPI/transformations.txt");
std::ifstream file(transformations_filename);
ASSERT_TRUE(file.is_open());
std::string str;
//Parse X
file >> str;
Matx44d wTb;
for (int i = 0; i < 4; i++)
{
for (int j = 0; j < 4; j++)
{
file >> wTb(i,j);
}
}
//Parse Z
file >> str;
int cam_num = 0;
//Parse camera number
file >> cam_num;
Matx44d cTg;
for (int i = 0; i < 4; i++)
{
for (int j = 0; j < 4; j++)
{
file >> cTg(i,j);
}
}
for (int i = 0; i < 3; i++)
{
for (int j = 0; j < 3; j++)
{
wRb(i,j) = wTb(i,j);
cRg(i,j) = cTg(i,j);
}
wtb(i) = wTb(i,3);
ctg(i) = cTg(i,3);
}
}
class CV_CalibrateHandEyeTest : public cvtest::BaseTest class CV_CalibrateHandEyeTest : public cvtest::BaseTest
{ {
public: public:
CV_CalibrateHandEyeTest() { CV_CalibrateHandEyeTest(bool eyeToHand) : eyeToHandConfig(eyeToHand) {
eps_rvec[CALIB_HAND_EYE_TSAI] = 1.0e-8; eps_rvec[CALIB_HAND_EYE_TSAI] = 1.0e-8;
eps_rvec[CALIB_HAND_EYE_PARK] = 1.0e-8; eps_rvec[CALIB_HAND_EYE_PARK] = 1.0e-8;
eps_rvec[CALIB_HAND_EYE_HORAUD] = 1.0e-8; eps_rvec[CALIB_HAND_EYE_HORAUD] = 1.0e-8;
@ -64,24 +430,20 @@ public:
eps_tvec_noise[CALIB_HAND_EYE_TSAI] = 5.0e-2; eps_tvec_noise[CALIB_HAND_EYE_TSAI] = 5.0e-2;
eps_tvec_noise[CALIB_HAND_EYE_PARK] = 5.0e-2; eps_tvec_noise[CALIB_HAND_EYE_PARK] = 5.0e-2;
eps_tvec_noise[CALIB_HAND_EYE_HORAUD] = 5.0e-2; eps_tvec_noise[CALIB_HAND_EYE_HORAUD] = 5.0e-2;
if (eyeToHandConfig)
{
eps_tvec_noise[CALIB_HAND_EYE_ANDREFF] = 7.0e-2;
}
else
{
eps_tvec_noise[CALIB_HAND_EYE_ANDREFF] = 5.0e-2; eps_tvec_noise[CALIB_HAND_EYE_ANDREFF] = 5.0e-2;
}
eps_tvec_noise[CALIB_HAND_EYE_DANIILIDIS] = 5.0e-2; eps_tvec_noise[CALIB_HAND_EYE_DANIILIDIS] = 5.0e-2;
} }
protected: protected:
virtual void run(int); virtual void run(int);
void generatePose(RNG& rng, double min_theta, double max_theta,
double min_tx, double max_tx,
double min_ty, double max_ty,
double min_tz, double max_tz,
Mat& R, Mat& tvec,
bool randSign=false);
void simulateData(RNG& rng, int nPoses,
std::vector<Mat> &R_gripper2base, std::vector<Mat> &t_gripper2base,
std::vector<Mat> &R_target2cam, std::vector<Mat> &t_target2cam,
bool noise, Mat& R_cam2gripper, Mat& t_cam2gripper);
Mat homogeneousInverse(const Mat& T);
double sign_double(double val);
bool eyeToHandConfig;
double eps_rvec[5]; double eps_rvec[5];
double eps_tvec[5]; double eps_tvec[5];
double eps_rvec_noise[5]; double eps_rvec_noise[5];
@ -110,28 +472,31 @@ void CV_CalibrateHandEyeTest::run(int)
for (int i = 0; i < nTests; i++) for (int i = 0; i < nTests; i++)
{ {
const int nPoses = 10; const int nPoses = 10;
if (eyeToHandConfig)
{
{ {
//No noise //No noise
std::vector<Mat> R_gripper2base, t_gripper2base; std::vector<Mat> R_base2gripper, t_base2gripper;
std::vector<Mat> R_target2cam, t_target2cam; std::vector<Mat> R_target2cam, t_target2cam;
Mat R_cam2gripper_true, t_cam2gripper_true; Mat R_cam2base_true, t_cam2base_true;
const bool noise = false; const bool noise = false;
simulateData(rng, nPoses, R_gripper2base, t_gripper2base, R_target2cam, t_target2cam, noise, R_cam2gripper_true, t_cam2gripper_true); simulateDataEyeToHand(rng, nPoses, R_base2gripper, t_base2gripper, R_target2cam, t_target2cam, noise,
R_cam2base_true, t_cam2base_true);
for (size_t idx = 0; idx < methods.size(); idx++) for (size_t idx = 0; idx < methods.size(); idx++)
{ {
Mat rvec_cam2gripper_true; Mat rvec_cam2base_true;
cv::Rodrigues(R_cam2gripper_true, rvec_cam2gripper_true); cv::Rodrigues(R_cam2base_true, rvec_cam2base_true);
Mat R_cam2gripper_est, t_cam2gripper_est; Mat R_cam2base_est, t_cam2base_est;
calibrateHandEye(R_gripper2base, t_gripper2base, R_target2cam, t_target2cam, R_cam2gripper_est, t_cam2gripper_est, methods[idx]); calibrateHandEye(R_base2gripper, t_base2gripper, R_target2cam, t_target2cam, R_cam2base_est, t_cam2base_est, methods[idx]);
Mat rvec_cam2gripper_est; Mat rvec_cam2base_est;
cv::Rodrigues(R_cam2gripper_est, rvec_cam2gripper_est); cv::Rodrigues(R_cam2base_est, rvec_cam2base_est);
double rvecDiff = cvtest::norm(rvec_cam2gripper_true, rvec_cam2gripper_est, NORM_L2); double rvecDiff = cvtest::norm(rvec_cam2base_true, rvec_cam2base_est, NORM_L2);
double tvecDiff = cvtest::norm(t_cam2gripper_true, t_cam2gripper_est, NORM_L2); double tvecDiff = cvtest::norm(t_cam2base_true, t_cam2base_est, NORM_L2);
vec_rvec_diff[idx].push_back(rvecDiff); vec_rvec_diff[idx].push_back(rvecDiff);
vec_tvec_diff[idx].push_back(tvecDiff); vec_tvec_diff[idx].push_back(tvecDiff);
@ -150,27 +515,28 @@ void CV_CalibrateHandEyeTest::run(int)
} }
{ {
//Gaussian noise on transformations between calibration target frame and camera frame and between gripper and robot base frames //Gaussian noise on transformations between calibration target frame and camera frame and between robot base and gripper frames
std::vector<Mat> R_gripper2base, t_gripper2base; std::vector<Mat> R_base2gripper, t_base2gripper;
std::vector<Mat> R_target2cam, t_target2cam; std::vector<Mat> R_target2cam, t_target2cam;
Mat R_cam2gripper_true, t_cam2gripper_true; Mat R_cam2base_true, t_cam2base_true;
const bool noise = true; const bool noise = true;
simulateData(rng, nPoses, R_gripper2base, t_gripper2base, R_target2cam, t_target2cam, noise, R_cam2gripper_true, t_cam2gripper_true); simulateDataEyeToHand(rng, nPoses, R_base2gripper, t_base2gripper, R_target2cam, t_target2cam, noise,
R_cam2base_true, t_cam2base_true);
for (size_t idx = 0; idx < methods.size(); idx++) for (size_t idx = 0; idx < methods.size(); idx++)
{ {
Mat rvec_cam2gripper_true; Mat rvec_cam2base_true;
cv::Rodrigues(R_cam2gripper_true, rvec_cam2gripper_true); cv::Rodrigues(R_cam2base_true, rvec_cam2base_true);
Mat R_cam2gripper_est, t_cam2gripper_est; Mat R_cam2base_est, t_cam2base_est;
calibrateHandEye(R_gripper2base, t_gripper2base, R_target2cam, t_target2cam, R_cam2gripper_est, t_cam2gripper_est, methods[idx]); calibrateHandEye(R_base2gripper, t_base2gripper, R_target2cam, t_target2cam, R_cam2base_est, t_cam2base_est, methods[idx]);
Mat rvec_cam2gripper_est; Mat rvec_cam2base_est;
cv::Rodrigues(R_cam2gripper_est, rvec_cam2gripper_est); cv::Rodrigues(R_cam2base_est, rvec_cam2base_est);
double rvecDiff = cvtest::norm(rvec_cam2gripper_true, rvec_cam2gripper_est, NORM_L2); double rvecDiff = cvtest::norm(rvec_cam2base_true, rvec_cam2base_est, NORM_L2);
double tvecDiff = cvtest::norm(t_cam2gripper_true, t_cam2gripper_est, NORM_L2); double tvecDiff = cvtest::norm(t_cam2base_true, t_cam2base_est, NORM_L2);
vec_rvec_diff_noise[idx].push_back(rvecDiff); vec_rvec_diff_noise[idx].push_back(rvecDiff);
vec_tvec_diff_noise[idx].push_back(tvecDiff); vec_tvec_diff_noise[idx].push_back(tvecDiff);
@ -188,277 +554,205 @@ void CV_CalibrateHandEyeTest::run(int)
} }
} }
} }
else
for (size_t idx = 0; idx < methods.size(); idx++)
{ {
{ {
double max_rvec_diff = *std::max_element(vec_rvec_diff[idx].begin(), vec_rvec_diff[idx].end()); //No noise
double mean_rvec_diff = std::accumulate(vec_rvec_diff[idx].begin(), std::vector<Mat> R_gripper2base, t_gripper2base;
vec_rvec_diff[idx].end(), 0.0) / vec_rvec_diff[idx].size(); std::vector<Mat> R_target2cam, t_target2cam;
double sq_sum_rvec_diff = std::inner_product(vec_rvec_diff[idx].begin(), vec_rvec_diff[idx].end(), Mat R_cam2gripper_true, t_cam2gripper_true;
vec_rvec_diff[idx].begin(), 0.0);
double std_rvec_diff = std::sqrt(sq_sum_rvec_diff / vec_rvec_diff[idx].size() - mean_rvec_diff * mean_rvec_diff);
double max_tvec_diff = *std::max_element(vec_tvec_diff[idx].begin(), vec_tvec_diff[idx].end()); const bool noise = false;
double mean_tvec_diff = std::accumulate(vec_tvec_diff[idx].begin(), simulateDataEyeInHand(rng, nPoses, R_gripper2base, t_gripper2base, R_target2cam, t_target2cam, noise,
vec_tvec_diff[idx].end(), 0.0) / vec_tvec_diff[idx].size(); R_cam2gripper_true, t_cam2gripper_true);
double sq_sum_tvec_diff = std::inner_product(vec_tvec_diff[idx].begin(), vec_tvec_diff[idx].end(),
vec_tvec_diff[idx].begin(), 0.0);
double std_tvec_diff = std::sqrt(sq_sum_tvec_diff / vec_tvec_diff[idx].size() - mean_tvec_diff * mean_tvec_diff);
std::cout << "\nMethod " << getMethodName(methods[idx]) << ":\n" for (size_t idx = 0; idx < methods.size(); idx++)
<< "Max rvec error: " << max_rvec_diff << ", Mean rvec error: " << mean_rvec_diff
<< ", Std rvec error: " << std_rvec_diff << "\n"
<< "Max tvec error: " << max_tvec_diff << ", Mean tvec error: " << mean_tvec_diff
<< ", Std tvec error: " << std_tvec_diff << std::endl;
}
{ {
double max_rvec_diff = *std::max_element(vec_rvec_diff_noise[idx].begin(), vec_rvec_diff_noise[idx].end()); Mat rvec_cam2gripper_true;
double mean_rvec_diff = std::accumulate(vec_rvec_diff_noise[idx].begin(), cv::Rodrigues(R_cam2gripper_true, rvec_cam2gripper_true);
vec_rvec_diff_noise[idx].end(), 0.0) / vec_rvec_diff_noise[idx].size();
double sq_sum_rvec_diff = std::inner_product(vec_rvec_diff_noise[idx].begin(), vec_rvec_diff_noise[idx].end(),
vec_rvec_diff_noise[idx].begin(), 0.0);
double std_rvec_diff = std::sqrt(sq_sum_rvec_diff / vec_rvec_diff_noise[idx].size() - mean_rvec_diff * mean_rvec_diff);
double max_tvec_diff = *std::max_element(vec_tvec_diff_noise[idx].begin(), vec_tvec_diff_noise[idx].end()); Mat R_cam2gripper_est, t_cam2gripper_est;
double mean_tvec_diff = std::accumulate(vec_tvec_diff_noise[idx].begin(), calibrateHandEye(R_gripper2base, t_gripper2base, R_target2cam, t_target2cam, R_cam2gripper_est, t_cam2gripper_est, methods[idx]);
vec_tvec_diff_noise[idx].end(), 0.0) / vec_tvec_diff_noise[idx].size();
double sq_sum_tvec_diff = std::inner_product(vec_tvec_diff_noise[idx].begin(), vec_tvec_diff_noise[idx].end(),
vec_tvec_diff_noise[idx].begin(), 0.0);
double std_tvec_diff = std::sqrt(sq_sum_tvec_diff / vec_tvec_diff_noise[idx].size() - mean_tvec_diff * mean_tvec_diff);
std::cout << "Method (noise) " << getMethodName(methods[idx]) << ":\n" Mat rvec_cam2gripper_est;
<< "Max rvec error: " << max_rvec_diff << ", Mean rvec error: " << mean_rvec_diff cv::Rodrigues(R_cam2gripper_est, rvec_cam2gripper_est);
<< ", Std rvec error: " << std_rvec_diff << "\n"
<< "Max tvec error: " << max_tvec_diff << ", Mean tvec error: " << mean_tvec_diff
<< ", Std tvec error: " << std_tvec_diff << std::endl;
}
}
}
void CV_CalibrateHandEyeTest::generatePose(RNG& rng, double min_theta, double max_theta, double rvecDiff = cvtest::norm(rvec_cam2gripper_true, rvec_cam2gripper_est, NORM_L2);
double min_tx, double max_tx, double tvecDiff = cvtest::norm(t_cam2gripper_true, t_cam2gripper_est, NORM_L2);
double min_ty, double max_ty,
double min_tz, double max_tz,
Mat& R, Mat& tvec,
bool random_sign)
{
Mat axis(3, 1, CV_64FC1);
for (int i = 0; i < 3; i++)
{
axis.at<double>(i,0) = rng.uniform(-1.0, 1.0);
}
double theta = rng.uniform(min_theta, max_theta);
if (random_sign)
{
theta *= sign_double(rng.uniform(-1.0, 1.0));
}
Mat rvec(3, 1, CV_64FC1); vec_rvec_diff[idx].push_back(rvecDiff);
rvec.at<double>(0,0) = theta*axis.at<double>(0,0); vec_tvec_diff[idx].push_back(tvecDiff);
rvec.at<double>(1,0) = theta*axis.at<double>(1,0);
rvec.at<double>(2,0) = theta*axis.at<double>(2,0);
tvec.create(3, 1, CV_64FC1); const double epsilon_rvec = eps_rvec[idx];
tvec.at<double>(0,0) = rng.uniform(min_tx, max_tx); const double epsilon_tvec = eps_tvec[idx];
tvec.at<double>(1,0) = rng.uniform(min_ty, max_ty);
tvec.at<double>(2,0) = rng.uniform(min_tz, max_tz);
if (random_sign) //Maybe a better accuracy test would be to compare the mean and std errors with some thresholds?
if (rvecDiff > epsilon_rvec || tvecDiff > epsilon_tvec)
{ {
tvec.at<double>(0,0) *= sign_double(rng.uniform(-1.0, 1.0)); ts->printf(cvtest::TS::LOG, "Invalid accuracy (no noise) for method: %s, rvecDiff: %f, epsilon_rvec: %f, tvecDiff: %f, epsilon_tvec: %f\n",
tvec.at<double>(1,0) *= sign_double(rng.uniform(-1.0, 1.0)); getMethodName(methods[idx]).c_str(), rvecDiff, epsilon_rvec, tvecDiff, epsilon_tvec);
tvec.at<double>(2,0) *= sign_double(rng.uniform(-1.0, 1.0)); ts->set_failed_test_info(cvtest::TS::FAIL_BAD_ACCURACY);
}
} }
cv::Rodrigues(rvec, R);
} }
void CV_CalibrateHandEyeTest::simulateData(RNG& rng, int nPoses,
std::vector<Mat> &R_gripper2base, std::vector<Mat> &t_gripper2base,
std::vector<Mat> &R_target2cam, std::vector<Mat> &t_target2cam,
bool noise, Mat& R_cam2gripper, Mat& t_cam2gripper)
{ {
//to avoid generating values close to zero, //Gaussian noise on transformations between calibration target frame and camera frame and between gripper and robot base frames
//we use positive range values and randomize the sign std::vector<Mat> R_gripper2base, t_gripper2base;
const bool random_sign = true; std::vector<Mat> R_target2cam, t_target2cam;
generatePose(rng, 10.0*CV_PI/180.0, 50.0*CV_PI/180.0, Mat R_cam2gripper_true, t_cam2gripper_true;
0.05, 0.5, 0.05, 0.5, 0.05, 0.5,
R_cam2gripper, t_cam2gripper, random_sign);
Mat R_target2base, t_target2base; const bool noise = true;
generatePose(rng, 5.0*CV_PI/180.0, 85.0*CV_PI/180.0, simulateDataEyeInHand(rng, nPoses, R_gripper2base, t_gripper2base, R_target2cam, t_target2cam, noise,
0.5, 3.5, 0.5, 3.5, 0.5, 3.5, R_cam2gripper_true, t_cam2gripper_true);
R_target2base, t_target2base, random_sign);
for (int i = 0; i < nPoses; i++) for (size_t idx = 0; idx < methods.size(); idx++)
{ {
Mat R_gripper2base_, t_gripper2base_; Mat rvec_cam2gripper_true;
generatePose(rng, 5.0*CV_PI/180.0, 45.0*CV_PI/180.0, cv::Rodrigues(R_cam2gripper_true, rvec_cam2gripper_true);
0.5, 1.5, 0.5, 1.5, 0.5, 1.5,
R_gripper2base_, t_gripper2base_, random_sign);
R_gripper2base.push_back(R_gripper2base_);
t_gripper2base.push_back(t_gripper2base_);
Mat T_cam2gripper = Mat::eye(4, 4, CV_64FC1);
R_cam2gripper.copyTo(T_cam2gripper(Rect(0, 0, 3, 3)));
t_cam2gripper.copyTo(T_cam2gripper(Rect(3, 0, 1, 3)));
Mat T_gripper2base = Mat::eye(4, 4, CV_64FC1);
R_gripper2base_.copyTo(T_gripper2base(Rect(0, 0, 3, 3)));
t_gripper2base_.copyTo(T_gripper2base(Rect(3, 0, 1, 3)));
Mat T_base2cam = homogeneousInverse(T_cam2gripper) * homogeneousInverse(T_gripper2base);
Mat T_target2base = Mat::eye(4, 4, CV_64FC1);
R_target2base.copyTo(T_target2base(Rect(0, 0, 3, 3)));
t_target2base.copyTo(T_target2base(Rect(3, 0, 1, 3)));
Mat T_target2cam = T_base2cam * T_target2base;
if (noise) Mat R_cam2gripper_est, t_cam2gripper_est;
{ calibrateHandEye(R_gripper2base, t_gripper2base, R_target2cam, t_target2cam, R_cam2gripper_est, t_cam2gripper_est, methods[idx]);
//Add some noise for the transformation between the target and the camera
Mat R_target2cam_noise = T_target2cam(Rect(0, 0, 3, 3));
Mat rvec_target2cam_noise;
cv::Rodrigues(R_target2cam_noise, rvec_target2cam_noise);
rvec_target2cam_noise.at<double>(0,0) += rng.gaussian(0.002);
rvec_target2cam_noise.at<double>(1,0) += rng.gaussian(0.002);
rvec_target2cam_noise.at<double>(2,0) += rng.gaussian(0.002);
cv::Rodrigues(rvec_target2cam_noise, R_target2cam_noise); Mat rvec_cam2gripper_est;
cv::Rodrigues(R_cam2gripper_est, rvec_cam2gripper_est);
Mat t_target2cam_noise = T_target2cam(Rect(3, 0, 1, 3)); double rvecDiff = cvtest::norm(rvec_cam2gripper_true, rvec_cam2gripper_est, NORM_L2);
t_target2cam_noise.at<double>(0,0) += rng.gaussian(0.005); double tvecDiff = cvtest::norm(t_cam2gripper_true, t_cam2gripper_est, NORM_L2);
t_target2cam_noise.at<double>(1,0) += rng.gaussian(0.005);
t_target2cam_noise.at<double>(2,0) += rng.gaussian(0.005);
//Add some noise for the transformation between the gripper and the robot base vec_rvec_diff_noise[idx].push_back(rvecDiff);
Mat R_gripper2base_noise = T_gripper2base(Rect(0, 0, 3, 3)); vec_tvec_diff_noise[idx].push_back(tvecDiff);
Mat rvec_gripper2base_noise;
cv::Rodrigues(R_gripper2base_noise, rvec_gripper2base_noise);
rvec_gripper2base_noise.at<double>(0,0) += rng.gaussian(0.001);
rvec_gripper2base_noise.at<double>(1,0) += rng.gaussian(0.001);
rvec_gripper2base_noise.at<double>(2,0) += rng.gaussian(0.001);
cv::Rodrigues(rvec_gripper2base_noise, R_gripper2base_noise); const double epsilon_rvec = eps_rvec_noise[idx];
const double epsilon_tvec = eps_tvec_noise[idx];
Mat t_gripper2base_noise = T_gripper2base(Rect(3, 0, 1, 3)); //Maybe a better accuracy test would be to compare the mean and std errors with some thresholds?
t_gripper2base_noise.at<double>(0,0) += rng.gaussian(0.001); if (rvecDiff > epsilon_rvec || tvecDiff > epsilon_tvec)
t_gripper2base_noise.at<double>(1,0) += rng.gaussian(0.001); {
t_gripper2base_noise.at<double>(2,0) += rng.gaussian(0.001); ts->printf(cvtest::TS::LOG, "Invalid accuracy (noise) for method: %s, rvecDiff: %f, epsilon_rvec: %f, tvecDiff: %f, epsilon_tvec: %f\n",
getMethodName(methods[idx]).c_str(), rvecDiff, epsilon_rvec, tvecDiff, epsilon_tvec);
ts->set_failed_test_info(cvtest::TS::FAIL_BAD_ACCURACY);
}
}
} }
// test rvec represenation
Mat rvec_target2cam;
cv::Rodrigues(T_target2cam(Rect(0, 0, 3, 3)), rvec_target2cam);
R_target2cam.push_back(rvec_target2cam);
t_target2cam.push_back(T_target2cam(Rect(3, 0, 1, 3)));
} }
} }
Mat CV_CalibrateHandEyeTest::homogeneousInverse(const Mat& T) for (size_t idx = 0; idx < methods.size(); idx++)
{ {
CV_Assert( T.rows == 4 && T.cols == 4 ); std::cout << std::endl;
printStats(getMethodName(methods[idx]), vec_rvec_diff[idx], vec_tvec_diff[idx]);
printStats("(noise) " + getMethodName(methods[idx]), vec_rvec_diff_noise[idx], vec_tvec_diff_noise[idx]);
}
}
Mat R = T(Rect(0, 0, 3, 3)); ///////////////////////////////////////////////////////////////////////////////////////////////////
Mat t = T(Rect(3, 0, 1, 3));
Mat Rt = R.t();
Mat tinv = -Rt * t;
Mat Tinv = Mat::eye(4, 4, T.type());
Rt.copyTo(Tinv(Rect(0, 0, 3, 3)));
tinv.copyTo(Tinv(Rect(3, 0, 1, 3)));
return Tinv; TEST(Calib3d_CalibrateHandEye, regression_eye_in_hand)
{
//Eye-in-Hand configuration (camera mounted on the robot end-effector observing a static calibration pattern)
const bool eyeToHand = false;
CV_CalibrateHandEyeTest test(eyeToHand);
test.safe_run();
} }
double CV_CalibrateHandEyeTest::sign_double(double val) TEST(Calib3d_CalibrateHandEye, regression_eye_to_hand)
{ {
return (0 < val) - (val < 0); //Eye-to-Hand configuration (static camera observing a calibration pattern mounted on the robot end-effector)
const bool eyeToHand = true;
CV_CalibrateHandEyeTest test(eyeToHand);
test.safe_run();
} }
///////////////////////////////////////////////////////////////////////////////////////////////////
TEST(Calib3d_CalibrateHandEye, regression) { CV_CalibrateHandEyeTest test; test.safe_run(); }
TEST(Calib3d_CalibrateHandEye, regression_17986) TEST(Calib3d_CalibrateHandEye, regression_17986)
{ {
const std::string camera_poses_filename = findDataFile("cv/hand_eye_calibration/cali.txt"); std::vector<Mat> R_target2cam, t_target2cam;
const std::string end_effector_poses = findDataFile("cv/hand_eye_calibration/robot_cali.txt"); // Dataset contains transformation from base to gripper frame since it contains data for AX = ZB calibration problem
std::vector<Mat> R_base2gripper, t_base2gripper;
loadDataset(R_target2cam, t_target2cam, R_base2gripper, t_base2gripper);
std::vector<Mat> R_target2cam; std::vector<HandEyeCalibrationMethod> methods = {CALIB_HAND_EYE_TSAI,
std::vector<Mat> t_target2cam; CALIB_HAND_EYE_PARK,
// Parse camera poses CALIB_HAND_EYE_HORAUD,
{ CALIB_HAND_EYE_ANDREFF,
std::ifstream file(camera_poses_filename.c_str()); CALIB_HAND_EYE_DANIILIDIS};
ASSERT_TRUE(file.is_open());
int ndata = 0; for (auto method : methods) {
file >> ndata; SCOPED_TRACE(cv::format("method=%s", getMethodName(method).c_str()));
R_target2cam.reserve(ndata);
t_target2cam.reserve(ndata);
std::string image_name; Matx33d R_cam2base_est;
Matx33d cameraMatrix; Matx31d t_cam2base_est;
Matx33d R; calibrateHandEye(R_base2gripper, t_base2gripper, R_target2cam, t_target2cam, R_cam2base_est, t_cam2base_est, method);
Matx31d t;
Matx16d distCoeffs; EXPECT_TRUE(checkRange(R_cam2base_est));
Matx13d distCoeffs2; EXPECT_TRUE(checkRange(t_cam2base_est));
while (file >> image_name >>
cameraMatrix(0,0) >> cameraMatrix(0,1) >> cameraMatrix(0,2) >>
cameraMatrix(1,0) >> cameraMatrix(1,1) >> cameraMatrix(1,2) >>
cameraMatrix(2,0) >> cameraMatrix(2,1) >> cameraMatrix(2,2) >>
R(0,0) >> R(0,1) >> R(0,2) >>
R(1,0) >> R(1,1) >> R(1,2) >>
R(2,0) >> R(2,1) >> R(2,2) >>
t(0) >> t(1) >> t(2) >>
distCoeffs(0) >> distCoeffs(1) >> distCoeffs(2) >> distCoeffs(3) >> distCoeffs(4) >>
distCoeffs2(0) >> distCoeffs2(1) >> distCoeffs2(2)) {
R_target2cam.push_back(Mat(R));
t_target2cam.push_back(Mat(t));
} }
} }
std::vector<Mat> R_gripper2base; TEST(Calib3d_CalibrateRobotWorldHandEye, regression)
std::vector<Mat> t_gripper2base;
// Parse end-effector poses
{ {
std::ifstream file(end_effector_poses.c_str()); std::vector<Mat> R_world2cam, t_worldt2cam;
ASSERT_TRUE(file.is_open()); std::vector<Mat> R_base2gripper, t_base2gripper;
loadDataset(R_world2cam, t_worldt2cam, R_base2gripper, t_base2gripper);
int ndata = 0;
file >> ndata;
R_gripper2base.reserve(ndata);
t_gripper2base.reserve(ndata);
Matx33d R; std::vector<Mat> rvec_R_world2cam;
Matx31d t; rvec_R_world2cam.reserve(R_world2cam.size());
Matx14d last_row; for (size_t i = 0; i < R_world2cam.size(); i++)
while (file >> {
R(0,0) >> R(0,1) >> R(0,2) >> t(0) >> Mat rvec;
R(1,0) >> R(1,1) >> R(1,2) >> t(1) >> cv::Rodrigues(R_world2cam[i], rvec);
R(2,0) >> R(2,1) >> R(2,2) >> t(2) >> rvec_R_world2cam.push_back(rvec);
last_row(0) >> last_row(1) >> last_row(2) >> last_row(3)) {
R_gripper2base.push_back(Mat(R));
t_gripper2base.push_back(Mat(t));
}
} }
std::vector<HandEyeCalibrationMethod> methods; std::vector<RobotWorldHandEyeCalibrationMethod> methods = {CALIB_ROBOT_WORLD_HAND_EYE_SHAH,
methods.push_back(CALIB_HAND_EYE_TSAI); CALIB_ROBOT_WORLD_HAND_EYE_LI};
methods.push_back(CALIB_HAND_EYE_PARK);
methods.push_back(CALIB_HAND_EYE_HORAUD); Matx33d wRb, cRg;
methods.push_back(CALIB_HAND_EYE_ANDREFF); Matx31d wtb, ctg;
methods.push_back(CALIB_HAND_EYE_DANIILIDIS); loadResults(wRb, wtb, cRg, ctg);
for (size_t idx = 0; idx < methods.size(); idx++) { for (auto method : methods) {
SCOPED_TRACE(cv::format("method=%s", getMethodName(methods[idx]).c_str())); SCOPED_TRACE(cv::format("method=%s", getMethodName(method).c_str()));
Matx33d R_cam2gripper_est; Matx33d wRb_est, cRg_est;
Matx31d t_cam2gripper_est; Matx31d wtb_est, ctg_est;
calibrateHandEye(R_gripper2base, t_gripper2base, R_target2cam, t_target2cam, R_cam2gripper_est, t_cam2gripper_est, methods[idx]); calibrateRobotWorldHandEye(rvec_R_world2cam, t_worldt2cam, R_base2gripper, t_base2gripper,
wRb_est, wtb_est, cRg_est, ctg_est, method);
EXPECT_TRUE(checkRange(R_cam2gripper_est));
EXPECT_TRUE(checkRange(t_cam2gripper_est)); EXPECT_TRUE(checkRange(wRb_est));
EXPECT_TRUE(checkRange(wtb_est));
EXPECT_TRUE(checkRange(cRg_est));
EXPECT_TRUE(checkRange(ctg_est));
//Arbitrary thresholds
const double rotation_threshold = 1.0; //1deg
const double translation_threshold = 50.0; //5cm
//X
//rotation error
Matx33d wRw_est = wRb * wRb_est.t();
Matx31d rvec_wRw_est;
cv::Rodrigues(wRw_est, rvec_wRw_est);
double X_rotation_error = cv::norm(rvec_wRw_est)*180/CV_PI;
//translation error
double X_t_error = cv::norm(wtb_est - wtb);
SCOPED_TRACE(cv::format("X rotation error=%f", X_rotation_error));
SCOPED_TRACE(cv::format("X translation error=%f", X_t_error));
EXPECT_TRUE(X_rotation_error < rotation_threshold);
EXPECT_TRUE(X_t_error < translation_threshold);
//Z
//rotation error
Matx33d cRc_est = cRg * cRg_est.t();
Matx31d rvec_cMc_est;
cv::Rodrigues(cRc_est, rvec_cMc_est);
double Z_rotation_error = cv::norm(rvec_cMc_est)*180/CV_PI;
//translation error
double Z_t_error = cv::norm(ctg_est - ctg);
SCOPED_TRACE(cv::format("Z rotation error=%f", Z_rotation_error));
SCOPED_TRACE(cv::format("Z translation error=%f", Z_t_error));
EXPECT_TRUE(Z_rotation_error < rotation_threshold);
EXPECT_TRUE(Z_t_error < translation_threshold);
} }
} }

Loading…
Cancel
Save