Merge pull request #3256 from AleksandrPanov:fix_aruco_axes_docs

fix axes and docs

* fix axes docs, tutorial and add estimateParameters, change estimateParameters in test

* update docs, add singlemarkersaxes2.jpg

* fix docs
pull/3268/head^2 3.4.18
Alexander Panov 2 years ago committed by GitHub
parent 63cab1b0ee
commit 0eda296f40
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 68
      modules/aruco/include/opencv2/aruco.hpp
  2. 43
      modules/aruco/src/aruco.cpp
  3. 6
      modules/aruco/test/test_charucodetection.cpp
  4. BIN
      modules/aruco/tutorials/aruco_board_detection/images/singlemarkersaxes2.jpg
  5. 5
      modules/aruco/tutorials/aruco_detection/aruco_detection.markdown

@ -40,6 +40,7 @@ the use of this software, even if advised of the possibility of such damage.
#define __OPENCV_ARUCO_HPP__
#include <opencv2/core.hpp>
#include <opencv2/calib3d.hpp>
#include <vector>
#include "opencv2/aruco/dictionary.hpp"
@ -219,7 +220,55 @@ struct CV_EXPORTS_W DetectorParameters {
CV_EXPORTS_W void detectMarkers(InputArray image, const Ptr<Dictionary> &dictionary, OutputArrayOfArrays corners,
OutputArray ids, const Ptr<DetectorParameters> &parameters = DetectorParameters::create(),
OutputArrayOfArrays rejectedImgPoints = noArray());
/** @brief
* rvec/tvec define the right handed coordinate system of the marker.
* PatternPos defines center this system and axes direction.
* Axis X (red color) - first coordinate, axis Y (green color) - second coordinate,
* axis Z (blue color) - third coordinate.
* @sa estimatePoseSingleMarkers(), @ref tutorial_aruco_detection
*/
enum PatternPos {
/** @brief The marker coordinate system is centered on the middle of the marker.
* The coordinates of the four corners (CCW order) of the marker in its own coordinate system are:
* (-markerLength/2, markerLength/2, 0), (markerLength/2, markerLength/2, 0),
* (markerLength/2, -markerLength/2, 0), (-markerLength/2, -markerLength/2, 0).
*
* These pattern points define this coordinate system:
* ![Image with axes drawn](images/singlemarkersaxes.jpg)
*/
CCW_center,
/** @brief The marker coordinate system is centered on the top-left corner of the marker.
* The coordinates of the four corners (CW order) of the marker in its own coordinate system are:
* (0, 0, 0), (markerLength, 0, 0),
* (markerLength, markerLength, 0), (0, markerLength, 0).
*
* These pattern points define this coordinate system:
* ![Image with axes drawn](images/singlemarkersaxes2.jpg)
*/
CW_top_left_corner
};
/** @brief
* Pose estimation parameters
* @param pattern Defines center this system and axes direction (default PatternPos::CCW_center).
* @param useExtrinsicGuess Parameter used for SOLVEPNP_ITERATIVE. If true (1), the function uses the provided
* rvec and tvec values as initial approximations of the rotation and translation vectors, respectively, and further
* optimizes them (default false).
* @param solvePnPMethod Method for solving a PnP problem: see @ref calib3d_solvePnP_flags (default SOLVEPNP_ITERATIVE).
* @sa PatternPos, solvePnP(), @ref tutorial_aruco_detection
*/
struct CV_EXPORTS_W EstimateParameters {
CV_PROP_RW PatternPos pattern;
CV_PROP_RW bool useExtrinsicGuess;
CV_PROP_RW SolvePnPMethod solvePnPMethod;
EstimateParameters(): pattern(CCW_center), useExtrinsicGuess(false),
solvePnPMethod(SOLVEPNP_ITERATIVE) {}
CV_WRAP static Ptr<EstimateParameters> create() {
return makePtr<EstimateParameters>();
}
};
/**
@ -240,21 +289,28 @@ CV_EXPORTS_W void detectMarkers(InputArray image, const Ptr<Dictionary> &diction
* @param tvecs array of output translation vectors (e.g. std::vector<cv::Vec3d>).
* Each element in tvecs corresponds to the specific marker in imgPoints.
* @param _objPoints array of object points of all the marker corners
* @param estimateParameters set the origin of coordinate system and the coordinates of the four corners of the marker
* (default estimateParameters.pattern = PatternPos::CCW_center, estimateParameters.useExtrinsicGuess = false,
* estimateParameters.solvePnPMethod = SOLVEPNP_ITERATIVE).
*
* This function receives the detected markers and returns their pose estimation respect to
* the camera individually. So for each marker, one rotation and translation vector is returned.
* The returned transformation is the one that transforms points from each marker coordinate system
* to the camera coordinate system.
* The marker corrdinate system is centered on the middle of the marker, with the Z axis
* perpendicular to the marker plane.
* The coordinates of the four corners of the marker in its own coordinate system are:
* (0, 0, 0), (markerLength, 0, 0),
* (markerLength, markerLength, 0), (0, markerLength, 0)
* The marker coordinate system is centered on the middle (by default) or on the top-left corner of the marker,
* with the Z axis perpendicular to the marker plane.
* estimateParameters defines the coordinates of the four corners of the marker in its own coordinate system (by default) are:
* (-markerLength/2, markerLength/2, 0), (markerLength/2, markerLength/2, 0),
* (markerLength/2, -markerLength/2, 0), (-markerLength/2, -markerLength/2, 0)
* @sa use cv::drawFrameAxes to get world coordinate system axis for object points
* @sa @ref tutorial_aruco_detection
* @sa EstimateParameters
* @sa PatternPos
*/
CV_EXPORTS_W void estimatePoseSingleMarkers(InputArrayOfArrays corners, float markerLength,
InputArray cameraMatrix, InputArray distCoeffs,
OutputArray rvecs, OutputArray tvecs, OutputArray _objPoints = noArray());
OutputArray rvecs, OutputArray tvecs, OutputArray _objPoints = noArray(),
Ptr<EstimateParameters> estimateParameters = EstimateParameters::create());

@ -810,19 +810,31 @@ static void _identifyCandidates(InputArray _image, vector< vector< vector< Point
/**
* @brief Return object points for the system centered in a single marker, given the marker length
* @brief Return object points for the system centered in a middle (by default) or in a top left corner of single
* marker, given the marker length
*/
static void _getSingleMarkerObjectPoints(float markerLength, OutputArray _objPoints) {
static void _getSingleMarkerObjectPoints(float markerLength, OutputArray _objPoints,
EstimateParameters estimateParameters) {
CV_Assert(markerLength > 0);
_objPoints.create(4, 1, CV_32FC3);
Mat objPoints = _objPoints.getMat();
// set coordinate system in the top-left corner of the marker, with Z pointing out
objPoints.ptr< Vec3f >(0)[0] = Vec3f(0.f, 0.f, 0);
objPoints.ptr< Vec3f >(0)[1] = Vec3f(markerLength, 0.f, 0);
objPoints.ptr< Vec3f >(0)[2] = Vec3f(markerLength, markerLength, 0);
objPoints.ptr< Vec3f >(0)[3] = Vec3f(0.f, markerLength, 0);
if (estimateParameters.pattern == CW_top_left_corner) {
objPoints.ptr<Vec3f>(0)[0] = Vec3f(0.f, 0.f, 0);
objPoints.ptr<Vec3f>(0)[1] = Vec3f(markerLength, 0.f, 0);
objPoints.ptr<Vec3f>(0)[2] = Vec3f(markerLength, markerLength, 0);
objPoints.ptr<Vec3f>(0)[3] = Vec3f(0.f, markerLength, 0);
}
else if (estimateParameters.pattern == CCW_center) {
objPoints.ptr<Vec3f>(0)[0] = Vec3f(-markerLength/2.f, markerLength/2.f, 0);
objPoints.ptr<Vec3f>(0)[1] = Vec3f(markerLength/2.f, markerLength/2.f, 0);
objPoints.ptr<Vec3f>(0)[2] = Vec3f(markerLength/2.f, -markerLength/2.f, 0);
objPoints.ptr<Vec3f>(0)[3] = Vec3f(-markerLength/2.f, -markerLength/2.f, 0);
}
else
CV_Error(Error::StsBadArg, "Unknown estimateParameters pattern");
}
@ -1221,17 +1233,17 @@ class SinglePoseEstimationParallel : public ParallelLoopBody {
public:
SinglePoseEstimationParallel(Mat& _markerObjPoints, InputArrayOfArrays _corners,
InputArray _cameraMatrix, InputArray _distCoeffs,
Mat& _rvecs, Mat& _tvecs)
Mat& _rvecs, Mat& _tvecs, EstimateParameters _estimateParameters)
: markerObjPoints(_markerObjPoints), corners(_corners), cameraMatrix(_cameraMatrix),
distCoeffs(_distCoeffs), rvecs(_rvecs), tvecs(_tvecs) {}
distCoeffs(_distCoeffs), rvecs(_rvecs), tvecs(_tvecs), estimateParameters(_estimateParameters) {}
void operator()(const Range &range) const CV_OVERRIDE {
const int begin = range.start;
const int end = range.end;
for(int i = begin; i < end; i++) {
solvePnP(markerObjPoints, corners.getMat(i), cameraMatrix, distCoeffs,
rvecs.at<Vec3d>(i), tvecs.at<Vec3d>(i));
solvePnP(markerObjPoints, corners.getMat(i), cameraMatrix, distCoeffs, rvecs.at<Vec3d>(i),
tvecs.at<Vec3d>(i), estimateParameters.useExtrinsicGuess, estimateParameters.solvePnPMethod);
}
}
@ -1242,21 +1254,20 @@ class SinglePoseEstimationParallel : public ParallelLoopBody {
InputArrayOfArrays corners;
InputArray cameraMatrix, distCoeffs;
Mat& rvecs, tvecs;
EstimateParameters estimateParameters;
};
/**
*/
void estimatePoseSingleMarkers(InputArrayOfArrays _corners, float markerLength,
InputArray _cameraMatrix, InputArray _distCoeffs,
OutputArray _rvecs, OutputArray _tvecs, OutputArray _objPoints) {
OutputArray _rvecs, OutputArray _tvecs, OutputArray _objPoints,
Ptr<EstimateParameters> estimateParameters) {
CV_Assert(markerLength > 0);
Mat markerObjPoints;
_getSingleMarkerObjectPoints(markerLength, markerObjPoints);
_getSingleMarkerObjectPoints(markerLength, markerObjPoints, *estimateParameters);
int nMarkers = (int)_corners.total();
_rvecs.create(nMarkers, 1, CV_64FC3);
_tvecs.create(nMarkers, 1, CV_64FC3);
@ -1272,7 +1283,7 @@ void estimatePoseSingleMarkers(InputArrayOfArrays _corners, float markerLength,
// this is the parallel call for the previous commented loop (result is equivalent)
parallel_for_(Range(0, nMarkers),
SinglePoseEstimationParallel(markerObjPoints, _corners, _cameraMatrix,
_distCoeffs, rvecs, tvecs));
_distCoeffs, rvecs, tvecs, *estimateParameters));
if(_objPoints.needed()){
markerObjPoints.convertTo(_objPoints, -1);
}

@ -439,10 +439,12 @@ void CV_CharucoDiamondDetection::run(int) {
}
}
Ptr<aruco::EstimateParameters> estimateParameters = aruco::EstimateParameters::create();
estimateParameters->pattern = aruco::CW_top_left_corner;
// estimate diamond pose
vector< Vec3d > estimatedRvec, estimatedTvec;
aruco::estimatePoseSingleMarkers(diamondCorners, squareLength, cameraMatrix,
distCoeffs, estimatedRvec, estimatedTvec);
aruco::estimatePoseSingleMarkers(diamondCorners, squareLength, cameraMatrix, distCoeffs, estimatedRvec,
estimatedTvec, noArray(), estimateParameters);
// check result
vector< Point2f > projectedDiamondCornersPose;

Binary file not shown.

After

Width:  |  Height:  |  Size: 78 KiB

@ -286,8 +286,9 @@ translation vectors of the estimated poses will be in the same unit
- The output parameters `rvecs` and `tvecs` are the rotation and translation vectors respectively, for each of the markers
in `markerCorners`.
The marker coordinate system that is assumed by this function is placed at the center of the marker
with the Z axis pointing out, as in the following image. Axis-color correspondences are X: red, Y: green, Z: blue. Note the axis directions of the rotated markers in this image.
The marker coordinate system that is assumed by this function is placed in the center (by default) or
in the top left corner of the marker with the Z axis pointing out, as in the following image.
Axis-color correspondences are X: red, Y: green, Z: blue. Note the axis directions of the rotated markers in this image.
![Image with axes drawn](images/singlemarkersaxes.jpg)

Loading…
Cancel
Save