-
Notifications
You must be signed in to change notification settings - Fork 5.8k
fix axes and docs #3256
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
fix axes and docs #3256
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -40,6 +40,7 @@ the use of this software, even if advised of the possibility of such damage. | |
#define __OPENCV_ARUCO_HPP__ | ||
|
||
#include <opencv2/core.hpp> | ||
#include <opencv2/calib3d.hpp> | ||
#include <vector> | ||
#include "opencv2/aruco/dictionary.hpp" | ||
|
||
|
@@ -219,7 +220,55 @@ struct CV_EXPORTS_W DetectorParameters { | |
CV_EXPORTS_W void detectMarkers(InputArray image, const Ptr<Dictionary> &dictionary, OutputArrayOfArrays corners, | ||
OutputArray ids, const Ptr<DetectorParameters> ¶meters = DetectorParameters::create(), | ||
OutputArrayOfArrays rejectedImgPoints = noArray()); | ||
/** @brief | ||
* rvec/tvec define the right handed coordinate system of the marker. | ||
* PatternPos defines center this system and axes direction. | ||
* Axis X (red color) - first coordinate, axis Y (green color) - second coordinate, | ||
* axis Z (blue color) - third coordinate. | ||
* @sa estimatePoseSingleMarkers(), @ref tutorial_aruco_detection | ||
*/ | ||
enum PatternPos { | ||
/** @brief The marker coordinate system is centered on the middle of the marker. | ||
* The coordinates of the four corners (CCW order) of the marker in its own coordinate system are: | ||
* (-markerLength/2, markerLength/2, 0), (markerLength/2, markerLength/2, 0), | ||
* (markerLength/2, -markerLength/2, 0), (-markerLength/2, -markerLength/2, 0). | ||
* | ||
* These pattern points define this coordinate system: | ||
*  | ||
*/ | ||
CCW_center, | ||
/** @brief The marker coordinate system is centered on the top-left corner of the marker. | ||
* The coordinates of the four corners (CW order) of the marker in its own coordinate system are: | ||
* (0, 0, 0), (markerLength, 0, 0), | ||
* (markerLength, markerLength, 0), (0, markerLength, 0). | ||
* | ||
* These pattern points define this coordinate system: | ||
*  | ||
*/ | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think that CCW and CW meaning should be covered too. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. fixed |
||
CW_top_left_corner | ||
}; | ||
|
||
/** @brief | ||
* Pose estimation parameters | ||
* @param pattern Defines center this system and axes direction (default PatternPos::CCW_center). | ||
* @param useExtrinsicGuess Parameter used for SOLVEPNP_ITERATIVE. If true (1), the function uses the provided | ||
* rvec and tvec values as initial approximations of the rotation and translation vectors, respectively, and further | ||
* optimizes them (default false). | ||
* @param solvePnPMethod Method for solving a PnP problem: see @ref calib3d_solvePnP_flags (default SOLVEPNP_ITERATIVE). | ||
* @sa PatternPos, solvePnP(), @ref tutorial_aruco_detection | ||
*/ | ||
struct CV_EXPORTS_W EstimateParameters { | ||
CV_PROP_RW PatternPos pattern; | ||
CV_PROP_RW bool useExtrinsicGuess; | ||
CV_PROP_RW SolvePnPMethod solvePnPMethod; | ||
|
||
EstimateParameters(): pattern(CCW_center), useExtrinsicGuess(false), | ||
solvePnPMethod(SOLVEPNP_ITERATIVE) {} | ||
|
||
CV_WRAP static Ptr<EstimateParameters> create() { | ||
return makePtr<EstimateParameters>(); | ||
} | ||
}; | ||
|
||
|
||
/** | ||
|
@@ -240,21 +289,28 @@ CV_EXPORTS_W void detectMarkers(InputArray image, const Ptr<Dictionary> &diction | |
* @param tvecs array of output translation vectors (e.g. std::vector<cv::Vec3d>). | ||
* Each element in tvecs corresponds to the specific marker in imgPoints. | ||
* @param _objPoints array of object points of all the marker corners | ||
* @param estimateParameters set the origin of coordinate system and the coordinates of the four corners of the marker | ||
* (default estimateParameters.pattern = PatternPos::CCW_center, estimateParameters.useExtrinsicGuess = false, | ||
* estimateParameters.solvePnPMethod = SOLVEPNP_ITERATIVE). | ||
* | ||
* This function receives the detected markers and returns their pose estimation respect to | ||
* the camera individually. So for each marker, one rotation and translation vector is returned. | ||
* The returned transformation is the one that transforms points from each marker coordinate system | ||
* to the camera coordinate system. | ||
* The marker corrdinate system is centered on the middle of the marker, with the Z axis | ||
* perpendicular to the marker plane. | ||
* The coordinates of the four corners of the marker in its own coordinate system are: | ||
* (0, 0, 0), (markerLength, 0, 0), | ||
* (markerLength, markerLength, 0), (0, markerLength, 0) | ||
* The marker coordinate system is centered on the middle (by default) or on the top-left corner of the marker, | ||
* with the Z axis perpendicular to the marker plane. | ||
* estimateParameters defines the coordinates of the four corners of the marker in its own coordinate system (by default) are: | ||
* (-markerLength/2, markerLength/2, 0), (markerLength/2, markerLength/2, 0), | ||
* (markerLength/2, -markerLength/2, 0), (-markerLength/2, -markerLength/2, 0) | ||
* @sa use cv::drawFrameAxes to get world coordinate system axis for object points | ||
* @sa @ref tutorial_aruco_detection | ||
* @sa EstimateParameters | ||
* @sa PatternPos | ||
*/ | ||
CV_EXPORTS_W void estimatePoseSingleMarkers(InputArrayOfArrays corners, float markerLength, | ||
InputArray cameraMatrix, InputArray distCoeffs, | ||
OutputArray rvecs, OutputArray tvecs, OutputArray _objPoints = noArray()); | ||
OutputArray rvecs, OutputArray tvecs, OutputArray _objPoints = noArray(), | ||
Ptr<EstimateParameters> estimateParameters = EstimateParameters::create()); | ||
|
||
|
||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -810,19 +810,31 @@ static void _identifyCandidates(InputArray _image, vector< vector< vector< Point | |
|
||
|
||
/** | ||
* @brief Return object points for the system centered in a single marker, given the marker length | ||
* @brief Return object points for the system centered in a middle (by default) or in a top left corner of single | ||
* marker, given the marker length | ||
*/ | ||
static void _getSingleMarkerObjectPoints(float markerLength, OutputArray _objPoints) { | ||
static void _getSingleMarkerObjectPoints(float markerLength, OutputArray _objPoints, | ||
EstimateParameters estimateParameters) { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. const reference. |
||
|
||
CV_Assert(markerLength > 0); | ||
|
||
_objPoints.create(4, 1, CV_32FC3); | ||
Mat objPoints = _objPoints.getMat(); | ||
// set coordinate system in the top-left corner of the marker, with Z pointing out | ||
objPoints.ptr< Vec3f >(0)[0] = Vec3f(0.f, 0.f, 0); | ||
objPoints.ptr< Vec3f >(0)[1] = Vec3f(markerLength, 0.f, 0); | ||
objPoints.ptr< Vec3f >(0)[2] = Vec3f(markerLength, markerLength, 0); | ||
objPoints.ptr< Vec3f >(0)[3] = Vec3f(0.f, markerLength, 0); | ||
if (estimateParameters.pattern == CW_top_left_corner) { | ||
objPoints.ptr<Vec3f>(0)[0] = Vec3f(0.f, 0.f, 0); | ||
objPoints.ptr<Vec3f>(0)[1] = Vec3f(markerLength, 0.f, 0); | ||
objPoints.ptr<Vec3f>(0)[2] = Vec3f(markerLength, markerLength, 0); | ||
objPoints.ptr<Vec3f>(0)[3] = Vec3f(0.f, markerLength, 0); | ||
} | ||
else if (estimateParameters.pattern == CCW_center) { | ||
objPoints.ptr<Vec3f>(0)[0] = Vec3f(-markerLength/2.f, markerLength/2.f, 0); | ||
objPoints.ptr<Vec3f>(0)[1] = Vec3f(markerLength/2.f, markerLength/2.f, 0); | ||
objPoints.ptr<Vec3f>(0)[2] = Vec3f(markerLength/2.f, -markerLength/2.f, 0); | ||
objPoints.ptr<Vec3f>(0)[3] = Vec3f(-markerLength/2.f, -markerLength/2.f, 0); | ||
} | ||
else | ||
CV_Error(Error::StsBadArg, "Unknown estimateParameters pattern"); | ||
} | ||
|
||
|
||
|
@@ -1221,17 +1233,17 @@ class SinglePoseEstimationParallel : public ParallelLoopBody { | |
public: | ||
SinglePoseEstimationParallel(Mat& _markerObjPoints, InputArrayOfArrays _corners, | ||
InputArray _cameraMatrix, InputArray _distCoeffs, | ||
Mat& _rvecs, Mat& _tvecs) | ||
Mat& _rvecs, Mat& _tvecs, EstimateParameters _estimateParameters) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. const reference |
||
: markerObjPoints(_markerObjPoints), corners(_corners), cameraMatrix(_cameraMatrix), | ||
distCoeffs(_distCoeffs), rvecs(_rvecs), tvecs(_tvecs) {} | ||
distCoeffs(_distCoeffs), rvecs(_rvecs), tvecs(_tvecs), estimateParameters(_estimateParameters) {} | ||
|
||
void operator()(const Range &range) const CV_OVERRIDE { | ||
const int begin = range.start; | ||
const int end = range.end; | ||
|
||
for(int i = begin; i < end; i++) { | ||
solvePnP(markerObjPoints, corners.getMat(i), cameraMatrix, distCoeffs, | ||
rvecs.at<Vec3d>(i), tvecs.at<Vec3d>(i)); | ||
solvePnP(markerObjPoints, corners.getMat(i), cameraMatrix, distCoeffs, rvecs.at<Vec3d>(i), | ||
tvecs.at<Vec3d>(i), estimateParameters.useExtrinsicGuess, estimateParameters.solvePnPMethod); | ||
} | ||
} | ||
|
||
|
@@ -1242,21 +1254,20 @@ class SinglePoseEstimationParallel : public ParallelLoopBody { | |
InputArrayOfArrays corners; | ||
InputArray cameraMatrix, distCoeffs; | ||
Mat& rvecs, tvecs; | ||
EstimateParameters estimateParameters; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. const reference |
||
}; | ||
|
||
|
||
|
||
|
||
/** | ||
*/ | ||
void estimatePoseSingleMarkers(InputArrayOfArrays _corners, float markerLength, | ||
InputArray _cameraMatrix, InputArray _distCoeffs, | ||
OutputArray _rvecs, OutputArray _tvecs, OutputArray _objPoints) { | ||
OutputArray _rvecs, OutputArray _tvecs, OutputArray _objPoints, | ||
Ptr<EstimateParameters> estimateParameters) { | ||
|
||
CV_Assert(markerLength > 0); | ||
|
||
Mat markerObjPoints; | ||
_getSingleMarkerObjectPoints(markerLength, markerObjPoints); | ||
_getSingleMarkerObjectPoints(markerLength, markerObjPoints, *estimateParameters); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. unchecked pointer unreference. |
||
int nMarkers = (int)_corners.total(); | ||
_rvecs.create(nMarkers, 1, CV_64FC3); | ||
_tvecs.create(nMarkers, 1, CV_64FC3); | ||
|
@@ -1272,7 +1283,7 @@ void estimatePoseSingleMarkers(InputArrayOfArrays _corners, float markerLength, | |
// this is the parallel call for the previous commented loop (result is equivalent) | ||
parallel_for_(Range(0, nMarkers), | ||
SinglePoseEstimationParallel(markerObjPoints, _corners, _cameraMatrix, | ||
_distCoeffs, rvecs, tvecs)); | ||
_distCoeffs, rvecs, tvecs, *estimateParameters)); | ||
if(_objPoints.needed()){ | ||
markerObjPoints.convertTo(_objPoints, -1); | ||
} | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Missing prefix for enum and its values violates OpenCV coding style guide.
Constants MUST be UPPERCASE (defined by the same coding style guide)