Merge 7f5478d2ab
into ce3c6681c9
commit
28215b3d47
2 changed files with 1085 additions and 0 deletions
@ -0,0 +1,579 @@ |
||||
#include <opencv2/opencv.hpp> |
||||
#include <opencv2/imgproc/imgproc.hpp> |
||||
#include <iostream> |
||||
#include <opencv2/core.hpp> |
||||
#include <opencv2/calib3d/calib3d.hpp> |
||||
#include <stdio.h> |
||||
|
||||
using namespace cv; |
||||
#include <vector> |
||||
|
||||
struct CameraParams |
||||
{ |
||||
double ppx = 0.0; |
||||
double ppy = 0.0; |
||||
double focal = 0.0; |
||||
double aspect = 0.0; |
||||
Mat camera_mat; |
||||
|
||||
CameraParams(const Mat& camera) |
||||
{ |
||||
ppx = camera.at<double>(0, 2); |
||||
ppy = camera.at<double>(1, 2); |
||||
focal = camera.at<double>(0, 0); |
||||
aspect = camera.at<double>(1, 1) / camera.at<double>(0, 0); |
||||
camera_mat = camera.clone(); |
||||
} |
||||
|
||||
CameraParams() {}; |
||||
|
||||
void setCameraParams(const Mat& camera) |
||||
{ |
||||
ppx = camera.at<double>(0, 2); |
||||
ppy = camera.at<double>(1, 2); |
||||
focal = camera.at<double>(0, 0); |
||||
aspect = camera.at<double>(1, 1) / camera.at<double>(0, 0); |
||||
camera_mat = camera.clone(); |
||||
} |
||||
|
||||
Mat getCameraParamsMat() const |
||||
{ |
||||
return camera_mat; |
||||
} |
||||
}; |
||||
|
||||
struct Quaternion |
||||
{ |
||||
double w = 0.0; |
||||
double x = 0.0; |
||||
double y = 0.0; |
||||
double z = 0.0; |
||||
Quaternion(const std::vector<double>& q_vec) |
||||
{ |
||||
x = q_vec[0]; |
||||
y = q_vec[1]; |
||||
z = q_vec[2]; |
||||
w = q_vec[3]; |
||||
} |
||||
}; |
||||
|
||||
struct Mat_xy |
||||
{ |
||||
Mat x; |
||||
Mat y; |
||||
}; |
||||
|
||||
class LevMarqBA |
||||
{ |
||||
public: |
||||
LevMarqBA(std::vector<Point3f> points3d, |
||||
std::vector<std::vector<Point2f> > points2d, //i=views
|
||||
std::vector<Mat> rotations_, |
||||
std::vector<Mat> translations_, |
||||
Mat camera_params_); |
||||
//Mat dist_coeffs,
|
||||
//std::vector<std::vector<int> > visibility_indices_
|
||||
|
||||
//protected:
|
||||
Point3f computeProjection(const Mat& camera_matrix, |
||||
const Mat& rotation, |
||||
const Mat& translation, |
||||
const Point3f X); |
||||
|
||||
Mat computeErrors(const std::vector<std::vector<Point2f> >& X, |
||||
const std::vector<std::vector<Point2f> >& X_hat); |
||||
|
||||
Mat computedfdR(const Point3f xyz_point, |
||||
const Point3f uvw_point, |
||||
const Point3f C, |
||||
const Mat& R, |
||||
const CameraParams& camera); |
||||
|
||||
Mat computedfdX(const Point3f xyz_point, |
||||
const Point3f uvw_point, |
||||
const Mat& R, |
||||
const CameraParams& camera); |
||||
|
||||
Mat computedRdq(const Quaternion q); |
||||
|
||||
Mat computedfdq(const Point3f xyz_point, |
||||
const Point3f uvw_point, |
||||
const Point3f C, |
||||
const Mat& R, |
||||
const CameraParams& camera); |
||||
|
||||
Mat computedfdC(const Point3f xyz_point, |
||||
const Point3f uvw_point, |
||||
const Mat& R, |
||||
const CameraParams& camera); |
||||
|
||||
Mat computeDiagCovarMatrix(const std::vector<std::vector<Point2f> >& X); |
||||
|
||||
Mat augmentateNonZeroElements(const Mat& A, const double mu); |
||||
|
||||
Mat computeJacobian(const std::vector<Point3f>& X_3d, |
||||
const std::vector<std::vector<int> >& visibility_indices, |
||||
const std::vector<Mat>& R, |
||||
const std::vector<Mat>& t, |
||||
const CameraParams& camera); |
||||
|
||||
void findDelta(const double mu); |
||||
Quaternion RMatToQuaternion(Mat R); |
||||
|
||||
private: |
||||
CameraParams camera_params; |
||||
std::vector<std::vector<Point2f> > X, X_hat; |
||||
std::vector<Point3f> X_3d; |
||||
std::vector<std::vector<int> > visibility_indices; |
||||
std::vector<Mat> rotations, translations; |
||||
|
||||
size_t n_views, n_points; |
||||
Mat A, B, J; |
||||
//Mat diag_cov, diag_cov_inv;
|
||||
Mat U, V, W; |
||||
Mat U_augm, V_augm; |
||||
Mat Y; |
||||
|
||||
Mat errors; |
||||
Mat errors_a, errors_b; |
||||
|
||||
/*
|
||||
Mat A_x, A_y, B_x, B_y, J_x, J_y; |
||||
Mat U_x, U_y, V_x, V_y, W_x, W_y; |
||||
Mat U_x_augm, U_y_augm, V_x_augm, V_y_augm; |
||||
Mat Y_x, Y_y; |
||||
Mat diag_cov_x, diag_cov_inv_x; |
||||
Mat diag_cov_y, diag_cov_inv_y; |
||||
|
||||
Mat errors_x, errors_y; |
||||
Mat errors_a_x, errors_a_y; |
||||
Mat errors_b_x, errors_b_y; |
||||
*/ |
||||
}; |
||||
|
||||
LevMarqBA::LevMarqBA(std::vector<Point3f> points3d, |
||||
std::vector<std::vector<Point2f> > points2d,
|
||||
std::vector<Mat> rotations_, |
||||
std::vector<Mat> translations_, |
||||
Mat camera_params_) |
||||
{ |
||||
X_3d = points3d; |
||||
X = points2d; |
||||
|
||||
rotations = rotations_; |
||||
translations = translations_; |
||||
|
||||
camera_params.setCameraParams(camera_params_); |
||||
|
||||
n_views = rotations.size(); |
||||
n_points = X_3d.size(); |
||||
} |
||||
|
||||
Point3f LevMarqBA::computeProjection(const Mat& camera_matrix, |
||||
const Mat& rotation, |
||||
const Mat& translation, |
||||
const Point3f X) |
||||
{ |
||||
Mat rot_transl; |
||||
hconcat(rotation, translation, rot_transl); |
||||
|
||||
Mat X_mat(4, 1, CV_64F); |
||||
X_mat.at<double>(0, 0) = X.x; |
||||
X_mat.at<double>(1, 0) = X.y; |
||||
X_mat.at<double>(2, 0) = X.z; |
||||
X_mat.at<double>(3, 0) = 1.0; |
||||
|
||||
Mat uvw_mat = camera_matrix * rot_transl * X_mat; |
||||
|
||||
Point3f uvw_point(uvw_mat); |
||||
return uvw_point; |
||||
} |
||||
|
||||
Mat LevMarqBA::computeErrors(const std::vector<std::vector<Point2f> >& X, |
||||
const std::vector<std::vector<Point2f> >& X_hat) |
||||
{ |
||||
Mat errs(n_views, n_points, CV_64F); |
||||
|
||||
for (size_t i = 0; i < errs.rows; i++) |
||||
{ |
||||
for (size_t j = 0; j < errs.cols; j++) |
||||
{ |
||||
if (X[i][j] != Point2f(-1.0, -1.0)) |
||||
errs.at<double>(j, i) = norm(X[i][j] - X_hat[i][j]); |
||||
else |
||||
errs.at<double>(j, i) = 0.0; |
||||
} |
||||
} |
||||
return errs; |
||||
} |
||||
|
||||
//TODO: input mat of 2f points
|
||||
Mat LevMarqBA::computeDiagCovarMatrix(const std::vector<std::vector<Point2f> >& X) |
||||
{ |
||||
Mat cov, mean; |
||||
|
||||
calcCovarMatrix(X, cov, mean, COVAR_NORMAL | COVAR_COLS); |
||||
|
||||
int nsamples = n_views * n_points; |
||||
cov = cov / (nsamples - 1); |
||||
|
||||
Mat diag_cov = Mat::zeros(2 * nsamples, 2 * nsamples, CV_64F); |
||||
Mat identity_mat = Mat::ones(Size(2, 2), CV_64F); |
||||
for (int i = 0; i < nsamples; i++) |
||||
{ |
||||
Mat cov_block = cov.at<double>(i, i) * identity_mat; |
||||
cov_block.copyTo(diag_cov(Rect(2 * i, 2 * i, 2, 2))); |
||||
} |
||||
return diag_cov; |
||||
} |
||||
|
||||
/*
|
||||
void LevMarqBA::computeDiagCovarMatrix(const std::vector<std::vector<Point2d> >& X) |
||||
{ |
||||
Mat X_mat; |
||||
|
||||
Mat(X).reshape(X.size()).convertTo(X_mat, CV_64F); |
||||
std::vector<Mat> channels(2); |
||||
split(X_mat, channels); |
||||
|
||||
//std::cout << "X mat = " << X_mat << std::endl;
|
||||
Mat cov_x, mean_x; |
||||
Mat cov_y, mean_y; |
||||
|
||||
calcCovarMatrix(channels[0], cov_x, mean_x, COVAR_NORMAL | COVAR_COLS); |
||||
calcCovarMatrix(channels[1], cov_y, mean_y, COVAR_NORMAL | COVAR_COLS); |
||||
|
||||
int nsamples = X_mat.cols; |
||||
cov_x = cov_x / (nsamples - 1); |
||||
cov_y = cov_y / (nsamples - 1); |
||||
|
||||
diag_cov_x = Mat::zeros(nsamples, nsamples, CV_64F); |
||||
diag_cov_inv_x = diag_cov_x.clone(); |
||||
|
||||
diag_cov_y = Mat::zeros(nsamples, nsamples, CV_64F); |
||||
diag_cov_inv_y = diag_cov_y.clone(); |
||||
|
||||
for (int i = 0; i < nsamples; i++) |
||||
{ |
||||
diag_cov_x.at<double>(i, i) = cov_x.at<double>(i, i); |
||||
diag_cov_inv_x.at<double>(i, i) = 1 / diag_cov_x.at<double>(i, i); |
||||
|
||||
diag_cov_y.at<double>(i, i) = cov_y.at<double>(i, i); |
||||
diag_cov_inv_y.at<double>(i, i) = 1 / diag_cov_y.at<double>(i, i); |
||||
} |
||||
*/ |
||||
/*
|
||||
int nsamples = X.size(); |
||||
|
||||
Mat X_mat(X); |
||||
std::vector<Mat> channels(2); |
||||
split(X_mat, channels); |
||||
|
||||
diag_cov_x = Mat::zeros(nsamples, nsamples, CV_64F); |
||||
diag_cov_inv_x = diag_cov_x.clone(); |
||||
|
||||
diag_cov_y = Mat::zeros(nsamples, nsamples, CV_64F); |
||||
diag_cov_inv_y = diag_cov_y.clone(); |
||||
|
||||
double mean_x = sum(mean(channels[0]))[0]; |
||||
double mean_y = sum(mean(channels[1]))[0]; |
||||
|
||||
for (int i = 0; i < nsamples; i++) |
||||
{ |
||||
double t_x = channels[0].at<double>(i, 0) - mean_x; |
||||
diag_cov_x.at<double>(i, i) = t_x * t_x / (nsamples - 1); |
||||
diag_cov_inv_x.at<double>(i, i) = 1 / diag_cov_x.at<double>(i, i); |
||||
|
||||
double t_y = channels[1].at<double>(i, 0) - mean_y; |
||||
diag_cov_y.at<double>(i, i) = t_y * t_y / (nsamples - 1); |
||||
diag_cov_inv_y.at<double>(i, i) = 1 / diag_cov_y.at<double>(i, i); |
||||
} |
||||
*/ |
||||
/*
|
||||
std::cout << "diag_cov_x = " << diag_cov_x << std::endl; |
||||
std::cout << "diag_cov_y = " << diag_cov_y << std::endl; |
||||
return; |
||||
} |
||||
*/ |
||||
Mat LevMarqBA::computedRdq(const Quaternion q) |
||||
{ |
||||
Mat dRdq(9, 4, CV_64F); |
||||
|
||||
Mat((Mat_<double>(1, 4) << 0, -4 * q.y, -4 * q.z, 0)).copyTo(dRdq.row(0)); |
||||
Mat((Mat_<double>(1, 4) << 2 * q.y, 2 * q.x, -2 * q.w, -2 * q.z)).copyTo(dRdq.row(1)); |
||||
Mat((Mat_<double>(1, 4) << 2 * q.z, 2 * q.w, 2 * q.x, 2 * q.y)).copyTo(dRdq.row(2)); |
||||
Mat((Mat_<double>(1, 4) << 2 * q.y, 2 * q.x, 2 * q.w, 2 * q.z)).copyTo(dRdq.row(3)); |
||||
Mat((Mat_<double>(1, 4) << -4 * q.x, 0, -4 * q.z, 0)).copyTo(dRdq.row(4)); |
||||
Mat((Mat_<double>(1, 4) << -2 * q.w, 2 * q.z, 2 * q.y, 2 * q.x)).copyTo(dRdq.row(5)); |
||||
Mat((Mat_<double>(1, 4) << 2 * q.z, -2 * q.w, 2 * q.x, -2 * q.y)).copyTo(dRdq.row(6)); |
||||
Mat((Mat_<double>(1, 4) << 2 * q.w, 2 * q.z, 2 * q.y, 2 * q.x)).copyTo(dRdq.row(7)); |
||||
Mat((Mat_<double>(1, 4) << -4 * q.x, -4 * q.y, 0, 0)).copyTo(dRdq.row(8)); |
||||
|
||||
//std::cout << "dfdq = " << dRdq << std::endl;
|
||||
return dRdq; |
||||
} |
||||
|
||||
Mat LevMarqBA::computedfdR(const Point3f xyz_point, |
||||
const Point3f uvw_point, |
||||
const Point3f C, |
||||
const Mat& R, |
||||
const CameraParams& camera) |
||||
{ |
||||
Mat du_dr = (Mat_<double>(1, 9) << camera.focal * (xyz_point.x - C.x), |
||||
camera.focal * (xyz_point.y - C.y), |
||||
camera.focal * (xyz_point.z - C.z), |
||||
0.0, 0.0, 0.0, |
||||
camera.ppx * (xyz_point.x - C.x), |
||||
camera.ppx * (xyz_point.y - C.y), |
||||
camera.ppx * (xyz_point.z - C.z)); |
||||
|
||||
Mat dv_dr = (Mat_<double>(1, 9) << 0.0, 0.0, 0.0, |
||||
camera.focal * (xyz_point.x - C.x), |
||||
camera.focal * (xyz_point.y - C.y), |
||||
camera.focal * (xyz_point.z - C.z), |
||||
camera.ppy * (xyz_point.x - C.x), |
||||
camera.ppy * (xyz_point.y - C.y), |
||||
camera.ppy * (xyz_point.z - C.z)); |
||||
|
||||
|
||||
Mat dw_dr = (Mat_<double>(1, 9) << 0.0, 0.0, 0.0, |
||||
0.0, 0.0, 0.0, |
||||
xyz_point.x - C.x, |
||||
xyz_point.y - C.y, |
||||
xyz_point.z - C.z); |
||||
|
||||
Mat dfdR(2, 9, CV_64F); |
||||
|
||||
Mat((uvw_point.z * du_dr - uvw_point.x * dw_dr) / (uvw_point.z * uvw_point.z)).row(0).copyTo(dfdR.row(0)); |
||||
Mat((uvw_point.z * dv_dr - uvw_point.y * dw_dr) / (uvw_point.z * uvw_point.z)).row(0).copyTo(dfdR.row(1)); |
||||
return dfdR; |
||||
|
||||
} |
||||
|
||||
Mat LevMarqBA::computedfdX(const Point3f xyz_point, |
||||
const Point3f uvw_point, |
||||
const Mat& R, |
||||
const CameraParams& camera) |
||||
{ |
||||
Mat du_dx = (Mat_<double>(1, 3) << camera.focal * R.at<double>(0, 0) + camera.ppx * R.at<double>(2, 0), |
||||
camera.focal * R.at<double>(0, 1) + camera.ppx * R.at<double>(2, 1), |
||||
camera.focal * R.at<double>(0, 2) + camera.ppx * R.at<double>(2, 2)); |
||||
|
||||
Mat dv_dx = (Mat_<double>(1, 3) << camera.focal * R.at<double>(1, 0) + camera.ppy * R.at<double>(2, 0), |
||||
camera.focal * R.at<double>(1, 1) + camera.ppy * R.at<double>(2, 1), |
||||
camera.focal * R.at<double>(2, 2) + camera.ppy * R.at<double>(2, 2)); |
||||
|
||||
Mat dw_dx = (Mat_<double>(1, 3) << R.at<double>(2, 0), R.at<double>(2, 1), R.at<double>(2, 2)); |
||||
|
||||
Mat dfdX(2, 3, CV_64F); |
||||
|
||||
Mat((uvw_point.z * du_dx - uvw_point.x * dw_dx) / (uvw_point.z * uvw_point.z)).copyTo(dfdX.row(0)); |
||||
Mat((uvw_point.z * dv_dx - uvw_point.y * dw_dx) / (uvw_point.z * uvw_point.z)).copyTo(dfdX.row(1)); |
||||
|
||||
return dfdX; |
||||
} |
||||
|
||||
Mat LevMarqBA::computedfdq(const Point3f xyz_point, |
||||
const Point3f uvw_point, |
||||
const Point3f C, |
||||
const Mat& R, |
||||
const CameraParams& camera) |
||||
{ |
||||
Mat dfdR = computedfdR(xyz_point, uvw_point, C, R, camera); |
||||
Quaternion q = RMatToQuaternion(R); |
||||
Mat dRdq = computedRdq(q); |
||||
Mat dfdq = dfdR * dRdq; |
||||
return dfdq; |
||||
} |
||||
|
||||
Mat LevMarqBA::computedfdC(const Point3f xyz_point, |
||||
const Point3f uvw_point, |
||||
const Mat& R, |
||||
const CameraParams& camera) |
||||
{ |
||||
Mat du_dx = (Mat_<double>(1, 3) << -(camera.focal * R.at<double>(0, 0) + camera.ppx * R.at<double>(2, 0)), |
||||
-(camera.focal * R.at<double>(0, 1) + camera.ppx * R.at<double>(2, 1)), |
||||
-(camera.focal * R.at<double>(0, 2) + camera.ppx * R.at<double>(2, 2))); |
||||
|
||||
Mat dv_dx = (Mat_<double>(1, 3) << -(camera.focal * R.at<double>(1, 0) + camera.ppy * R.at<double>(2, 0)), |
||||
-(camera.focal * R.at<double>(1, 1) + camera.ppy * R.at<double>(2, 1)), |
||||
-(camera.focal * R.at<double>(2, 2) + camera.ppy * R.at<double>(2, 2))); |
||||
|
||||
Mat dw_dx = (Mat_<double>(1, 3) << -R.at<double>(2, 0), -R.at<double>(2, 1), -R.at<double>(2, 2)); |
||||
|
||||
Mat dfdC(2, 3, CV_64F); |
||||
|
||||
Mat((uvw_point.z * du_dx - uvw_point.x * dw_dx) / (uvw_point.z * uvw_point.z)).copyTo(dfdC.row(0)); |
||||
Mat((uvw_point.z * dv_dx - uvw_point.y * dw_dx) / (uvw_point.z * uvw_point.z)).copyTo(dfdC.row(1)); |
||||
|
||||
return dfdC; |
||||
} |
||||
|
||||
Quaternion LevMarqBA::RMatToQuaternion(const Mat R) |
||||
{ |
||||
std::vector<double> Q_vec(4); |
||||
double trace = R.at<double>(0, 0) + R.at<double>(1, 1) + R.at<double>(2, 2); |
||||
|
||||
if (trace > 0.0) |
||||
{ |
||||
double s = sqrt(trace + 1.0); |
||||
Q_vec[3] = (s * 0.5); |
||||
s = 0.5 / s; |
||||
Q_vec[0] = ((R.at<double>(2, 1) - R.at<double>(1, 2)) * s); |
||||
Q_vec[1] = ((R.at<double>(0, 2) - R.at<double>(2, 0)) * s); |
||||
Q_vec[2] = ((R.at<double>(1, 0) - R.at<double>(0, 1)) * s); |
||||
} |
||||
else |
||||
{ |
||||
int i = R.at<double>(0, 0) < R.at<double>(1, 1) ? (R.at<double>(1, 1) < R.at<double>(2, 2) ? 2 : 1) : |
||||
(R.at<double>(0, 0) < R.at<double>(2, 2) ? 2 : 0); |
||||
int j = (i + 1) % 3; |
||||
int k = (i + 2) % 3; |
||||
|
||||
double s = sqrt(R.at<double>(i, i) - R.at<double>(j, j) - R.at<double>(k, k) + 1.0); |
||||
Q_vec[i] = s * 0.5; |
||||
s = 0.5 / s; |
||||
|
||||
Q_vec[3] = (R.at<double>(k, j) - R.at<double>(j, k)) * s; |
||||
Q_vec[j] = (R.at<double>(j, i) + R.at<double>(i, j)) * s; |
||||
Q_vec[k] = (R.at<double>(k, i) + R.at<double>(i, k)) * s; |
||||
} |
||||
//std::cout << Q_vec[0] << std::endl;
|
||||
return Quaternion(Q_vec); |
||||
} |
||||
|
||||
Mat LevMarqBA::computeJacobian(const std::vector<Point3f>& X_3d, |
||||
const std::vector<std::vector<int> >& visibility_indices, |
||||
const std::vector<Mat>& R, |
||||
const std::vector<Mat>& t, |
||||
const CameraParams& camera) |
||||
{ |
||||
A = Mat::zeros(2 * n_views * n_points, 7 * n_views, CV_64F); |
||||
B = Mat::zeros(2 * n_views * n_points, 3 * n_points, CV_64F); |
||||
X_hat.resize(n_views); |
||||
|
||||
for (int i = 0; i < n_views; i++) |
||||
{ |
||||
X_hat[i].resize(n_points); |
||||
|
||||
for (int j = 0; j < n_points; j++) |
||||
{ |
||||
if (X[i][j] != Point2f(-1.0, -1.0)) |
||||
{ |
||||
Point3f C(Mat(-1 * R[i].inv() * t[i])); |
||||
Point3f P = computeProjection(camera.getCameraParamsMat(), R[i], t[i], X_3d[j]); |
||||
X_hat[i][j] = Point2f(P.x / P.z, P.y / P.z); |
||||
|
||||
Mat A_ij; |
||||
hconcat(computedfdq(X_3d[j], P, C, R[i], camera), |
||||
computedfdC(X_3d[j], P, R[i], camera), |
||||
A_ij); |
||||
A_ij.copyTo(A(Rect(7 * i, 2 * n_views * j, A_ij.cols, A_ij.rows))); |
||||
|
||||
Mat B_ij; |
||||
B_ij = computedfdX(X_3d[j], P, R[i], camera); |
||||
B_ij.copyTo(B(Rect(3 * j, 2 * (3 * j + i), B_ij.cols, B_ij.rows))); |
||||
} |
||||
} |
||||
} |
||||
hconcat(A, B, J); |
||||
return J; |
||||
/*
|
||||
Point3d C(Mat(-1 * R[0].inv() * t[0])); |
||||
A_x.create(Size(7, X_3d.size()), CV_64F); |
||||
A_y.create(Size(7, X_3d.size()), CV_64F); |
||||
|
||||
B_x.create(Size(X_3d.size() * 3, X_3d.size()), CV_64F); |
||||
B_y.create(Size(X_3d.size() * 3, X_3d.size()), CV_64F); |
||||
|
||||
B_x = Mat::zeros(B_x.size(), B_x.type()); |
||||
B_y = Mat::zeros(B_y.size(), B_y.type()); |
||||
for (int i = 0; i < X.size(); i++) |
||||
{ |
||||
Point3d P = computeProjection(camera.getCameraParamsMat(), R[0], t[0], X_3d[i]); |
||||
X_hat[i] = Point2d(P.x / P.z, P.y / P.z); |
||||
Mat A_i; |
||||
hconcat(computedfdq(X_3d[i], P, C, R[0], camera), |
||||
computedfdC(X_3d[i], P, R[0], camera), |
||||
A_i); |
||||
A_i.row(0).copyTo(A_x(Rect(0, i, A_i.cols, 1))); |
||||
A_i.row(1).copyTo(A_y(Rect(0, i, A_i.cols, 1))); |
||||
|
||||
Mat B_i; |
||||
B_i = computedfdX(X_3d[i], P, R[0], camera); |
||||
B_i.row(0).copyTo(B_x(Rect(i * 3, i, B_i.cols, 1))); |
||||
B_i.row(1).copyTo(B_y(Rect(i * 3, i, B_i.cols, 1))); |
||||
} |
||||
|
||||
hconcat(A_x, B_x, J_x); |
||||
hconcat(A_y, B_y, J_y); |
||||
*/ |
||||
/*
|
||||
Mat J, A, B; |
||||
A.create(Size(7, X.size() * 2), CV_64F); |
||||
B.create(Size(X.size() * 3, X.size() * 2), CV_64F); |
||||
B = Mat::zeros(B.size(), B.type()); |
||||
for (int i = 0; i < X.size(); i++) |
||||
{ |
||||
Point3d P = computeProjection(camera[0], R[0], t[0], X[i]); |
||||
Mat A_i; |
||||
hconcat(computedfdq(X[i], P, C, R[0], camera[0], camera_params[0]), |
||||
computedfdC(X[i], P, R[0], camera[0], camera_params[0]), |
||||
A_i); |
||||
A_i.copyTo(A(Rect(0, i * 2, A_i.cols, A_i.rows))); |
||||
|
||||
Mat B_i; |
||||
B_i = computedfdX(X[i], P, R[0], camera[0], camera_params[0]); |
||||
B_i.copyTo(B(Rect(i * 3, i * 2, B_i.cols, B_i.rows))); |
||||
} |
||||
|
||||
hconcat(A, B, J); |
||||
return J; |
||||
*/ |
||||
} |
||||
|
||||
Mat LevMarqBA::augmentateNonZeroElements(const Mat& A, const double mu) |
||||
{ |
||||
std::vector<Point> non_zero_locations; |
||||
findNonZero(A, non_zero_locations); |
||||
Mat A_augm = A.clone(); |
||||
for (size_t i = 0; i < non_zero_locations.size(); i++) |
||||
{ |
||||
A_augm.at<double>(non_zero_locations[i]) += mu; |
||||
} |
||||
return A_augm; |
||||
} |
||||
|
||||
void LevMarqBA::findDelta(const double mu) |
||||
{ |
||||
Mat diag_cov = computeDiagCovarMatrix(X); |
||||
Mat diag_cov_inv = diag_cov_inv.inv(); |
||||
|
||||
U = A.t() * diag_cov_inv * A; |
||||
V = B.t() * diag_cov_inv * B; |
||||
W = A.t() * diag_cov_inv * B; |
||||
|
||||
U_augm = augmentateNonZeroElements(U, mu); |
||||
V_augm = augmentateNonZeroElements(V, mu); |
||||
|
||||
Mat errs = computeErrors(X, X_hat); |
||||
|
||||
errors_a = A.t() * diag_cov_inv * errs; |
||||
errors_b = B.t() * diag_cov_inv * errs; |
||||
|
||||
vconcat(errors_a, errors_b, errors); |
||||
|
||||
Y = W * V_augm.inv(); |
||||
|
||||
Mat left_side = U_augm - (Y * W.t()); |
||||
Mat right_side = errors_a - (Y * errors_b); |
||||
|
||||
Mat delta_a; |
||||
solve(left_side, right_side, delta_a, DECOMP_SVD); |
||||
|
||||
Mat delta_b; |
||||
delta_b = Mat(V_augm.inv() * (errors_b - (W.t() * delta_a))).clone(); |
||||
|
||||
std::cout << "delta_a" << delta_a << std::endl; |
||||
std::cout << "delta_b" << delta_b << std::endl; |
||||
} |
@ -0,0 +1,506 @@ |
||||
#include <opencv2/opencv.hpp> |
||||
#include <opencv2/core.hpp> |
||||
#include <opencv2/imgproc/imgproc.hpp> |
||||
#include <opencv2/calib3d/calib3d.hpp> |
||||
#include <opencv2/sfm.hpp> |
||||
#include <opencv2/viz.hpp> |
||||
|
||||
#include <iostream> |
||||
#include <vector> |
||||
|
||||
#include "sba.h" |
||||
|
||||
using namespace cv; |
||||
|
||||
class Vslam |
||||
{ |
||||
public: |
||||
Vslam(Mat camera_matrix, Mat dist_coeffs); |
||||
bool initMap(const Mat& img1, const Mat& img2); |
||||
bool tracking(const Mat& img1, const Mat& img2); |
||||
void localMapping(); |
||||
|
||||
protected: |
||||
void detectFeatures(const Mat& img, std::vector<KeyPoint>& features, Mat& descriptors); |
||||
bool matchFeatures(const Mat& descriptor1, const Mat& descriptor2, |
||||
std::vector<DMatch>& matches, |
||||
std::vector<int>& matched_ind1, std::vector<int>& matched_ind2, |
||||
std::vector<int>& unmatched_ind1, std::vector<int>& unmatched_ind2); |
||||
double computeScore(const Mat &M, |
||||
const std::vector<Point2f>& points1, |
||||
const std::vector<Point2f>& points2, |
||||
const double T_M, size_t num_iter); |
||||
Mat chooseModel(const std::vector<Point2f>& points1, const std::vector<Point2f>& points2); |
||||
bool RTFromHomography(const Mat& H, |
||||
const std::vector<Point2f>& points1, const std::vector<Point2f>& points2, |
||||
std::vector<Mat>& rotations, std::vector<Mat>& translations); |
||||
bool RTFromFundamental(const Mat& F, |
||||
const std::vector<Point2f>& points1, const std::vector<Point2f>& points2, |
||||
std::vector<Mat>& rotations, std::vector<Mat>& translations); |
||||
|
||||
int motionChooseSolution(const std::vector<Point2f>& points1, const std::vector<Point2f>& points2, |
||||
const std::vector<Mat>& rotations, const std::vector<Mat>& translations, |
||||
std::vector<Point3f>& points_3f); |
||||
void visualizeStructureAndMotion(const std::vector<Point3f>& points_3f, |
||||
const std::vector<Mat>& rotations, |
||||
const std::vector<Mat>& translations) const; |
||||
private: |
||||
Mat camera_matrix, dist_coeffs; |
||||
enum tForm { HOMOGRAPHY, FUNDAMENTAL } t_form; |
||||
bool is_map_init = false; |
||||
size_t frames_number = 0; |
||||
|
||||
std::vector<KeyPoint> prev_features, curr_features; |
||||
|
||||
std::vector<std::vector<KeyPoint> > unmatched_features; |
||||
std::vector<Mat> unmatched_descriptors; |
||||
|
||||
Mat prev_descriptors, curr_descriptors; |
||||
|
||||
std::vector<Mat> R, t; |
||||
std::vector<std::vector<Point3f> > point_3f_frame; |
||||
|
||||
}; |
||||
|
||||
Vslam::Vslam(Mat camera_matrix_, Mat dist_coeffs_) |
||||
{ |
||||
camera_matrix = camera_matrix_.clone(); |
||||
dist_coeffs = dist_coeffs_.clone(); |
||||
} |
||||
|
||||
bool Vslam::initMap(const Mat& img1, const Mat& img2) |
||||
{ |
||||
if ((!is_map_init) &&
|
||||
(!img1.empty()) && |
||||
(!img2.empty())) |
||||
{ |
||||
std::vector<KeyPoint> prev_features_tmp, curr_features_tmp; |
||||
Mat prev_descriptors_tmp, curr_descriptors_tmp; |
||||
|
||||
detectFeatures(img1, prev_features_tmp, prev_descriptors_tmp); |
||||
detectFeatures(img2, curr_features_tmp, curr_descriptors_tmp); |
||||
|
||||
std::vector<DMatch> matches; |
||||
std::vector<int> matched_ind1, matched_ind2, unmatched_ind1, unmatched_ind2; |
||||
|
||||
if (!matchFeatures(prev_descriptors_tmp, curr_descriptors_tmp, matches, |
||||
matched_ind1, matched_ind2,
|
||||
unmatched_ind1, unmatched_ind2)) |
||||
{ |
||||
std::cout << "Not enough matches for initialization" << std::endl; |
||||
return false; |
||||
} |
||||
|
||||
prev_features.resize(matches.size()); |
||||
curr_features.resize(matches.size()); |
||||
|
||||
std::vector<Point2f> points1(matches.size()); |
||||
std::vector<Point2f> points2(matches.size()); |
||||
|
||||
Mat descriptors2(Size(32, matches.size()), CV_8U); |
||||
for (size_t i = 0; i < matched_ind1.size(); i++) |
||||
{ |
||||
int i1 = matched_ind1[i]; |
||||
int i2 = matched_ind2[i]; |
||||
|
||||
prev_features[i] = prev_features_tmp[i1]; |
||||
curr_features[i] = curr_features_tmp[i2]; |
||||
|
||||
curr_descriptors_tmp.row(i2).copyTo(descriptors2.row(i)); |
||||
|
||||
points1[i] = prev_features[i].pt; |
||||
points2[i] = curr_features[i].pt; |
||||
} |
||||
curr_descriptors = descriptors2.clone(); |
||||
/*
|
||||
unmatched_features.resize(2); |
||||
unmatched_descriptors.resize(2); |
||||
|
||||
Mat descriptors1_tmp(Size(32, unmatched_ind1.size()), CV_8U); |
||||
Mat descriptors2_tmp(Size(32, unmatched_ind2.size()), CV_8U); |
||||
|
||||
for (size_t i = 0; i < unmatched_ind1.size(); i++) |
||||
{ |
||||
int i1 = unmatched_ind1[i]; |
||||
int i2 = unmatched_ind2[i]; |
||||
unmatched_features[0].push_back(prev_features_tmp[i1]); |
||||
unmatched_features[1].push_back(prev_features_tmp[i2]); |
||||
|
||||
prev_descriptors_tmp.row(i1).copyTo(descriptors1_tmp.row(i)); |
||||
curr_descriptors_tmp.row(i2).copyTo(descriptors2_tmp.row(i)); |
||||
} |
||||
unmatched_descriptors[0] = descriptors1_tmp.clone(); |
||||
unmatched_descriptors[1] = descriptors2_tmp.clone(); |
||||
*/ |
||||
Mat tform = chooseModel(points1, points2); |
||||
|
||||
std::cout << "t_form is " << t_form << std::endl; |
||||
|
||||
std::vector<Mat> rotations, translations; |
||||
if (t_form == HOMOGRAPHY) |
||||
RTFromHomography(tform, points1, points2, rotations, translations); |
||||
else
|
||||
RTFromFundamental(tform, points1, points2, rotations, translations); |
||||
|
||||
std::vector<Point3f> points_3f; |
||||
int solution = motionChooseSolution(points1, points2, rotations, translations, points_3f); |
||||
|
||||
R.push_back(rotations[solution]); |
||||
t.push_back(translations[solution]); |
||||
visualizeStructureAndMotion(points_3f, R, t); |
||||
point_3f_frame.push_back(points_3f); |
||||
frames_number += 2; |
||||
is_map_init = true; |
||||
return true; |
||||
} |
||||
else if (is_map_init) |
||||
{ |
||||
std::cout << "Map is already init" << std::endl; |
||||
return false; |
||||
} |
||||
else |
||||
{ |
||||
std::cout << "Need two images to start initialization" << std::endl; |
||||
return false; |
||||
} |
||||
} |
||||
|
||||
bool Vslam::tracking(const Mat& img1, const Mat& img2) |
||||
{ |
||||
if ((is_map_init) && (!img2.empty())) |
||||
{ |
||||
std::vector<KeyPoint> prev_features_tmp = curr_features; |
||||
prev_descriptors = curr_descriptors.clone(); |
||||
|
||||
std::vector<KeyPoint> curr_features_tmp; |
||||
Mat curr_descriptors_tmp; |
||||
|
||||
detectFeatures(img2, curr_features_tmp, curr_descriptors_tmp); |
||||
|
||||
std::vector<DMatch> matches; |
||||
std::vector<int> matched_ind1, matched_ind2, unmatched_ind1, unmatched_ind2; |
||||
|
||||
if (!matchFeatures(prev_descriptors, curr_descriptors_tmp, matches, |
||||
matched_ind1, matched_ind2, unmatched_ind1, unmatched_ind2)) |
||||
{ |
||||
return false; |
||||
} |
||||
//drawing
|
||||
Mat imMatches; |
||||
drawMatches(img1, prev_features_tmp, img2, curr_features_tmp, matches, imMatches); |
||||
imshow("matches2", imMatches); |
||||
waitKey(); |
||||
|
||||
prev_features.resize(matches.size()); |
||||
curr_features.resize(matches.size()); |
||||
|
||||
std::vector<Point2f> points1(matches.size()); |
||||
std::vector<Point2f> points2(matches.size()); |
||||
|
||||
Mat descriptors2(Size(32, matches.size()), CV_8U); |
||||
|
||||
for (size_t m = 0; m < matches.size(); m++) |
||||
{ |
||||
int i1 = matches[m].queryIdx; |
||||
int i2 = matches[m].trainIdx; |
||||
|
||||
prev_features[m] = prev_features_tmp[i1]; |
||||
curr_features[m] = curr_features_tmp[i1]; |
||||
|
||||
curr_descriptors_tmp.row(i2).copyTo(descriptors2.row(m)); |
||||
|
||||
points1[m] = prev_features[m].pt; |
||||
points2[m] = curr_features[m].pt; |
||||
} |
||||
curr_descriptors = descriptors2.clone(); |
||||
|
||||
Mat rvec(Mat::zeros(3, 1, CV_64F));
|
||||
Mat tvec(Mat::zeros(3, 1, CV_64F));
|
||||
|
||||
solvePnPRansac(point_3f_frame[frames_number - 1], points2, camera_matrix, dist_coeffs, rvec, tvec); |
||||
|
||||
Mat rmatrix; |
||||
Rodrigues(rvec, rmatrix); |
||||
R.push_back(rmatrix); |
||||
t.push_back(tvec); |
||||
|
||||
frames_number++; |
||||
|
||||
return true; |
||||
} |
||||
else if (!is_map_init) |
||||
{ |
||||
std::cout << "Map is not init" << std::endl; |
||||
return false; |
||||
} |
||||
else if (img2.empty()) |
||||
{ |
||||
std::cout << "Empty input image" << std::endl; |
||||
return false; |
||||
} |
||||
} |
||||
|
||||
void Vslam::localMapping() |
||||
{ |
||||
|
||||
} |
||||
|
||||
void Vslam::detectFeatures(const Mat& img, std::vector<KeyPoint>& features, Mat& descriptors) |
||||
{ |
||||
Mat img_gray; |
||||
cvtColor(img, img_gray, cv::COLOR_BGR2GRAY); |
||||
|
||||
int max_features = 2000; |
||||
Ptr<Feature2D> orb = ORB::create(max_features); |
||||
orb->detectAndCompute(img_gray, Mat(), features, descriptors); |
||||
|
||||
} |
||||
|
||||
bool Vslam::matchFeatures(const Mat& descriptor1, const Mat& descriptor2,
|
||||
std::vector<DMatch>& matches, |
||||
std::vector<int>& matched_ind1, std::vector<int>& matched_ind2, |
||||
std::vector<int>& unmatched_ind1, std::vector<int>& unmatched_ind2) |
||||
{ |
||||
float good_match_percent = 0.1; |
||||
Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("BruteForce-Hamming"); |
||||
matcher->match(descriptor1, descriptor2, matches, Mat()); |
||||
std::sort(matches.begin(), matches.end(), |
||||
[](DMatch a, DMatch b) { return a.distance > b.distance; }); |
||||
std::cout << matches.size() << std::endl; |
||||
const int num_good_matches = matches.size() * good_match_percent; |
||||
|
||||
matched_ind1.resize(num_good_matches); |
||||
matched_ind2.resize(num_good_matches); |
||||
unmatched_ind1.resize(matches.size() - num_good_matches); |
||||
unmatched_ind1.resize(matches.size() - num_good_matches); |
||||
for (size_t m = 0; m < num_good_matches; m++) |
||||
{ |
||||
matched_ind1[m] = matches[m].queryIdx; |
||||
matched_ind2[m] = matches[m].trainIdx; |
||||
} |
||||
/*
|
||||
for (size_t m = num_good_matches; m < matches.size(); m++) |
||||
{ |
||||
unmatched_ind1[m - num_good_matches] = matches[m].queryIdx; |
||||
unmatched_ind2[m - num_good_matches] = matches[m].trainIdx; |
||||
} |
||||
*/ |
||||
/*
|
||||
const int min_matches_num = 100; |
||||
if (num_good_matches < min_matches_num) |
||||
{ |
||||
matches.clear(); |
||||
return false; |
||||
} |
||||
else |
||||
{ |
||||
matches.erase(matches.begin() + num_good_matches, matches.end()); |
||||
return true; |
||||
} |
||||
*/ |
||||
matches.erase(matches.begin() + num_good_matches, matches.end()); |
||||
return true; |
||||
} |
||||
|
||||
|
||||
//S = summ(p_M(d^2(m1, M^(-1) * m2) + p_M(d^2(m2, M * m1))))
|
||||
//p_M(d^2) = 5.99 - d^2 if d^2 < 5.99
|
||||
//else p_M(d^2) = 0
|
||||
double Vslam::computeScore(const Mat &M, |
||||
const std::vector<Point2f>& points1, |
||||
const std::vector<Point2f>& points2, |
||||
const double T_M, size_t num_iter) |
||||
{ |
||||
Mat M_inv = M.inv(); |
||||
|
||||
Mat m2(3, num_iter, CV_64F); |
||||
if (points2.size() < num_iter) |
||||
num_iter = points2.size(); |
||||
|
||||
for (int i = 0; i < num_iter; i++) |
||||
{ |
||||
m2.at<double>(0, i) = points2[i].x; |
||||
m2.at<double>(1, i) = points2[i].y; |
||||
m2.at<double>(2, i) = 1; |
||||
} |
||||
|
||||
Mat M_inv_m2_mat = M_inv * m2; |
||||
std::vector<Point2f> M_inv_m2; |
||||
std::vector<double> dist1; |
||||
for (int i = 0; i < num_iter; i++) |
||||
{ |
||||
M_inv_m2.push_back(Point2f(M_inv_m2_mat.at<double>(0, i) / M_inv_m2_mat.at<double>(2, i), |
||||
M_inv_m2_mat.at<double>(1, i) / M_inv_m2_mat.at<double>(2, i))); |
||||
dist1.push_back((M_inv_m2[i].x - points1[i].x) * (M_inv_m2[i].x - points1[i].x) + |
||||
(M_inv_m2[i].y - points1[i].y) * (M_inv_m2[i].y - points1[i].y)); |
||||
} |
||||
//TODO: use convertPointsToHomogeneous() and convertPointsFromHomogeneous()
|
||||
Mat m1(3, num_iter, CV_64FC1); |
||||
for (int i = 0; i < num_iter; i++) |
||||
{ |
||||
m1.at<double>(0, i) = points1[i].x; |
||||
m1.at<double>(1, i) = points1[i].y; |
||||
m1.at<double>(2, i) = 0; |
||||
} |
||||
Mat M_m1_mat = M * m1; |
||||
std::vector<Point2f> M_m1; |
||||
std::vector<double> dist2; |
||||
double S_M = 0; |
||||
for (int i = 0; i < num_iter; i++) |
||||
{ |
||||
M_m1.push_back(Point2f(M_m1_mat.at<double>(0, i) / M_m1_mat.at<double>(2, i), |
||||
M_m1_mat.at<double>(1, i) / M_m1_mat.at<double>(2, i))); |
||||
dist2.push_back((M_m1[i].x - points2[i].x) * (M_m1[i].x - points2[i].x) + |
||||
(M_m1[i].y - points2[i].y) * (M_m1[i].y - points2[i].y)); |
||||
} |
||||
double T_H = 5.99; |
||||
for (int i = 0; i < num_iter; i++) |
||||
{ |
||||
if (dist1[i] < T_M) |
||||
S_M += T_H - dist1[i]; |
||||
if (dist2[i] < T_M) |
||||
S_M += T_H - dist2[i]; |
||||
} |
||||
std::cout << "S_M = " << S_M << std::endl; |
||||
return S_M; |
||||
} |
||||
|
||||
Mat Vslam::chooseModel(const std::vector<Point2f>& points1, const std::vector<Point2f>& points2) |
||||
{ |
||||
std::vector<uchar> mask; |
||||
Mat h = findHomography(points1, points2, mask, RANSAC, 5.99); |
||||
|
||||
Mat f(3, 3, CV_64FC1); |
||||
f = findFundamentalMat(points1, points2, FM_RANSAC, 3.84); |
||||
|
||||
double score_f = 0.0, score_h = 0.0, R_h; |
||||
if(!f.empty()) |
||||
score_f = computeScore(f, points1, points2, 3.84, 8); |
||||
if(!h.empty()) |
||||
score_h = computeScore(h, points1, points2, 5.99, 4); |
||||
|
||||
std::cout << "score_f is " << score_f << std::endl; |
||||
std::cout << "score_h is " << score_h << std::endl; |
||||
|
||||
R_h = score_h / (score_h + score_f); |
||||
if (R_h > 0.45) |
||||
{ |
||||
t_form = HOMOGRAPHY; |
||||
return h; |
||||
} |
||||
else |
||||
{ |
||||
t_form = FUNDAMENTAL; |
||||
return f; |
||||
} |
||||
|
||||
} |
||||
|
||||
bool Vslam::RTFromHomography(const Mat& H,
|
||||
const std::vector<Point2f>& points1, const std::vector<Point2f>& points2,
|
||||
std::vector<Mat>& rotations, std::vector<Mat>& translations) |
||||
{ |
||||
std::vector<Mat> normals; |
||||
int solutions = decomposeHomographyMat(H, camera_matrix, rotations, translations, normals); |
||||
|
||||
if (solutions == 0) |
||||
return false; |
||||
|
||||
return true; |
||||
} |
||||
|
||||
bool Vslam::RTFromFundamental(const Mat& F,
|
||||
const std::vector<Point2f>& points1, const std::vector<Point2f>& points2, |
||||
std::vector<Mat>& rotations, std::vector<Mat>& translations) |
||||
{ |
||||
Mat E = (F.t().mul(camera_matrix)).mul(F); |
||||
Mat rotation1, rotation2; |
||||
Mat mat_translations; |
||||
decomposeEssentialMat(E, rotation1, rotation2, mat_translations); |
||||
|
||||
rotations.resize(4); |
||||
rotations[0] = rotation1; |
||||
rotations[1] = rotation1; |
||||
rotations[2] = rotation2; |
||||
rotations[3] = rotation2; |
||||
|
||||
Mat neg_mat_translation(3, 1, CV_64F); |
||||
for(int i = 0; i < 3; i++) |
||||
neg_mat_translation.at<double>(i, 0) = -mat_translations.at<double>(i, 0); |
||||
|
||||
translations.resize(4); |
||||
translations[0] = mat_translations; |
||||
translations[1] = neg_mat_translation; |
||||
translations[2] = mat_translations; |
||||
translations[3] = neg_mat_translation; |
||||
|
||||
return true; |
||||
} |
||||
|
||||
int Vslam::motionChooseSolution(const std::vector<Point2f>& points1, const std::vector<Point2f>& points2, |
||||
const std::vector<Mat>& rotations, const std::vector<Mat>& translations, |
||||
std::vector<Point3f>& points_3f) |
||||
{ |
||||
std::vector<std::vector<Point3f> > points_3f_tmp(rotations.size()); |
||||
std::vector<int> count_visible_points(rotations.size()); |
||||
|
||||
for (size_t i = 0; i < rotations.size(); i++) |
||||
{ |
||||
Mat rotation_translation1, rotation_translation2; |
||||
|
||||
Mat identity_mat = Mat::eye(rotations[i].size(), CV_64F); |
||||
Mat z = Mat::zeros(translations[i].size(), CV_64F); |
||||
hconcat(identity_mat, z, rotation_translation1); |
||||
Mat projection1 = camera_matrix * rotation_translation1; |
||||
|
||||
hconcat(rotations[i], translations[i], rotation_translation2); |
||||
Mat projection2 = camera_matrix * rotation_translation2; |
||||
|
||||
Mat points_4f; |
||||
triangulatePoints(projection1, projection2, points1, points2, points_4f); |
||||
|
||||
for (size_t k = 0; k < points_4f.cols; k++) |
||||
{ |
||||
Mat X(3, 1, CV_32F); |
||||
X.at<float>(0, 0) = points_4f.at<float>(0, k) / points_4f.at<float>(3, k); |
||||
X.at<float>(1, 0) = points_4f.at<float>(1, k) / points_4f.at<float>(3, k); |
||||
X.at<float>(2, 0) = points_4f.at<float>(2, k) / points_4f.at<float>(3, k); |
||||
|
||||
points_3f_tmp[i].push_back(Point3f(X.at<float>(0, 0), |
||||
X.at<float>(1, 0), |
||||
X.at<float>(2, 0))); |
||||
|
||||
double d1 = sfm::depth(identity_mat, z, X); |
||||
double d2 = sfm::depth(rotations[i], translations[i], X); |
||||
|
||||
// Test if point is front to the two cameras.
|
||||
if (d1 > 0 && d2 > 0) |
||||
{ |
||||
count_visible_points[i]++; |
||||
} |
||||
} |
||||
} |
||||
|
||||
int solution_ind = std::distance(count_visible_points.begin(),
|
||||
std::max_element(count_visible_points.begin(), count_visible_points.end())); |
||||
points_3f = points_3f_tmp[solution_ind]; |
||||
|
||||
return solution_ind; |
||||
} |
||||
|
||||
void Vslam::visualizeStructureAndMotion(const std::vector<Point3f>& points_3f, |
||||
const std::vector<Mat>& rotations,
|
||||
const std::vector<Mat>& translations) const |
||||
{ |
||||
std::vector<Affine3f> cam_poses(rotations.size()); |
||||
for(size_t i = 0; i < rotations.size(); i++) |
||||
cam_poses[i] = Affine3d(rotations[i], translations[i]); |
||||
|
||||
viz::Viz3d window; |
||||
viz::WCloud cloud_wid(points_3f, viz::Color::green()); |
||||
cloud_wid.setRenderingProperty(cv::viz::POINT_SIZE, 2); |
||||
window.showWidget("cameras_frames_and_lines", viz::WTrajectory(cam_poses, viz::WTrajectory::BOTH, 0.1, viz::Color::green())); |
||||
window.showWidget("points", cloud_wid); |
||||
window.setViewerPose(cam_poses.back()); |
||||
window.spin(); |
||||
} |
Loading…
Reference in new issue