Fixed warper selection bug in stitching_detailed. Removed estimation of aspect ratio in BA in stitching to avoid stretching of input images. Did minor refactoring.

pull/13383/head
Alexey Spizhevoy 14 years ago
parent fb2c288627
commit 07efb17d12
  1. 11
      modules/stitching/include/opencv2/stitching/detail/motion_estimators.hpp
  2. 92
      modules/stitching/src/motion_estimators.cpp
  3. 2
      modules/stitching/src/stitcher.cpp
  4. 34
      samples/cpp/stitching_detailed.cpp

@ -80,15 +80,11 @@ private:
}; };
class CV_EXPORTS BundleAdjuster : public Estimator // Minimizes reprojection error
class CV_EXPORTS BundleAdjusterReproj : public Estimator
{ {
public: public:
enum { NO, RAY_SPACE, FOCAL_RAY_SPACE }; BundleAdjusterReproj(float conf_thresh = 1.f) : conf_thresh_(conf_thresh) {}
BundleAdjuster(int cost_space = FOCAL_RAY_SPACE, float conf_thresh = 1.f)
: cost_space_(cost_space), conf_thresh_(conf_thresh) {}
Mat K;
private: private:
void estimate(const std::vector<ImageFeatures> &features, const std::vector<MatchesInfo> &pairwise_matches, void estimate(const std::vector<ImageFeatures> &features, const std::vector<MatchesInfo> &pairwise_matches,
@ -104,7 +100,6 @@ private:
Mat cameras_; Mat cameras_;
std::vector<std::pair<int,int> > edges_; std::vector<std::pair<int,int> > edges_;
int cost_space_;
float conf_thresh_; float conf_thresh_;
Mat err_, err1_, err2_; Mat err_, err1_, err2_;
Mat J_; Mat J_;

@ -155,12 +155,10 @@ void HomographyBasedEstimator::estimate(const vector<ImageFeatures> &features, c
////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////
void BundleAdjuster::estimate(const vector<ImageFeatures> &features, const vector<MatchesInfo> &pairwise_matches, void BundleAdjusterReproj::estimate(const vector<ImageFeatures> &features,
vector<CameraParams> &cameras) const vector<MatchesInfo> &pairwise_matches,
vector<CameraParams> &cameras)
{ {
if (cost_space_ == NO)
return;
LOG("Bundle adjustment"); LOG("Bundle adjustment");
int64 t = getTickCount(); int64 t = getTickCount();
@ -169,14 +167,13 @@ void BundleAdjuster::estimate(const vector<ImageFeatures> &features, const vecto
pairwise_matches_ = &pairwise_matches[0]; pairwise_matches_ = &pairwise_matches[0];
// Prepare focals and rotations // Prepare focals and rotations
cameras_.create(num_images_ * 7, 1, CV_64F); cameras_.create(num_images_ * 6, 1, CV_64F);
SVD svd; SVD svd;
for (int i = 0; i < num_images_; ++i) for (int i = 0; i < num_images_; ++i)
{ {
cameras_.at<double>(i * 7, 0) = cameras[i].focal; cameras_.at<double>(i * 6, 0) = cameras[i].focal;
cameras_.at<double>(i * 7 + 1, 0) = cameras[i].ppx; cameras_.at<double>(i * 6 + 1, 0) = cameras[i].ppx;
cameras_.at<double>(i * 7 + 2, 0) = cameras[i].ppy; cameras_.at<double>(i * 6 + 2, 0) = cameras[i].ppy;
cameras_.at<double>(i * 7 + 3, 0) = cameras[i].aspect;
svd(cameras[i].R, SVD::FULL_UV); svd(cameras[i].R, SVD::FULL_UV);
Mat R = svd.u * svd.vt; Mat R = svd.u * svd.vt;
@ -185,9 +182,9 @@ void BundleAdjuster::estimate(const vector<ImageFeatures> &features, const vecto
Mat rvec; Mat rvec;
Rodrigues(R, rvec); CV_Assert(rvec.type() == CV_32F); Rodrigues(R, rvec); CV_Assert(rvec.type() == CV_32F);
cameras_.at<double>(i * 7 + 4, 0) = rvec.at<float>(0, 0); cameras_.at<double>(i * 6 + 3, 0) = rvec.at<float>(0, 0);
cameras_.at<double>(i * 7 + 5, 0) = rvec.at<float>(1, 0); cameras_.at<double>(i * 6 + 4, 0) = rvec.at<float>(1, 0);
cameras_.at<double>(i * 7 + 6, 0) = rvec.at<float>(2, 0); cameras_.at<double>(i * 6 + 5, 0) = rvec.at<float>(2, 0);
} }
// Select only consistent image pairs for futher adjustment // Select only consistent image pairs for futher adjustment
@ -207,7 +204,7 @@ void BundleAdjuster::estimate(const vector<ImageFeatures> &features, const vecto
for (size_t i = 0; i < edges_.size(); ++i) for (size_t i = 0; i < edges_.size(); ++i)
total_num_matches_ += static_cast<int>(pairwise_matches[edges_[i].first * num_images_ + edges_[i].second].num_inliers); total_num_matches_ += static_cast<int>(pairwise_matches[edges_[i].first * num_images_ + edges_[i].second].num_inliers);
CvLevMarq solver(num_images_ * 7, total_num_matches_ * 2, CvLevMarq solver(num_images_ * 6, total_num_matches_ * 2,
cvTermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 1000, DBL_EPSILON)); cvTermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 1000, DBL_EPSILON));
CvMat matParams = cameras_; CvMat matParams = cameras_;
@ -250,14 +247,13 @@ void BundleAdjuster::estimate(const vector<ImageFeatures> &features, const vecto
// Obtain global motion // Obtain global motion
for (int i = 0; i < num_images_; ++i) for (int i = 0; i < num_images_; ++i)
{ {
cameras[i].focal = cameras_.at<double>(i * 7, 0); cameras[i].focal = cameras_.at<double>(i * 6, 0);
cameras[i].ppx = cameras_.at<double>(i * 7 + 1, 0); cameras[i].ppx = cameras_.at<double>(i * 6 + 1, 0);
cameras[i].ppy = cameras_.at<double>(i * 7 + 2, 0); cameras[i].ppy = cameras_.at<double>(i * 6 + 2, 0);
cameras[i].aspect = cameras_.at<double>(i * 7 + 3, 0);
Mat rvec(3, 1, CV_64F); Mat rvec(3, 1, CV_64F);
rvec.at<double>(0, 0) = cameras_.at<double>(i * 7 + 4, 0); rvec.at<double>(0, 0) = cameras_.at<double>(i * 6 + 3, 0);
rvec.at<double>(1, 0) = cameras_.at<double>(i * 7 + 5, 0); rvec.at<double>(1, 0) = cameras_.at<double>(i * 6 + 4, 0);
rvec.at<double>(2, 0) = cameras_.at<double>(i * 7 + 6, 0); rvec.at<double>(2, 0) = cameras_.at<double>(i * 6 + 5, 0);
Rodrigues(rvec, cameras[i].R); Rodrigues(rvec, cameras[i].R);
Mat Mf; Mat Mf;
cameras[i].R.convertTo(Mf, CV_32F); cameras[i].R.convertTo(Mf, CV_32F);
@ -276,7 +272,7 @@ void BundleAdjuster::estimate(const vector<ImageFeatures> &features, const vecto
} }
void BundleAdjuster::calcError(Mat &err) void BundleAdjusterReproj::calcError(Mat &err)
{ {
err.create(total_num_matches_ * 2, 1, CV_64F); err.create(total_num_matches_ * 2, 1, CV_64F);
@ -285,28 +281,26 @@ void BundleAdjuster::calcError(Mat &err)
{ {
int i = edges_[edge_idx].first; int i = edges_[edge_idx].first;
int j = edges_[edge_idx].second; int j = edges_[edge_idx].second;
double f1 = cameras_.at<double>(i * 7, 0); double f1 = cameras_.at<double>(i * 6, 0);
double f2 = cameras_.at<double>(j * 7, 0); double f2 = cameras_.at<double>(j * 6, 0);
double ppx1 = cameras_.at<double>(i * 7 + 1, 0); double ppx1 = cameras_.at<double>(i * 6 + 1, 0);
double ppx2 = cameras_.at<double>(j * 7 + 1, 0); double ppx2 = cameras_.at<double>(j * 6 + 1, 0);
double ppy1 = cameras_.at<double>(i * 7 + 2, 0); double ppy1 = cameras_.at<double>(i * 6 + 2, 0);
double ppy2 = cameras_.at<double>(j * 7 + 2, 0); double ppy2 = cameras_.at<double>(j * 6 + 2, 0);
double a1 = cameras_.at<double>(i * 7 + 3, 0);
double a2 = cameras_.at<double>(j * 7 + 3, 0);
double R1[9]; double R1[9];
Mat R1_(3, 3, CV_64F, R1); Mat R1_(3, 3, CV_64F, R1);
Mat rvec(3, 1, CV_64F); Mat rvec(3, 1, CV_64F);
rvec.at<double>(0, 0) = cameras_.at<double>(i * 7 + 4, 0); rvec.at<double>(0, 0) = cameras_.at<double>(i * 6 + 3, 0);
rvec.at<double>(1, 0) = cameras_.at<double>(i * 7 + 5, 0); rvec.at<double>(1, 0) = cameras_.at<double>(i * 6 + 4, 0);
rvec.at<double>(2, 0) = cameras_.at<double>(i * 7 + 6, 0); rvec.at<double>(2, 0) = cameras_.at<double>(i * 6 + 5, 0);
Rodrigues(rvec, R1_); Rodrigues(rvec, R1_);
double R2[9]; double R2[9];
Mat R2_(3, 3, CV_64F, R2); Mat R2_(3, 3, CV_64F, R2);
rvec.at<double>(0, 0) = cameras_.at<double>(j * 7 + 4, 0); rvec.at<double>(0, 0) = cameras_.at<double>(j * 6 + 3, 0);
rvec.at<double>(1, 0) = cameras_.at<double>(j * 7 + 5, 0); rvec.at<double>(1, 0) = cameras_.at<double>(j * 6 + 4, 0);
rvec.at<double>(2, 0) = cameras_.at<double>(j * 7 + 6, 0); rvec.at<double>(2, 0) = cameras_.at<double>(j * 6 + 5, 0);
Rodrigues(rvec, R2_); Rodrigues(rvec, R2_);
const ImageFeatures& features1 = features_[i]; const ImageFeatures& features1 = features_[i];
@ -315,11 +309,11 @@ void BundleAdjuster::calcError(Mat &err)
Mat_<double> K1 = Mat::eye(3, 3, CV_64F); Mat_<double> K1 = Mat::eye(3, 3, CV_64F);
K1(0,0) = f1; K1(0,2) = ppx1; K1(0,0) = f1; K1(0,2) = ppx1;
K1(1,1) = f1*a1; K1(1,2) = ppy1; K1(1,1) = f1; K1(1,2) = ppy1;
Mat_<double> K2 = Mat::eye(3, 3, CV_64F); Mat_<double> K2 = Mat::eye(3, 3, CV_64F);
K2(0,0) = f2; K2(0,2) = ppx2; K2(0,0) = f2; K2(0,2) = ppx2;
K2(1,1) = f2*a2; K2(1,2) = ppy2; K2(1,1) = f2; K2(1,2) = ppy2;
Mat_<double> H = K2 * R2_.inv() * R1_ * K1.inv(); Mat_<double> H = K2 * R2_.inv() * R1_ * K1.inv();
@ -329,8 +323,8 @@ void BundleAdjuster::calcError(Mat &err)
continue; continue;
const DMatch& m = matches_info.matches[k]; const DMatch& m = matches_info.matches[k];
Point2d p1 = features1.keypoints[m.queryIdx].pt; Point2f p1 = features1.keypoints[m.queryIdx].pt;
Point2d p2 = features2.keypoints[m.trainIdx].pt; Point2f p2 = features2.keypoints[m.trainIdx].pt;
double x = H(0,0)*p1.x + H(0,1)*p1.y + H(0,2); double x = H(0,0)*p1.x + H(0,1)*p1.y + H(0,2);
double y = H(1,0)*p1.x + H(1,1)*p1.y + H(1,2); double y = H(1,0)*p1.x + H(1,1)*p1.y + H(1,2);
double z = H(2,0)*p1.x + H(2,1)*p1.y + H(2,2); double z = H(2,0)*p1.x + H(2,1)*p1.y + H(2,2);
@ -343,24 +337,24 @@ void BundleAdjuster::calcError(Mat &err)
} }
void BundleAdjuster::calcJacobian() void BundleAdjusterReproj::calcJacobian()
{ {
J_.create(total_num_matches_ * 2, num_images_ * 7, CV_64F); J_.create(total_num_matches_ * 2, num_images_ * 6, CV_64F);
double val; double val;
const double step = 1e-3; const double step = 1e-4;
for (int i = 0; i < num_images_; ++i) for (int i = 0; i < num_images_; ++i)
{ {
for (int j = 0; j < 7; ++j) for (int j = 0; j < 6; ++j)
{ {
val = cameras_.at<double>(i * 7 + j, 0); val = cameras_.at<double>(i * 6 + j, 0);
cameras_.at<double>(i * 7+ j, 0) = val - step; cameras_.at<double>(i * 6 + j, 0) = val - step;
calcError(err1_); calcError(err1_);
cameras_.at<double>(i * 7 + j, 0) = val + step; cameras_.at<double>(i * 6 + j, 0) = val + step;
calcError(err2_); calcError(err2_);
calcDeriv(err1_, err2_, 2 * step, J_.col(i * 7 + j)); calcDeriv(err1_, err2_, 2 * step, J_.col(i * 6 + j));
cameras_.at<double>(i * 7 + j, 0) = val; cameras_.at<double>(i * 6 + j, 0) = val;
} }
} }
} }

@ -189,7 +189,7 @@ Stitcher::Status Stitcher::stitch(InputArray imgs_, OutputArray pano_)
LOGLN("Initial intrinsic parameters #" << indices[i]+1 << ":\n " << cameras[i].K()); LOGLN("Initial intrinsic parameters #" << indices[i]+1 << ":\n " << cameras[i].K());
} }
detail::BundleAdjuster adjuster(detail::BundleAdjuster::FOCAL_RAY_SPACE, conf_thresh_); detail::BundleAdjusterReproj adjuster(conf_thresh_);
adjuster(features, pairwise_matches, cameras); adjuster(features, pairwise_matches, cameras);
// Find median focal length // Find median focal length

@ -79,8 +79,6 @@ void printUsage()
" --conf_thresh <float>\n" " --conf_thresh <float>\n"
" Threshold for two images are from the same panorama confidence.\n" " Threshold for two images are from the same panorama confidence.\n"
" The default is 1.0.\n" " The default is 1.0.\n"
" --ba (no|ray|focal_ray)\n"
" Bundle adjustment cost function. The default is 'focal_ray'.\n"
" --wave_correct (no|yes)\n" " --wave_correct (no|yes)\n"
" Perform wave effect correction. The default is 'yes'.\n" " Perform wave effect correction. The default is 'yes'.\n"
" --save_graph <file_name>\n" " --save_graph <file_name>\n"
@ -115,7 +113,6 @@ bool try_gpu = false;
double work_megapix = 0.6; double work_megapix = 0.6;
double seam_megapix = 0.1; double seam_megapix = 0.1;
double compose_megapix = -1; double compose_megapix = -1;
int ba_space = BundleAdjuster::FOCAL_RAY_SPACE;
float conf_thresh = 1.f; float conf_thresh = 1.f;
bool wave_correct = true; bool wave_correct = true;
bool save_graph = false; bool save_graph = false;
@ -184,21 +181,6 @@ int parseCmdArgs(int argc, char** argv)
match_conf = static_cast<float>(atof(argv[i + 1])); match_conf = static_cast<float>(atof(argv[i + 1]));
i++; i++;
} }
else if (string(argv[i]) == "--ba")
{
if (string(argv[i + 1]) == "no")
ba_space = BundleAdjuster::NO;
else if (string(argv[i + 1]) == "ray")
ba_space = BundleAdjuster::RAY_SPACE;
else if (string(argv[i + 1]) == "focal_ray")
ba_space = BundleAdjuster::FOCAL_RAY_SPACE;
else
{
cout << "Bad bundle adjustment space\n";
return -1;
}
i++;
}
else if (string(argv[i]) == "--conf_thresh") else if (string(argv[i]) == "--conf_thresh")
{ {
conf_thresh = static_cast<float>(atof(argv[i + 1])); conf_thresh = static_cast<float>(atof(argv[i + 1]));
@ -431,14 +413,14 @@ int main(int argc, char* argv[])
LOGLN("Initial focal length #" << indices[i]+1 << ": " << cameras[i].focal); LOGLN("Initial focal length #" << indices[i]+1 << ": " << cameras[i].focal);
} }
BundleAdjuster adjuster(ba_space, conf_thresh); BundleAdjusterReproj adjuster(conf_thresh);
adjuster(features, pairwise_matches, cameras); adjuster(features, pairwise_matches, cameras);
// Find median focal length // Find median focal length
vector<double> focals; vector<double> focals;
for (size_t i = 0; i < cameras.size(); ++i) for (size_t i = 0; i < cameras.size(); ++i)
{ {
LOGLN("Camera #" << indices[i]+1 << " focal length: " << cameras[i].focal); LOGLN("Camera #" << indices[i]+1 << ":\n" << cameras[i].K());
focals.push_back(cameras[i].focal); focals.push_back(cameras[i].focal);
} }
nth_element(focals.begin(), focals.begin() + focals.size()/2, focals.end()); nth_element(focals.begin(), focals.begin() + focals.size()/2, focals.end());
@ -476,16 +458,16 @@ int main(int argc, char* argv[])
#ifndef ANDROID #ifndef ANDROID
if (try_gpu && gpu::getCudaEnabledDeviceCount() > 0) if (try_gpu && gpu::getCudaEnabledDeviceCount() > 0)
{ {
if (warp_type == "plane") warper_creator = new cv::PlaneWarper(); if (warp_type == "plane") warper_creator = new cv::PlaneWarperGpu();
else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarper(); else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarperGpu();
else if (warp_type == "spherical") warper_creator = new cv::SphericalWarper(); else if (warp_type == "spherical") warper_creator = new cv::SphericalWarperGpu();
} }
else else
#endif #endif
{ {
if (warp_type == "plane") warper_creator = new cv::PlaneWarperGpu(); if (warp_type == "plane") warper_creator = new cv::PlaneWarper();
else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarperGpu(); else if (warp_type == "cylindrical") warper_creator = new cv::CylindricalWarper();
else if (warp_type == "spherical") warper_creator = new cv::SphericalWarperGpu(); else if (warp_type == "spherical") warper_creator = new cv::SphericalWarper();
} }
if (warper_creator.empty()) if (warper_creator.empty())

Loading…
Cancel
Save