opencv_stitching refactoring

pull/13383/head
Alexey Spizhevoy 14 years ago
parent f6fc807d49
commit 3467c6f732
  1. 6
      modules/stitching/blenders.hpp
  2. 6
      modules/stitching/focal_estimators.hpp
  3. 16
      modules/stitching/main.cpp
  4. 43
      modules/stitching/motion_estimators.cpp
  5. 6
      modules/stitching/motion_estimators.hpp
  6. 6
      modules/stitching/seam_finders.hpp
  7. 6
      modules/stitching/util.hpp
  8. 6
      modules/stitching/util_inl.hpp
  9. 2
      modules/stitching/warpers.cpp
  10. 8
      modules/stitching/warpers.hpp
  11. 6
      modules/stitching/warpers_inl.hpp

@ -1,5 +1,5 @@
#ifndef _OPENCV_BLENDERS_HPP_
#define _OPENCV_BLENDERS_HPP_
#ifndef __OPENCV_BLENDERS_HPP__
#define __OPENCV_BLENDERS_HPP__
#include <vector>
#include <opencv2/core/core.hpp>
@ -68,4 +68,4 @@ void createLaplacePyr(const std::vector<cv::Mat>& pyr_gauss, std::vector<cv::Mat
// Restores source image in-place. Result will be in pyr[0].
void restoreImageFromLaplacePyr(std::vector<cv::Mat>& pyr);
#endif // _OPENCV_BLENDERS_HPP_
#endif // __OPENCV_BLENDERS_HPP__

@ -1,5 +1,5 @@
#ifndef _OPENCV_FOCAL_ESTIMATORS_HPP_
#define _OPENCV_FOCAL_ESTIMATORS_HPP_
#ifndef __OPENCV_FOCAL_ESTIMATORS_HPP__
#define __OPENCV_FOCAL_ESTIMATORS_HPP__
#include <opencv2/core/core.hpp>
@ -9,4 +9,4 @@ void focalsFromHomography(const cv::Mat &H, double &f0, double &f1, bool &f0_ok,
bool focalsFromFundamental(const cv::Mat &F, double &f0, double &f1);
#endif // _OPENCV_FOCAL_ESTIMATORS_HPP_
#endif // __OPENCV_FOCAL_ESTIMATORS_HPP__

@ -9,10 +9,10 @@
using namespace std;
using namespace cv;
void printHelp()
void printUsage()
{
cout << "Rotation model stitcher.\n"
<< "Usage: stitch img1 img2 [...imgN]\n"
cout << "Rotation model images stitcher.\n"
<< "Usage: opencv_stitching img1 img2 [...imgN]\n"
<< "\t[--matchconf <0.0-1.0>]\n"
<< "\t[--ba (ray_space|focal_ray_space)]\n"
<< "\t[--warp (plane|cylindrical|spherical)]\n"
@ -30,13 +30,13 @@ int main(int argc, char* argv[])
int ba_space = BundleAdjuster::RAY_SPACE;
int warp_type = Warper::SPHERICAL;
bool user_match_conf = false;
float match_conf;
float match_conf = 0.55f;
int seam_find_type = SeamFinder::GRAPH_CUT;
int blend_type = Blender::MULTI_BAND;
if (argc == 1)
{
printHelp();
printUsage();
return 0;
}
@ -172,7 +172,7 @@ int main(int argc, char* argv[])
focals.push_back(cameras[i].focal);
}
nth_element(focals.begin(), focals.end(), focals.begin() + focals.size() / 2);
float camera_focal = focals[focals.size() / 2];
float camera_focal = static_cast<float>(focals[focals.size() / 2]);
vector<Mat> masks(num_images);
for (int i = 0; i < num_images; ++i)
@ -189,8 +189,8 @@ int main(int argc, char* argv[])
Ptr<Warper> warper = Warper::createByCameraFocal(camera_focal, warp_type);
for (int i = 0; i < num_images; ++i)
{
corners[i] = (*warper)(images[i], cameras[i].focal, cameras[i].M, images_warped[i]);
(*warper)(masks[i], cameras[i].focal, cameras[i].M, masks_warped[i], INTER_NEAREST, BORDER_CONSTANT);
corners[i] = (*warper)(images[i], static_cast<float>(cameras[i].focal), cameras[i].M, images_warped[i]);
(*warper)(masks[i], static_cast<float>(cameras[i].focal), cameras[i].M, masks_warped[i], INTER_NEAREST, BORDER_CONSTANT);
}
vector<Mat> images_f(num_images);
for (int i = 0; i < num_images; ++i)

@ -242,8 +242,8 @@ namespace
const DMatch& m0 = pair_matches[i][0];
const DMatch& m1 = pair_matches[i][1];
CV_Assert(m0.queryIdx < features1.keypoints.size());
CV_Assert(m0.trainIdx < features2.keypoints.size());
CV_Assert(m0.queryIdx < static_cast<int>(features1.keypoints.size()));
CV_Assert(m0.trainIdx < static_cast<int>(features2.keypoints.size()));
if (m0.distance < (1.f - match_conf_) * m1.distance)
matches_info.matches.push_back(m0);
@ -260,8 +260,8 @@ namespace
const DMatch& m0 = pair_matches[i][0];
const DMatch& m1 = pair_matches[i][1];
CV_Assert(m0.trainIdx < features1.keypoints.size());
CV_Assert(m0.queryIdx < features2.keypoints.size());
CV_Assert(m0.trainIdx < static_cast<int>(features1.keypoints.size()));
CV_Assert(m0.queryIdx < static_cast<int>(features2.keypoints.size()));
if (m0.distance < (1.f - match_conf_) * m1.distance)
matches_info.matches.push_back(DMatch(m0.trainIdx, m0.queryIdx, m0.distance));
@ -416,7 +416,7 @@ struct CalcRotation
};
void HomographyBasedEstimator::estimate(const vector<Mat> &images, const vector<ImageFeatures> &features,
void HomographyBasedEstimator::estimate(const vector<Mat> &images, const vector<ImageFeatures> &/*features*/,
const vector<MatchesInfo> &pairwise_matches, vector<CameraParams> &cameras)
{
const int num_images = static_cast<int>(images.size());
@ -514,7 +514,6 @@ void BundleAdjuster::estimate(const vector<Mat> &images, const vector<ImageFeatu
cvCopy(&matParams, solver.param);
int count = 0;
double last_err = numeric_limits<double>::max();
for(;;)
{
const CvMat* _param = 0;
@ -580,8 +579,8 @@ void BundleAdjuster::calcError(Mat &err)
{
int i = edges_[edge_idx].first;
int j = edges_[edge_idx].second;
float f1 = static_cast<float>(cameras_.at<double>(i * 4, 0));
float f2 = static_cast<float>(cameras_.at<double>(j * 4, 0));
double f1 = cameras_.at<double>(i * 4, 0);
double f2 = cameras_.at<double>(j * 4, 0);
double R1[9], R2[9];
Mat R1_(3, 3, CV_64F, R1), R2_(3, 3, CV_64F, R2);
Mat rvec(3, 1, CV_64F);
@ -602,25 +601,25 @@ void BundleAdjuster::calcError(Mat &err)
{
const DMatch& m = matches_info.matches[k];
Point2f kp1 = features1.keypoints[m.queryIdx].pt;
kp1.x -= 0.5f * images_[i].cols;
kp1.y -= 0.5f * images_[i].rows;
Point2f kp2 = features2.keypoints[m.trainIdx].pt;
kp2.x -= 0.5f * images_[j].cols;
kp2.y -= 0.5f * images_[j].rows;
float len1 = sqrt(kp1.x * kp1.x + kp1.y * kp1.y + f1 * f1);
float len2 = sqrt(kp2.x * kp2.x + kp2.y * kp2.y + f2 * f2);
Point3f p1(kp1.x / len1, kp1.y / len1, f1 / len1);
Point3f p2(kp2.x / len2, kp2.y / len2, f2 / len2);
Point3f d1(p1.x * R1[0] + p1.y * R1[1] + p1.z * R1[2],
Point2d kp1 = features1.keypoints[m.queryIdx].pt;
kp1.x -= 0.5 * images_[i].cols;
kp1.y -= 0.5 * images_[i].rows;
Point2d kp2 = features2.keypoints[m.trainIdx].pt;
kp2.x -= 0.5 * images_[j].cols;
kp2.y -= 0.5 * images_[j].rows;
double len1 = sqrt(kp1.x * kp1.x + kp1.y * kp1.y + f1 * f1);
double len2 = sqrt(kp2.x * kp2.x + kp2.y * kp2.y + f2 * f2);
Point3d p1(kp1.x / len1, kp1.y / len1, f1 / len1);
Point3d p2(kp2.x / len2, kp2.y / len2, f2 / len2);
Point3d d1(p1.x * R1[0] + p1.y * R1[1] + p1.z * R1[2],
p1.x * R1[3] + p1.y * R1[4] + p1.z * R1[5],
p1.x * R1[6] + p1.y * R1[7] + p1.z * R1[8]);
Point3f d2(p2.x * R2[0] + p2.y * R2[1] + p2.z * R2[2],
Point3d d2(p2.x * R2[0] + p2.y * R2[1] + p2.z * R2[2],
p2.x * R2[3] + p2.y * R2[4] + p2.z * R2[5],
p2.x * R2[6] + p2.y * R2[7] + p2.z * R2[8]);
float mult = 1.f;
double mult = 1;
if (cost_space_ == FOCAL_RAY_SPACE)
mult = sqrt(f1 * f2);
err.at<double>(3 * match_idx, 0) = mult * (d1.x - d2.x);

@ -1,5 +1,5 @@
#ifndef _OPENCV_MOTION_ESTIMATORS_HPP_
#define _OPENCV_MOTION_ESTIMATORS_HPP_
#ifndef __OPENCV_MOTION_ESTIMATORS_HPP__
#define __OPENCV_MOTION_ESTIMATORS_HPP__
#include <vector>
#include <opencv2/core/core.hpp>
@ -154,4 +154,4 @@ private:
void findMaxSpanningTree(int num_images, const std::vector<MatchesInfo> &pairwise_matches,
Graph &span_tree, std::vector<int> &centers);
#endif // _OPENCV_MOTION_ESTIMATORS_HPP_
#endif // __OPENCV_MOTION_ESTIMATORS_HPP__

@ -1,5 +1,5 @@
#ifndef _OPENCV_SEAM_FINDERS_HPP_
#define _OPENCV_SEAM_FINDERS_HPP_
#ifndef __OPENCV_SEAM_FINDERS_HPP__
#define __OPENCV_SEAM_FINDERS_HPP__
#include <vector>
#include <opencv2/core/core.hpp>
@ -65,4 +65,4 @@ private:
cv::Ptr<Impl> impl_;
};
#endif // _OPENCV_SEAM_FINDERS_HPP_
#endif // __OPENCV_SEAM_FINDERS_HPP__

@ -1,5 +1,5 @@
#ifndef _OPENCV_STITCHING_UTIL_HPP_
#define _OPENCV_STITCHING_UTIL_HPP_
#ifndef __OPENCV_STITCHING_UTIL_HPP__
#define __OPENCV_STITCHING_UTIL_HPP__
#include <vector>
#include <list>
@ -69,4 +69,4 @@ private:
#include "util_inl.hpp"
#endif // _OPENCV_STITCHING_UTIL_HPP_
#endif // __OPENCV_STITCHING_UTIL_HPP__

@ -1,5 +1,5 @@
#ifndef _OPENCV_STITCHING_UTIL_INL_HPP_
#define _OPENCV_STITCHING_UTIL_INL_HPP_
#ifndef __OPENCV_STITCHING_UTIL_INL_HPP__
#define __OPENCV_STITCHING_UTIL_INL_HPP__
#include <queue>
#include "util.hpp" // Make your IDE see declarations
@ -71,4 +71,4 @@ T sqr(T x)
return x * x;
}
#endif // _OPENCV_STITCHING_UTIL_INL_HPP_
#endif // __OPENCV_STITCHING_UTIL_INL_HPP__

@ -3,7 +3,7 @@
using namespace std;
using namespace cv;
Ptr<Warper> Warper::createByCameraFocal(int focal, int type)
Ptr<Warper> Warper::createByCameraFocal(float focal, int type)
{
if (type == PLANE)
return new PlaneWarper(focal);

@ -1,5 +1,5 @@
#ifndef _OPENCV_WARPERS_HPP_
#define _OPENCV_WARPERS_HPP_
#ifndef __OPENCV_WARPERS_HPP__
#define __OPENCV_WARPERS_HPP__
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
@ -9,7 +9,7 @@ class Warper
public:
enum { PLANE, CYLINDRICAL, SPHERICAL };
static cv::Ptr<Warper> createByCameraFocal(int focal, int type);
static cv::Ptr<Warper> createByCameraFocal(float focal, int type);
virtual ~Warper() {}
@ -118,4 +118,4 @@ private:
#include "warpers_inl.hpp"
#endif // _OPENCV_WARPERS_HPP_
#endif // __OPENCV_WARPERS_HPP__

@ -1,5 +1,5 @@
#ifndef _OPENCV_WARPERS_INL_HPP_
#define _OPENCV_WARPERS_INL_HPP_
#ifndef __OPENCV_WARPERS_INL_HPP__
#define __OPENCV_WARPERS_INL_HPP__
#include "warpers.hpp" // Make your IDE see declarations
@ -195,4 +195,4 @@ void CylindricalProjector::mapBackward(float u, float v, float &x, float &y)
y = focal * y / z + size.height * 0.5f;
}
#endif // _OPENCV_WARPERS_INL_HPP_
#endif // __OPENCV_WARPERS_INL_HPP__

Loading…
Cancel
Save