Merge pull request #18646 from qchateau:wave-auto

* stitching: add WAVE_CORRECT_AUTO

* stitching: use CV_EXPORTS
pull/18662/head
Quentin Chateau 4 years ago committed by GitHub
parent 72dfd4846e
commit 36598677cf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 12
      modules/stitching/include/opencv2/stitching/detail/motion_estimators.hpp
  2. 45
      modules/stitching/src/motion_estimators.cpp
  3. 1
      modules/stitching/test/test_precomp.hpp
  4. 50
      modules/stitching/test/test_wave_correction.cpp

@ -328,9 +328,19 @@ private:
enum WaveCorrectKind enum WaveCorrectKind
{ {
WAVE_CORRECT_HORIZ, WAVE_CORRECT_HORIZ,
WAVE_CORRECT_VERT WAVE_CORRECT_VERT,
WAVE_CORRECT_AUTO
}; };
/** @brief Tries to detect the wave correction kind depending
on whether a panorama spans horizontally or vertically
@param rmats Camera rotation matrices.
@return The correction kind to use for this panorama
*/
CV_EXPORTS
WaveCorrectKind autoDetectWaveCorrectKind(const std::vector<Mat> &rmats);
/** @brief Tries to make panorama more horizontal (or vertical). /** @brief Tries to make panorama more horizontal (or vertical).
@param rmats Camera rotation matrices. @param rmats Camera rotation matrices.

@ -886,6 +886,45 @@ void BundleAdjusterAffinePartial::calcJacobian(Mat &jac)
////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////
WaveCorrectKind autoDetectWaveCorrectKind(const std::vector<Mat> &rmats)
{
std::vector<float> xs, ys;
xs.reserve(rmats.size());
ys.reserve(rmats.size());
// Project a [0, 0, 1, 1] point to the camera image frame
// Ignore intrinsic parameters and camera translation as they
// have little influence
// This also means we can simply use "rmat.col(2)" as the
// projected point homogeneous coordinate
for (const Mat& rmat: rmats)
{
CV_Assert(rmat.type() == CV_32F);
xs.push_back(rmat.at<float>(0, 2) / rmat.at<float>(2, 2));
ys.push_back(rmat.at<float>(1, 2) / rmat.at<float>(2, 2));
}
// Calculate the delta between the max and min values for
// both the X and Y axis
auto min_max_x = std::minmax_element(xs.begin(), xs.end());
auto min_max_y = std::minmax_element(ys.begin(), ys.end());
double delta_x = *min_max_x.second - *min_max_x.first;
double delta_y = *min_max_y.second - *min_max_y.first;
// If the Y delta is the biggest, it means the images
// mostly span along the vertical axis: correct this axis
if (delta_y > delta_x)
{
LOGLN(" using vertical wave correction");
return WAVE_CORRECT_VERT;
}
else
{
LOGLN(" using horizontal wave correction");
return WAVE_CORRECT_HORIZ;
}
}
void waveCorrect(std::vector<Mat> &rmats, WaveCorrectKind kind) void waveCorrect(std::vector<Mat> &rmats, WaveCorrectKind kind)
{ {
LOGLN("Wave correcting..."); LOGLN("Wave correcting...");
@ -898,12 +937,18 @@ void waveCorrect(std::vector<Mat> &rmats, WaveCorrectKind kind)
return; return;
} }
if (kind == WAVE_CORRECT_AUTO)
{
kind = autoDetectWaveCorrectKind(rmats);
}
Mat moment = Mat::zeros(3, 3, CV_32F); Mat moment = Mat::zeros(3, 3, CV_32F);
for (size_t i = 0; i < rmats.size(); ++i) for (size_t i = 0; i < rmats.size(); ++i)
{ {
Mat col = rmats[i].col(0); Mat col = rmats[i].col(0);
moment += col * col.t(); moment += col * col.t();
} }
Mat eigen_vals, eigen_vecs; Mat eigen_vals, eigen_vecs;
eigen(moment, eigen_vals, eigen_vecs); eigen(moment, eigen_vals, eigen_vecs);

@ -6,6 +6,7 @@
#include "opencv2/ts.hpp" #include "opencv2/ts.hpp"
#include "opencv2/stitching.hpp" #include "opencv2/stitching.hpp"
#include "opencv2/stitching/detail/motion_estimators.hpp"
#include "opencv2/stitching/detail/matchers.hpp" #include "opencv2/stitching/detail/matchers.hpp"
#include "opencv2/stitching/detail/blenders.hpp" #include "opencv2/stitching/detail/blenders.hpp"
#include "opencv2/stitching/detail/exposure_compensate.hpp" #include "opencv2/stitching/detail/exposure_compensate.hpp"

@ -0,0 +1,50 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "test_precomp.hpp"
namespace opencv_test {
namespace {
detail::WaveCorrectKind correctionKind(const std::vector<UMat>& images)
{
Ptr<Stitcher> stitcher = Stitcher::create(Stitcher::PANORAMA);
stitcher->estimateTransform(images);
std::vector<Mat> rmats;
auto cameras = stitcher->cameras();
for (const auto& camera: cameras)
rmats.push_back(camera.R);
return detail::autoDetectWaveCorrectKind(rmats);
}
TEST(WaveCorrection, AutoWaveCorrection)
{
std::vector<UMat> images(2);
imread(cvtest::TS::ptr()->get_data_path() + "stitching/s1.jpg").copyTo(images[0]);
imread(cvtest::TS::ptr()->get_data_path() + "stitching/s2.jpg").copyTo(images[1]);
EXPECT_EQ(detail::WAVE_CORRECT_HORIZ, correctionKind(images));
std::vector<UMat> rotated_images(2);
rotate(images[0], rotated_images[0], cv::ROTATE_90_CLOCKWISE);
rotate(images[1], rotated_images[1], cv::ROTATE_90_CLOCKWISE);
EXPECT_EQ(detail::WAVE_CORRECT_VERT, correctionKind(rotated_images));
rotate(images[0], rotated_images[0], cv::ROTATE_90_COUNTERCLOCKWISE);
rotate(images[1], rotated_images[1], cv::ROTATE_90_COUNTERCLOCKWISE);
EXPECT_EQ(detail::WAVE_CORRECT_VERT, correctionKind(rotated_images));
rotate(images[0], rotated_images[0], cv::ROTATE_180);
rotate(images[1], rotated_images[1], cv::ROTATE_180);
EXPECT_EQ(detail::WAVE_CORRECT_HORIZ, correctionKind(rotated_images));
}
} // namespace
} // namespace opencv_test
Loading…
Cancel
Save