Merge pull request #22329 from chinery:stitching-py-fixes

Fix stitching Python bindings (and one stitching_detailed.cpp bug)
pull/22611/head
Alexander Smorkalov 2 years ago committed by GitHub
commit 5cd07006f6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 21
      modules/stitching/include/opencv2/stitching/detail/matchers.hpp
  2. 2
      modules/stitching/include/opencv2/stitching/detail/seam_finders.hpp
  3. 42
      modules/stitching/misc/python/test/test_stitching.py
  4. 8
      modules/stitching/src/matchers.cpp
  5. 26
      modules/stitching/test/test_matchers.cpp

@ -138,7 +138,7 @@ public:
@sa detail::MatchesInfo
*/
CV_WRAP_AS(apply2) void operator ()(const std::vector<ImageFeatures> &features, CV_OUT std::vector<MatchesInfo> &pairwise_matches,
const cv::UMat &mask = cv::UMat());
const cv::UMat &mask = cv::UMat()) { match(features, pairwise_matches, mask); };
/** @return True, if it's possible to use the same matcher instance in parallel, false otherwise
*/
@ -161,6 +161,16 @@ protected:
virtual void match(const ImageFeatures &features1, const ImageFeatures &features2,
MatchesInfo& matches_info) = 0;
/** @brief This method implements logic to match features between arbitrary number of features.
By default this checks every pair of inputs in the input, but the behaviour can be changed by subclasses.
@param features vector of image features
@param pairwise_matches found matches
@param mask (optional) mask indicating which image pairs should be matched
*/
virtual void match(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const cv::UMat &mask = cv::UMat());
bool is_thread_safe_;
};
@ -205,11 +215,12 @@ public:
CV_WRAP BestOf2NearestRangeMatcher(int range_width = 5, bool try_use_gpu = false, float match_conf = 0.3f,
int num_matches_thresh1 = 6, int num_matches_thresh2 = 6);
void operator ()(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const cv::UMat &mask = cv::UMat());
protected:
// indicate that we do not want to hide the base class match method with a different signature
using BestOf2NearestMatcher::match;
void match(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const cv::UMat &mask = cv::UMat()) CV_OVERRIDE;
int range_width_;
};

@ -248,7 +248,7 @@ public:
~GraphCutSeamFinder();
CV_WRAP void find(const std::vector<UMat> &src, const std::vector<Point> &corners,
std::vector<UMat> &masks) CV_OVERRIDE;
CV_IN_OUT std::vector<UMat> &masks) CV_OVERRIDE;
private:
// To avoid GCGraph dependency

@ -1,5 +1,6 @@
#!/usr/bin/env python
import cv2 as cv
import numpy as np
from tests_common import NewOpenCVTests
@ -134,6 +135,47 @@ class stitching_matches_info_test(NewOpenCVTests):
self.assertIsNotNone(matches_info.matches)
self.assertIsNotNone(matches_info.inliers_mask)
class stitching_range_matcher_test(NewOpenCVTests):
def test_simple(self):
images = [
self.get_sample('stitching/a1.png'),
self.get_sample('stitching/a2.png'),
self.get_sample('stitching/a3.png')
]
orb = cv.ORB_create()
features = [cv.detail.computeImageFeatures2(orb, img) for img in images]
matcher = cv.detail_BestOf2NearestRangeMatcher(range_width=1)
matches = matcher.apply2(features)
# matches[1] is image 0 and image 1, should have non-zero confidence
self.assertNotEqual(matches[1].confidence, 0)
# matches[2] is image 0 and image 2, should have zero confidence due to range_width=1
self.assertEqual(matches[2].confidence, 0)
class stitching_seam_finder_graph_cuts(NewOpenCVTests):
def test_simple(self):
images = [
self.get_sample('stitching/a1.png'),
self.get_sample('stitching/a2.png'),
self.get_sample('stitching/a3.png')
]
images = [cv.resize(img, [100, 100]) for img in images]
finder = cv.detail_GraphCutSeamFinder('COST_COLOR_GRAD')
masks = [cv.UMat(255 * np.ones((img.shape[0], img.shape[1]), np.uint8)) for img in images]
images_f = [img.astype(np.float32) for img in images]
masks_warped = finder.find(images_f, [(0, 0), (75, 0), (150, 0)], masks)
self.assertIsNotNone(masks_warped)
if __name__ == '__main__':
NewOpenCVTests.bootstrap()

@ -335,8 +335,8 @@ MatchesInfo& MatchesInfo::operator =(const MatchesInfo &other)
//////////////////////////////////////////////////////////////////////////////
void FeaturesMatcher::operator ()(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const UMat &mask)
void FeaturesMatcher::match(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const UMat &mask)
{
const int num_images = static_cast<int>(features.size());
@ -486,8 +486,8 @@ BestOf2NearestRangeMatcher::BestOf2NearestRangeMatcher(int range_width, bool try
}
void BestOf2NearestRangeMatcher::operator ()(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const UMat &mask)
void BestOf2NearestRangeMatcher::match(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const UMat &mask)
{
const int num_images = static_cast<int>(features.size());

@ -114,4 +114,30 @@ TEST(ParallelFeaturesFinder, IsSameWithSerial)
}
}
TEST(RangeMatcher, MatchesRangeOnly)
{
Ptr<Feature2D> finder = ORB::create();
Mat img0 = imread(string(cvtest::TS::ptr()->get_data_path()) + "stitching/a1.png", IMREAD_GRAYSCALE);
Mat img1 = imread(string(cvtest::TS::ptr()->get_data_path()) + "stitching/a2.png", IMREAD_GRAYSCALE);
Mat img2 = imread(string(cvtest::TS::ptr()->get_data_path()) + "stitching/a3.png", IMREAD_GRAYSCALE);
vector<detail::ImageFeatures> features(3);
computeImageFeatures(finder, img0, features[0]);
computeImageFeatures(finder, img1, features[1]);
computeImageFeatures(finder, img2, features[2]);
vector<detail::MatchesInfo> pairwise_matches;
Ptr<detail::FeaturesMatcher> matcher = makePtr<detail::BestOf2NearestRangeMatcher>(1);
(*matcher)(features, pairwise_matches);
// matches[1] will be image 0 and image 1, should have non-zero confidence
EXPECT_NE(pairwise_matches[1].confidence, .0);
// matches[2] will be image 0 and image 2, should have zero confidence due to range_width=1
EXPECT_DOUBLE_EQ(pairwise_matches[2].confidence, .0);
}
}} // namespace

Loading…
Cancel
Save