From 1ba7c728a6f6e6139aede123c2074034fc5c6063 Mon Sep 17 00:00:00 2001 From: Jiri Horner Date: Sat, 10 Nov 2018 17:53:48 +0100 Subject: [PATCH] Merge pull request #12827 from hrnr:stitching_4 [evolution] Stitching for OpenCV 4.0 * stitching: wrap Stitcher::create for bindings * provide method for consistent stitcher usage across languages * samples: add python stitching sample * port cpp stitching sample to python * stitching: consolidate Stitcher create methods * remove Stitcher::createDefault, it returns Stitcher, not Ptr -> inconsistent API * deprecate cv::createStitcher and cv::createStitcherScans in favor of Stitcher::create * stitching: avoid anonymous enum in Stitcher * ORIG_RESOL should be double * add documentatiton * stitching: improve documentation in Stitcher * stitching: expose estimator in Stitcher * remove ABI hack * stitching: drop try_use_gpu flag * OCL will be used automatically through T-API in OCL-enable paths * CUDA won't be used unless user sets CUDA-enabled classes manually * stitching: drop FeaturesFinder * use Feature2D instead of FeaturesFinder * interoperability with features2d module * detach from dependency on xfeatures2d * features2d: fix compute and detect to work with UMat vectors * correctly pass UMats as UMats to allow OCL paths * support vector of UMats as output arg * stitching: use nearest interpolation for resizing masks * fix warnings --- modules/features2d/src/feature2d.cpp | 72 ++-- modules/python/test/test_stitching.py | 2 +- .../stitching/include/opencv2/stitching.hpp | 69 ++-- .../opencv2/stitching/detail/matchers.hpp | 158 +------ .../misc/python/pyopencv_stitching.hpp | 4 +- modules/stitching/perf/opencl/perf_stitch.cpp | 42 +- modules/stitching/perf/perf_estimators.cpp | 8 +- modules/stitching/perf/perf_matchers.cpp | 48 +-- modules/stitching/perf/perf_precomp.hpp | 16 +- modules/stitching/perf/perf_stich.cpp | 38 +- modules/stitching/src/matchers.cpp | 386 ++---------------- modules/stitching/src/precomp.hpp | 4 - modules/stitching/src/stitcher.cpp | 157 ++----- modules/stitching/test/test_matchers.cpp | 39 +- modules/stitching/test/test_precomp.hpp | 4 + samples/cpp/stitching.cpp | 19 +- samples/cpp/stitching_detailed.cpp | 26 +- samples/python/stitching.py | 47 +++ 18 files changed, 329 insertions(+), 810 deletions(-) create mode 100644 samples/python/stitching.py diff --git a/modules/features2d/src/feature2d.cpp b/modules/features2d/src/feature2d.cpp index 0114f87ba7..49fafbaf23 100644 --- a/modules/features2d/src/feature2d.cpp +++ b/modules/features2d/src/feature2d.cpp @@ -71,29 +71,38 @@ void Feature2D::detect( InputArray image, } -void Feature2D::detect( InputArrayOfArrays _images, +void Feature2D::detect( InputArrayOfArrays images, std::vector >& keypoints, - InputArrayOfArrays _masks ) + InputArrayOfArrays masks ) { CV_INSTRUMENT_REGION(); - vector images, masks; + int nimages = (int)images.total(); - _images.getMatVector(images); - size_t i, nimages = images.size(); - - if( !_masks.empty() ) + if (!masks.empty()) { - _masks.getMatVector(masks); - CV_Assert(masks.size() == nimages); + CV_Assert(masks.total() == (size_t)nimages); } keypoints.resize(nimages); - for( i = 0; i < nimages; i++ ) + if (images.isMatVector()) + { + for (int i = 0; i < nimages; i++) + { + detect(images.getMat(i), keypoints[i], masks.empty() ? noArray() : masks.getMat(i)); + } + } + else { - detect(images[i], keypoints[i], masks.empty() ? Mat() : masks[i] ); + // assume UMats + for (int i = 0; i < nimages; i++) + { + detect(images.getUMat(i), keypoints[i], masks.empty() ? noArray() : masks.getUMat(i)); + } } + + } /* @@ -116,29 +125,40 @@ void Feature2D::compute( InputArray image, detectAndCompute(image, noArray(), keypoints, descriptors, true); } -void Feature2D::compute( InputArrayOfArrays _images, +void Feature2D::compute( InputArrayOfArrays images, std::vector >& keypoints, - OutputArrayOfArrays _descriptors ) + OutputArrayOfArrays descriptors ) { CV_INSTRUMENT_REGION(); - if( !_descriptors.needed() ) + if( !descriptors.needed() ) return; - vector images; - - _images.getMatVector(images); - size_t i, nimages = images.size(); + int nimages = (int)images.total(); - CV_Assert( keypoints.size() == nimages ); - CV_Assert( _descriptors.kind() == _InputArray::STD_VECTOR_MAT ); - - vector& descriptors = *(vector*)_descriptors.getObj(); - descriptors.resize(nimages); - - for( i = 0; i < nimages; i++ ) + CV_Assert( keypoints.size() == (size_t)nimages ); + // resize descriptors to appropriate size and compute + if (descriptors.isMatVector()) + { + vector& vec = *(vector*)descriptors.getObj(); + vec.resize(nimages); + for (int i = 0; i < nimages; i++) + { + compute(images.getMat(i), keypoints[i], vec[i]); + } + } + else if (descriptors.isUMatVector()) + { + vector& vec = *(vector*)descriptors.getObj(); + vec.resize(nimages); + for (int i = 0; i < nimages; i++) + { + compute(images.getUMat(i), keypoints[i], vec[i]); + } + } + else { - compute(images[i], keypoints[i], descriptors[i]); + CV_Error(Error::StsBadArg, "descriptors must be vector or vector"); } } diff --git a/modules/python/test/test_stitching.py b/modules/python/test/test_stitching.py index 3a5a99a590..0f5a2e4221 100644 --- a/modules/python/test/test_stitching.py +++ b/modules/python/test/test_stitching.py @@ -10,7 +10,7 @@ class stitching_test(NewOpenCVTests): img1 = self.get_sample('stitching/a1.png') img2 = self.get_sample('stitching/a2.png') - stitcher = cv.createStitcher(False) + stitcher = cv.Stitcher.create(cv.Stitcher_PANORAMA) (_result, pano) = stitcher.stitch((img1, img2)) #cv.imshow("pano", pano) diff --git a/modules/stitching/include/opencv2/stitching.hpp b/modules/stitching/include/opencv2/stitching.hpp index 07e1b5f73a..c58360a731 100644 --- a/modules/stitching/include/opencv2/stitching.hpp +++ b/modules/stitching/include/opencv2/stitching.hpp @@ -113,6 +113,10 @@ namespace cv { A basic example on image stitching */ +/** @example samples/python/stitching.py +A basic example on image stitching in Python. +*/ + /** @example samples/cpp/stitching_detailed.cpp A detailed example on image stitching */ @@ -124,15 +128,22 @@ be able to achieve higher stitching stability and quality of the final images at familiar with the theory is recommended. @note - - A basic example on image stitching can be found at - opencv_source_code/samples/cpp/stitching.cpp - - A detailed example on image stitching can be found at - opencv_source_code/samples/cpp/stitching_detailed.cpp +- A basic example on image stitching can be found at + opencv_source_code/samples/cpp/stitching.cpp +- A basic example on image stitching in Python can be found at + opencv_source_code/samples/python/stitching.py +- A detailed example on image stitching can be found at + opencv_source_code/samples/cpp/stitching_detailed.cpp */ class CV_EXPORTS_W Stitcher { public: - enum { ORIG_RESOL = -1 }; + /** + * When setting a resolution for stitching, this values is a placeholder + * for preserving the original resolution. + */ + static constexpr const double ORIG_RESOL = -1.0; + enum Status { OK = 0, @@ -140,6 +151,7 @@ public: ERR_HOMOGRAPHY_EST_FAIL = 2, ERR_CAMERA_PARAMS_ADJUST_FAIL = 3 }; + enum Mode { /** Mode for creating photo panoramas. Expects images under perspective @@ -157,22 +169,14 @@ public: }; - // Stitcher() {} - /** @brief Creates a stitcher with the default parameters. - - @param try_use_gpu Flag indicating whether GPU should be used whenever it's possible. - @return Stitcher class instance. - */ - static Stitcher createDefault(bool try_use_gpu = false); /** @brief Creates a Stitcher configured in one of the stitching modes. @param mode Scenario for stitcher operation. This is usually determined by source of images to stitch and their transformation. Default parameters will be chosen for operation in given scenario. - @param try_use_gpu Flag indicating whether GPU should be used whenever it's possible. @return Stitcher class instance. */ - static Ptr create(Mode mode = PANORAMA, bool try_use_gpu = false); + CV_WRAP static Ptr create(Mode mode = Stitcher::PANORAMA); CV_WRAP double registrationResol() const { return registr_resol_; } CV_WRAP void setRegistrationResol(double resol_mpx) { registr_resol_ = resol_mpx; } @@ -192,9 +196,9 @@ public: detail::WaveCorrectKind waveCorrectKind() const { return wave_correct_kind_; } void setWaveCorrectKind(detail::WaveCorrectKind kind) { wave_correct_kind_ = kind; } - Ptr featuresFinder() { return features_finder_; } - const Ptr featuresFinder() const { return features_finder_; } - void setFeaturesFinder(Ptr features_finder) + Ptr featuresFinder() { return features_finder_; } + const Ptr featuresFinder() const { return features_finder_; } + void setFeaturesFinder(Ptr features_finder) { features_finder_ = features_finder; } Ptr featuresMatcher() { return features_matcher_; } @@ -214,12 +218,10 @@ public: void setBundleAdjuster(Ptr bundle_adjuster) { bundle_adjuster_ = bundle_adjuster; } - /* TODO OpenCV ABI 4.x Ptr estimator() { return estimator_; } const Ptr estimator() const { return estimator_; } void setEstimator(Ptr estimator) { estimator_ = estimator; } - */ Ptr warper() { return warper_; } const Ptr warper() const { return warper_; } @@ -238,18 +240,16 @@ public: const Ptr blender() const { return blender_; } void setBlender(Ptr b) { blender_ = b; } - /** @overload */ - CV_WRAP Status estimateTransform(InputArrayOfArrays images); /** @brief These functions try to match the given images and to estimate rotations of each camera. @note Use the functions only if you're aware of the stitching pipeline, otherwise use Stitcher::stitch. @param images Input images. - @param rois Region of interest rectangles. + @param masks Masks for each input image specifying where to look for keypoints (optional). @return Status code. */ - Status estimateTransform(InputArrayOfArrays images, const std::vector > &rois); + CV_WRAP Status estimateTransform(InputArrayOfArrays images, InputArrayOfArrays masks = noArray()); /** @overload */ CV_WRAP Status composePanorama(OutputArray pano); @@ -271,19 +271,17 @@ public: /** @brief These functions try to stitch the given images. @param images Input images. - @param rois Region of interest rectangles. + @param masks Masks for each input image specifying where to look for keypoints (optional). @param pano Final pano. @return Status code. */ - Status stitch(InputArrayOfArrays images, const std::vector > &rois, OutputArray pano); + CV_WRAP Status stitch(InputArrayOfArrays images, InputArrayOfArrays masks, OutputArray pano); std::vector component() const { return indices_; } std::vector cameras() const { return cameras_; } CV_WRAP double workScale() const { return work_scale_; } private: - //Stitcher() {} - Status matchImages(); Status estimateCameraParams(); @@ -291,13 +289,11 @@ private: double seam_est_resol_; double compose_resol_; double conf_thresh_; - Ptr features_finder_; + Ptr features_finder_; Ptr features_matcher_; cv::UMat matching_mask_; Ptr bundle_adjuster_; - /* TODO OpenCV ABI 4.x Ptr estimator_; - */ bool do_wave_correct_; detail::WaveCorrectKind wave_correct_kind_; Ptr warper_; @@ -306,7 +302,7 @@ private: Ptr blender_; std::vector imgs_; - std::vector > rois_; + std::vector masks_; std::vector full_img_sizes_; std::vector features_; std::vector pairwise_matches_; @@ -319,8 +315,15 @@ private: double warped_image_scale_; }; -CV_EXPORTS_W Ptr createStitcher(bool try_use_gpu = false); -CV_EXPORTS_W Ptr createStitcherScans(bool try_use_gpu = false); +/** + * @deprecated use Stitcher::create + */ +CV_DEPRECATED Ptr createStitcher(bool try_use_gpu = false); + +/** + * @deprecated use Stitcher::create + */ +CV_DEPRECATED Ptr createStitcherScans(bool try_use_gpu = false); //! @} stitching diff --git a/modules/stitching/include/opencv2/stitching/detail/matchers.hpp b/modules/stitching/include/opencv2/stitching/detail/matchers.hpp index 5c0123833f..9160f47818 100644 --- a/modules/stitching/include/opencv2/stitching/detail/matchers.hpp +++ b/modules/stitching/include/opencv2/stitching/detail/matchers.hpp @@ -48,10 +48,6 @@ #include "opencv2/opencv_modules.hpp" -#ifdef HAVE_OPENCV_XFEATURES2D -# include "opencv2/xfeatures2d/cuda.hpp" -#endif - namespace cv { namespace detail { @@ -67,149 +63,17 @@ struct CV_EXPORTS ImageFeatures UMat descriptors; }; -/** @brief Feature finders base class */ -class CV_EXPORTS FeaturesFinder -{ -public: - virtual ~FeaturesFinder() {} - /** @overload */ - void operator ()(InputArray image, ImageFeatures &features); - /** @brief Finds features in the given image. - - @param image Source image - @param features Found features - @param rois Regions of interest - - @sa detail::ImageFeatures, Rect_ - */ - void operator ()(InputArray image, ImageFeatures &features, const std::vector &rois); - /** @brief Finds features in the given images in parallel. - - @param images Source images - @param features Found features for each image - @param rois Regions of interest for each image - - @sa detail::ImageFeatures, Rect_ - */ - void operator ()(InputArrayOfArrays images, std::vector &features, - const std::vector > &rois); - /** @overload */ - void operator ()(InputArrayOfArrays images, std::vector &features); - /** @brief Frees unused memory allocated before if there is any. */ - virtual void collectGarbage() {} - - /* TODO OpenCV ABI 4.x - reimplement this as public method similar to FeaturesMatcher and remove private function hack - @return True, if it's possible to use the same finder instance in parallel, false otherwise - bool isThreadSafe() const { return is_thread_safe_; } - */ - -protected: - /** @brief This method must implement features finding logic in order to make the wrappers - detail::FeaturesFinder::operator()_ work. - - @param image Source image - @param features Found features - - @sa detail::ImageFeatures */ - virtual void find(InputArray image, ImageFeatures &features) = 0; - /** @brief uses dynamic_cast to determine thread-safety - @return True, if it's possible to use the same finder instance in parallel, false otherwise - */ - bool isThreadSafe() const; -}; - -/** @brief SURF features finder. - -@sa detail::FeaturesFinder, SURF -*/ -class CV_EXPORTS SurfFeaturesFinder : public FeaturesFinder -{ -public: - SurfFeaturesFinder(double hess_thresh = 300., int num_octaves = 3, int num_layers = 4, - int num_octaves_descr = /*4*/3, int num_layers_descr = /*2*/4); - -private: - void find(InputArray image, ImageFeatures &features) CV_OVERRIDE; - - Ptr detector_; - Ptr extractor_; - Ptr surf; -}; - - -/** @brief SIFT features finder. - -@sa detail::FeaturesFinder, SIFT -*/ -class CV_EXPORTS SiftFeaturesFinder : public FeaturesFinder -{ -public: - SiftFeaturesFinder(); - -private: - void find(InputArray image, ImageFeatures &features) CV_OVERRIDE; - Ptr sift; -}; - -/** @brief ORB features finder. : - -@sa detail::FeaturesFinder, ORB -*/ -class CV_EXPORTS OrbFeaturesFinder : public FeaturesFinder -{ -public: - OrbFeaturesFinder(Size _grid_size = Size(3,1), int nfeatures=1500, float scaleFactor=1.3f, int nlevels=5); - -private: - void find(InputArray image, ImageFeatures &features) CV_OVERRIDE; - - Ptr orb; - Size grid_size; -}; - -/** @brief AKAZE features finder. : - -@sa detail::FeaturesFinder, AKAZE -*/ -class CV_EXPORTS AKAZEFeaturesFinder : public detail::FeaturesFinder -{ -public: - AKAZEFeaturesFinder(AKAZE::DescriptorType descriptor_type = AKAZE::DESCRIPTOR_MLDB, - int descriptor_size = 0, - int descriptor_channels = 3, - float threshold = 0.001f, - int nOctaves = 4, - int nOctaveLayers = 4, - KAZE::DiffusivityType diffusivity = KAZE::DIFF_PM_G2); - -private: - void find(InputArray image, ImageFeatures &features) CV_OVERRIDE; - - Ptr akaze; -}; - -#ifdef HAVE_OPENCV_XFEATURES2D -class CV_EXPORTS SurfFeaturesFinderGpu : public FeaturesFinder -{ -public: - SurfFeaturesFinderGpu(double hess_thresh = 300., int num_octaves = 3, int num_layers = 4, - int num_octaves_descr = 4, int num_layers_descr = 2); - - void collectGarbage() CV_OVERRIDE; - -private: - void find(InputArray image, ImageFeatures &features) CV_OVERRIDE; - - cuda::GpuMat image_; - cuda::GpuMat gray_image_; - cuda::SURF_CUDA surf_; - cuda::GpuMat keypoints_; - cuda::GpuMat descriptors_; - int num_octaves_, num_layers_; - int num_octaves_descr_, num_layers_descr_; -}; -#endif +CV_EXPORTS void computeImageFeatures( + const Ptr &featuresFinder, + InputArrayOfArrays images, + std::vector &features, + InputArrayOfArrays masks = noArray()); + +CV_EXPORTS void computeImageFeatures( + const Ptr &featuresFinder, + InputArray image, + ImageFeatures &features, + InputArray mask = noArray()); /** @brief Structure containing information about matches between two images. diff --git a/modules/stitching/misc/python/pyopencv_stitching.hpp b/modules/stitching/misc/python/pyopencv_stitching.hpp index 9c438356b8..634e8dd1d5 100644 --- a/modules/stitching/misc/python/pyopencv_stitching.hpp +++ b/modules/stitching/misc/python/pyopencv_stitching.hpp @@ -1,4 +1,6 @@ #ifdef HAVE_OPENCV_STITCHING + typedef Stitcher::Status Status; +typedef Stitcher::Mode Mode; -#endif \ No newline at end of file +#endif diff --git a/modules/stitching/perf/opencl/perf_stitch.cpp b/modules/stitching/perf/opencl/perf_stitch.cpp index 9a84d3edc1..1b6e43304a 100644 --- a/modules/stitching/perf/opencl/perf_stitch.cpp +++ b/modules/stitching/perf/opencl/perf_stitch.cpp @@ -35,7 +35,7 @@ OCL_PERF_TEST_P(stitch, a123, TEST_DETECTORS) _imgs.push_back( imread( getDataPath("stitching/a3.png") ) ); vector imgs = ToUMat(_imgs); - Ptr featuresFinder = getFeatureFinder(GetParam()); + Ptr featuresFinder = getFeatureFinder(GetParam()); Ptr featuresMatcher = GetParam() == "orb" ? makePtr(false, ORB_MATCH_CONFIDENCE) : makePtr(false, SURF_MATCH_CONFIDENCE); @@ -44,14 +44,14 @@ OCL_PERF_TEST_P(stitch, a123, TEST_DETECTORS) while(next()) { - Stitcher stitcher = Stitcher::createDefault(); - stitcher.setFeaturesFinder(featuresFinder); - stitcher.setFeaturesMatcher(featuresMatcher); - stitcher.setWarper(makePtr()); - stitcher.setRegistrationResol(WORK_MEGAPIX); + Ptr stitcher = Stitcher::create(); + stitcher->setFeaturesFinder(featuresFinder); + stitcher->setFeaturesMatcher(featuresMatcher); + stitcher->setWarper(makePtr()); + stitcher->setRegistrationResol(WORK_MEGAPIX); startTimer(); - stitcher.stitch(imgs, pano); + stitcher->stitch(imgs, pano); stopTimer(); } @@ -69,7 +69,7 @@ OCL_PERF_TEST_P(stitch, b12, TEST_DETECTORS) imgs.push_back( imread( getDataPath("stitching/b1.png") ) ); imgs.push_back( imread( getDataPath("stitching/b2.png") ) ); - Ptr featuresFinder = getFeatureFinder(GetParam()); + Ptr featuresFinder = getFeatureFinder(GetParam()); Ptr featuresMatcher = GetParam() == "orb" ? makePtr(false, ORB_MATCH_CONFIDENCE) : makePtr(false, SURF_MATCH_CONFIDENCE); @@ -78,14 +78,14 @@ OCL_PERF_TEST_P(stitch, b12, TEST_DETECTORS) while(next()) { - Stitcher stitcher = Stitcher::createDefault(); - stitcher.setFeaturesFinder(featuresFinder); - stitcher.setFeaturesMatcher(featuresMatcher); - stitcher.setWarper(makePtr()); - stitcher.setRegistrationResol(WORK_MEGAPIX); + Ptr stitcher = Stitcher::create(); + stitcher->setFeaturesFinder(featuresFinder); + stitcher->setFeaturesMatcher(featuresMatcher); + stitcher->setWarper(makePtr()); + stitcher->setRegistrationResol(WORK_MEGAPIX); startTimer(); - stitcher.stitch(imgs, pano); + stitcher->stitch(imgs, pano); stopTimer(); } @@ -116,7 +116,7 @@ OCL_PERF_TEST_P(stitch, boat, TEST_DETECTORS) _imgs.push_back( imread( getDataPath("stitching/boat6.jpg") ) ); vector imgs = ToUMat(_imgs); - Ptr featuresFinder = getFeatureFinder(GetParam()); + Ptr featuresFinder = getFeatureFinder(GetParam()); Ptr featuresMatcher = GetParam() == "orb" ? makePtr(false, ORB_MATCH_CONFIDENCE) : makePtr(false, SURF_MATCH_CONFIDENCE); @@ -125,14 +125,14 @@ OCL_PERF_TEST_P(stitch, boat, TEST_DETECTORS) while(next()) { - Stitcher stitcher = Stitcher::createDefault(); - stitcher.setFeaturesFinder(featuresFinder); - stitcher.setFeaturesMatcher(featuresMatcher); - stitcher.setWarper(makePtr()); - stitcher.setRegistrationResol(WORK_MEGAPIX); + Ptr stitcher = Stitcher::create(); + stitcher->setFeaturesFinder(featuresFinder); + stitcher->setFeaturesMatcher(featuresMatcher); + stitcher->setWarper(makePtr()); + stitcher->setRegistrationResol(WORK_MEGAPIX); startTimer(); - stitcher.stitch(imgs, pano); + stitcher->stitch(imgs, pano); stopTimer(); } diff --git a/modules/stitching/perf/perf_estimators.cpp b/modules/stitching/perf/perf_estimators.cpp index 7b2b533927..dd1d44e077 100644 --- a/modules/stitching/perf/perf_estimators.cpp +++ b/modules/stitching/perf/perf_estimators.cpp @@ -28,13 +28,9 @@ PERF_TEST_P(bundleAdjuster, affine, testing::Combine(TEST_DETECTORS, AFFINE_FUNC string detector = get<0>(GetParam()); string affine_fun = get<1>(GetParam()); - Ptr finder; + Ptr finder = getFeatureFinder(detector); Ptr matcher; Ptr bundle_adjuster; - if (detector == "surf") - finder = makePtr(); - else if (detector == "orb") - finder = makePtr(); if (affine_fun == "affinePartial") { matcher = makePtr(false); @@ -54,7 +50,7 @@ PERF_TEST_P(bundleAdjuster, affine, testing::Combine(TEST_DETECTORS, AFFINE_FUNC std::vector cameras; std::vector cameras2; - (*finder)(images, features); + computeImageFeatures(finder, images, features); (*matcher)(features, pairwise_matches); if (!(*estimator)(features, pairwise_matches, cameras)) FAIL() << "estimation failed. this should never happen."; diff --git a/modules/stitching/perf/perf_matchers.cpp b/modules/stitching/perf/perf_matchers.cpp index 28e19c85f3..6065be6ce8 100644 --- a/modules/stitching/perf/perf_matchers.cpp +++ b/modules/stitching/perf/perf_matchers.cpp @@ -29,11 +29,11 @@ PERF_TEST_P(FeaturesFinderVec, ParallelFeaturesFinder, NUMBER_IMAGES) vector imgs(GetParam(), img); vector features(imgs.size()); - Ptr featuresFinder = makePtr(); + Ptr finder = ORB::create(); TEST_CYCLE() { - (*featuresFinder)(imgs, features); + detail::computeImageFeatures(finder, imgs, features); } SANITY_CHECK_NOTHING(); @@ -45,12 +45,12 @@ PERF_TEST_P(FeaturesFinderVec, SerialFeaturesFinder, NUMBER_IMAGES) vector imgs(GetParam(), img); vector features(imgs.size()); - Ptr featuresFinder = makePtr(); + Ptr finder = ORB::create(); TEST_CYCLE() { for (size_t i = 0; i < imgs.size(); ++i) - (*featuresFinder)(imgs[i], features[i]); + detail::computeImageFeatures(finder, imgs[i], features[i]); } SANITY_CHECK_NOTHING(); @@ -65,16 +65,14 @@ PERF_TEST_P( match, bestOf2Nearest, TEST_DETECTORS) resize(img1_full, img1, Size(), scale1, scale1, INTER_LINEAR_EXACT); resize(img2_full, img2, Size(), scale2, scale2, INTER_LINEAR_EXACT); - Ptr finder; + Ptr finder = getFeatureFinder(GetParam()); Ptr matcher; if (GetParam() == "surf") { - finder = makePtr(); matcher = makePtr(false, SURF_MATCH_CONFIDENCE); } else if (GetParam() == "orb") { - finder = makePtr(); matcher = makePtr(false, ORB_MATCH_CONFIDENCE); } else @@ -83,8 +81,8 @@ PERF_TEST_P( match, bestOf2Nearest, TEST_DETECTORS) } detail::ImageFeatures features1, features2; - (*finder)(img1, features1); - (*finder)(img2, features2); + detail::computeImageFeatures(finder, img1, features1); + detail::computeImageFeatures(finder, img2, features2); detail::MatchesInfo pairwise_matches; @@ -118,18 +116,16 @@ PERF_TEST_P( matchVector, bestOf2NearestVectorFeatures, testing::Combine( resize(img1_full, img1, Size(), scale1, scale1, INTER_LINEAR_EXACT); resize(img2_full, img2, Size(), scale2, scale2, INTER_LINEAR_EXACT); - Ptr finder; - Ptr matcher; string detectorName = get<0>(GetParam()); int featuresVectorSize = get<1>(GetParam()); + Ptr finder = getFeatureFinder(detectorName); + Ptr matcher; if (detectorName == "surf") { - finder = makePtr(); matcher = makePtr(false, SURF_MATCH_CONFIDENCE); } else if (detectorName == "orb") { - finder = makePtr(); matcher = makePtr(false, ORB_MATCH_CONFIDENCE); } else @@ -138,8 +134,8 @@ PERF_TEST_P( matchVector, bestOf2NearestVectorFeatures, testing::Combine( } detail::ImageFeatures features1, features2; - (*finder)(img1, features1); - (*finder)(img2, features2); + detail::computeImageFeatures(finder, img1, features1); + detail::computeImageFeatures(finder, img2, features2); vector features; vector pairwise_matches; for(int i = 0; i < featuresVectorSize/2; i++) @@ -183,16 +179,14 @@ PERF_TEST_P( match, affineBestOf2Nearest, TEST_DETECTORS) resize(img1_full, img1, Size(), scale1, scale1, INTER_LINEAR_EXACT); resize(img2_full, img2, Size(), scale2, scale2, INTER_LINEAR_EXACT); - Ptr finder; + Ptr finder = getFeatureFinder(GetParam()); Ptr matcher; if (GetParam() == "surf") { - finder = makePtr(); matcher = makePtr(false, false, SURF_MATCH_CONFIDENCE); } else if (GetParam() == "orb") { - finder = makePtr(); matcher = makePtr(false, false, ORB_MATCH_CONFIDENCE); } else @@ -201,8 +195,8 @@ PERF_TEST_P( match, affineBestOf2Nearest, TEST_DETECTORS) } detail::ImageFeatures features1, features2; - (*finder)(img1, features1); - (*finder)(img2, features2); + detail::computeImageFeatures(finder, img1, features1); + detail::computeImageFeatures(finder, img2, features2); detail::MatchesInfo pairwise_matches; @@ -242,18 +236,16 @@ PERF_TEST_P( matchVector, affineBestOf2NearestVectorFeatures, testing::Combine( resize(img1_full, img1, Size(), scale1, scale1, INTER_LINEAR_EXACT); resize(img2_full, img2, Size(), scale2, scale2, INTER_LINEAR_EXACT); - Ptr finder; - Ptr matcher; string detectorName = get<0>(GetParam()); int featuresVectorSize = get<1>(GetParam()); + Ptr finder = getFeatureFinder(detectorName); + Ptr matcher; if (detectorName == "surf") { - finder = makePtr(); matcher = makePtr(false, false, SURF_MATCH_CONFIDENCE); } else if (detectorName == "orb") { - finder = makePtr(); matcher = makePtr(false, false, ORB_MATCH_CONFIDENCE); } else @@ -262,8 +254,8 @@ PERF_TEST_P( matchVector, affineBestOf2NearestVectorFeatures, testing::Combine( } detail::ImageFeatures features1, features2; - (*finder)(img1, features1); - (*finder)(img2, features2); + detail::computeImageFeatures(finder, img1, features1); + detail::computeImageFeatures(finder, img2, features2); vector features; vector pairwise_matches; for(int i = 0; i < featuresVectorSize/2; i++) @@ -288,12 +280,12 @@ PERF_TEST_P( matchVector, affineBestOf2NearestVectorFeatures, testing::Combine( if (pairwise_matches[i].src_img_idx < 0) continue; - EXPECT_TRUE(pairwise_matches[i].matches.size() > 400); + EXPECT_GT(pairwise_matches[i].matches.size(), 200u); EXPECT_FALSE(pairwise_matches[i].H.empty()); ++matches_count; } - EXPECT_TRUE(matches_count > 0); + EXPECT_GT(matches_count, 0u); SANITY_CHECK_NOTHING(); } diff --git a/modules/stitching/perf/perf_precomp.hpp b/modules/stitching/perf/perf_precomp.hpp index fcbdc68c25..cb89f16eef 100644 --- a/modules/stitching/perf/perf_precomp.hpp +++ b/modules/stitching/perf/perf_precomp.hpp @@ -4,19 +4,25 @@ #include "opencv2/ts.hpp" #include "opencv2/stitching.hpp" +#ifdef HAVE_OPENCV_XFEATURES2D +#include "opencv2/xfeatures2d/nonfree.hpp" +#endif + namespace cv { -static inline Ptr getFeatureFinder(const std::string& name) +static inline Ptr getFeatureFinder(const std::string& name) { if (name == "orb") - return makePtr(); + return ORB::create(); +#ifdef HAVE_OPENCV_XFEATURES2D else if (name == "surf") - return makePtr(); + return xfeatures2d::SURF::create(); +#endif else if (name == "akaze") - return makePtr(); + return AKAZE::create(); else - return Ptr(); + return Ptr(); } } // namespace cv diff --git a/modules/stitching/perf/perf_stich.cpp b/modules/stitching/perf/perf_stich.cpp index 75fb93fea9..3d2a12846c 100644 --- a/modules/stitching/perf/perf_stich.cpp +++ b/modules/stitching/perf/perf_stich.cpp @@ -31,7 +31,7 @@ PERF_TEST_P(stitch, a123, TEST_DETECTORS) imgs.push_back( imread( getDataPath("stitching/a2.png") ) ); imgs.push_back( imread( getDataPath("stitching/a3.png") ) ); - Ptr featuresFinder = getFeatureFinder(GetParam()); + Ptr featuresFinder = getFeatureFinder(GetParam()); Ptr featuresMatcher = GetParam() == "orb" ? makePtr(false, ORB_MATCH_CONFIDENCE) @@ -41,14 +41,14 @@ PERF_TEST_P(stitch, a123, TEST_DETECTORS) while(next()) { - Stitcher stitcher = Stitcher::createDefault(); - stitcher.setFeaturesFinder(featuresFinder); - stitcher.setFeaturesMatcher(featuresMatcher); - stitcher.setWarper(makePtr()); - stitcher.setRegistrationResol(WORK_MEGAPIX); + Ptr stitcher = Stitcher::create(); + stitcher->setFeaturesFinder(featuresFinder); + stitcher->setFeaturesMatcher(featuresMatcher); + stitcher->setWarper(makePtr()); + stitcher->setRegistrationResol(WORK_MEGAPIX); startTimer(); - stitcher.stitch(imgs, pano); + stitcher->stitch(imgs, pano); stopTimer(); } @@ -66,7 +66,7 @@ PERF_TEST_P(stitch, b12, TEST_DETECTORS) imgs.push_back( imread( getDataPath("stitching/b1.png") ) ); imgs.push_back( imread( getDataPath("stitching/b2.png") ) ); - Ptr featuresFinder = getFeatureFinder(GetParam()); + Ptr featuresFinder = getFeatureFinder(GetParam()); Ptr featuresMatcher = GetParam() == "orb" ? makePtr(false, ORB_MATCH_CONFIDENCE) @@ -76,14 +76,14 @@ PERF_TEST_P(stitch, b12, TEST_DETECTORS) while(next()) { - Stitcher stitcher = Stitcher::createDefault(); - stitcher.setFeaturesFinder(featuresFinder); - stitcher.setFeaturesMatcher(featuresMatcher); - stitcher.setWarper(makePtr()); - stitcher.setRegistrationResol(WORK_MEGAPIX); + Ptr stitcher = Stitcher::create(); + stitcher->setFeaturesFinder(featuresFinder); + stitcher->setFeaturesMatcher(featuresMatcher); + stitcher->setWarper(makePtr()); + stitcher->setRegistrationResol(WORK_MEGAPIX); startTimer(); - stitcher.stitch(imgs, pano); + stitcher->stitch(imgs, pano); stopTimer(); } @@ -101,7 +101,7 @@ PERF_TEST_P(stitchDatasets, affine, testing::Combine(AFFINE_DATASETS, TEST_DETEC Mat pano; vector imgs; int width, height, allowed_diff = 20; - Ptr featuresFinder = getFeatureFinder(detector); + Ptr featuresFinder = getFeatureFinder(detector); if(dataset == "budapest") { @@ -116,6 +116,10 @@ PERF_TEST_P(stitchDatasets, affine, testing::Combine(AFFINE_DATASETS, TEST_DETEC // this dataset is big, the results between surf and orb differ slightly, // but both are still good allowed_diff = 50; + // we need to boost ORB number of features to be able to stitch this dataset + // SURF works just fine with default settings + if(detector == "orb") + featuresFinder = ORB::create(1500); } else if (dataset == "newspaper") { @@ -128,7 +132,7 @@ PERF_TEST_P(stitchDatasets, affine, testing::Combine(AFFINE_DATASETS, TEST_DETEC // we need to boost ORB number of features to be able to stitch this dataset // SURF works just fine with default settings if(detector == "orb") - featuresFinder = makePtr(Size(3,1), 3000); + featuresFinder = ORB::create(3000); } else if (dataset == "prague") { @@ -149,7 +153,7 @@ PERF_TEST_P(stitchDatasets, affine, testing::Combine(AFFINE_DATASETS, TEST_DETEC while(next()) { - Ptr stitcher = Stitcher::create(Stitcher::SCANS, false); + Ptr stitcher = Stitcher::create(Stitcher::SCANS); stitcher->setFeaturesFinder(featuresFinder); if (cv::ocl::useOpenCL()) diff --git a/modules/stitching/src/matchers.cpp b/modules/stitching/src/matchers.cpp index 795d51adc8..9a54d1bd38 100644 --- a/modules/stitching/src/matchers.cpp +++ b/modules/stitching/src/matchers.cpp @@ -48,16 +48,6 @@ using namespace cv; using namespace cv::detail; using namespace cv::cuda; -#ifdef HAVE_OPENCV_XFEATURES2D -#include "opencv2/xfeatures2d.hpp" -using xfeatures2d::SURF; -using xfeatures2d::SIFT; -#else -# if defined(_MSC_VER) -# pragma warning(disable:4702) // unreachable code -# endif -#endif - #ifdef HAVE_OPENCV_CUDAIMGPROC # include "opencv2/cudaimgproc.hpp" #endif @@ -121,35 +111,6 @@ private: }; -struct FindFeaturesBody : ParallelLoopBody -{ - FindFeaturesBody(FeaturesFinder &finder, InputArrayOfArrays images, - std::vector &features, const std::vector > *rois) - : finder_(finder), images_(images), features_(features), rois_(rois) {} - - void operator ()(const Range &r) const CV_OVERRIDE - { - for (int i = r.start; i < r.end; ++i) - { - Mat image = images_.getMat(i); - if (rois_) - finder_(image, features_[i], (*rois_)[i]); - else - finder_(image, features_[i]); - } - } - -private: - FeaturesFinder &finder_; - InputArrayOfArrays images_; - std::vector &features_; - const std::vector > *rois_; - - // to cease visual studio warning - void operator =(const FindFeaturesBody&); -}; - - ////////////////////////////////////////////////////////////////////////////// typedef std::set > MatchesSet; @@ -318,339 +279,40 @@ void GpuMatcher::collectGarbage() namespace cv { namespace detail { -void FeaturesFinder::operator ()(InputArray image, ImageFeatures &features) +void computeImageFeatures( + const Ptr &featuresFinder, + InputArrayOfArrays images, + std::vector &features, + InputArrayOfArrays masks) { - find(image, features); - features.img_size = image.size(); -} - - -void FeaturesFinder::operator ()(InputArray image, ImageFeatures &features, const std::vector &rois) -{ - std::vector roi_features(rois.size()); - size_t total_kps_count = 0; - int total_descriptors_height = 0; - - for (size_t i = 0; i < rois.size(); ++i) - { - find(image.getUMat()(rois[i]), roi_features[i]); - total_kps_count += roi_features[i].keypoints.size(); - total_descriptors_height += roi_features[i].descriptors.rows; - } - - features.img_size = image.size(); - features.keypoints.resize(total_kps_count); - features.descriptors.create(total_descriptors_height, - roi_features[0].descriptors.cols, - roi_features[0].descriptors.type()); - - int kp_idx = 0; - int descr_offset = 0; - for (size_t i = 0; i < rois.size(); ++i) - { - for (size_t j = 0; j < roi_features[i].keypoints.size(); ++j, ++kp_idx) - { - features.keypoints[kp_idx] = roi_features[i].keypoints[j]; - features.keypoints[kp_idx].pt.x += (float)rois[i].x; - features.keypoints[kp_idx].pt.y += (float)rois[i].y; - } - UMat subdescr = features.descriptors.rowRange( - descr_offset, descr_offset + roi_features[i].descriptors.rows); - roi_features[i].descriptors.copyTo(subdescr); - descr_offset += roi_features[i].descriptors.rows; - } -} + // compute all features + std::vector> keypoints; + std::vector descriptors; + // TODO replace with 1 call to new over load of detectAndCompute + featuresFinder->detect(images, keypoints, masks); + featuresFinder->compute(images, keypoints, descriptors); - -void FeaturesFinder::operator ()(InputArrayOfArrays images, std::vector &features) -{ + // store to ImageFeatures size_t count = images.total(); features.resize(count); - - FindFeaturesBody body(*this, images, features, NULL); - if (isThreadSafe()) - parallel_for_(Range(0, static_cast(count)), body); - else - body(Range(0, static_cast(count))); -} - - -void FeaturesFinder::operator ()(InputArrayOfArrays images, std::vector &features, - const std::vector > &rois) -{ - CV_Assert(rois.size() == images.total()); - size_t count = images.total(); - features.resize(count); - - FindFeaturesBody body(*this, images, features, &rois); - if (isThreadSafe()) - parallel_for_(Range(0, static_cast(count)), body); - else - body(Range(0, static_cast(count))); -} - - -bool FeaturesFinder::isThreadSafe() const -{ -#ifdef HAVE_OPENCL - if (ocl::isOpenCLActivated()) - { - return false; - } -#endif - if (dynamic_cast(this)) - { - return true; - } - else if (dynamic_cast(this)) - { - return true; - } - else - { - return false; - } -} - - -SurfFeaturesFinder::SurfFeaturesFinder(double hess_thresh, int num_octaves, int num_layers, - int num_octaves_descr, int num_layers_descr) -{ -#ifdef HAVE_OPENCV_XFEATURES2D - if (num_octaves_descr == num_octaves && num_layers_descr == num_layers) - { - Ptr surf_ = SURF::create(); - if( !surf_ ) - CV_Error( Error::StsNotImplemented, "OpenCV was built without SURF support" ); - surf_->setHessianThreshold(hess_thresh); - surf_->setNOctaves(num_octaves); - surf_->setNOctaveLayers(num_layers); - surf = surf_; - } - else + CV_Assert(count == keypoints.size() && count == descriptors.size()); + for (size_t i = 0; i < count; ++i) { - Ptr sdetector_ = SURF::create(); - Ptr sextractor_ = SURF::create(); - - if( !sdetector_ || !sextractor_ ) - CV_Error( Error::StsNotImplemented, "OpenCV was built without SURF support" ); - - sdetector_->setHessianThreshold(hess_thresh); - sdetector_->setNOctaves(num_octaves); - sdetector_->setNOctaveLayers(num_layers); - - sextractor_->setNOctaves(num_octaves_descr); - sextractor_->setNOctaveLayers(num_layers_descr); - - detector_ = sdetector_; - extractor_ = sextractor_; + features[i].img_size = images.size(int(i)); + features[i].keypoints = std::move(keypoints[i]); + features[i].descriptors = std::move(descriptors[i]); } -#else - CV_UNUSED(hess_thresh); - CV_UNUSED(num_octaves); - CV_UNUSED(num_layers); - CV_UNUSED(num_octaves_descr); - CV_UNUSED(num_layers_descr); - CV_Error( Error::StsNotImplemented, "OpenCV was built without SURF support" ); -#endif -} - -void SurfFeaturesFinder::find(InputArray image, ImageFeatures &features) -{ - UMat gray_image; - CV_Assert((image.type() == CV_8UC3) || (image.type() == CV_8UC1)); - if(image.type() == CV_8UC3) - { - cvtColor(image, gray_image, COLOR_BGR2GRAY); - } - else - { - gray_image = image.getUMat(); - } - if (!surf) - { - detector_->detect(gray_image, features.keypoints); - extractor_->compute(gray_image, features.keypoints, features.descriptors); - } - else - { - UMat descriptors; - surf->detectAndCompute(gray_image, Mat(), features.keypoints, descriptors); - features.descriptors = descriptors.reshape(1, (int)features.keypoints.size()); - } -} - -SiftFeaturesFinder::SiftFeaturesFinder() -{ -#ifdef HAVE_OPENCV_XFEATURES2D - Ptr sift_ = SIFT::create(); - if( !sift_ ) - CV_Error( Error::StsNotImplemented, "OpenCV was built without SIFT support" ); - sift = sift_; -#else - CV_Error( Error::StsNotImplemented, "OpenCV was built without SIFT support" ); -#endif -} - -void SiftFeaturesFinder::find(InputArray image, ImageFeatures &features) -{ - UMat gray_image; - CV_Assert((image.type() == CV_8UC3) || (image.type() == CV_8UC1)); - if(image.type() == CV_8UC3) - { - cvtColor(image, gray_image, COLOR_BGR2GRAY); - } - else - { - gray_image = image.getUMat(); - } - UMat descriptors; - sift->detectAndCompute(gray_image, Mat(), features.keypoints, descriptors); - features.descriptors = descriptors.reshape(1, (int)features.keypoints.size()); -} - -OrbFeaturesFinder::OrbFeaturesFinder(Size _grid_size, int n_features, float scaleFactor, int nlevels) -{ - grid_size = _grid_size; - orb = ORB::create(n_features * (99 + grid_size.area())/100/grid_size.area(), scaleFactor, nlevels); } -void OrbFeaturesFinder::find(InputArray image, ImageFeatures &features) +void computeImageFeatures( + const Ptr &featuresFinder, + InputArray image, + ImageFeatures &features, + InputArray mask) { - UMat gray_image; - - CV_Assert((image.type() == CV_8UC3) || (image.type() == CV_8UC4) || (image.type() == CV_8UC1)); - - if (image.type() == CV_8UC3) { - cvtColor(image, gray_image, COLOR_BGR2GRAY); - } else if (image.type() == CV_8UC4) { - cvtColor(image, gray_image, COLOR_BGRA2GRAY); - } else if (image.type() == CV_8UC1) { - gray_image = image.getUMat(); - } else { - CV_Error(Error::StsUnsupportedFormat, ""); - } - - if (grid_size.area() == 1) - orb->detectAndCompute(gray_image, Mat(), features.keypoints, features.descriptors); - else - { - features.keypoints.clear(); - features.descriptors.release(); - - std::vector points; - Mat _descriptors; - UMat descriptors; - - for (int r = 0; r < grid_size.height; ++r) - for (int c = 0; c < grid_size.width; ++c) - { - int xl = c * gray_image.cols / grid_size.width; - int yl = r * gray_image.rows / grid_size.height; - int xr = (c+1) * gray_image.cols / grid_size.width; - int yr = (r+1) * gray_image.rows / grid_size.height; - - // LOGLN("OrbFeaturesFinder::find: gray_image.empty=" << (gray_image.empty()?"true":"false") << ", " - // << " gray_image.size()=(" << gray_image.size().width << "x" << gray_image.size().height << "), " - // << " yl=" << yl << ", yr=" << yr << ", " - // << " xl=" << xl << ", xr=" << xr << ", gray_image.data=" << ((size_t)gray_image.data) << ", " - // << "gray_image.dims=" << gray_image.dims << "\n"); - - UMat gray_image_part=gray_image(Range(yl, yr), Range(xl, xr)); - // LOGLN("OrbFeaturesFinder::find: gray_image_part.empty=" << (gray_image_part.empty()?"true":"false") << ", " - // << " gray_image_part.size()=(" << gray_image_part.size().width << "x" << gray_image_part.size().height << "), " - // << " gray_image_part.dims=" << gray_image_part.dims << ", " - // << " gray_image_part.data=" << ((size_t)gray_image_part.data) << "\n"); - - orb->detectAndCompute(gray_image_part, UMat(), points, descriptors); - - features.keypoints.reserve(features.keypoints.size() + points.size()); - for (std::vector::iterator kp = points.begin(); kp != points.end(); ++kp) - { - kp->pt.x += xl; - kp->pt.y += yl; - features.keypoints.push_back(*kp); - } - _descriptors.push_back(descriptors.getMat(ACCESS_READ)); - } - - // TODO optimize copyTo() - //features.descriptors = _descriptors.getUMat(ACCESS_READ); - _descriptors.copyTo(features.descriptors); - } -} - -AKAZEFeaturesFinder::AKAZEFeaturesFinder(AKAZE::DescriptorType descriptor_type, - int descriptor_size, - int descriptor_channels, - float threshold, - int nOctaves, - int nOctaveLayers, - KAZE::DiffusivityType diffusivity) -{ - akaze = AKAZE::create(descriptor_type, descriptor_size, descriptor_channels, - threshold, nOctaves, nOctaveLayers, diffusivity); -} - -void AKAZEFeaturesFinder::find(InputArray image, detail::ImageFeatures &features) -{ - CV_Assert((image.type() == CV_8UC3) || (image.type() == CV_8UC1)); - akaze->detectAndCompute(image, noArray(), features.keypoints, features.descriptors); -} - -#ifdef HAVE_OPENCV_XFEATURES2D -SurfFeaturesFinderGpu::SurfFeaturesFinderGpu(double hess_thresh, int num_octaves, int num_layers, - int num_octaves_descr, int num_layers_descr) -{ - surf_.keypointsRatio = 0.1f; - surf_.hessianThreshold = hess_thresh; - surf_.extended = false; - num_octaves_ = num_octaves; - num_layers_ = num_layers; - num_octaves_descr_ = num_octaves_descr; - num_layers_descr_ = num_layers_descr; -} - - -void SurfFeaturesFinderGpu::find(InputArray image, ImageFeatures &features) -{ - CV_Assert(image.depth() == CV_8U); - - ensureSizeIsEnough(image.size(), image.type(), image_); - image_.upload(image); - - ensureSizeIsEnough(image.size(), CV_8UC1, gray_image_); - -#ifdef HAVE_OPENCV_CUDAIMGPROC - cv::cuda::cvtColor(image_, gray_image_, COLOR_BGR2GRAY); -#else - cvtColor(image_, gray_image_, COLOR_BGR2GRAY); -#endif - - surf_.nOctaves = num_octaves_; - surf_.nOctaveLayers = num_layers_; - surf_.upright = false; - surf_(gray_image_, GpuMat(), keypoints_); - - surf_.nOctaves = num_octaves_descr_; - surf_.nOctaveLayers = num_layers_descr_; - surf_.upright = true; - surf_(gray_image_, GpuMat(), keypoints_, descriptors_, true); - surf_.downloadKeypoints(keypoints_, features.keypoints); - - descriptors_.download(features.descriptors); -} - -void SurfFeaturesFinderGpu::collectGarbage() -{ - surf_.releaseMemory(); - image_.release(); - gray_image_.release(); - keypoints_.release(); - descriptors_.release(); + features.img_size = image.size(); + featuresFinder->detectAndCompute(image, mask, features.keypoints, features.descriptors); } -#endif - ////////////////////////////////////////////////////////////////////////////// diff --git a/modules/stitching/src/precomp.hpp b/modules/stitching/src/precomp.hpp index 535cce5e68..debc0d2088 100644 --- a/modules/stitching/src/precomp.hpp +++ b/modules/stitching/src/precomp.hpp @@ -87,10 +87,6 @@ # include "opencv2/cudalegacy.hpp" #endif -#ifdef HAVE_OPENCV_XFEATURES2D -# include "opencv2/xfeatures2d/cuda.hpp" -#endif - #include "opencv2/core/private.hpp" #include "util_log.hpp" diff --git a/modules/stitching/src/stitcher.cpp b/modules/stitching/src/stitcher.cpp index e328d67ab9..8fa214d8c0 100644 --- a/modules/stitching/src/stitcher.cpp +++ b/modules/stitching/src/stitcher.cpp @@ -44,67 +44,40 @@ namespace cv { -Stitcher Stitcher::createDefault(bool try_use_gpu) +Ptr Stitcher::create(Mode mode) { - Stitcher stitcher; - stitcher.setRegistrationResol(0.6); - stitcher.setSeamEstimationResol(0.1); - stitcher.setCompositingResol(ORIG_RESOL); - stitcher.setPanoConfidenceThresh(1); - stitcher.setWaveCorrection(true); - stitcher.setWaveCorrectKind(detail::WAVE_CORRECT_HORIZ); - stitcher.setFeaturesMatcher(makePtr(try_use_gpu)); - stitcher.setBundleAdjuster(makePtr()); - -#ifdef HAVE_OPENCV_CUDALEGACY - if (try_use_gpu && cuda::getCudaEnabledDeviceCount() > 0) - { -#ifdef HAVE_OPENCV_XFEATURES2D - stitcher.setFeaturesFinder(makePtr()); -#else - stitcher.setFeaturesFinder(makePtr()); -#endif - stitcher.setWarper(makePtr()); - stitcher.setSeamFinder(makePtr()); - } - else -#endif - { -#ifdef HAVE_OPENCV_XFEATURES2D - stitcher.setFeaturesFinder(makePtr()); -#else - stitcher.setFeaturesFinder(makePtr()); -#endif - stitcher.setWarper(makePtr()); - stitcher.setSeamFinder(makePtr(detail::GraphCutSeamFinderBase::COST_COLOR)); - } + Ptr stitcher = makePtr(); - stitcher.setExposureCompensator(makePtr()); - stitcher.setBlender(makePtr(try_use_gpu)); + stitcher->setRegistrationResol(0.6); + stitcher->setSeamEstimationResol(0.1); + stitcher->setCompositingResol(ORIG_RESOL); + stitcher->setPanoConfidenceThresh(1); + stitcher->setSeamFinder(makePtr(detail::GraphCutSeamFinderBase::COST_COLOR)); + stitcher->setBlender(makePtr(false)); + stitcher->setFeaturesFinder(ORB::create()); - stitcher.work_scale_ = 1; - stitcher.seam_scale_ = 1; - stitcher.seam_work_aspect_ = 1; - stitcher.warped_image_scale_ = 1; - - return stitcher; -} - - -Ptr Stitcher::create(Mode mode, bool try_use_gpu) -{ - Stitcher stit = createDefault(try_use_gpu); - Ptr stitcher = makePtr(stit); + stitcher->work_scale_ = 1; + stitcher->seam_scale_ = 1; + stitcher->seam_work_aspect_ = 1; + stitcher->warped_image_scale_ = 1; switch (mode) { case PANORAMA: // PANORAMA is the default - // already setup + // mostly already setup + stitcher->setEstimator(makePtr()); + stitcher->setWaveCorrection(true); + stitcher->setWaveCorrectKind(detail::WAVE_CORRECT_HORIZ); + stitcher->setFeaturesMatcher(makePtr(false)); + stitcher->setBundleAdjuster(makePtr()); + stitcher->setWarper(makePtr()); + stitcher->setExposureCompensator(makePtr()); break; case SCANS: + stitcher->setEstimator(makePtr()); stitcher->setWaveCorrection(false); - stitcher->setFeaturesMatcher(makePtr(false, try_use_gpu)); + stitcher->setFeaturesMatcher(makePtr(false, false)); stitcher->setBundleAdjuster(makePtr()); stitcher->setWarper(makePtr()); stitcher->setExposureCompensator(makePtr()); @@ -119,20 +92,12 @@ Ptr Stitcher::create(Mode mode, bool try_use_gpu) } -Stitcher::Status Stitcher::estimateTransform(InputArrayOfArrays images) -{ - CV_INSTRUMENT_REGION(); - - return estimateTransform(images, std::vector >()); -} - - -Stitcher::Status Stitcher::estimateTransform(InputArrayOfArrays images, const std::vector > &rois) +Stitcher::Status Stitcher::estimateTransform(InputArrayOfArrays images, InputArrayOfArrays masks) { CV_INSTRUMENT_REGION(); images.getUMatVector(imgs_); - rois_ = rois; + masks.getUMatVector(masks_); Status status; @@ -407,20 +372,15 @@ Stitcher::Status Stitcher::composePanorama(InputArrayOfArrays images, OutputArra Stitcher::Status Stitcher::stitch(InputArrayOfArrays images, OutputArray pano) { - CV_INSTRUMENT_REGION(); - - Status status = estimateTransform(images); - if (status != OK) - return status; - return composePanorama(pano); + return stitch(images, noArray(), pano); } -Stitcher::Status Stitcher::stitch(InputArrayOfArrays images, const std::vector > &rois, OutputArray pano) +Stitcher::Status Stitcher::stitch(InputArrayOfArrays images, InputArrayOfArrays masks, OutputArray pano) { CV_INSTRUMENT_REGION(); - Status status = estimateTransform(images, rois); + Status status = estimateTransform(images, masks); if (status != OK) return status; return composePanorama(pano); @@ -440,7 +400,6 @@ Stitcher::Status Stitcher::matchImages() seam_scale_ = 1; bool is_work_scale_set = false; bool is_seam_scale_set = false; - UMat full_img, img; features_.resize(imgs_.size()); seam_est_imgs_.resize(imgs_.size()); full_img_sizes_.resize(imgs_.size()); @@ -451,16 +410,14 @@ Stitcher::Status Stitcher::matchImages() #endif std::vector feature_find_imgs(imgs_.size()); - std::vector > feature_find_rois(rois_.size()); + std::vector feature_find_masks(masks_.size()); for (size_t i = 0; i < imgs_.size(); ++i) { - full_img = imgs_[i]; - full_img_sizes_[i] = full_img.size(); - + full_img_sizes_[i] = imgs_[i].size(); if (registr_resol_ < 0) { - img = full_img; + feature_find_imgs[i] = imgs_[i]; work_scale_ = 1; is_work_scale_set = true; } @@ -468,50 +425,34 @@ Stitcher::Status Stitcher::matchImages() { if (!is_work_scale_set) { - work_scale_ = std::min(1.0, std::sqrt(registr_resol_ * 1e6 / full_img.size().area())); + work_scale_ = std::min(1.0, std::sqrt(registr_resol_ * 1e6 / full_img_sizes_[i].area())); is_work_scale_set = true; } - resize(full_img, img, Size(), work_scale_, work_scale_, INTER_LINEAR_EXACT); + resize(imgs_[i], feature_find_imgs[i], Size(), work_scale_, work_scale_, INTER_LINEAR_EXACT); } if (!is_seam_scale_set) { - seam_scale_ = std::min(1.0, std::sqrt(seam_est_resol_ * 1e6 / full_img.size().area())); + seam_scale_ = std::min(1.0, std::sqrt(seam_est_resol_ * 1e6 / full_img_sizes_[i].area())); seam_work_aspect_ = seam_scale_ / work_scale_; is_seam_scale_set = true; } - if (rois_.empty()) - feature_find_imgs[i] = img; - else + if (!masks_.empty()) { - feature_find_rois[i].resize(rois_[i].size()); - for (size_t j = 0; j < rois_[i].size(); ++j) - { - Point tl(cvRound(rois_[i][j].x * work_scale_), cvRound(rois_[i][j].y * work_scale_)); - Point br(cvRound(rois_[i][j].br().x * work_scale_), cvRound(rois_[i][j].br().y * work_scale_)); - feature_find_rois[i][j] = Rect(tl, br); - } - feature_find_imgs[i] = img; + resize(masks_[i], feature_find_masks[i], Size(), work_scale_, work_scale_, INTER_NEAREST); } features_[i].img_idx = (int)i; LOGLN("Features in image #" << i+1 << ": " << features_[i].keypoints.size()); - resize(full_img, img, Size(), seam_scale_, seam_scale_, INTER_LINEAR_EXACT); - seam_est_imgs_[i] = img.clone(); + resize(imgs_[i], seam_est_imgs_[i], Size(), seam_scale_, seam_scale_, INTER_LINEAR_EXACT); } // find features possibly in parallel - if (rois_.empty()) - (*features_finder_)(feature_find_imgs, features_); - else - (*features_finder_)(feature_find_imgs, features_, feature_find_rois); + detail::computeImageFeatures(features_finder_, feature_find_imgs, features_, feature_find_masks); // Do it to save memory - features_finder_->collectGarbage(); - full_img.release(); - img.release(); feature_find_imgs.clear(); - feature_find_rois.clear(); + feature_find_masks.clear(); LOGLN("Finding features, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec"); @@ -550,16 +491,8 @@ Stitcher::Status Stitcher::matchImages() Stitcher::Status Stitcher::estimateCameraParams() { - /* TODO OpenCV ABI 4.x - get rid of this dynamic_cast hack and use estimator_ - */ - Ptr estimator; - if (dynamic_cast(features_matcher_.get())) - estimator = makePtr(); - else - estimator = makePtr(); - - if (!(*estimator)(features_, pairwise_matches_, cameras_)) + // estimate homography in global frame + if (!(*estimator_)(features_, pairwise_matches_, cameras_)) return ERR_HOMOGRAPHY_EST_FAIL; for (size_t i = 0; i < cameras_.size(); ++i) @@ -602,17 +535,17 @@ Stitcher::Status Stitcher::estimateCameraParams() } -Ptr createStitcher(bool try_use_gpu) +CV_DEPRECATED Ptr createStitcher(bool /*ignored*/) { CV_INSTRUMENT_REGION(); - return Stitcher::create(Stitcher::PANORAMA, try_use_gpu); + return Stitcher::create(Stitcher::PANORAMA); } -Ptr createStitcherScans(bool try_use_gpu) +CV_DEPRECATED Ptr createStitcherScans(bool /*ignored*/) { CV_INSTRUMENT_REGION(); - return Stitcher::create(Stitcher::SCANS, try_use_gpu); + return Stitcher::create(Stitcher::SCANS); } } // namespace cv diff --git a/modules/stitching/test/test_matchers.cpp b/modules/stitching/test/test_matchers.cpp index a264c10915..251716843f 100644 --- a/modules/stitching/test/test_matchers.cpp +++ b/modules/stitching/test/test_matchers.cpp @@ -48,54 +48,61 @@ namespace opencv_test { namespace { TEST(SurfFeaturesFinder, CanFindInROIs) { - Ptr finder = makePtr(); + Ptr finder = xfeatures2d::SURF::create(); Mat img = imread(string(cvtest::TS::ptr()->get_data_path()) + "cv/shared/lena.png"); vector rois; rois.push_back(Rect(0, 0, img.cols / 2, img.rows / 2)); rois.push_back(Rect(img.cols / 2, img.rows / 2, img.cols - img.cols / 2, img.rows - img.rows / 2)); + + // construct mask + Mat mask = Mat::zeros(img.size(), CV_8U); + for (const Rect &roi : rois) + { + Mat(mask, roi) = 1; + } + detail::ImageFeatures roi_features; - (*finder)(img, roi_features, rois); + detail::computeImageFeatures(finder, img, roi_features, mask); int tl_rect_count = 0, br_rect_count = 0, bad_count = 0; - for (size_t i = 0; i < roi_features.keypoints.size(); ++i) + for (const auto &keypoint : roi_features.keypoints) { - Point2f pt = roi_features.keypoints[i].pt; - if (pt.x >= rois[0].x && pt.y >= rois[0].y && pt.x <= rois[0].br().x && pt.y <= rois[0].br().y) + if (rois[0].contains(keypoint.pt)) tl_rect_count++; - else if (pt.x >= rois[1].x && pt.y >= rois[1].y && pt.x <= rois[1].br().x && pt.y <= rois[1].br().y) + else if (rois[1].contains(keypoint.pt)) br_rect_count++; else bad_count++; } - ASSERT_GT(tl_rect_count, 0); - ASSERT_GT(br_rect_count, 0); - ASSERT_EQ(bad_count, 0); + EXPECT_GT(tl_rect_count, 0); + EXPECT_GT(br_rect_count, 0); + EXPECT_EQ(bad_count, 0); } #endif // HAVE_OPENCV_XFEATURES2D TEST(ParallelFeaturesFinder, IsSameWithSerial) { - Ptr para_finder = makePtr(); - Ptr serial_finder = makePtr(); + Ptr para_finder = ORB::create(); + Ptr serial_finder = ORB::create(); Mat img = imread(string(cvtest::TS::ptr()->get_data_path()) + "stitching/a3.png", IMREAD_GRAYSCALE); vector imgs(50, img); detail::ImageFeatures serial_features; vector para_features(imgs.size()); - (*serial_finder)(img, serial_features); - (*para_finder)(imgs, para_features); + detail::computeImageFeatures(serial_finder, img, serial_features); + detail::computeImageFeatures(para_finder, imgs, para_features); // results must be the same for(size_t i = 0; i < para_features.size(); ++i) { Mat diff_descriptors = serial_features.descriptors.getMat(ACCESS_READ) != para_features[i].descriptors.getMat(ACCESS_READ); - ASSERT_EQ(countNonZero(diff_descriptors), 0); - ASSERT_EQ(serial_features.img_size, para_features[i].img_size); - ASSERT_EQ(serial_features.keypoints.size(), para_features[i].keypoints.size()); + EXPECT_EQ(countNonZero(diff_descriptors), 0); + EXPECT_EQ(serial_features.img_size, para_features[i].img_size); + EXPECT_EQ(serial_features.keypoints.size(), para_features[i].keypoints.size()); } } diff --git a/modules/stitching/test/test_precomp.hpp b/modules/stitching/test/test_precomp.hpp index 8dffcdfb9c..f3ebc682c0 100644 --- a/modules/stitching/test/test_precomp.hpp +++ b/modules/stitching/test/test_precomp.hpp @@ -9,4 +9,8 @@ #include "opencv2/stitching/detail/matchers.hpp" #include "opencv2/stitching/detail/blenders.hpp" +#ifdef HAVE_OPENCV_XFEATURES2D +#include "opencv2/xfeatures2d/nonfree.hpp" +#endif + #endif diff --git a/samples/cpp/stitching.cpp b/samples/cpp/stitching.cpp index dd8c151dd3..fca31dc188 100644 --- a/samples/cpp/stitching.cpp +++ b/samples/cpp/stitching.cpp @@ -8,7 +8,6 @@ using namespace std; using namespace cv; -bool try_use_gpu = false; bool divide_images = false; Stitcher::Mode mode = Stitcher::PANORAMA; vector imgs; @@ -24,7 +23,7 @@ int main(int argc, char* argv[]) //![stitching] Mat pano; - Ptr stitcher = Stitcher::create(mode, try_use_gpu); + Ptr stitcher = Stitcher::create(mode); Stitcher::Status status = stitcher->stitch(imgs, pano); if (status != Stitcher::OK) @@ -47,9 +46,6 @@ void printUsage(char** argv) "Flags:\n" " --d3\n" " internally creates three chunks of each image to increase stitching success\n" - " --try_use_gpu (yes|no)\n" - " Try to use GPU. The default value is 'no'. All default values\n" - " are for CPU mode.\n" " --mode (panorama|scans)\n" " Determines configuration of stitcher. The default is 'panorama',\n" " mode suitable for creating photo panoramas. Option 'scans' is suitable\n" @@ -75,19 +71,6 @@ int parseCmdArgs(int argc, char** argv) printUsage(argv); return EXIT_FAILURE; } - else if (string(argv[i]) == "--try_use_gpu") - { - if (string(argv[i + 1]) == "no") - try_use_gpu = false; - else if (string(argv[i + 1]) == "yes") - try_use_gpu = true; - else - { - cout << "Bad --try_use_gpu flag value\n"; - return EXIT_FAILURE; - } - i++; - } else if (string(argv[i]) == "--d3") { divide_images = true; diff --git a/samples/cpp/stitching_detailed.cpp b/samples/cpp/stitching_detailed.cpp index 27b6e024f8..7806446141 100644 --- a/samples/cpp/stitching_detailed.cpp +++ b/samples/cpp/stitching_detailed.cpp @@ -17,6 +17,10 @@ #include "opencv2/stitching/detail/warpers.hpp" #include "opencv2/stitching/warpers.hpp" +#ifdef HAVE_OPENCV_XFEATURES2D +#include "opencv2/xfeatures2d/nonfree.hpp" +#endif + #define ENABLE_LOG 1 #define LOG(msg) std::cout << msg #define LOGLN(msg) std::cout << msg << std::endl @@ -374,23 +378,20 @@ int main(int argc, char* argv[]) int64 t = getTickCount(); #endif - Ptr finder; - if (features_type == "surf") + Ptr finder; + if (features_type == "orb") { -#ifdef HAVE_OPENCV_XFEATURES2D - if (try_cuda && cuda::getCudaEnabledDeviceCount() > 0) - finder = makePtr(); - else -#endif - finder = makePtr(); + finder = ORB::create(); } - else if (features_type == "orb") +#ifdef HAVE_OPENCV_XFEATURES2D + else if (features_type == "surf") { - finder = makePtr(); + finder = xfeatures2d::SURF::create(); } else if (features_type == "sift") { - finder = makePtr(); + finder = xfeatures2d::SIFT::create(); } +#endif else { cout << "Unknown 2D features type: '" << features_type << "'.\n"; @@ -435,7 +436,7 @@ int main(int argc, char* argv[]) is_seam_scale_set = true; } - (*finder)(img, features[i]); + computeImageFeatures(finder, img, features[i]); features[i].img_idx = i; LOGLN("Features in image #" << i+1 << ": " << features[i].keypoints.size()); @@ -443,7 +444,6 @@ int main(int argc, char* argv[]) images[i] = img.clone(); } - finder->collectGarbage(); full_img.release(); img.release(); diff --git a/samples/python/stitching.py b/samples/python/stitching.py new file mode 100644 index 0000000000..b08c0160de --- /dev/null +++ b/samples/python/stitching.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python + +''' +Stitching sample +================ + +Show how to use Stitcher API from python in a simple way to stitch panoramas +or scans. +''' + +from __future__ import print_function +import cv2 as cv +import numpy as np +import argparse +import sys + +modes = (cv.Stitcher_PANORAMA, cv.Stitcher_SCANS) + +parser = argparse.ArgumentParser(description='Stitching sample.') +parser.add_argument('--mode', + type = int, choices = modes, default = cv.Stitcher_PANORAMA, + help = 'Determines configuration of stitcher. The default is `PANORAMA` (%d), ' + 'mode suitable for creating photo panoramas. Option `SCANS` (%d) is suitable ' + 'for stitching materials under affine transformation, such as scans.' % modes) +parser.add_argument('--output', default = 'result.jpg', + help = 'Resulting image. The default is `result.jpg`.') +parser.add_argument('img', nargs='+', help = 'input images') +args = parser.parse_args() + +# read input images +imgs = [] +for img_name in args.img: + img = cv.imread(img_name) + if img is None: + print("can't read image " + img_name) + sys.exit(-1) + imgs.append(img) + +stitcher = cv.Stitcher.create(args.mode) +status, pano = stitcher.stitch(imgs) + +if status != cv.Stitcher_OK: + print("Can't stitch images, error code = %d" % status) + sys.exit(-1) + +cv.imwrite(args.output, pano); +print("stitching completed successfully. %s saved!" % args.output)