+ Click Start button to launch the code below.
+ Then click on image to pick source point. After that you can hover mouse pointer over canvas to specify target point candidate.
+ You can change the code in the <textarea> to investigate more. You can choose another image (need to "Stop" first).
+
+
+
+
+
+
+
+
canvasInput
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/doc/js_tutorials/js_imgproc/js_intelligent_scissors/js_intelligent_scissors.markdown b/doc/js_tutorials/js_imgproc/js_intelligent_scissors/js_intelligent_scissors.markdown
new file mode 100644
index 0000000000..97ffca596d
--- /dev/null
+++ b/doc/js_tutorials/js_imgproc/js_intelligent_scissors/js_intelligent_scissors.markdown
@@ -0,0 +1,14 @@
+Intelligent Scissors Demo {#tutorial_js_intelligent_scissors}
+=========================
+
+Goal
+----
+
+- Here you can check how to use IntelligentScissors tool for image segmentation task.
+- Available methods and parameters: @ref cv::segmentation::IntelligentScissorsMB
+
+\htmlonly
+
+\endhtmlonly
diff --git a/doc/js_tutorials/js_imgproc/js_table_of_contents_imgproc.markdown b/doc/js_tutorials/js_imgproc/js_table_of_contents_imgproc.markdown
index 3bb809be71..b06eb95639 100644
--- a/doc/js_tutorials/js_imgproc/js_table_of_contents_imgproc.markdown
+++ b/doc/js_tutorials/js_imgproc/js_table_of_contents_imgproc.markdown
@@ -77,3 +77,7 @@ Image Processing {#tutorial_js_table_of_contents_imgproc}
- @subpage tutorial_js_imgproc_camera
Learn image processing for video capture.
+
+- @subpage tutorial_js_intelligent_scissors
+
+ Learn how to use IntelligentScissors tool for image segmentation task.
diff --git a/doc/opencv.bib b/doc/opencv.bib
index 6212ea5a55..d3d6f7812f 100644
--- a/doc/opencv.bib
+++ b/doc/opencv.bib
@@ -768,6 +768,13 @@
pages = {432--441},
publisher = {Springer}
}
+@INPROCEEDINGS{Mortensen95intelligentscissors,
+ author = {Eric N. Mortensen and William A. Barrett},
+ title = {Intelligent Scissors for Image Composition},
+ booktitle = {In Computer Graphics, SIGGRAPH Proceedings},
+ year = {1995},
+ pages = {191--198}
+}
@inproceedings{Muja2009,
author = {Muja, Marius and Lowe, David G},
title = {Fast Approximate Nearest Neighbors with Automatic Algorithm Configuration},
diff --git a/modules/imgproc/include/opencv2/imgproc.hpp b/modules/imgproc/include/opencv2/imgproc.hpp
index c607a18b7d..533c2234c5 100644
--- a/modules/imgproc/include/opencv2/imgproc.hpp
+++ b/modules/imgproc/include/opencv2/imgproc.hpp
@@ -185,6 +185,7 @@ location of points on the plane, building special graphs (such as NNG,RNG), and
@defgroup imgproc_motion Motion Analysis and Object Tracking
@defgroup imgproc_feature Feature Detection
@defgroup imgproc_object Object Detection
+ @defgroup imgproc_segmentation Image Segmentation
@defgroup imgproc_c C API
@defgroup imgproc_hal Hardware Acceleration Layer
@{
@@ -3227,6 +3228,9 @@ CV_EXPORTS_AS(EMD) float wrapperEMD( InputArray signature1, InputArray signature
//! @} imgproc_hist
+//! @addtogroup imgproc_segmentation
+//! @{
+
/** @example samples/cpp/watershed.cpp
An example using the watershed algorithm
*/
@@ -3254,11 +3258,11 @@ function.
size as image .
@sa findContours
-
-@ingroup imgproc_misc
*/
CV_EXPORTS_W void watershed( InputArray image, InputOutputArray markers );
+//! @} imgproc_segmentation
+
//! @addtogroup imgproc_filter
//! @{
@@ -3304,7 +3308,7 @@ CV_EXPORTS_W void pyrMeanShiftFiltering( InputArray src, OutputArray dst,
//! @}
-//! @addtogroup imgproc_misc
+//! @addtogroup imgproc_segmentation
//! @{
/** @example samples/cpp/grabcut.cpp
@@ -3334,6 +3338,11 @@ CV_EXPORTS_W void grabCut( InputArray img, InputOutputArray mask, Rect rect,
InputOutputArray bgdModel, InputOutputArray fgdModel,
int iterCount, int mode = GC_EVAL );
+//! @} imgproc_segmentation
+
+//! @addtogroup imgproc_misc
+//! @{
+
/** @example samples/cpp/distrans.cpp
An example on using the distance transform
*/
@@ -4876,4 +4885,8 @@ Point LineIterator::pos() const
} // cv
+
+#include "./imgproc/segmentation.hpp"
+
+
#endif
diff --git a/modules/imgproc/include/opencv2/imgproc/segmentation.hpp b/modules/imgproc/include/opencv2/imgproc/segmentation.hpp
new file mode 100644
index 0000000000..26882f444e
--- /dev/null
+++ b/modules/imgproc/include/opencv2/imgproc/segmentation.hpp
@@ -0,0 +1,141 @@
+// This file is part of OpenCV project.
+// It is subject to the license terms in the LICENSE file found in the top-level directory
+// of this distribution and at http://opencv.org/license.html.
+
+#ifndef OPENCV_IMGPROC_SEGMENTATION_HPP
+#define OPENCV_IMGPROC_SEGMENTATION_HPP
+
+#include "opencv2/imgproc.hpp"
+
+namespace cv {
+
+namespace segmentation {
+
+//! @addtogroup imgproc_segmentation
+//! @{
+
+
+/** @brief Intelligent Scissors image segmentation
+ *
+ * This class is used to find the path (contour) between two points
+ * which can be used for image segmentation.
+ *
+ * Usage example:
+ * @snippet snippets/imgproc_segmentation.cpp usage_example_intelligent_scissors
+ *
+ * Reference: "Intelligent Scissors for Image Composition"
+ * algorithm designed by Eric N. Mortensen and William A. Barrett, Brigham Young University
+ * @cite Mortensen95intelligentscissors
+ */
+class CV_EXPORTS_W_SIMPLE IntelligentScissorsMB
+{
+public:
+ CV_WRAP
+ IntelligentScissorsMB();
+
+ /** @brief Specify weights of feature functions
+ *
+ * Consider keeping weights normalized (sum of weights equals to 1.0)
+ * Discrete dynamic programming (DP) goal is minimization of costs between pixels.
+ *
+ * @param weight_non_edge Specify cost of non-edge pixels (default: 0.43f)
+ * @param weight_gradient_direction Specify cost of gradient direction function (default: 0.43f)
+ * @param weight_gradient_magnitude Specify cost of gradient magnitude function (default: 0.14f)
+ */
+ CV_WRAP
+ IntelligentScissorsMB& setWeights(float weight_non_edge, float weight_gradient_direction, float weight_gradient_magnitude);
+
+ /** @brief Specify gradient magnitude max value threshold
+ *
+ * Zero limit value is used to disable gradient magnitude thresholding (default behavior, as described in original article).
+ * Otherwize pixels with `gradient magnitude >= threshold` have zero cost.
+ *
+ * @note Thresholding should be used for images with irregular regions (to avoid stuck on parameters from high-contract areas, like embedded logos).
+ *
+ * @param gradient_magnitude_threshold_max Specify gradient magnitude max value threshold (default: 0, disabled)
+ */
+ CV_WRAP
+ IntelligentScissorsMB& setGradientMagnitudeMaxLimit(float gradient_magnitude_threshold_max = 0.0f);
+
+ /** @brief Switch to "Laplacian Zero-Crossing" edge feature extractor and specify its parameters
+ *
+ * This feature extractor is used by default according to article.
+ *
+ * Implementation has additional filtering for regions with low-amplitude noise.
+ * This filtering is enabled through parameter of minimal gradient amplitude (use some small value 4, 8, 16).
+ *
+ * @note Current implementation of this feature extractor is based on processing of grayscale images (color image is converted to grayscale image first).
+ *
+ * @note Canny edge detector is a bit slower, but provides better results (especially on color images): use setEdgeFeatureCannyParameters().
+ *
+ * @param gradient_magnitude_min_value Minimal gradient magnitude value for edge pixels (default: 0, check is disabled)
+ */
+ CV_WRAP
+ IntelligentScissorsMB& setEdgeFeatureZeroCrossingParameters(float gradient_magnitude_min_value = 0.0f);
+
+ /** @brief Switch edge feature extractor to use Canny edge detector
+ *
+ * @note "Laplacian Zero-Crossing" feature extractor is used by default (following to original article)
+ *
+ * @sa Canny
+ */
+ CV_WRAP
+ IntelligentScissorsMB& setEdgeFeatureCannyParameters(
+ double threshold1, double threshold2,
+ int apertureSize = 3, bool L2gradient = false
+ );
+
+ /** @brief Specify input image and extract image features
+ *
+ * @param image input image. Type is #CV_8UC1 / #CV_8UC3
+ */
+ CV_WRAP
+ IntelligentScissorsMB& applyImage(InputArray image);
+
+ /** @brief Specify custom features of imput image
+ *
+ * Customized advanced variant of applyImage() call.
+ *
+ * @param non_edge Specify cost of non-edge pixels. Type is CV_8UC1. Expected values are `{0, 1}`.
+ * @param gradient_direction Specify gradient direction feature. Type is CV_32FC2. Values are expected to be normalized: `x^2 + y^2 == 1`
+ * @param gradient_magnitude Specify cost of gradient magnitude function: Type is CV_32FC1. Values should be in range `[0, 1]`.
+ * @param image **Optional parameter**. Must be specified if subset of features is specified (non-specified features are calculated internally)
+ */
+ CV_WRAP
+ IntelligentScissorsMB& applyImageFeatures(
+ InputArray non_edge, InputArray gradient_direction, InputArray gradient_magnitude,
+ InputArray image = noArray()
+ );
+
+ /** @brief Prepares a map of optimal paths for the given source point on the image
+ *
+ * @note applyImage() / applyImageFeatures() must be called before this call
+ *
+ * @param sourcePt The source point used to find the paths
+ */
+ CV_WRAP void buildMap(const Point& sourcePt);
+
+ /** @brief Extracts optimal contour for the given target point on the image
+ *
+ * @note buildMap() must be called before this call
+ *
+ * @param targetPt The target point
+ * @param[out] contour The list of pixels which contains optimal path between the source and the target points of the image. Type is CV_32SC2 (compatible with `std::vector`)
+ * @param backward Flag to indicate reverse order of retrived pixels (use "true" value to fetch points from the target to the source point)
+ */
+ CV_WRAP void getContour(const Point& targetPt, OutputArray contour, bool backward = false) const;
+
+#ifndef CV_DOXYGEN
+ struct Impl;
+ inline Impl* getImpl() const { return impl.get(); }
+protected:
+ std::shared_ptr impl;
+#endif
+};
+
+//! @}
+
+} // namespace segmentation
+} // namespace cv
+
+#endif // OPENCV_IMGPROC_SEGMENTATION_HPP
diff --git a/modules/imgproc/src/intelligent_scissors.cpp b/modules/imgproc/src/intelligent_scissors.cpp
new file mode 100644
index 0000000000..38acfd79e3
--- /dev/null
+++ b/modules/imgproc/src/intelligent_scissors.cpp
@@ -0,0 +1,772 @@
+// This file is part of OpenCV project.
+// It is subject to the license terms in the LICENSE file found in the top-level directory
+// of this distribution and at http://opencv.org/license.html.
+//
+// Copyright (C) 2020, Intel Corporation, all rights reserved.
+// Third party copyrights are property of their respective owners.
+
+
+#include "precomp.hpp"
+//#include "opencv2/imgproc/segmentation.hpp"
+
+#include
+
+#include // std::priority_queue
+
+namespace cv {
+namespace segmentation {
+
+namespace {
+
+// 0 1 2
+// 3 x 4
+// 5 6 7
+static const int neighbors[8][2] = {
+ { -1, -1 },
+ { 0, -1 },
+ { 1, -1 },
+ { -1, 0 },
+ { 1, 0 },
+ { -1, 1 },
+ { 0, 1 },
+ { 1, 1 },
+};
+
+// encoded reverse direction
+static const int neighbors_encode[8] = {
+ 7+1, 6+1, 5+1,
+ 4+1, 3+1,
+ 2+1, 1+1, 0+1
+};
+
+#define ACOS_TABLE_SIZE 64
+// acos_table[x + ACOS_TABLE_SIZE] = acos(x / ACOS_TABLE_SIZE) / CV_PI (see local_cost)
+// x = [ -ACOS_TABLE_SIZE .. ACOS_TABLE_SIZE ]
+float* getAcosTable()
+{
+ constexpr int N = ACOS_TABLE_SIZE;
+ static bool initialized = false;
+ static float acos_table[2*N + 1] = { 0 };
+ if (!initialized)
+ {
+ const float CV_PI_inv = static_cast(1.0 / CV_PI);
+ for (int i = -N; i <= N; i++)
+ {
+ acos_table[i + N] = acosf(i / (float)N) * CV_PI_inv;
+ }
+ initialized = true;
+ }
+ return acos_table;
+}
+
+} // namespace anon
+
+struct IntelligentScissorsMB::Impl
+{
+ // proposed weights from the article (sum = 1.0)
+ float weight_non_edge = 0.43f;
+ float weight_gradient_direction = 0.43f;
+ float weight_gradient_magnitude = 0.14f;
+
+ enum EdgeFeatureMode {
+ FEATURE_ZERO_CROSSING = 0,
+ FEATURE_CANNY
+ };
+ EdgeFeatureMode edge_mode = FEATURE_ZERO_CROSSING;
+
+ // FEATURE_ZERO_CROSSING
+ float edge_gradient_magnitude_min_value = 0.0f;
+
+ // FEATURE_CANNY
+ double edge_canny_threshold1 = 10;
+ double edge_canny_threshold2 = 100;
+ int edge_canny_apertureSize = 3;
+ bool edge_canny_L2gradient = false;
+
+
+ float gradient_magnitude_threshold_max = 0.0f; // disabled thresholding
+
+ int sobelKernelSize = 3; // 1 or 3
+ int laplacianKernelSize = 3; // 1 or 3
+
+ // image features
+ Mat_ gradient_direction; //< I: normalized laplacian x/y components
+ Mat_ gradient_magnitude; //< Fg: gradient cost function
+ Mat_ non_edge_feature; //< Fz: zero-crossing function
+
+ float weight_non_edge_compute = 0.0f;
+
+ // encoded paths map (produced by `buildMap()`)
+ Mat_ optimalPathsMap;
+
+ void resetFeatures_()
+ {
+ CV_TRACE_FUNCTION();
+
+ gradient_direction.release();
+ gradient_magnitude.release();
+ non_edge_feature.release();
+
+ weight_non_edge_compute = weight_non_edge;
+
+ optimalPathsMap.release();
+ }
+
+ Size src_size;
+ Mat image_;
+ Mat grayscale_;
+ void initImage_(InputArray image)
+ {
+ CV_TRACE_FUNCTION();
+ if (!image_.empty())
+ return;
+ CV_CheckType(image.type(), image.type() == CV_8UC1 || image.type() == CV_8UC3 || image.type() == CV_8UC4, "");
+ src_size = image.size();
+ image_ = image.getMat();
+ }
+ void initGrayscale_(InputArray image)
+ {
+ CV_TRACE_FUNCTION();
+ if (!grayscale_.empty())
+ return;
+ CV_Assert(!image.empty());
+ CV_CheckType(image.type(), image.type() == CV_8UC1 || image.type() == CV_8UC3 || image.type() == CV_8UC4, "");
+ src_size = image.size();
+ if (image.channels() > 1)
+ cvtColor(image, grayscale_, COLOR_BGR2GRAY);
+ else
+ grayscale_ = image.getMat();
+ }
+ Mat Ix_, Iy_;
+ void initImageDerives_(InputArray image)
+ {
+ CV_TRACE_FUNCTION();
+ if (!Ix_.empty())
+ return;
+ initGrayscale_(image);
+ Sobel(grayscale_, Ix_, CV_32FC1, 1, 0, sobelKernelSize);
+ Sobel(grayscale_, Iy_, CV_32FC1, 0, 1, sobelKernelSize);
+ }
+ Mat image_magnitude_;
+ void initImageMagnitude_(InputArray image)
+ {
+ CV_TRACE_FUNCTION();
+ if (!image_magnitude_.empty())
+ return;
+ initImageDerives_(image);
+ magnitude(Ix_, Iy_, image_magnitude_);
+ }
+
+ void cleanupFeaturesTemporaryArrays_()
+ {
+ CV_TRACE_FUNCTION();
+ image_.release();
+ grayscale_.release();
+ Ix_.release();
+ Iy_.release();
+ image_magnitude_.release();
+ }
+
+ Impl()
+ {
+ // nothing
+ CV_TRACE_FUNCTION();
+ }
+
+ void setWeights(float weight_non_edge_, float weight_gradient_direction_, float weight_gradient_magnitude_)
+ {
+ CV_TRACE_FUNCTION();
+
+ CV_CheckGE(weight_non_edge_, 0.0f, "");
+ CV_CheckGE(weight_gradient_direction_, 0.0f, "");
+ CV_CheckGE(weight_gradient_magnitude_, 0.0f, "");
+ CV_CheckGE(weight_non_edge_ + weight_gradient_direction_ + weight_gradient_magnitude_, FLT_EPSILON, "Sum of weights must be greater than zero");
+ weight_non_edge = weight_non_edge_;
+ weight_gradient_direction = weight_gradient_direction_;
+ weight_gradient_magnitude = weight_gradient_magnitude_;
+ resetFeatures_();
+ }
+
+ void setGradientMagnitudeMaxLimit(float gradient_magnitude_threshold_max_)
+ {
+ CV_TRACE_FUNCTION();
+
+ CV_CheckGE(gradient_magnitude_threshold_max_, 0.0f, "");
+ gradient_magnitude_threshold_max = gradient_magnitude_threshold_max_;
+ resetFeatures_();
+ }
+
+ void setEdgeFeatureZeroCrossingParameters(float gradient_magnitude_min_value_)
+ {
+ CV_TRACE_FUNCTION();
+
+ CV_CheckGE(gradient_magnitude_min_value_, 0.0f, "");
+ edge_mode = FEATURE_ZERO_CROSSING;
+ edge_gradient_magnitude_min_value = gradient_magnitude_min_value_;
+ resetFeatures_();
+ }
+
+ void setEdgeFeatureCannyParameters(
+ double threshold1, double threshold2,
+ int apertureSize = 3, bool L2gradient = false
+ )
+ {
+ CV_TRACE_FUNCTION();
+
+ CV_CheckGE(threshold1, 0.0, "");
+ CV_CheckGE(threshold2, 0.0, "");
+ edge_mode = FEATURE_CANNY;
+ edge_canny_threshold1 = threshold1;
+ edge_canny_threshold2 = threshold2;
+ edge_canny_apertureSize = apertureSize;
+ edge_canny_L2gradient = L2gradient;
+ resetFeatures_();
+ }
+
+ void applyImageFeatures(
+ InputArray non_edge, InputArray gradient_direction_, InputArray gradient_magnitude_,
+ InputArray image
+ )
+ {
+ CV_TRACE_FUNCTION();
+
+ resetFeatures_();
+ cleanupFeaturesTemporaryArrays_();
+
+ src_size = Size(0, 0);
+ if (!non_edge.empty())
+ src_size = non_edge.size();
+ if (!gradient_direction_.empty())
+ {
+ Size gradient_direction_size = gradient_direction_.size();
+ if (!src_size.empty())
+ CV_CheckEQ(src_size, gradient_direction_size, "");
+ else
+ src_size = gradient_direction_size;
+ }
+ if (!gradient_magnitude_.empty())
+ {
+ Size gradient_magnitude_size = gradient_magnitude_.size();
+ if (!src_size.empty())
+ CV_CheckEQ(src_size, gradient_magnitude_size, "");
+ else
+ src_size = gradient_magnitude_size;
+ }
+ if (!image.empty())
+ {
+ Size image_size = image.size();
+ if (!src_size.empty())
+ CV_CheckEQ(src_size, image_size, "");
+ else
+ src_size = image_size;
+ }
+ // src_size must be filled
+ CV_Assert(!src_size.empty());
+
+ if (!non_edge.empty())
+ {
+ CV_CheckTypeEQ(non_edge.type(), CV_8UC1, "");
+ non_edge_feature = non_edge.getMat();
+ }
+ else
+ {
+ if (weight_non_edge == 0.0f)
+ {
+ non_edge_feature.create(src_size);
+ non_edge_feature.setTo(0);
+ }
+ else
+ {
+ if (image.empty())
+ CV_Error(Error::StsBadArg, "Non-edge feature parameter is missing. Input image parameter is required to extract this feature");
+ extractEdgeFeature_(image);
+ }
+ }
+
+ if (!gradient_direction_.empty())
+ {
+ CV_CheckTypeEQ(gradient_direction_.type(), CV_32FC2, "");
+ gradient_direction = gradient_direction_.getMat();
+ }
+ else
+ {
+ if (weight_gradient_direction == 0.0f)
+ {
+ gradient_direction.create(src_size);
+ gradient_direction.setTo(Scalar::all(0));
+ }
+ else
+ {
+ if (image.empty())
+ CV_Error(Error::StsBadArg, "Gradient direction feature parameter is missing. Input image parameter is required to extract this feature");
+ extractGradientDirection_(image);
+ }
+ }
+
+ if (!gradient_magnitude_.empty())
+ {
+ CV_CheckTypeEQ(gradient_magnitude_.type(), CV_32FC1, "");
+ gradient_magnitude = gradient_magnitude_.getMat();
+ }
+ else
+ {
+ if (weight_gradient_magnitude == 0.0f)
+ {
+ gradient_magnitude.create(src_size);
+ gradient_magnitude.setTo(Scalar::all(0));
+ }
+ else
+ {
+ if (image.empty())
+ CV_Error(Error::StsBadArg, "Gradient magnitude feature parameter is missing. Input image parameter is required to extract this feature");
+ extractGradientMagnitude_(image);
+ }
+ }
+
+ cleanupFeaturesTemporaryArrays_();
+ }
+
+
+ void extractEdgeFeature_(InputArray image)
+ {
+ CV_TRACE_FUNCTION();
+
+ if (edge_mode == FEATURE_CANNY)
+ {
+ CV_LOG_DEBUG(NULL, "Canny(" << edge_canny_threshold1 << ", " << edge_canny_threshold2 << ")");
+ Mat img_canny;
+ Canny(image, img_canny, edge_canny_threshold1, edge_canny_threshold2, edge_canny_apertureSize, edge_canny_L2gradient);
+#if 0
+ threshold(img_canny, non_edge_feature, 254, 1, THRESH_BINARY_INV);
+#else
+ // Canny result values are 0 or 255
+ bitwise_not(img_canny, non_edge_feature);
+ weight_non_edge_compute = weight_non_edge * (1.0f / 255.0f);
+#endif
+ }
+ else // if (edge_mode == FEATURE_ZERO_CROSSING)
+ {
+ initGrayscale_(image);
+ Mat_ laplacian;
+ Laplacian(grayscale_, laplacian, CV_16S, laplacianKernelSize);
+ Mat_ zero_crossing(src_size, 1);
+
+ const size_t zstep = zero_crossing.step[0];
+ for (int y = 0; y < src_size.height - 1; y++)
+ {
+ const short* row0 = laplacian.ptr(y);
+ const short* row1 = laplacian.ptr(y + 1);
+ uchar* zrow0 = zero_crossing.ptr(y);
+ //uchar* zrow1 = zero_crossing.ptr(y + 1);
+ for (int x = 0; x < src_size.width - 1; x++)
+ {
+ const int v = row0[x];
+ const int neg_v = -v;
+ // - * 1
+ // 2 3 4
+ const int v1 = row0[x + 1];
+ const int v2 = (x > 0) ? row1[x - 1] : v;
+ const int v3 = row1[x + 0];
+ const int v4 = row1[x + 1];
+ if (v < 0)
+ {
+ if (v1 > 0)
+ {
+ zrow0[x + ((v1 < neg_v) ? 1 : 0)] = 0;
+ }
+ if (v2 > 0)
+ {
+ zrow0[x + ((v2 < neg_v) ? (zstep - 1) : 0)] = 0;
+ }
+ if (v3 > 0)
+ {
+ zrow0[x + ((v3 < neg_v) ? (zstep + 0) : 0)] = 0;
+ }
+ if (v4 > 0)
+ {
+ zrow0[x + ((v4 < neg_v) ? (zstep + 1) : 0)] = 0;
+ }
+ }
+ else
+ {
+ if (v1 < 0)
+ {
+ zrow0[x + ((v1 > neg_v) ? 1 : 0)] = 0;
+ }
+ if (v2 < 0)
+ {
+ zrow0[x + ((v2 > neg_v) ? (zstep - 1) : 0)] = 0;
+ }
+ if (v3 < 0)
+ {
+ zrow0[x + ((v3 > neg_v) ? (zstep + 0) : 0)] = 0;
+ }
+ if (v4 < 0)
+ {
+ zrow0[x + ((v4 > neg_v) ? (zstep + 1) : 0)] = 0;
+ }
+ }
+ }
+ }
+
+ if (edge_gradient_magnitude_min_value > 0)
+ {
+ initImageMagnitude_(image);
+ Mat mask = image_magnitude_ < edge_gradient_magnitude_min_value;
+ zero_crossing.setTo(1, mask); // reset low-amplitude noise
+ }
+
+ non_edge_feature = zero_crossing;
+ }
+ }
+
+
+ void extractGradientDirection_(InputArray image)
+ {
+ CV_TRACE_FUNCTION();
+
+ initImageMagnitude_(image); // calls internally: initImageDerives_(image);
+ gradient_direction.create(src_size);
+ for (int y = 0; y < src_size.height; y++)
+ {
+ const float* magnutude_row = image_magnitude_.ptr(y);
+ const float* Ix_row = Ix_.ptr(y);
+ const float* Iy_row = Iy_.ptr(y);
+ Point2f* gradient_direction_row = gradient_direction.ptr(y);
+ for (int x = 0; x < src_size.width; x++)
+ {
+ const float m = magnutude_row[x];
+ if (m > FLT_EPSILON)
+ {
+ float m_inv = 1.0f / m;
+ gradient_direction_row[x] = Point2f(Ix_row[x] * m_inv, Iy_row[x] * m_inv);
+ }
+ else
+ {
+ gradient_direction_row[x] = Point2f(0, 0);
+ }
+ }
+ }
+ }
+
+ void extractGradientMagnitude_(InputArray image)
+ {
+ CV_TRACE_FUNCTION();
+
+ initImageMagnitude_(image); // calls internally: initImageDerives_(image);
+ Mat m;
+ double max_m = 0;
+ if (gradient_magnitude_threshold_max > 0)
+ {
+ threshold(image_magnitude_, m, gradient_magnitude_threshold_max, 0, THRESH_TRUNC);
+ max_m = gradient_magnitude_threshold_max;
+ }
+ else
+ {
+ m = image_magnitude_;
+ minMaxLoc(m, 0, &max_m);
+ }
+ if (max_m <= FLT_EPSILON)
+ {
+ CV_LOG_INFO(NULL, "IntelligentScissorsMB: input image gradient is almost zero")
+ gradient_magnitude.create(src_size);
+ gradient_magnitude.setTo(0);
+ }
+ else
+ {
+ m.convertTo(gradient_magnitude, CV_32F, -1.0 / max_m, 1.0); // normalize and inverse to range 0..1
+ }
+ }
+
+ void applyImage(InputArray image)
+ {
+ CV_TRACE_FUNCTION();
+
+ CV_CheckType(image.type(), image.type() == CV_8UC1 || image.type() == CV_8UC3 || image.type() == CV_8UC4, "");
+
+ resetFeatures_();
+ cleanupFeaturesTemporaryArrays_();
+ extractEdgeFeature_(image);
+ extractGradientDirection_(image);
+ extractGradientMagnitude_(image);
+ cleanupFeaturesTemporaryArrays_();
+ }
+
+
+ // details: see section 3.1 of the article
+ const float* acos_table = getAcosTable();
+ float local_cost(const Point& p, const Point& q) const
+ {
+ const bool isDiag = (p.x != q.x) && (p.y != q.y);
+
+ float fG = gradient_magnitude.at(q);
+
+ const Point2f diff((float)(q.x - p.x), (float)(q.y - p.y));
+
+ const Point2f Ip = gradient_direction(p);
+ const Point2f Iq = gradient_direction(q);
+
+ const Point2f Dp(Ip.y, -Ip.x); // D(p) - 90 degrees clockwise
+ const Point2f Dq(Iq.y, -Iq.x); // D(q) - 90 degrees clockwise
+
+ float dp = Dp.dot(diff); // dp(p, q)
+ float dq = Dq.dot(diff); // dq(p, q)
+ if (dp < 0)
+ {
+ dp = -dp; // ensure dp >= 0
+ dq = -dq;
+ }
+
+ const float sqrt2_inv = 0.7071067811865475f; // 1.0 / sqrt(2)
+ if (isDiag)
+ {
+ dp *= sqrt2_inv; // normalize length of (q - p)
+ dq *= sqrt2_inv; // normalize length of (q - p)
+ }
+ else
+ {
+ fG *= sqrt2_inv;
+ }
+
+#if 1
+ int dp_i = cvFloor(dp * ACOS_TABLE_SIZE); // dp is in range 0..1
+ dp_i = std::min(ACOS_TABLE_SIZE, std::max(0, dp_i));
+ int dq_i = cvFloor(dq * ACOS_TABLE_SIZE); // dq is in range -1..1
+ dq_i = std::min(ACOS_TABLE_SIZE, std::max(-ACOS_TABLE_SIZE, dq_i));
+ const float fD = acos_table[dp_i + ACOS_TABLE_SIZE] + acos_table[dq_i + ACOS_TABLE_SIZE];
+#else
+ const float CV_PI_inv = static_cast(1.0 / CV_PI);
+ const float fD = (acosf(dp) + acosf(dq)) * CV_PI_inv; // TODO optimize acos calls (through tables)
+#endif
+
+ float cost =
+ weight_non_edge_compute * non_edge_feature.at(q) +
+ weight_gradient_direction * fD +
+ weight_gradient_magnitude * fG;
+ return cost;
+ }
+
+ struct Pix
+ {
+ Point pt;
+ float cost; // NOTE: do not remove cost from here through replacing by cost(pt) map access
+
+ inline bool operator > (const Pix &b) const
+ {
+ return cost > b.cost;
+ }
+ };
+
+ void buildMap(const Point& start_point)
+ {
+ CV_TRACE_FUNCTION();
+
+ CV_Assert(!src_size.empty());
+ CV_Assert(!gradient_magnitude.empty() && "Features are missing. applyImage() must be called first");
+
+ CV_CheckGE(weight_non_edge + weight_gradient_direction + weight_gradient_magnitude, FLT_EPSILON, "");
+
+#if 0 // debug
+ Rect wholeImage(0, 0, src_size.width, src_size.height);
+ Rect roi = Rect(start_point.x - 5, start_point.y - 5, 11, 11) & wholeImage;
+ std::cout << roi << std::endl;
+ std::cout << gradient_magnitude(roi) << std::endl;
+ std::cout << gradient_direction(roi) << std::endl;
+ std::cout << non_edge_feature(roi) << std::endl;
+#endif
+
+ optimalPathsMap.release();
+ optimalPathsMap.create(src_size);
+ optimalPathsMap.setTo(0); // optimalPathsMap(start_point) = 0;
+
+ //
+ // Section 3.2
+ // Live-Wire 2-D DP graph search.
+ //
+
+ Mat_ cost_map(src_size, FLT_MAX); // g(q)
+ Mat_ processed(src_size, (uchar)0); // e(q)
+
+ // Note: std::vector is faster than std::deque
+ // TODO check std::set
+ std::priority_queue< Pix, std::vector, std::greater > L;
+
+ cost_map(start_point) = 0;
+ L.emplace(Pix{ start_point, 0/*cost*/ });
+
+ while (!L.empty())
+ {
+ Pix pix = L.top(); L.pop();
+ Point q = pix.pt; // 'q' from the article
+ if (processed(q))
+ continue; // already processed (with lower cost, see note below)
+ processed(q) = 1;
+#if 1
+ const float cost_q = pix.cost;
+#else
+ const float cost_q = cost_map(q);
+ CV_Assert(cost_q == pix.cost);
+#endif
+ for (int n = 0; n < 8; n++) // scan neighbours
+ {
+ Point r(q.x + neighbors[n][0], q.y + neighbors[n][1]); // 'r' from the article
+ if (r.x < 0 || r.x >= src_size.width || r.y < 0 || r.y >= src_size.height)
+ continue; // out of range
+
+#if !defined(__EMSCRIPTEN__) // slower in JS
+ float& cost_r = cost_map(r);
+ if (cost_r < cost_q)
+ continue; // already processed
+#else
+ if (processed(r))
+ continue; // already processed
+
+ float& cost_r = cost_map(r);
+ CV_DbgCheckLE(cost_q, cost_r, "INTERNAL ERROR: sorted queue is corrupted");
+#endif
+
+ float cost = cost_q + local_cost(q, r); // TODO(opt): compute partially until cost < cost_r
+ if (cost < cost_r)
+ {
+#if 0 // avoid compiler warning
+ if (cost_r != FLT_MAX)
+ {
+ // In article the point 'r' is removed from the queue L
+ // to be re-inserted again with sorting against new optimized cost.
+ // We can do nothing, because "new point" will be placed before in the sorted queue.
+ // Old point will be skipped through "if (processed(q))" check above after processing of new optimal candidate.
+ //
+ // This approach leads to some performance impact, however it is much smaller than element removal from the sorted queue.
+ // So, do nothing.
+ }
+#endif
+ cost_r = cost;
+ L.emplace(Pix{ r, cost });
+ optimalPathsMap(r) = (uchar)neighbors_encode[n];
+ }
+ }
+ }
+ }
+
+ void getContour(const Point& target, OutputArray contour_, bool backward)
+ {
+ CV_TRACE_FUNCTION();
+
+ CV_Assert(!optimalPathsMap.empty() && "buildMap() must be called before getContour()");
+
+ const int cols = optimalPathsMap.cols;
+ const int rows = optimalPathsMap.rows;
+
+ std::vector result; result.reserve(512);
+
+ size_t loop_check = 4096;
+ Point pt = target;
+ for (size_t i = 0; i < (size_t)rows * cols; i++) // don't hang on invalid maps
+ {
+ CV_CheckLT(pt.x, cols, "");
+ CV_CheckLT(pt.y, rows, "");
+ result.push_back(pt);
+ int direction = (int)optimalPathsMap(pt);
+ if (direction == 0)
+ break; // stop, start point is reached
+ CV_CheckLT(direction, 9, "Map is invalid");
+ Point next(pt.x + neighbors[direction - 1][0], pt.y + neighbors[direction - 1][1]);
+ pt = next;
+
+ if (result.size() == loop_check) // optional sanity check of invalid maps with loops (don't eat huge amount of memory)
+ {
+ loop_check *= 4; // next limit for loop check
+ for (const auto& pt_check : result)
+ {
+ CV_CheckNE(pt_check, pt, "Map is invalid. Contour loop is detected");
+ }
+ }
+ }
+
+ if (backward)
+ {
+ _InputArray(result).copyTo(contour_);
+ }
+ else
+ {
+ const int N = (int)result.size();
+ const int sz[1] = { N };
+ contour_.create(1, sz, CV_32SC2);
+ Mat_ contour = contour_.getMat();
+ for (int i = 0; i < N; i++)
+ {
+ contour.at(i) = result[N - (i + 1)];
+ }
+ }
+ }
+};
+
+
+
+IntelligentScissorsMB::IntelligentScissorsMB()
+ : impl(std::make_shared())
+{
+ // nothing
+}
+
+IntelligentScissorsMB& IntelligentScissorsMB::setWeights(float weight_non_edge, float weight_gradient_direction, float weight_gradient_magnitude)
+{
+ CV_DbgAssert(impl);
+ impl->setWeights(weight_non_edge, weight_gradient_direction, weight_gradient_magnitude);
+ return *this;
+}
+
+IntelligentScissorsMB& IntelligentScissorsMB::setGradientMagnitudeMaxLimit(float gradient_magnitude_threshold_max)
+{
+ CV_DbgAssert(impl);
+ impl->setGradientMagnitudeMaxLimit(gradient_magnitude_threshold_max);
+ return *this;
+}
+
+IntelligentScissorsMB& IntelligentScissorsMB::setEdgeFeatureZeroCrossingParameters(float gradient_magnitude_min_value)
+{
+ CV_DbgAssert(impl);
+ impl->setEdgeFeatureZeroCrossingParameters(gradient_magnitude_min_value);
+ return *this;
+}
+
+IntelligentScissorsMB& IntelligentScissorsMB::setEdgeFeatureCannyParameters(
+ double threshold1, double threshold2,
+ int apertureSize, bool L2gradient
+)
+{
+ CV_DbgAssert(impl);
+ impl->setEdgeFeatureCannyParameters(threshold1, threshold2, apertureSize, L2gradient);
+ return *this;
+}
+
+IntelligentScissorsMB& IntelligentScissorsMB::applyImage(InputArray image)
+{
+ CV_DbgAssert(impl);
+ impl->applyImage(image);
+ return *this;
+}
+
+IntelligentScissorsMB& IntelligentScissorsMB::applyImageFeatures(
+ InputArray non_edge, InputArray gradient_direction, InputArray gradient_magnitude,
+ InputArray image
+)
+{
+ CV_DbgAssert(impl);
+ impl->applyImageFeatures(non_edge, gradient_direction, gradient_magnitude, image);
+ return *this;
+}
+
+void IntelligentScissorsMB::buildMap(const Point& pt)
+{
+ CV_DbgAssert(impl);
+ impl->buildMap(pt);
+}
+
+void IntelligentScissorsMB::getContour(const Point& target, OutputArray contour, bool backward) const
+{
+ CV_DbgAssert(impl);
+ impl->getContour(target, contour, backward);
+}
+
+}} // namespace
diff --git a/modules/imgproc/test/test_intelligent_scissors.cpp b/modules/imgproc/test/test_intelligent_scissors.cpp
new file mode 100644
index 0000000000..c6b51fd6b6
--- /dev/null
+++ b/modules/imgproc/test/test_intelligent_scissors.cpp
@@ -0,0 +1,467 @@
+// This file is part of OpenCV project.
+// It is subject to the license terms in the LICENSE file found in the top-level directory
+// of this distribution and at http://opencv.org/license.html.
+
+#include "test_precomp.hpp"
+//#include "opencv2/imgproc/segmentation.hpp"
+
+namespace opencv_test { namespace {
+
+
+Mat getTestImageGray()
+{
+ static Mat m;
+ if (m.empty())
+ {
+ m = imread(findDataFile("shared/lena.png"), IMREAD_GRAYSCALE);
+ }
+ return m.clone();
+}
+
+Mat getTestImageColor()
+{
+ static Mat m;
+ if (m.empty())
+ {
+ m = imread(findDataFile("shared/lena.png"), IMREAD_COLOR);
+ }
+ return m.clone();
+}
+
+Mat getTestImage1()
+{
+ static Mat m;
+ if (m.empty())
+ {
+ m.create(Size(200, 100), CV_8UC1);
+ m.setTo(Scalar::all(128));
+ Rect roi(50, 30, 100, 40);
+ m(roi).setTo(Scalar::all(0));
+#if 0
+ imshow("image", m);
+ waitKey();
+#endif
+ }
+ return m.clone();
+}
+
+Mat getTestImage2()
+{
+ static Mat m;
+ if (m.empty())
+ {
+ m.create(Size(200, 100), CV_8UC1);
+ m.setTo(Scalar::all(128));
+ Rect roi(40, 30, 100, 40);
+ m(roi).setTo(Scalar::all(255));
+#if 0
+ imshow("image", m);
+ waitKey();
+#endif
+ }
+ return m.clone();
+}
+
+Mat getTestImage3()
+{
+ static Mat m;
+ if (m.empty())
+ {
+ m.create(Size(200, 100), CV_8UC1);
+ m.setTo(Scalar::all(128));
+ Scalar color(0,0,0,0);
+ line(m, Point(30, 50), Point(50, 50), color, 1);
+ line(m, Point(50, 50), Point(80, 30), color, 1);
+ line(m, Point(150, 50), Point(80, 30), color, 1);
+ line(m, Point(150, 50), Point(180, 50), color, 1);
+
+ line(m, Point(80, 10), Point(80, 90), Scalar::all(200), 1);
+ line(m, Point(100, 10), Point(100, 90), Scalar::all(200), 1);
+ line(m, Point(120, 10), Point(120, 90), Scalar::all(200), 1);
+#if 0
+ imshow("image", m);
+ waitKey();
+#endif
+ }
+ return m.clone();
+}
+
+Mat getTestImage4()
+{
+ static Mat m;
+ if (m.empty())
+ {
+ m.create(Size(200, 100), CV_8UC1);
+ for (int y = 0; y < m.rows; y++)
+ {
+ for (int x = 0; x < m.cols; x++)
+ {
+ float dx = (float)(x - 100);
+ float dy = (float)(y - 100);
+ float d = sqrtf(dx * dx + dy * dy);
+ m.at(y, x) = saturate_cast(100 + 100 * sin(d / 10 * CV_PI));
+ }
+ }
+#if 0
+ imshow("image", m);
+ waitKey();
+#endif
+ }
+ return m.clone();
+}
+
+Mat getTestImage5()
+{
+ static Mat m;
+ if (m.empty())
+ {
+ m.create(Size(200, 100), CV_8UC1);
+ for (int y = 0; y < m.rows; y++)
+ {
+ for (int x = 0; x < m.cols; x++)
+ {
+ float dx = (float)(x - 100);
+ float dy = (float)(y - 100);
+ float d = sqrtf(dx * dx + dy * dy);
+ m.at(y, x) = saturate_cast(x / 2 + 100 * sin(d / 10 * CV_PI));
+ }
+ }
+#if 0
+ imshow("image", m);
+ waitKey();
+#endif
+ }
+ return m.clone();
+}
+
+void show(const Mat& img, const std::vector pts)
+{
+ if (cvtest::debugLevel >= 10)
+ {
+ Mat dst = img.clone();
+ std::vector< std::vector > contours;
+ contours.push_back(pts);
+ polylines(dst, contours, false, Scalar::all(255));
+ imshow("dst", dst);
+ waitKey();
+ }
+}
+
+TEST(Imgproc_IntelligentScissorsMB, rect)
+{
+ segmentation::IntelligentScissorsMB tool;
+
+ tool.applyImage(getTestImage1());
+
+ Point source_point(50, 30);
+ tool.buildMap(source_point);
+
+ Point target_point(100, 30);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+
+ tool.applyImage(getTestImage2());
+
+ tool.buildMap(source_point);
+
+ std::vector pts2;
+ tool.getContour(target_point, pts2, true/*backward*/);
+
+ EXPECT_EQ(pts.size(), pts2.size());
+}
+
+TEST(Imgproc_IntelligentScissorsMB, lines)
+{
+ segmentation::IntelligentScissorsMB tool;
+ Mat image = getTestImage3();
+ tool.applyImage(image);
+
+ Point source_point(30, 50);
+ tool.buildMap(source_point);
+
+ Point target_point(150, 50);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+
+ EXPECT_EQ((size_t)121, pts.size());
+ show(image, pts);
+}
+
+TEST(Imgproc_IntelligentScissorsMB, circles)
+{
+ segmentation::IntelligentScissorsMB tool;
+ tool.setGradientMagnitudeMaxLimit(10);
+
+ Mat image = getTestImage4();
+ tool.applyImage(image);
+
+ Point source_point(50, 50);
+ tool.buildMap(source_point);
+
+ Point target_point(150, 50);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+
+ EXPECT_EQ((size_t)101, pts.size());
+ show(image, pts);
+}
+
+TEST(Imgproc_IntelligentScissorsMB, circles_gradient)
+{
+ segmentation::IntelligentScissorsMB tool;
+ Mat image = getTestImage5();
+ tool.applyImage(image);
+
+ Point source_point(50, 50);
+ tool.buildMap(source_point);
+
+ Point target_point(150, 50);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+
+ EXPECT_EQ((size_t)101, pts.size());
+ show(image, pts);
+}
+
+#define PTS_SIZE_EPS 2
+
+TEST(Imgproc_IntelligentScissorsMB, grayscale)
+{
+ segmentation::IntelligentScissorsMB tool;
+
+ Mat image = getTestImageGray();
+ tool.applyImage(image);
+
+ Point source_point(275, 63);
+ tool.buildMap(source_point);
+
+ Point target_point(413, 155);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+
+ size_t gold = 206;
+ EXPECT_GE(pts.size(), gold - PTS_SIZE_EPS);
+ EXPECT_LE(pts.size(), gold + PTS_SIZE_EPS);
+ show(image, pts);
+}
+
+TEST(Imgproc_IntelligentScissorsMB, check_features_grayscale_1_0_0_zerro_crossing_with_limit)
+{
+ segmentation::IntelligentScissorsMB tool;
+ tool.setEdgeFeatureZeroCrossingParameters(64);
+ tool.setWeights(1.0f, 0.0f, 0.0f);
+
+ Mat image = getTestImageGray();
+ tool.applyImage(image);
+
+ Point source_point(275, 63);
+ tool.buildMap(source_point);
+
+ Point target_point(413, 155);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+
+ size_t gold = 207;
+ EXPECT_GE(pts.size(), gold - PTS_SIZE_EPS);
+ EXPECT_LE(pts.size(), gold + PTS_SIZE_EPS);
+ show(image, pts);
+}
+
+TEST(Imgproc_IntelligentScissorsMB, check_features_grayscale_1_0_0_canny)
+{
+ segmentation::IntelligentScissorsMB tool;
+ tool.setEdgeFeatureCannyParameters(50, 100);
+ tool.setWeights(1.0f, 0.0f, 0.0f);
+
+ Mat image = getTestImageGray();
+ tool.applyImage(image);
+
+ Point source_point(275, 63);
+ tool.buildMap(source_point);
+
+ Point target_point(413, 155);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+
+ size_t gold = 201;
+ EXPECT_GE(pts.size(), gold - PTS_SIZE_EPS);
+ EXPECT_LE(pts.size(), gold + PTS_SIZE_EPS);
+ show(image, pts);
+}
+
+TEST(Imgproc_IntelligentScissorsMB, check_features_grayscale_0_1_0)
+{
+ segmentation::IntelligentScissorsMB tool;
+ tool.setWeights(0.0f, 1.0f, 0.0f);
+
+ Mat image = getTestImageGray();
+ tool.applyImage(image);
+
+ Point source_point(275, 63);
+ tool.buildMap(source_point);
+
+ Point target_point(413, 155);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+
+ size_t gold = 166;
+ EXPECT_GE(pts.size(), gold - PTS_SIZE_EPS);
+ EXPECT_LE(pts.size(), gold + PTS_SIZE_EPS);
+ show(image, pts);
+}
+
+TEST(Imgproc_IntelligentScissorsMB, check_features_grayscale_0_0_1)
+{
+ segmentation::IntelligentScissorsMB tool;
+ tool.setWeights(0.0f, 0.0f, 1.0f);
+
+ Mat image = getTestImageGray();
+ tool.applyImage(image);
+
+ Point source_point(275, 63);
+ tool.buildMap(source_point);
+
+ Point target_point(413, 155);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+
+ size_t gold = 197;
+ EXPECT_GE(pts.size(), gold - PTS_SIZE_EPS);
+ EXPECT_LE(pts.size(), gold + PTS_SIZE_EPS);
+ show(image, pts);
+}
+
+TEST(Imgproc_IntelligentScissorsMB, color)
+{
+ segmentation::IntelligentScissorsMB tool;
+
+ Mat image = getTestImageColor();
+ tool.applyImage(image);
+
+ Point source_point(275, 63);
+ tool.buildMap(source_point);
+
+ Point target_point(413, 155);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+
+ size_t gold = 205;
+ EXPECT_GE(pts.size(), gold - PTS_SIZE_EPS);
+ EXPECT_LE(pts.size(), gold + PTS_SIZE_EPS);
+ show(image, pts);
+}
+
+TEST(Imgproc_IntelligentScissorsMB, color_canny)
+{
+ segmentation::IntelligentScissorsMB tool;
+ tool.setEdgeFeatureCannyParameters(32, 100);
+
+ Mat image = getTestImageColor();
+ tool.applyImage(image);
+
+ Point source_point(275, 63);
+ tool.buildMap(source_point);
+
+ Point target_point(413, 155);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+
+ size_t gold = 200;
+ EXPECT_GE(pts.size(), gold - PTS_SIZE_EPS);
+ EXPECT_LE(pts.size(), gold + PTS_SIZE_EPS);
+ show(image, pts);
+}
+
+
+TEST(Imgproc_IntelligentScissorsMB, color_custom_features_invalid)
+{
+ segmentation::IntelligentScissorsMB tool;
+ ASSERT_ANY_THROW(tool.applyImageFeatures(noArray(), noArray(), noArray()));
+}
+
+TEST(Imgproc_IntelligentScissorsMB, color_custom_features_edge)
+{
+ segmentation::IntelligentScissorsMB tool;
+
+ Mat image = getTestImageColor();
+
+ Mat canny_edges;
+ Canny(image, canny_edges, 32, 100, 5);
+ Mat binary_edge_feature;
+ cv::threshold(canny_edges, binary_edge_feature, 254, 1, THRESH_BINARY_INV);
+ tool.applyImageFeatures(binary_edge_feature, noArray(), noArray(), image);
+
+ Point source_point(275, 63);
+ tool.buildMap(source_point);
+
+ Point target_point(413, 155);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+
+ size_t gold = 201;
+ EXPECT_GE(pts.size(), gold - PTS_SIZE_EPS);
+ EXPECT_LE(pts.size(), gold + PTS_SIZE_EPS);
+ show(image, pts);
+}
+
+TEST(Imgproc_IntelligentScissorsMB, color_custom_features_all)
+{
+ segmentation::IntelligentScissorsMB tool;
+
+ tool.setWeights(0.9f, 0.0f, 0.1f);
+
+ Mat image = getTestImageColor();
+
+ Mat canny_edges;
+ Canny(image, canny_edges, 50, 100, 5);
+ Mat binary_edge_feature; // 0, 1 values
+ cv::threshold(canny_edges, binary_edge_feature, 254, 1, THRESH_BINARY_INV);
+
+ Mat_ gradient_direction(image.size(), Point2f(0, 0)); // normalized
+ Mat_ gradient_magnitude(image.size(), 0); // cost function
+ tool.applyImageFeatures(binary_edge_feature, gradient_direction, gradient_magnitude);
+
+ Point source_point(275, 63);
+ tool.buildMap(source_point);
+
+ Point target_point(413, 155);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+
+ size_t gold = 201;
+ EXPECT_GE(pts.size(), gold - PTS_SIZE_EPS);
+ EXPECT_LE(pts.size(), gold + PTS_SIZE_EPS);
+ show(image, pts);
+}
+
+TEST(Imgproc_IntelligentScissorsMB, color_custom_features_edge_magnitude)
+{
+ segmentation::IntelligentScissorsMB tool;
+
+ tool.setWeights(0.9f, 0.0f, 0.1f);
+
+ Mat image = getTestImageColor();
+
+ Mat canny_edges;
+ Canny(image, canny_edges, 50, 100, 5);
+ Mat binary_edge_feature; // 0, 1 values
+ cv::threshold(canny_edges, binary_edge_feature, 254, 1, THRESH_BINARY_INV);
+
+ Mat_ gradient_magnitude(image.size(), 0); // cost function
+ tool.applyImageFeatures(binary_edge_feature, noArray(), gradient_magnitude);
+
+ Point source_point(275, 63);
+ tool.buildMap(source_point);
+
+ Point target_point(413, 155);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+
+ size_t gold = 201;
+ EXPECT_GE(pts.size(), gold - PTS_SIZE_EPS);
+ EXPECT_LE(pts.size(), gold + PTS_SIZE_EPS);
+ show(image, pts);
+}
+
+
+}} // namespace
diff --git a/modules/js/src/core_bindings.cpp b/modules/js/src/core_bindings.cpp
index c660253fa1..4409778e83 100644
--- a/modules/js/src/core_bindings.cpp
+++ b/modules/js/src/core_bindings.cpp
@@ -87,6 +87,8 @@ namespace hal {
using namespace emscripten;
using namespace cv;
+using namespace cv::segmentation; // FIXIT
+
#ifdef HAVE_OPENCV_DNN
using namespace cv::dnn;
#endif
diff --git a/modules/js/test/test_imgproc.js b/modules/js/test/test_imgproc.js
index 9ba5cd4e38..ada315b390 100644
--- a/modules/js/test/test_imgproc.js
+++ b/modules/js/test/test_imgproc.js
@@ -977,3 +977,26 @@ QUnit.test('warpPolar', function(assert) {
96, 83, 64, 45, 32
]);
});
+
+
+QUnit.test('IntelligentScissorsMB', function(assert) {
+ const lines = new cv.Mat(50, 100, cv.CV_8U, new cv.Scalar(0));
+ lines.row(10).setTo(new cv.Scalar(255));
+ assert.ok(lines instanceof cv.Mat);
+
+ let tool = new cv.segmentation_IntelligentScissorsMB();
+ tool.applyImage(lines);
+ assert.ok(lines instanceof cv.Mat);
+ lines.delete();
+
+ tool.buildMap(new cv.Point(10, 10));
+
+ let contour = new cv.Mat();
+ tool.getContour(new cv.Point(50, 10), contour);
+ assert.equal(contour.type(), cv.CV_32SC2);
+ assert.ok(contour.total() == 41, contour.total());
+
+ tool.getContour(new cv.Point(80, 10), contour);
+ assert.equal(contour.type(), cv.CV_32SC2);
+ assert.ok(contour.total() == 71, contour.total());
+});
diff --git a/platforms/js/opencv_js.config.py b/platforms/js/opencv_js.config.py
index 6e4677f78b..2bcc7de03a 100644
--- a/platforms/js/opencv_js.config.py
+++ b/platforms/js/opencv_js.config.py
@@ -18,8 +18,21 @@ imgproc = {'': ['Canny', 'GaussianBlur', 'Laplacian', 'HoughLines', 'HoughLinesP
'matchShapes', 'matchTemplate','medianBlur', 'minAreaRect', 'minEnclosingCircle', 'moments', 'morphologyEx', \
'pointPolygonTest', 'putText','pyrDown','pyrUp','rectangle','remap', 'resize','sepFilter2D','threshold', \
'undistort','warpAffine','warpPerspective','warpPolar','watershed', \
- 'fillPoly', 'fillConvexPoly'],
- 'CLAHE': ['apply', 'collectGarbage', 'getClipLimit', 'getTilesGridSize', 'setClipLimit', 'setTilesGridSize']}
+ 'fillPoly', 'fillConvexPoly', 'polylines',
+ ],
+ 'CLAHE': ['apply', 'collectGarbage', 'getClipLimit', 'getTilesGridSize', 'setClipLimit', 'setTilesGridSize'],
+ 'segmentation_IntelligentScissorsMB': [
+ 'IntelligentScissorsMB',
+ 'setWeights',
+ 'setGradientMagnitudeMaxLimit',
+ 'setEdgeFeatureZeroCrossingParameters',
+ 'setEdgeFeatureCannyParameters',
+ 'applyImage',
+ 'applyImageFeatures',
+ 'buildMap',
+ 'getContour'
+ ],
+}
objdetect = {'': ['groupRectangles'],
'HOGDescriptor': ['load', 'HOGDescriptor', 'getDefaultPeopleDetector', 'getDaimlerPeopleDetector', 'setSVMDetector', 'detectMultiScale'],
diff --git a/samples/cpp/CMakeLists.txt b/samples/cpp/CMakeLists.txt
index 14ab6141df..5f40dba889 100644
--- a/samples/cpp/CMakeLists.txt
+++ b/samples/cpp/CMakeLists.txt
@@ -23,6 +23,9 @@ if(NOT BUILD_EXAMPLES OR NOT OCV_DEPENDENCIES_FOUND)
return()
endif()
+set(DEPS_example_snippet_imgproc_segmentation opencv_core opencv_imgproc)
+set(DEPS_example_cpp_intelligent_scissors opencv_core opencv_imgproc opencv_imgcodecs opencv_highgui)
+
project(cpp_samples)
ocv_include_modules_recurse(${OPENCV_CPP_SAMPLES_REQUIRED_DEPS})
file(GLOB_RECURSE cpp_samples RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} *.cpp)
@@ -32,11 +35,17 @@ endif()
ocv_list_filterout(cpp_samples "real_time_pose_estimation/")
foreach(sample_filename ${cpp_samples})
set(package "cpp")
- if(sample_filename MATCHES "tutorial_code")
+ if(sample_filename MATCHES "tutorial_code/snippet")
+ set(package "snippet")
+ elseif(sample_filename MATCHES "tutorial_code")
set(package "tutorial")
endif()
ocv_define_sample(tgt ${sample_filename} ${package})
- ocv_target_link_libraries(${tgt} PRIVATE ${OPENCV_LINKER_LIBS} ${OPENCV_CPP_SAMPLES_REQUIRED_DEPS})
+ set(deps ${OPENCV_CPP_SAMPLES_REQUIRED_DEPS})
+ if(DEFINED DEPS_${tgt})
+ set(deps ${DEPS_${tgt}})
+ endif()
+ ocv_target_link_libraries(${tgt} PRIVATE ${OPENCV_LINKER_LIBS} ${deps})
if(sample_filename MATCHES "/gpu/" AND HAVE_opencv_cudaarithm AND HAVE_opencv_cuda_filters)
ocv_target_link_libraries(${tgt} PRIVATE opencv_cudaarithm opencv_cudafilters)
endif()
diff --git a/samples/cpp/tutorial_code/snippets/imgproc_segmentation.cpp b/samples/cpp/tutorial_code/snippets/imgproc_segmentation.cpp
new file mode 100644
index 0000000000..b81ba34494
--- /dev/null
+++ b/samples/cpp/tutorial_code/snippets/imgproc_segmentation.cpp
@@ -0,0 +1,35 @@
+#include "opencv2/imgproc.hpp"
+#include "opencv2/imgproc/segmentation.hpp"
+
+using namespace cv;
+
+static
+void usage_example_intelligent_scissors()
+{
+ Mat image(Size(1920, 1080), CV_8UC3, Scalar::all(128));
+
+ //! [usage_example_intelligent_scissors]
+ segmentation::IntelligentScissorsMB tool;
+ tool.setEdgeFeatureCannyParameters(16, 100) // using Canny() as edge feature extractor
+ .setGradientMagnitudeMaxLimit(200);
+
+ // calculate image features
+ tool.applyImage(image);
+
+ // calculate map for specified source point
+ Point source_point(200, 100);
+ tool.buildMap(source_point);
+
+ // fast fetching of contours
+ // for specified target point and the pre-calculated map (stored internally)
+ Point target_point(400, 300);
+ std::vector pts;
+ tool.getContour(target_point, pts);
+ //! [usage_example_intelligent_scissors]
+}
+
+int main()
+{
+ usage_example_intelligent_scissors();
+ return 0;
+}
diff --git a/samples/samples_utils.cmake b/samples/samples_utils.cmake
index 9459099200..2780b479fc 100644
--- a/samples/samples_utils.cmake
+++ b/samples/samples_utils.cmake
@@ -4,6 +4,9 @@
function(ocv_define_sample out_target source sub)
get_filename_component(name "${source}" NAME_WE)
set(the_target "example_${sub}_${name}")
+ if(OPENCV_DUMP_EXAMPLE_TARGET)
+ message(STATUS "Example: ${the_target} (${source})")
+ endif()
add_executable(${the_target} "${source}")
if(TARGET Threads::Threads AND NOT OPENCV_EXAMPLES_DISABLE_THREADS)
target_link_libraries(${the_target} PRIVATE Threads::Threads)