diff --git a/cmake/OpenCVFindIPPIW.cmake b/cmake/OpenCVFindIPPIW.cmake index bd3b0b8422..5c3d8a0ea2 100644 --- a/cmake/OpenCVFindIPPIW.cmake +++ b/cmake/OpenCVFindIPPIW.cmake @@ -29,6 +29,16 @@ macro(ippiw_debugmsg MESSAGE) message(STATUS "${MESSAGE}") endif() endmacro() + +macro(ippiw_done) + foreach(__file ${IPP_IW_LICENSE_FILES}) + if(EXISTS "${__file}") + ocv_install_3rdparty_licenses(ippiw "${__file}") + endif() + endforeach() + return() +endmacro() + file(TO_CMAKE_PATH "${IPPROOT}" IPPROOT) # This function detects Intel IPP Integration Wrappers version by analyzing .h file @@ -81,7 +91,7 @@ macro(ippiw_setup PATH BUILD) if(EXISTS "${FILE}") set(HAVE_IPP_IW_LL 1) endif() - return() + ippiw_done() else() ippiw_debugmsg("sources\tno") endif() @@ -120,7 +130,7 @@ macro(ippiw_setup PATH BUILD) if(EXISTS "${FILE}") set(HAVE_IPP_IW_LL 1) endif() - return() + ippiw_done() else() ippiw_debugmsg("binaries\tno") endif() @@ -147,14 +157,12 @@ if(BUILD_IPP_IW) ippiw_setup("${OpenCV_SOURCE_DIR}/3rdparty/ippiw" 1) set(IPPIW_ROOT "${IPPROOT}/../iw") - ocv_install_3rdparty_licenses(ippiw - "${IPPIW_ROOT}/../support.txt" - "${IPPIW_ROOT}/../third-party-programs.txt") - if(WIN32) - ocv_install_3rdparty_licenses(ippiw "${IPPIW_ROOT}/../EULA.rtf") - else() - ocv_install_3rdparty_licenses(ippiw "${IPPIW_ROOT}/../EULA.txt") - endif() + set(IPP_IW_LICENSE_FILES ${IPP_IW_LICENSE_FILES_EXTRA} + "${IPPIW_ROOT}/../support.txt" + "${IPPIW_ROOT}/../third-party-programs.txt" + "${IPPIW_ROOT}/../EULA.rtf" + "${IPPIW_ROOT}/../EULA.txt" + ) # Package sources get_filename_component(__PATH "${IPPROOT}/../iw/" ABSOLUTE) @@ -167,10 +175,11 @@ if(BUILD_IPP_IW) include("${OpenCV_SOURCE_DIR}/3rdparty/ippicv/ippicv.cmake") download_ippicv(TEMP_ROOT) set(IPPIW_ROOT "${TEMP_ROOT}/iw/") - ocv_install_3rdparty_licenses(ippiw - "${IPPIW_ROOT}/../EULA.txt" - "${IPPIW_ROOT}/../support.txt" - "${IPPIW_ROOT}/../third-party-programs.txt") + set(IPP_IW_LICENSE_FILES ${IPP_IW_LICENSE_FILES_EXTRA} + "${IPPIW_ROOT}/../EULA.txt" + "${IPPIW_ROOT}/../support.txt" + "${IPPIW_ROOT}/../third-party-programs.txt" + ) ippiw_setup("${IPPIW_ROOT}" 1) endif() diff --git a/modules/core/src/matmul.dispatch.cpp b/modules/core/src/matmul.dispatch.cpp index 6fcdb4c700..a9b82aee88 100644 --- a/modules/core/src/matmul.dispatch.cpp +++ b/modules/core/src/matmul.dispatch.cpp @@ -442,6 +442,12 @@ void transform(InputArray _src, OutputArray _dst, InputArray _mtx) _dst.create( src.size(), CV_MAKETYPE(depth, dcn) ); Mat dst = _dst.getMat(); + if (src.data == dst.data) // inplace case + { + CV_Assert(scn == dcn); + src = src.clone(); // TODO Add performance warning + } + int mtype = depth == CV_32S || depth == CV_64F ? CV_64F : CV_32F; AutoBuffer _mbuf; double* mbuf; diff --git a/modules/dnn/misc/python/test/test_dnn.py b/modules/dnn/misc/python/test/test_dnn.py index ed1e7a20ee..7230e16c30 100644 --- a/modules/dnn/misc/python/test/test_dnn.py +++ b/modules/dnn/misc/python/test/test_dnn.py @@ -215,7 +215,7 @@ class dnn_test(NewOpenCVTests): testScores, testBoxes, 0.5, scoresDiff, iouDiff) def test_async(self): - timeout = 500*10**6 # in nanoseconds (500ms) + timeout = 10*1000*10**6 # in nanoseconds (10 sec) testdata_required = bool(os.environ.get('OPENCV_DNN_TEST_REQUIRE_TESTDATA', False)) proto = self.find_dnn_file('dnn/layers/layer_convolution.prototxt', required=testdata_required) model = self.find_dnn_file('dnn/layers/layer_convolution.caffemodel', required=testdata_required) diff --git a/modules/dnn/test/test_darknet_importer.cpp b/modules/dnn/test/test_darknet_importer.cpp index e038652e2f..26637ebbe6 100644 --- a/modules/dnn/test/test_darknet_importer.cpp +++ b/modules/dnn/test/test_darknet_importer.cpp @@ -329,7 +329,7 @@ TEST_P(Test_Darknet_nets, TinyYoloVoc) } #ifdef HAVE_INF_ENGINE -static const std::chrono::milliseconds async_timeout(500); +static const std::chrono::milliseconds async_timeout(10000); typedef testing::TestWithParam > Test_Darknet_nets_async; TEST_P(Test_Darknet_nets_async, Accuracy) diff --git a/modules/dnn/test/test_halide_layers.cpp b/modules/dnn/test/test_halide_layers.cpp index 6b66722936..6c21be3cde 100644 --- a/modules/dnn/test/test_halide_layers.cpp +++ b/modules/dnn/test/test_halide_layers.cpp @@ -758,6 +758,12 @@ TEST_P(Eltwise, Accuracy) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1); #endif +#if defined(INF_ENGINE_RELEASE) + if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_OPENCL && + op == "sum" && numConv == 1 && !weighted) + applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE); +#endif + Net net; std::vector convLayerIds(numConv); diff --git a/modules/dnn/test/test_misc.cpp b/modules/dnn/test/test_misc.cpp index 6d45a89a05..9fd06996f9 100644 --- a/modules/dnn/test/test_misc.cpp +++ b/modules/dnn/test/test_misc.cpp @@ -363,7 +363,7 @@ TEST(Net, forwardAndRetrieve) } #ifdef HAVE_INF_ENGINE -static const std::chrono::milliseconds async_timeout(500); +static const std::chrono::milliseconds async_timeout(10000); // This test runs network in synchronous mode for different inputs and then // runs the same model asynchronously for the same inputs. diff --git a/modules/imgcodecs/src/loadsave.cpp b/modules/imgcodecs/src/loadsave.cpp index 8b56f0cac5..5dcd241f8c 100644 --- a/modules/imgcodecs/src/loadsave.cpp +++ b/modules/imgcodecs/src/loadsave.cpp @@ -40,7 +40,7 @@ //M*/ // -// Loading and saving IPL images. +// Loading and saving images. // #include "precomp.hpp" @@ -393,7 +393,6 @@ static void ApplyExifOrientation(const Mat& buf, Mat& img) * LOAD_MAT=2 * } * @param[in] mat Reference to C++ Mat object (If LOAD_MAT) - * @param[in] scale_denom Scale value * */ static bool @@ -459,11 +458,11 @@ imread_( const String& filename, int flags, Mat& mat ) int type = decoder->type(); if( (flags & IMREAD_LOAD_GDAL) != IMREAD_LOAD_GDAL && flags != IMREAD_UNCHANGED ) { - if( (flags & CV_LOAD_IMAGE_ANYDEPTH) == 0 ) + if( (flags & IMREAD_ANYDEPTH) == 0 ) type = CV_MAKETYPE(CV_8U, CV_MAT_CN(type)); - if( (flags & CV_LOAD_IMAGE_COLOR) != 0 || - ((flags & CV_LOAD_IMAGE_ANYCOLOR) != 0 && CV_MAT_CN(type) > 1) ) + if( (flags & IMREAD_COLOR) != 0 || + ((flags & IMREAD_ANYCOLOR) != 0 && CV_MAT_CN(type) > 1) ) type = CV_MAKETYPE(CV_MAT_DEPTH(type), 3); else type = CV_MAKETYPE(CV_MAT_DEPTH(type), 1); @@ -558,11 +557,11 @@ imreadmulti_(const String& filename, int flags, std::vector& mats) int type = decoder->type(); if( (flags & IMREAD_LOAD_GDAL) != IMREAD_LOAD_GDAL && flags != IMREAD_UNCHANGED ) { - if ((flags & CV_LOAD_IMAGE_ANYDEPTH) == 0) + if ((flags & IMREAD_ANYDEPTH) == 0) type = CV_MAKETYPE(CV_8U, CV_MAT_CN(type)); if ((flags & CV_LOAD_IMAGE_COLOR) != 0 || - ((flags & CV_LOAD_IMAGE_ANYCOLOR) != 0 && CV_MAT_CN(type) > 1)) + ((flags & IMREAD_ANYCOLOR) != 0 && CV_MAT_CN(type) > 1)) type = CV_MAKETYPE(CV_MAT_DEPTH(type), 3); else type = CV_MAKETYPE(CV_MAT_DEPTH(type), 1); @@ -791,11 +790,11 @@ imdecode_( const Mat& buf, int flags, Mat& mat ) int type = decoder->type(); if( (flags & IMREAD_LOAD_GDAL) != IMREAD_LOAD_GDAL && flags != IMREAD_UNCHANGED ) { - if( (flags & CV_LOAD_IMAGE_ANYDEPTH) == 0 ) + if( (flags & IMREAD_ANYDEPTH) == 0 ) type = CV_MAKETYPE(CV_8U, CV_MAT_CN(type)); - if( (flags & CV_LOAD_IMAGE_COLOR) != 0 || - ((flags & CV_LOAD_IMAGE_ANYCOLOR) != 0 && CV_MAT_CN(type) > 1) ) + if( (flags & IMREAD_COLOR) != 0 || + ((flags & IMREAD_ANYCOLOR) != 0 && CV_MAT_CN(type) > 1) ) type = CV_MAKETYPE(CV_MAT_DEPTH(type), 3); else type = CV_MAKETYPE(CV_MAT_DEPTH(type), 1); diff --git a/modules/imgproc/src/pyramids.cpp b/modules/imgproc/src/pyramids.cpp index b8bfabf9f3..f6a985578e 100644 --- a/modules/imgproc/src/pyramids.cpp +++ b/modules/imgproc/src/pyramids.cpp @@ -741,13 +741,13 @@ pyrDown_( const Mat& _src, Mat& _dst, int borderType ) CV_Assert( ssize.width > 0 && ssize.height > 0 && std::abs(dsize.width*2 - ssize.width) <= 2 && std::abs(dsize.height*2 - ssize.height) <= 2 ); - int k, x, sy0 = -PD_SZ/2, sy = sy0, width0 = std::min((ssize.width-PD_SZ/2-1)/2 + 1, dsize.width); + int sy0 = -PD_SZ/2, sy = sy0, width0 = std::min((ssize.width-PD_SZ/2-1)/2 + 1, dsize.width); - for( x = 0; x <= PD_SZ+1; x++ ) + for (int x = 0; x <= PD_SZ+1; x++) { int sx0 = borderInterpolate(x - PD_SZ/2, ssize.width, borderType)*cn; int sx1 = borderInterpolate(x + width0*2 - PD_SZ/2, ssize.width, borderType)*cn; - for( k = 0; k < cn; k++ ) + for (int k = 0; k < cn; k++) { tabL[x*cn + k] = sx0 + k; tabR[x*cn + k] = sx1 + k; @@ -758,10 +758,10 @@ pyrDown_( const Mat& _src, Mat& _dst, int borderType ) dsize.width *= cn; width0 *= cn; - for( x = 0; x < dsize.width; x++ ) + for (int x = 0; x < dsize.width; x++) tabM[x] = (x/cn)*2*cn + x % cn; - for( int y = 0; y < dsize.height; y++ ) + for (int y = 0; y < dsize.height; y++) { T* dst = _dst.ptr(y); WT *row0, *row1, *row2, *row3, *row4; @@ -772,15 +772,13 @@ pyrDown_( const Mat& _src, Mat& _dst, int borderType ) WT* row = buf + ((sy - sy0) % PD_SZ)*bufstep; int _sy = borderInterpolate(sy, ssize.height, borderType); const T* src = _src.ptr(_sy); - int limit = cn; - const int* tab = tabL; - for( x = 0;;) - { - for( ; x < limit; x++ ) + do { + int x = 0; + for( ; x < cn; x++ ) { - row[x] = src[tab[x+cn*2]]*6 + (src[tab[x+cn]] + src[tab[x+cn*3]])*4 + - src[tab[x]] + src[tab[x+cn*4]]; + row[x] = src[tabL[x+cn*2]]*6 + (src[tabL[x+cn]] + src[tabL[x+cn*3]])*4 + + src[tabL[x]] + src[tabL[x+cn*4]]; } if( x == dsize.width ) @@ -840,18 +838,22 @@ pyrDown_( const Mat& _src, Mat& _dst, int borderType ) } } - limit = dsize.width; - tab = tabR - x; - } + // tabR + for (int x_ = 0; x < dsize.width; x++, x_++) + { + row[x] = src[tabR[x_+cn*2]]*6 + (src[tabR[x_+cn]] + src[tabR[x_+cn*3]])*4 + + src[tabR[x_]] + src[tabR[x_+cn*4]]; + } + } while (0); } // do vertical convolution and decimation and write the result to the destination image - for( k = 0; k < PD_SZ; k++ ) + for (int k = 0; k < PD_SZ; k++) rows[k] = buf + ((y*2 - PD_SZ/2 + k - sy0) % PD_SZ)*bufstep; row0 = rows[0]; row1 = rows[1]; row2 = rows[2]; row3 = rows[3]; row4 = rows[4]; - x = PyrDownVecV(rows, dst, dsize.width); - for( ; x < dsize.width; x++ ) + int x = PyrDownVecV(rows, dst, dsize.width); + for (; x < dsize.width; x++ ) dst[x] = castOp(row2[x]*6 + (row1[x] + row3[x])*4 + row0[x] + row4[x]); } } diff --git a/modules/js/src/embindgen.py b/modules/js/src/embindgen.py index 098d51e3e5..06e7582eca 100644 --- a/modules/js/src/embindgen.py +++ b/modules/js/src/embindgen.py @@ -112,7 +112,7 @@ imgproc = {'': ['Canny', 'GaussianBlur', 'Laplacian', 'HoughLines', 'HoughLinesP 'goodFeaturesToTrack','grabCut','initUndistortRectifyMap', 'integral','integral2', 'isContourConvex', 'line', \ 'matchShapes', 'matchTemplate','medianBlur', 'minAreaRect', 'minEnclosingCircle', 'moments', 'morphologyEx', \ 'pointPolygonTest', 'putText','pyrDown','pyrUp','rectangle','remap', 'resize','sepFilter2D','threshold', \ - 'undistort','warpAffine','warpPerspective','watershed', \ + 'undistort','warpAffine','warpPerspective','warpPolar','watershed', \ 'fillPoly', 'fillConvexPoly'], 'CLAHE': ['apply', 'collectGarbage', 'getClipLimit', 'getTilesGridSize', 'setClipLimit', 'setTilesGridSize']} @@ -171,7 +171,7 @@ aruco = {'': ['detectMarkers', 'drawDetectedMarkers', 'drawAxis', 'estimatePoseS 'aruco_CharucoBoard': ['create', 'draw'], } -calib3d = {'': ['findHomography','calibrateCameraExtended', 'drawFrameAxes', 'getDefaultNewCameraMatrix', 'initUndistortRectifyMap']} +calib3d = {'': ['findHomography', 'calibrateCameraExtended', 'drawFrameAxes', 'estimateAffine2D', 'getDefaultNewCameraMatrix', 'initUndistortRectifyMap', 'Rodrigues']} def makeWhiteList(module_list): wl = {} diff --git a/modules/js/test/test_calib3d.js b/modules/js/test/test_calib3d.js index f3fe87e0a3..b0001dbc67 100644 --- a/modules/js/test/test_calib3d.js +++ b/modules/js/test/test_calib3d.js @@ -41,3 +41,51 @@ QUnit.test('findHomography', function(assert) { assert.ok(mat instanceof cv.Mat); }); + +QUnit.test('Rodrigues', function(assert) { + // Converts a rotation matrix to a rotation vector and vice versa + // data64F is the output array + const rvec0 = cv.matFromArray(1, 3, cv.CV_64F, [1,1,1]); + let rMat0 = new cv.Mat(); + let rvec1 = new cv.Mat(); + + // Args: input Mat, output Mat. The function mutates the output Mat, so the function does not return anything. + // cv.Rodrigues (InputArray=src, OutputArray=dst, jacobian=0) + // https://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html#void%20Rodrigues(InputArray%20src,%20OutputArray%20dst,%20OutputArray%20jacobian) + // vec to Mat, starting number is 3 long and each element is 1. + cv.Rodrigues(rvec0, rMat0); + + assert.ok(rMat0.data64F.length == 9); + assert.ok(0.23 > rMat0.data64F[0] > 0.22); + + // convert Mat to Vec, should be same as what we started with, 3 long and each item should be a 1. + cv.Rodrigues(rMat0, rvec1); + + assert.ok(rvec1.data64F.length == 3); + assert.ok(1.01 > rvec1.data64F[0] > 0.9); + // Answer should be around 1: 0.9999999999999999 +}); + +QUnit.test('estimateAffine2D', function(assert) { + const inputs = cv.matFromArray(4, 1, cv.CV_32FC2, [ + 1, 1, + 80, 0, + 0, 80, + 80, 80 + ]); + const outputs = cv.matFromArray(4, 1, cv.CV_32FC2, [ + 21, 51, + 70, 77, + 40, 40, + 10, 70 + ]); + const M = cv.estimateAffine2D(inputs, outputs); + assert.ok(M instanceof cv.Mat); + assert.deepEqual(Array.from(M.data), [ + 23, 55, 97, 126, 87, 139, 227, 63, 0, 0, + 0, 0, 0, 0, 232, 191, 71, 246, 12, 68, + 165, 35, 53, 64, 99, 56, 27, 66, 14, 254, + 212, 63, 103, 102, 102, 102, 102, 102, 182, 191, + 195, 252, 174, 22, 55, 97, 73, 64 + ]); +}); diff --git a/modules/js/test/test_imgproc.js b/modules/js/test/test_imgproc.js index 14977f5516..9ba5cd4e38 100644 --- a/modules/js/test/test_imgproc.js +++ b/modules/js/test/test_imgproc.js @@ -960,3 +960,20 @@ QUnit.test('test_filter', function(assert) { src.delete(); } }); + +QUnit.test('warpPolar', function(assert) { + const lines = new cv.Mat(255, 255, cv.CV_8U, new cv.Scalar(0)); + for (let r = 0; r < lines.rows; r++) { + lines.row(r).setTo(new cv.Scalar(r)); + } + cv.warpPolar(lines, lines, { width: 5, height: 5 }, new cv.Point(2, 2), 3, + cv.INTER_CUBIC | cv.WARP_FILL_OUTLIERS | cv.WARP_INVERSE_MAP); + assert.ok(lines instanceof cv.Mat); + assert.deepEqual(Array.from(lines.data), [ + 159, 172, 191, 210, 223, + 146, 159, 191, 223, 236, + 128, 128, 0, 0, 0, + 109, 96, 64, 32, 19, + 96, 83, 64, 45, 32 + ]); +}); diff --git a/modules/objdetect/src/qrcode.cpp b/modules/objdetect/src/qrcode.cpp index bde26d4b55..3cfff4a141 100644 --- a/modules/objdetect/src/qrcode.cpp +++ b/modules/objdetect/src/qrcode.cpp @@ -40,9 +40,10 @@ protected: bool testBypassRoute(vector hull, int start, int finish); inline double getCosVectors(Point2f a, Point2f b, Point2f c); - Mat barcode, bin_barcode, straight_barcode; + Mat barcode, bin_barcode, resized_barcode, resized_bin_barcode, straight_barcode; vector localization_points, transformation_points; double eps_vertical, eps_horizontal, coeff_expansion; + enum resize_direction { ZOOMING, SHRINKING, UNCHANGED } purpose; }; @@ -50,24 +51,36 @@ void QRDetect::init(const Mat& src, double eps_vertical_, double eps_horizontal_ { CV_TRACE_FUNCTION(); CV_Assert(!src.empty()); + barcode = src.clone(); const double min_side = std::min(src.size().width, src.size().height); if (min_side < 512.0) { + purpose = ZOOMING; coeff_expansion = 512.0 / min_side; const int width = cvRound(src.size().width * coeff_expansion); const int height = cvRound(src.size().height * coeff_expansion); Size new_size(width, height); resize(src, barcode, new_size, 0, 0, INTER_LINEAR); } + else if (min_side > 512.0) + { + purpose = SHRINKING; + coeff_expansion = min_side / 512.0; + const int width = cvRound(src.size().width / coeff_expansion); + const int height = cvRound(src.size().height / coeff_expansion); + Size new_size(width, height); + resize(src, resized_barcode, new_size, 0, 0, INTER_AREA); + } else { + purpose = UNCHANGED; coeff_expansion = 1.0; - barcode = src; } eps_vertical = eps_vertical_; eps_horizontal = eps_horizontal_; adaptiveThreshold(barcode, bin_barcode, 255, ADAPTIVE_THRESH_GAUSSIAN_C, THRESH_BINARY, 83, 2); + adaptiveThreshold(resized_barcode, resized_bin_barcode, 255, ADAPTIVE_THRESH_GAUSSIAN_C, THRESH_BINARY, 83, 2); } @@ -140,78 +153,96 @@ vector QRDetect::separateVerticalLines(const vector &list_lines) { CV_TRACE_FUNCTION(); vector result; - int temp_length = 0; + int temp_length; + vector point2f_result; uint8_t next_pixel; vector test_lines; - for (size_t pnt = 0; pnt < list_lines.size(); pnt++) + for (int coeff_epsilon = 1; coeff_epsilon < 10; coeff_epsilon++) { - const int x = cvRound(list_lines[pnt][0] + list_lines[pnt][2] * 0.5); - const int y = cvRound(list_lines[pnt][1]); + result.clear(); + temp_length = 0; + point2f_result.clear(); - // --------------- Search vertical up-lines --------------- // + for (size_t pnt = 0; pnt < list_lines.size(); pnt++) + { + const int x = cvRound(list_lines[pnt][0] + list_lines[pnt][2] * 0.5); + const int y = cvRound(list_lines[pnt][1]); - test_lines.clear(); - uint8_t future_pixel_up = 255; + // --------------- Search vertical up-lines --------------- // - for (int j = y; j < bin_barcode.rows - 1; j++) - { - next_pixel = bin_barcode.ptr(j + 1)[x]; - temp_length++; - if (next_pixel == future_pixel_up) + test_lines.clear(); + uint8_t future_pixel_up = 255; + + for (int j = y; j < bin_barcode.rows - 1; j++) { - future_pixel_up = 255 - future_pixel_up; - test_lines.push_back(temp_length); - temp_length = 0; - if (test_lines.size() == 3) { break; } + next_pixel = bin_barcode.ptr(j + 1)[x]; + temp_length++; + if (next_pixel == future_pixel_up) + { + future_pixel_up = 255 - future_pixel_up; + test_lines.push_back(temp_length); + temp_length = 0; + if (test_lines.size() == 3) { break; } + } } - } - // --------------- Search vertical down-lines --------------- // + // --------------- Search vertical down-lines --------------- // - uint8_t future_pixel_down = 255; - for (int j = y; j >= 1; j--) - { - next_pixel = bin_barcode.ptr(j - 1)[x]; - temp_length++; - if (next_pixel == future_pixel_down) + uint8_t future_pixel_down = 255; + for (int j = y; j >= 1; j--) { - future_pixel_down = 255 - future_pixel_down; - test_lines.push_back(temp_length); - temp_length = 0; - if (test_lines.size() == 6) { break; } + next_pixel = bin_barcode.ptr(j - 1)[x]; + temp_length++; + if (next_pixel == future_pixel_down) + { + future_pixel_down = 255 - future_pixel_down; + test_lines.push_back(temp_length); + temp_length = 0; + if (test_lines.size() == 6) { break; } + } } - } - // --------------- Compute vertical lines --------------- // + // --------------- Compute vertical lines --------------- // - if (test_lines.size() == 6) - { - double length = 0.0, weight = 0.0; + if (test_lines.size() == 6) + { + double length = 0.0, weight = 0.0; - for (size_t i = 0; i < test_lines.size(); i++) { length += test_lines[i]; } + for (size_t i = 0; i < test_lines.size(); i++) { length += test_lines[i]; } - CV_Assert(length > 0); - for (size_t i = 0; i < test_lines.size(); i++) - { - if (i % 3 != 0) { weight += fabs((test_lines[i] / length) - 1.0/ 7.0); } - else { weight += fabs((test_lines[i] / length) - 3.0/14.0); } - } + CV_Assert(length > 0); + for (size_t i = 0; i < test_lines.size(); i++) + { + if (i % 3 != 0) { weight += fabs((test_lines[i] / length) - 1.0/ 7.0); } + else { weight += fabs((test_lines[i] / length) - 3.0/14.0); } + } - if(weight < eps_horizontal) - { - result.push_back(list_lines[pnt]); + if(weight < eps_horizontal * coeff_epsilon) + { + result.push_back(list_lines[pnt]); + } } } - } + if (result.size() > 2) + { + for (size_t i = 0; i < result.size(); i++) + { + point2f_result.push_back( + Point2f(static_cast(result[i][0] + result[i][2] * 0.5), + static_cast(result[i][1]))); + } - vector point2f_result; - for (size_t i = 0; i < result.size(); i++) - { - point2f_result.push_back( - Point2f(static_cast(result[i][0] + result[i][2] * 0.5), - static_cast(result[i][1]))); + vector centers; + Mat labels; + double compactness; + compactness = kmeans(point2f_result, 3, labels, + TermCriteria( TermCriteria::EPS + TermCriteria::COUNT, 10, 0.1), + 3, KMEANS_PP_CENTERS, centers); + if (compactness == 0) { continue; } + if (compactness > 0) { break; } + } } return point2f_result; } @@ -316,7 +347,7 @@ bool QRDetect::localization() vector list_lines_x = searchHorizontalLines(); if( list_lines_x.empty() ) { return false; } vector list_lines_y = separateVerticalLines(list_lines_x); - if( list_lines_y.size() < 3 ) { return false; } + if( list_lines_y.empty() ) { return false; } vector centers; Mat labels; @@ -325,9 +356,56 @@ bool QRDetect::localization() 3, KMEANS_PP_CENTERS, localization_points); fixationPoints(localization_points); - if (localization_points.size() != 3) { return false; } - if (coeff_expansion > 1.0) + bool suare_flag = false, local_points_flag = false; + double triangle_sides[3]; + triangle_sides[0] = norm(localization_points[0] - localization_points[1]); + triangle_sides[1] = norm(localization_points[1] - localization_points[2]); + triangle_sides[2] = norm(localization_points[2] - localization_points[0]); + + double triangle_perim = (triangle_sides[0] + triangle_sides[1] + triangle_sides[2]) / 2; + + double square_area = sqrt((triangle_perim * (triangle_perim - triangle_sides[0]) + * (triangle_perim - triangle_sides[1]) + * (triangle_perim - triangle_sides[2]))) * 2; + double img_square_area = bin_barcode.cols * bin_barcode.rows; + + if (square_area > (img_square_area * 0.2)) + { + suare_flag = true; + } + if (localization_points.size() != 3) + { + local_points_flag = true; + } + if ((suare_flag || local_points_flag) && purpose == SHRINKING) + { + localization_points.clear(); + bin_barcode = resized_bin_barcode.clone(); + list_lines_x = searchHorizontalLines(); + if( list_lines_x.empty() ) { return false; } + list_lines_y = separateVerticalLines(list_lines_x); + if( list_lines_y.empty() ) { return false; } + + kmeans(list_lines_y, 3, labels, + TermCriteria( TermCriteria::EPS + TermCriteria::COUNT, 10, 0.1), + 3, KMEANS_PP_CENTERS, localization_points); + + fixationPoints(localization_points); + if (localization_points.size() != 3) { return false; } + + const int width = cvRound(bin_barcode.size().width * coeff_expansion); + const int height = cvRound(bin_barcode.size().height * coeff_expansion); + Size new_size(width, height); + Mat intermediate; + resize(bin_barcode, intermediate, new_size, 0, 0, INTER_LINEAR); + bin_barcode = intermediate.clone(); + for (size_t i = 0; i < localization_points.size(); i++) + { + localization_points[i] *= coeff_expansion; + } + } + if (purpose == ZOOMING) { const int width = cvRound(bin_barcode.size().width / coeff_expansion); const int height = cvRound(bin_barcode.size().height / coeff_expansion); @@ -475,6 +553,13 @@ bool QRDetect::computeTransformationPoints() vector quadrilateral = getQuadrilateral(transformation_points); transformation_points = quadrilateral; + int width = bin_barcode.size().width; + int height = bin_barcode.size().height; + for (size_t i = 0; i < transformation_points.size(); i++) + { + if ((cvRound(transformation_points[i].x) > width) || + (cvRound(transformation_points[i].y) > height)) { return false; } + } return true; } @@ -826,9 +911,27 @@ protected: void QRDecode::init(const Mat &src, const vector &points) { CV_TRACE_FUNCTION(); - original = src.clone(); - intermediate = Mat::zeros(src.size(), CV_8UC1); - original_points = points; + vector bbox = points; + double coeff_expansion; + const int min_side = std::min(src.size().width, src.size().height); + if (min_side > 512) + { + coeff_expansion = min_side / 512; + const int width = cvRound(src.size().width / coeff_expansion); + const int height = cvRound(src.size().height / coeff_expansion); + Size new_size(width, height); + resize(src, original, new_size, 0, 0, INTER_AREA); + for (size_t i = 0; i < bbox.size(); i++) + { + bbox[i] /= static_cast(coeff_expansion); + } + } + else + { + original = src.clone(); + } + intermediate = Mat::zeros(original.size(), CV_8UC1); + original_points = bbox; version = 0; version_size = 0; test_perspective_size = 251; diff --git a/modules/objdetect/test/test_qrcode.cpp b/modules/objdetect/test/test_qrcode.cpp index 15474b05aa..8c02f3db0c 100644 --- a/modules/objdetect/test/test_qrcode.cpp +++ b/modules/objdetect/test/test_qrcode.cpp @@ -15,6 +15,12 @@ std::string qrcode_images_name[] = { "russian.jpg", "kanji.jpg", "link_github_ocv.jpg", "link_ocv.jpg", "link_wiki_cv.jpg" }; +std::string qrcode_images_close[] = { + "close_1.png", "close_2.png", "close_3.png", "close_4.png", "close_5.png" +}; +std::string qrcode_images_monitor[] = { + "monitor_1.png", "monitor_2.png", "monitor_3.png", "monitor_4.png", "monitor_5.png" +}; // #define UPDATE_QRCODE_TEST_DATA #ifdef UPDATE_QRCODE_TEST_DATA @@ -51,6 +57,83 @@ TEST(Objdetect_QRCode, generate_test_data) file_config.release(); } +TEST(Objdetect_QRCode_Close, generate_test_data) +{ + const std::string root = "qrcode/close/"; + const std::string dataset_config = findDataFile(root + "dataset_config.json"); + FileStorage file_config(dataset_config, FileStorage::WRITE); + + file_config << "close_images" << "["; + size_t close_count = sizeof(qrcode_images_close) / sizeof(qrcode_images_close[0]); + for (size_t i = 0; i < close_count; i++) + { + file_config << "{:" << "image_name" << qrcode_images_close[i]; + std::string image_path = findDataFile(root + qrcode_images_close[i]); + std::vector corners; + Mat src = imread(image_path, IMREAD_GRAYSCALE), barcode, straight_barcode; + std::string decoded_info; + ASSERT_FALSE(src.empty()) << "Can't read image: " << image_path; + const double min_side = std::min(src.size().width, src.size().height); + double coeff_expansion = 1024.0 / min_side; + const int width = cvRound(src.size().width * coeff_expansion); + const int height = cvRound(src.size().height * coeff_expansion); + Size new_size(width, height); + resize(src, barcode, new_size, 0, 0, INTER_LINEAR); + EXPECT_TRUE(detectQRCode(barcode, corners)); +#ifdef HAVE_QUIRC + EXPECT_TRUE(decodeQRCode(barcode, corners, decoded_info, straight_barcode)); +#endif + file_config << "x" << "[:"; + for (size_t j = 0; j < corners.size(); j++) { file_config << corners[j].x; } + file_config << "]"; + file_config << "y" << "[:"; + for (size_t j = 0; j < corners.size(); j++) { file_config << corners[j].y; } + file_config << "]"; + file_config << "info" << decoded_info; + file_config << "}"; + } + file_config << "]"; + file_config.release(); +} +TEST(Objdetect_QRCode_Monitor, generate_test_data) +{ + const std::string root = "qrcode/monitor/"; + const std::string dataset_config = findDataFile(root + "dataset_config.json"); + FileStorage file_config(dataset_config, FileStorage::WRITE); + + file_config << "monitor_images" << "["; + size_t monitor_count = sizeof(qrcode_images_monitor) / sizeof(qrcode_images_monitor[0]); + for (size_t i = 0; i < monitor_count; i++) + { + file_config << "{:" << "image_name" << qrcode_images_monitor[i]; + std::string image_path = findDataFile(root + qrcode_images_monitor[i]); + std::vector corners; + Mat src = imread(image_path, IMREAD_GRAYSCALE), barcode, straight_barcode; + std::string decoded_info; + ASSERT_FALSE(src.empty()) << "Can't read image: " << image_path; + const double min_side = std::min(src.size().width, src.size().height); + double coeff_expansion = 1024.0 / min_side; + const int width = cvRound(src.size().width * coeff_expansion); + const int height = cvRound(src.size().height * coeff_expansion); + Size new_size(width, height); + resize(src, barcode, new_size, 0, 0, INTER_LINEAR); + EXPECT_TRUE(detectQRCode(barcode, corners)); +#ifdef HAVE_QUIRC + EXPECT_TRUE(decodeQRCode(barcode, corners, decoded_info, straight_barcode)); +#endif + file_config << "x" << "[:"; + for (size_t j = 0; j < corners.size(); j++) { file_config << corners[j].x; } + file_config << "]"; + file_config << "y" << "[:"; + for (size_t j = 0; j < corners.size(); j++) { file_config << corners[j].y; } + file_config << "]"; + file_config << "info" << decoded_info; + file_config << "}"; + } + file_config << "]"; + file_config.release(); +} + #else typedef testing::TestWithParam< std::string > Objdetect_QRCode; @@ -113,9 +196,139 @@ TEST_P(Objdetect_QRCode, regression) } } -INSTANTIATE_TEST_CASE_P(/**/, Objdetect_QRCode, testing::ValuesIn(qrcode_images_name)); +typedef testing::TestWithParam< std::string > Objdetect_QRCode_Close; +TEST_P(Objdetect_QRCode_Close, regression) +{ + const std::string name_current_image = GetParam(); + const std::string root = "qrcode/close/"; + const int pixels_error = 3; + + std::string image_path = findDataFile(root + name_current_image); + Mat src = imread(image_path, IMREAD_GRAYSCALE), barcode, straight_barcode; + ASSERT_FALSE(src.empty()) << "Can't read image: " << image_path; + const double min_side = std::min(src.size().width, src.size().height); + double coeff_expansion = 1024.0 / min_side; + const int width = cvRound(src.size().width * coeff_expansion); + const int height = cvRound(src.size().height * coeff_expansion); + Size new_size(width, height); + resize(src, barcode, new_size, 0, 0, INTER_LINEAR); + std::vector corners; + std::string decoded_info; + QRCodeDetector qrcode; +#ifdef HAVE_QUIRC + decoded_info = qrcode.detectAndDecode(barcode, corners, straight_barcode); + ASSERT_FALSE(corners.empty()); + ASSERT_FALSE(decoded_info.empty()); +#else + ASSERT_TRUE(qrcode.detect(src, corners)); +#endif + const std::string dataset_config = findDataFile(root + "dataset_config.json"); + FileStorage file_config(dataset_config, FileStorage::READ); + ASSERT_TRUE(file_config.isOpened()) << "Can't read validation data: " << dataset_config; + { + FileNode images_list = file_config["close_images"]; + size_t images_count = static_cast(images_list.size()); + ASSERT_GT(images_count, 0u) << "Can't find validation data entries in 'test_images': " << dataset_config; + for (size_t index = 0; index < images_count; index++) + { + FileNode config = images_list[(int)index]; + std::string name_test_image = config["image_name"]; + if (name_test_image == name_current_image) + { + for (int i = 0; i < 4; i++) + { + int x = config["x"][i]; + int y = config["y"][i]; + EXPECT_NEAR(x, corners[i].x, pixels_error); + EXPECT_NEAR(y, corners[i].y, pixels_error); + } + +#ifdef HAVE_QUIRC + std::string original_info = config["info"]; + EXPECT_EQ(decoded_info, original_info); +#endif + + return; // done + } + } + std::cerr + << "Not found results for '" << name_current_image + << "' image in config file:" << dataset_config << std::endl + << "Re-run tests with enabled UPDATE_QRCODE_TEST_DATA macro to update test data." + << std::endl; + } +} + +typedef testing::TestWithParam< std::string > Objdetect_QRCode_Monitor; +TEST_P(Objdetect_QRCode_Monitor, regression) +{ + const std::string name_current_image = GetParam(); + const std::string root = "qrcode/monitor/"; + const int pixels_error = 3; + + std::string image_path = findDataFile(root + name_current_image); + Mat src = imread(image_path, IMREAD_GRAYSCALE), barcode, straight_barcode; + ASSERT_FALSE(src.empty()) << "Can't read image: " << image_path; + const double min_side = std::min(src.size().width, src.size().height); + double coeff_expansion = 1024.0 / min_side; + const int width = cvRound(src.size().width * coeff_expansion); + const int height = cvRound(src.size().height * coeff_expansion); + Size new_size(width, height); + resize(src, barcode, new_size, 0, 0, INTER_LINEAR); + std::vector corners; + std::string decoded_info; + QRCodeDetector qrcode; +#ifdef HAVE_QUIRC + decoded_info = qrcode.detectAndDecode(barcode, corners, straight_barcode); + ASSERT_FALSE(corners.empty()); + ASSERT_FALSE(decoded_info.empty()); +#else + ASSERT_TRUE(qrcode.detect(src, corners)); +#endif + + const std::string dataset_config = findDataFile(root + "dataset_config.json"); + FileStorage file_config(dataset_config, FileStorage::READ); + ASSERT_TRUE(file_config.isOpened()) << "Can't read validation data: " << dataset_config; + { + FileNode images_list = file_config["monitor_images"]; + size_t images_count = static_cast(images_list.size()); + ASSERT_GT(images_count, 0u) << "Can't find validation data entries in 'test_images': " << dataset_config; + + for (size_t index = 0; index < images_count; index++) + { + FileNode config = images_list[(int)index]; + std::string name_test_image = config["image_name"]; + if (name_test_image == name_current_image) + { + for (int i = 0; i < 4; i++) + { + int x = config["x"][i]; + int y = config["y"][i]; + EXPECT_NEAR(x, corners[i].x, pixels_error); + EXPECT_NEAR(y, corners[i].y, pixels_error); + } + +#ifdef HAVE_QUIRC + std::string original_info = config["info"]; + EXPECT_EQ(decoded_info, original_info); +#endif + + return; // done + } + } + std::cerr + << "Not found results for '" << name_current_image + << "' image in config file:" << dataset_config << std::endl + << "Re-run tests with enabled UPDATE_QRCODE_TEST_DATA macro to update test data." + << std::endl; + } +} + +INSTANTIATE_TEST_CASE_P(/**/, Objdetect_QRCode, testing::ValuesIn(qrcode_images_name)); +INSTANTIATE_TEST_CASE_P(/**/, Objdetect_QRCode_Close, testing::ValuesIn(qrcode_images_close)); +INSTANTIATE_TEST_CASE_P(/**/, Objdetect_QRCode_Monitor, testing::ValuesIn(qrcode_images_monitor)); TEST(Objdetect_QRCode_basic, not_found_qrcode) { diff --git a/modules/videoio/src/cap_dshow.cpp b/modules/videoio/src/cap_dshow.cpp index a09e1018c4..96ee479106 100644 --- a/modules/videoio/src/cap_dshow.cpp +++ b/modules/videoio/src/cap_dshow.cpp @@ -2946,6 +2946,22 @@ int videoInput::start(int deviceID, videoDevice *VD){ DebugPrintOut("SETUP: Device is setup and ready to capture.\n\n"); VD->readyToCapture = true; + // check for optional saving the direct show graph to a file + const char* graph_filename = getenv("OPENCV_DSHOW_SAVEGRAPH_FILENAME"); + if (graph_filename) { + size_t filename_len = strlen(graph_filename); + std::vector wfilename(filename_len + 1); + size_t len = mbstowcs(&wfilename[0], graph_filename, filename_len + 1); + CV_Assert(len == filename_len); + + HRESULT res = SaveGraphFile(VD->pGraph, &wfilename[0]); + if (SUCCEEDED(res)) { + DebugPrintOut("Saved DSHOW graph to %s\n", graph_filename); + } else { + DebugPrintOut("Failed to save DSHOW graph to %s\n", graph_filename); + } + } + //Release filters - seen someone else do this //looks like it solved the freezes