Merge remote-tracking branch 'upstream/3.4' into merge-3.4

pull/14758/head
Alexander Alekhin 6 years ago
commit ddcf388270
  1. 2
      CMakeLists.txt
  2. 15
      cmake/checks/cpu_avx512skx.cpp
  3. 2
      doc/CMakeLists.txt
  4. 23
      modules/calib3d/include/opencv2/calib3d.hpp
  5. 2
      modules/calib3d/src/fundam.cpp
  6. 8
      modules/calib3d/src/ippe.hpp
  7. 2
      modules/calib3d/test/test_cameracalibration_artificial.cpp
  8. 6
      modules/core/include/opencv2/core.hpp
  9. 2
      modules/core/include/opencv2/core/core_c.h
  10. 6
      modules/core/include/opencv2/core/cvdef.h
  11. 2
      modules/core/include/opencv2/core/operations.hpp
  12. 4
      modules/core/include/opencv2/core/optim.hpp
  13. 2
      modules/core/src/datastructs.cpp
  14. 2
      modules/core/src/mathfuncs_core.simd.hpp
  15. 2
      modules/features2d/include/opencv2/features2d.hpp
  16. 3
      modules/features2d/misc/java/gen_dict.json
  17. 91
      modules/features2d/misc/java/test/SIMPLEBLOBFeatureDetectorTest.java
  18. 2
      modules/imgcodecs/src/exif.cpp
  19. 2
      modules/imgcodecs/src/rgbe.cpp
  20. 204
      modules/java/generator/gen_java.py
  21. 3
      modules/java/generator/templates/java_class.prolog
  22. 3
      modules/java/generator/templates/java_class_inherited.prolog
  23. 3
      modules/java/generator/templates/java_module.prolog
  24. 2
      modules/java/jar/CMakeLists.txt
  25. 25
      modules/java/jar/build.xml.in
  26. 2
      modules/ml/include/opencv2/ml.hpp
  27. 2
      modules/objdetect/misc/java/test/QRCodeDetectorTest.java
  28. 18
      modules/objdetect/misc/python/test/test_qrcode_detect.py
  29. 1
      modules/objdetect/src/qrcode.cpp
  30. 8
      modules/photo/src/inpaint.cpp
  31. 18
      modules/photo/test/test_inpaint.cpp
  32. 1
      modules/python/common.cmake
  33. 8
      modules/python/src2/cv2.cpp
  34. 2
      modules/python/src2/hdr_parser.py
  35. 4
      modules/ts/src/ts_gtest.cpp
  36. 2
      modules/video/src/ecc.cpp
  37. 4
      modules/video/src/lkpyramid.cpp
  38. 4
      modules/videoio/include/opencv2/videoio.hpp
  39. 37
      platforms/android/build_sdk.py
  40. 5
      samples/cpp/videocapture_gstreamer_pipeline.cpp
  41. 2
      samples/cpp/videocapture_openni.cpp
  42. 18
      samples/python/tutorial_code/imgProc/BasicGeometricDrawing/basic_geometric_drawing.py
  43. 4
      samples/python/tutorial_code/imgProc/morph_lines_detection/morph_lines_detection.py

@ -32,6 +32,8 @@ endif()
option(ENABLE_PIC "Generate position independent code (necessary for shared libraries)" TRUE)
set(CMAKE_POSITION_INDEPENDENT_CODE ${ENABLE_PIC})
set(OPENCV_MATHJAX_RELPATH "https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.0" CACHE STRING "URI to a MathJax installation")
# Following block can break build in case of cross-compiling
# but CMAKE_CROSSCOMPILING variable will be set only on project(OpenCV) command
# so we will try to detect cross-compiling by the presence of CMAKE_TOOLCHAIN_FILE

@ -1,5 +1,17 @@
#if defined __AVX512__ || defined __AVX512F__
#include <immintrin.h>
// Workaround for problem with GCC 5-6 in -O0 mode
struct v_uint32x16
{
__m512i val;
explicit v_uint32x16(__m512i v) : val(v) {}
};
inline v_uint32x16 operator << (const v_uint32x16& a, int imm)
{
return v_uint32x16(_mm512_slli_epi32(a.val, imm));
}
void test()
{
__m512i zmm = _mm512_setzero_si512();
@ -7,10 +19,13 @@ void test()
__m256i b = _mm256_abs_epi64(a); // VL
__m512i c = _mm512_abs_epi8(zmm); // BW
__m512i d = _mm512_broadcast_i32x8(b); // DQ
v_uint32x16 e(d); e = e << 10;
__m512i f = _mm512_packus_epi32(d,d);
#if defined __GNUC__ && defined __x86_64__
asm volatile ("" : : : "zmm16", "zmm17", "zmm18", "zmm19");
#endif
}
#else
#error "AVX512-SKX is not supported"
#endif

@ -19,8 +19,6 @@ if(DOXYGEN_FOUND)
unset(CMAKE_DOXYGEN_TUTORIAL_CONTRIB_ROOT)
unset(CMAKE_DOXYGEN_TUTORIAL_JS_ROOT)
set(OPENCV_MATHJAX_RELPATH "https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.0" CACHE STRING "URI to a MathJax installation")
set(OPENCV_DOCS_EXCLUDE_CUDA ON)
if(";${OPENCV_MODULES_EXTRA};" MATCHES ";cudev;")
set(OPENCV_DOCS_EXCLUDE_CUDA OFF)

@ -546,15 +546,14 @@ CV_EXPORTS_W void matMulDeriv( InputArray A, InputArray B, OutputArray dABdA, Ou
@param tvec2 Second translation vector.
@param rvec3 Output rotation vector of the superposition.
@param tvec3 Output translation vector of the superposition.
@param dr3dr1
@param dr3dt1
@param dr3dr2
@param dr3dt2
@param dt3dr1
@param dt3dt1
@param dt3dr2
@param dt3dt2 Optional output derivatives of rvec3 or tvec3 with regard to rvec1, rvec2, tvec1 and
tvec2, respectively.
@param dr3dr1 Optional output derivative of rvec3 with regard to rvec1
@param dr3dt1 Optional output derivative of rvec3 with regard to tvec1
@param dr3dr2 Optional output derivative of rvec3 with regard to rvec2
@param dr3dt2 Optional output derivative of rvec3 with regard to tvec2
@param dt3dr1 Optional output derivative of tvec3 with regard to rvec1
@param dt3dt1 Optional output derivative of tvec3 with regard to tvec1
@param dt3dr2 Optional output derivative of tvec3 with regard to rvec2
@param dt3dt2 Optional output derivative of tvec3 with regard to tvec2
The functions compute:
@ -3129,7 +3128,7 @@ namespace fisheye
@param D Input vector of distortion coefficients \f$(k_1, k_2, k_3, k_4)\f$.
@param Knew Camera matrix of the distorted image. By default, it is the identity matrix but you
may additionally scale and shift the result by using a different matrix.
@param new_size
@param new_size the new size
The function transforms an image to compensate radial and tangential lens distortion.
@ -3155,14 +3154,14 @@ namespace fisheye
/** @brief Estimates new camera matrix for undistortion or rectification.
@param K Camera matrix \f$K = \vecthreethree{f_x}{0}{c_x}{0}{f_y}{c_y}{0}{0}{_1}\f$.
@param image_size
@param image_size Size of the image
@param D Input vector of distortion coefficients \f$(k_1, k_2, k_3, k_4)\f$.
@param R Rectification transformation in the object space: 3x3 1-channel, or vector: 3x1/1x3
1-channel or 1x1 3-channel
@param P New camera matrix (3x3) or new projection matrix (3x4)
@param balance Sets the new focal length in range between the min focal length and the max focal
length. Balance is in range of [0, 1].
@param new_size
@param new_size the new size
@param fov_scale Divisor for new focal length.
*/
CV_EXPORTS_W void estimateNewCameraMatrixForUndistortRectify(InputArray K, InputArray D, const Size &image_size, InputArray R,

@ -72,7 +72,7 @@ public:
// are geometrically consistent. We check if every 3 correspondences sets
// fulfills the constraint.
//
// The usefullness of this constraint is explained in the paper:
// The usefulness of this constraint is explained in the paper:
//
// "Speeding-up homography estimation in mobile devices"
// Journal of Real-Time Image Processing. 2013. DOI: 10.1007/s11554-012-0314-1

@ -122,10 +122,10 @@ private:
* For highest accuracy the Jacobian should be computed at the centroid of the point correspondences (see the IPPE paper for the explanation of this).
* For a point (ux,uy) on the object plane, suppose the homography H maps (ux,uy) to a point (p,q) in the image (in normalized pixel coordinates).
* The Jacobian matrix [J00, J01; J10,J11] is the Jacobian of the mapping evaluated at (ux,uy).
* @param j00 Homography jacobian coefficent at (ux,uy)
* @param j01 Homography jacobian coefficent at (ux,uy)
* @param j10 Homography jacobian coefficent at (ux,uy)
* @param j11 Homography jacobian coefficent at (ux,uy)
* @param j00 Homography jacobian coefficient at (ux,uy)
* @param j01 Homography jacobian coefficient at (ux,uy)
* @param j10 Homography jacobian coefficient at (ux,uy)
* @param j11 Homography jacobian coefficient at (ux,uy)
* @param p The x coordinate of point (ux,uy) mapped into the image (undistorted and normalized position)
* @param q The y coordinate of point (ux,uy) mapped into the image (undistorted and normalized position)
*/

@ -145,7 +145,7 @@ protected:
if (fail)
{
// commented according to vp123's recomendation. TODO - improve accuaracy
// commented according to vp123's recommendation. TODO - improve accuracy
//ts->set_failed_test_info(cvtest::TS::FAIL_BAD_ACCURACY); ss
}
ts->printf( cvtest::TS::LOG, "%d) DistCoeff exp=(%.2f, %.2f, %.4f, %.4f %.2f)\n", r, k1, k2, p1, p2, k3);

@ -291,9 +291,9 @@ if src was not a ROI, use borderType | #BORDER_ISOLATED.
@param src Source image.
@param dst Destination image of the same type as src and the size Size(src.cols+left+right,
src.rows+top+bottom) .
@param top
@param bottom
@param left
@param top the top pixels
@param bottom the bottom pixels
@param left the left pixels
@param right Parameter specifying how many pixels in each direction from the source image rectangle
to extrapolate. For example, top=1, bottom=1, left=1, right=1 mean that 1 pixel-wide border needs
to be built.

@ -579,7 +579,7 @@ CvNArrayIterator;
#define CV_NO_CN_CHECK 2
#define CV_NO_SIZE_CHECK 4
/** initializes iterator that traverses through several arrays simulteneously
/** initializes iterator that traverses through several arrays simultaneously
(the function together with cvNextArraySlice is used for
N-ari element-wise operations) */
CVAPI(int) cvInitNArrayIterator( int count, CvArr** arrs,

@ -357,10 +357,10 @@ Cv64suf;
# define OPENCV_DISABLE_DEPRECATED_COMPATIBILITY
#endif
#ifdef CVAPI_EXPORTS
# if (defined _WIN32 || defined WINCE || defined __CYGWIN__)
#ifndef CV_EXPORTS
# if (defined _WIN32 || defined WINCE || defined __CYGWIN__) && defined(CVAPI_EXPORTS)
# define CV_EXPORTS __declspec(dllexport)
# elif defined __GNUC__ && __GNUC__ >= 4
# elif defined __GNUC__ && __GNUC__ >= 4 && (defined(CVAPI_EXPORTS) || defined(__APPLE__))
# define CV_EXPORTS __attribute__ ((visibility ("default")))
# endif
#endif

@ -398,7 +398,7 @@ inline unsigned RNG::next()
return (unsigned)state;
}
//! returns the next unifomly-distributed random number of the specified type
//! returns the next uniformly-distributed random number of the specified type
template<typename _Tp> static inline _Tp randu()
{
return (_Tp)theRNG();

@ -219,10 +219,10 @@ converge to it. Another obvious restriction is that it should be possible to com
a function at any point, thus it is preferable to have analytic expression for gradient and
computational burden should be born by the user.
The latter responsibility is accompilished via the getGradient method of a
The latter responsibility is accomplished via the getGradient method of a
MinProblemSolver::Function interface (which represents function being optimized). This method takes
point a point in *n*-dimensional space (first argument represents the array of coordinates of that
point) and comput its gradient (it should be stored in the second argument as an array).
point) and compute its gradient (it should be stored in the second argument as an array).
@note class ConjGradSolver thus does not add any new methods to the basic MinProblemSolver interface.

@ -3368,7 +3368,7 @@ cvTreeToNodeSeq( const void* first, int header_size, CvMemStorage* storage )
typedef struct CvTreeNode
{
int flags; /* micsellaneous flags */
int flags; /* miscellaneous flags */
int header_size; /* size of sequence header */
struct CvTreeNode* h_prev; /* previous sequence */
struct CvTreeNode* h_next; /* next sequence */

@ -422,7 +422,7 @@ void log64f(const double *src, double *dst, int n)
#define EXPPOLY_32F_A0 .9670371139572337719125840413672004409288e-2
// the code below uses _mm_cast* intrinsics, which are not avialable on VS2005
// the code below uses _mm_cast* intrinsics, which are not available on VS2005
#if (defined _MSC_VER && _MSC_VER < 1500) || \
(!defined __APPLE__ && defined __GNUC__ && __GNUC__*100 + __GNUC_MINOR__ < 402)
#undef CV_SSE2

@ -341,7 +341,7 @@ public:
but it is a little faster to compute.
@param patchSize size of the patch used by the oriented BRIEF descriptor. Of course, on smaller
pyramid layers the perceived image area covered by a feature will be larger.
@param fastThreshold
@param fastThreshold the fast threshold
*/
CV_WRAP static Ptr<ORB> create(int nfeatures=500, float scaleFactor=1.2f, int nlevels=8, int edgeThreshold=31,
int firstLevel=0, int WTA_K=2, ORB::ScoreType scoreType=ORB::HARRIS_SCORE, int patchSize=31, int fastThreshold=20);

@ -1,7 +1,4 @@
{
"class_ignore_list" : [
"SimpleBlobDetector"
],
"type_dict" : {
"Feature2D": {
"j_type": "Feature2D",

@ -1,11 +1,62 @@
package org.opencv.test.features2d;
import java.util.Arrays;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.KeyPoint;
import org.opencv.test.OpenCVTestCase;
import org.opencv.test.OpenCVTestRunner;
import org.opencv.imgproc.Imgproc;
import org.opencv.features2d.Feature2D;
import org.opencv.features2d.SimpleBlobDetector;
public class SIMPLEBLOBFeatureDetectorTest extends OpenCVTestCase {
Feature2D detector;
int matSize;
KeyPoint[] truth;
private Mat getMaskImg() {
Mat mask = new Mat(matSize, matSize, CvType.CV_8U, new Scalar(255));
Mat right = mask.submat(0, matSize, matSize / 2, matSize);
right.setTo(new Scalar(0));
return mask;
}
private Mat getTestImg() {
int center = matSize / 2;
int offset = 40;
Mat img = new Mat(matSize, matSize, CvType.CV_8U, new Scalar(255));
Imgproc.circle(img, new Point(center - offset, center), 24, new Scalar(0), -1);
Imgproc.circle(img, new Point(center + offset, center), 20, new Scalar(50), -1);
Imgproc.circle(img, new Point(center, center - offset), 18, new Scalar(100), -1);
Imgproc.circle(img, new Point(center, center + offset), 14, new Scalar(150), -1);
Imgproc.circle(img, new Point(center, center), 10, new Scalar(200), -1);
return img;
}
@Override
protected void setUp() throws Exception {
super.setUp();
detector = SimpleBlobDetector.create();
matSize = 200;
truth = new KeyPoint[] {
new KeyPoint( 140, 100, 41.036568f, -1, 0, 0, -1),
new KeyPoint( 60, 100, 48.538486f, -1, 0, 0, -1),
new KeyPoint(100, 60, 36.769554f, -1, 0, 0, -1),
new KeyPoint(100, 140, 28.635643f, -1, 0, 0, -1),
new KeyPoint(100, 100, 20.880613f, -1, 0, 0, -1)
};
}
public void testCreate() {
fail("Not yet implemented");
assertNotNull(detector);
}
public void testDetectListOfMatListOfListOfKeyPoint() {
@ -17,23 +68,51 @@ public class SIMPLEBLOBFeatureDetectorTest extends OpenCVTestCase {
}
public void testDetectMatListOfKeyPoint() {
fail("Not yet implemented");
Mat img = getTestImg();
MatOfKeyPoint keypoints = new MatOfKeyPoint();
detector.detect(img, keypoints);
assertListKeyPointEquals(Arrays.asList(truth), keypoints.toList(), EPS);
}
public void testDetectMatListOfKeyPointMat() {
fail("Not yet implemented");
Mat img = getTestImg();
Mat mask = getMaskImg();
MatOfKeyPoint keypoints = new MatOfKeyPoint();
detector.detect(img, keypoints, mask);
assertListKeyPointEquals(Arrays.asList(truth[1]), keypoints.toList(), EPS);
}
public void testEmpty() {
// assertFalse(detector.empty());
fail("Not yet implemented");
}
public void testRead() {
fail("Not yet implemented");
Mat img = getTestImg();
MatOfKeyPoint keypoints1 = new MatOfKeyPoint();
detector.detect(img, keypoints1);
String filename = OpenCVTestRunner.getTempFileName("yml");
writeFile(filename, "%YAML:1.0\nthresholdStep: 10\nminThreshold: 50\nmaxThreshold: 220\nminRepeatability: 2\nfilterByArea: true\nminArea: 800\nmaxArea: 5000\n");
detector.read(filename);
MatOfKeyPoint keypoints2 = new MatOfKeyPoint();
detector.detect(img, keypoints2);
assertTrue(keypoints2.total() <= keypoints1.total());
}
public void testWrite() {
fail("Not yet implemented");
}
String filename = OpenCVTestRunner.getTempFileName("xml");
detector.write(filename);
String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n<format>3</format>\n<thresholdStep>10.</thresholdStep>\n<minThreshold>50.</minThreshold>\n<maxThreshold>220.</maxThreshold>\n<minRepeatability>2</minRepeatability>\n<minDistBetweenBlobs>10.</minDistBetweenBlobs>\n<filterByColor>1</filterByColor>\n<blobColor>0</blobColor>\n<filterByArea>1</filterByArea>\n<minArea>25.</minArea>\n<maxArea>5000.</maxArea>\n<filterByCircularity>0</filterByCircularity>\n<minCircularity>8.0000001192092896e-01</minCircularity>\n<maxCircularity>3.4028234663852886e+38</maxCircularity>\n<filterByInertia>1</filterByInertia>\n<minInertiaRatio>1.0000000149011612e-01</minInertiaRatio>\n<maxInertiaRatio>3.4028234663852886e+38</maxInertiaRatio>\n<filterByConvexity>1</filterByConvexity>\n<minConvexity>9.4999998807907104e-01</minConvexity>\n<maxConvexity>3.4028234663852886e+38</maxConvexity>\n</opencv_storage>\n";
assertEquals(truth, readFile(filename));
}
}

@ -215,7 +215,7 @@ size_t ExifReader::getFieldSize ()
* @brief Filling m_exif member with exif directory elements
* This is internal function and is not exposed to client
*
* @return The function doesn't return any value. In case of unsiccessful parsing
* @return The function doesn't return any value. In case of unsuccessful parsing
* the m_exif member is not filled up
*/
void ExifReader::parseExif()

@ -51,7 +51,7 @@
// developed by Greg Ward. It handles the conversions between rgbe and
// pixels consisting of floats. The data is assumed to be an array of floats.
// By default there are three floats per pixel in the order red, green, blue.
// (RGBE_DATA_??? values control this.) Only the mimimal header reading and
// (RGBE_DATA_??? values control this.) Only the minimal header reading and
// writing is implemented. Each routine does error checking and will return
// a status value as defined below. This code is intended as a skeleton so
// feel free to modify it to suit your needs.

@ -3,6 +3,7 @@
import sys, re, os.path, errno, fnmatch
import json
import logging
import codecs
from shutil import copyfile
from pprint import pformat
from string import Template
@ -10,7 +11,12 @@ from string import Template
if sys.version_info[0] >= 3:
from io import StringIO
else:
from cStringIO import StringIO
import io
class StringIO(io.StringIO):
def write(self, s):
if isinstance(s, str):
s = unicode(s) # noqa: F821
return super(StringIO, self).write(s)
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
@ -114,14 +120,19 @@ class GeneralInfo():
self.params={}
self.annotation=[]
if type == "class":
docstring="// C++: class " + self.name + "\n//javadoc: " + self.name
docstring="// C++: class " + self.name + "\n"
else:
docstring=""
if len(decl)>5 and decl[5]:
#logging.info('docstring: %s', decl[5])
if re.search("(@|\\\\)deprecated", decl[5]):
doc = decl[5]
#logging.info('docstring: %s', doc)
if re.search("(@|\\\\)deprecated", doc):
self.annotation.append("@Deprecated")
docstring += sanitize_java_documentation_string(doc, type)
self.docstring = docstring
def parseName(self, name, namespaces):
@ -272,7 +283,7 @@ class ClassInfo(GeneralInfo):
def initCodeStreams(self, Module):
self.j_code = StringIO()
self.jn_code = StringIO()
self.cpp_code = StringIO();
self.cpp_code = StringIO()
if self.base:
self.j_code.write(T_JAVA_START_INHERITED)
else:
@ -302,7 +313,7 @@ class ClassInfo(GeneralInfo):
jname = self.jname,
imports = "\n".join(self.getAllImports(M)),
docs = self.docstring,
annotation = "\n".join(self.annotation),
annotation = "\n" + "\n".join(self.annotation) if self.annotation else "",
base = self.base)
def generateCppCode(self):
@ -490,7 +501,7 @@ class JavaWrapperGenerator(object):
content = f.read()
if content == buf:
return
with open(path, "wt") as f:
with codecs.open(path, "w", "utf-8") as f:
f.write(buf)
updated_files += 1
@ -504,7 +515,7 @@ class JavaWrapperGenerator(object):
self.add_class( ['class ' + self.Module, '', [], []] ) # [ 'class/struct cname', ':bases', [modlist] [props] ]
# scan the headers and build more descriptive maps of classes, consts, functions
includes = [];
includes = []
for hdr in common_headers:
logging.info("\n===== Common header : %s =====", hdr)
includes.append('#include "' + hdr + '"')
@ -723,7 +734,7 @@ class JavaWrapperGenerator(object):
fi.jname + "(" + ", ".join(j_args) + ")"
logging.info("java: " + j_signature)
if(j_signature in j_signatures):
if j_signature in j_signatures:
if args:
args.pop()
continue
@ -739,20 +750,51 @@ class JavaWrapperGenerator(object):
type = type_dict[fi.ctype].get("jn_type", "double[]"),
name = fi.jname + '_' + str(suffix_counter),
args = ", ".join(["%s %s" % (type_dict[a.ctype]["jn_type"], normalize_field_name(a.name)) for a in jn_args])
) );
) )
# java part:
#java doc comment
f_name = fi.jname
if fi.classname:
f_name = fi.classname + "::" + fi.jname
java_doc = "//javadoc: " + f_name + "(%s)" % ", ".join([a.name for a in args if a.ctype])
j_code.write(" "*4 + java_doc + "\n")
if fi.docstring:
lines = StringIO(fi.docstring)
for line in lines:
lines = fi.docstring.splitlines()
returnTag = False
javadocParams = []
toWrite = []
inCode = False
for index, line in enumerate(lines):
p0 = line.find("@param")
if p0 != -1:
p0 += 7
p1 = line.find(' ', p0)
p1 = len(line) if p1 == -1 else p1
name = line[p0:p1]
javadocParams.append(name)
for arg in j_args:
if arg.endswith(" " + name):
toWrite.append(line);
break
else:
if "<code>" in line:
inCode = True
if "</code>" in line:
inCode = False
if "@return " in line:
returnTag = True
if (not inCode and toWrite and not toWrite[-1] and
line and not line.startswith("\\") and not line.startswith("<ul>") and not line.startswith("@param")):
toWrite.append("<p>");
if index == len(lines) - 1:
for arg in j_args:
name = arg[arg.rfind(' ') + 1:]
if not name in javadocParams:
toWrite.append(" * @param " + name + " automatically generated");
if type_dict[fi.ctype]["j_type"] and not returnTag and fi.ctype != "void":
toWrite.append(" * @return automatically generated");
toWrite.append(line);
for line in toWrite:
j_code.write(" "*4 + line + "\n")
if fi.annotation:
j_code.write(" "*4 + "\n".join(fi.annotation) + "\n")
@ -779,7 +821,7 @@ class JavaWrapperGenerator(object):
j_epilogue.append('Converters.Mat_to_' + ret_type + '(retValMat, retVal);')
ret = "return retVal;"
elif ret_type.startswith("Ptr_"):
constructor = type_dict[ret_type]["j_type"] + ".__fromPtr__(";
constructor = type_dict[ret_type]["j_type"] + ".__fromPtr__("
if j_epilogue:
ret_val = type_dict[fi.ctype]["j_type"] + " retVal = " + constructor
else:
@ -796,14 +838,14 @@ class JavaWrapperGenerator(object):
ret_val = "nativeObj = "
ret = ""
elif self.isWrapped(ret_type): # wrapped class
constructor = self.getClass(ret_type).jname + "(";
constructor = self.getClass(ret_type).jname + "("
if j_epilogue:
ret_val = type_dict[ret_type]["j_type"] + " retVal = new " + constructor
else:
ret_val = "return new " + constructor
tail = ")"
elif "jn_type" not in type_dict[ret_type]:
constructor = type_dict[ret_type]["j_type"] + "(";
constructor = type_dict[ret_type]["j_type"] + "("
if j_epilogue:
ret_val = type_dict[fi.ctype]["j_type"] + " retVal = new " + constructor
else:
@ -815,7 +857,7 @@ class JavaWrapperGenerator(object):
static = fi.static
j_code.write( Template(
""" public $static$j_type $j_name($j_args) {$prologue
""" public $static$j_type$j_name($j_args) {$prologue
$ret_val$jn_name($jn_args_call)$tail;$epilogue$ret
}
@ -827,7 +869,7 @@ class JavaWrapperGenerator(object):
prologue = "\n " + "\n ".join(j_prologue) if j_prologue else "",
epilogue = "\n " + "\n ".join(j_epilogue) if j_epilogue else "",
static = static + " " if static else "",
j_type=type_dict[fi.ctype]["j_type"],
j_type=type_dict[fi.ctype]["j_type"] + " " if type_dict[fi.ctype]["j_type"] else "",
j_name=fi.jname,
j_args=", ".join(j_args),
jn_name=fi.jname + '_' + str(suffix_counter),
@ -1140,6 +1182,122 @@ def copy_java_files(java_files_dir, java_base_path, default_package_path='org/op
copyfile(src, dest)
updated_files += 1
def sanitize_java_documentation_string(doc, type):
if type == "class":
doc = doc.replace("@param ", "")
doc = re.sub(re.compile('\\\\f\\$(.*?)\\\\f\\$', re.DOTALL), '\\(' + r'\1' + '\\)', doc)
doc = re.sub(re.compile('\\\\f\\[(.*?)\\\\f\\]', re.DOTALL), '\\(' + r'\1' + '\\)', doc)
doc = re.sub(re.compile('\\\\f\\{(.*?)\\\\f\\}', re.DOTALL), '\\(' + r'\1' + '\\)', doc)
doc = doc.replace("&", "&amp;") \
.replace("\\<", "&lt;") \
.replace("\\>", "&gt;") \
.replace("<", "&lt;") \
.replace(">", "&gt;") \
.replace("$", "$$") \
.replace("@anchor", "") \
.replace("@brief ", "").replace("\\brief ", "") \
.replace("@cite", "CITE:") \
.replace("@code{.cpp}", "<code>") \
.replace("@code{.txt}", "<code>") \
.replace("@code", "<code>") \
.replace("@copydoc", "") \
.replace("@copybrief", "") \
.replace("@date", "") \
.replace("@defgroup", "") \
.replace("@details ", "") \
.replace("@endcode", "</code>") \
.replace("@endinternal", "") \
.replace("@file", "") \
.replace("@include", "INCLUDE:") \
.replace("@ingroup", "") \
.replace("@internal", "") \
.replace("@overload", "") \
.replace("@param[in]", "@param") \
.replace("@param[out]", "@param") \
.replace("@ref", "REF:") \
.replace("@returns", "@return") \
.replace("@sa", "SEE:") \
.replace("@see", "SEE:") \
.replace("@snippet", "SNIPPET:") \
.replace("@todo", "TODO:") \
.replace("@warning ", "WARNING: ")
doc = re.sub(re.compile('\\*\\*([^\\*]+?)\\*\\*', re.DOTALL), '<b>' + r'\1' + '</b>', doc)
lines = doc.splitlines()
lines = list(map(lambda x: x[x.find('*'):].strip() if x.lstrip().startswith("*") else x, lines))
listInd = [];
indexDiff = 0;
for index, line in enumerate(lines[:]):
if line.strip().startswith("-"):
i = line.find("-")
if not listInd or i > listInd[-1]:
lines.insert(index + indexDiff, " "*len(listInd) + "<ul>")
indexDiff += 1
listInd.append(i);
lines.insert(index + indexDiff, " "*len(listInd) + "<li>")
indexDiff += 1
elif i == listInd[-1]:
lines.insert(index + indexDiff, " "*len(listInd) + "</li>")
indexDiff += 1
lines.insert(index + indexDiff, " "*len(listInd) + "<li>")
indexDiff += 1
elif len(listInd) > 1 and i == listInd[-2]:
lines.insert(index + indexDiff, " "*len(listInd) + "</li>")
indexDiff += 1
del listInd[-1]
lines.insert(index + indexDiff, " "*len(listInd) + "</ul>")
indexDiff += 1
lines.insert(index + indexDiff, " "*len(listInd) + "<li>")
indexDiff += 1
else:
lines.insert(index + indexDiff, " "*len(listInd) + "</li>")
indexDiff += 1
del listInd[-1]
lines.insert(index + indexDiff, " "*len(listInd) + "</ul>")
indexDiff += 1
lines.insert(index + indexDiff, " "*len(listInd) + "<ul>")
indexDiff += 1
listInd.append(i);
lines.insert(index + indexDiff, " "*len(listInd) + "<li>")
indexDiff += 1
lines[index + indexDiff] = lines[index + indexDiff][0:i] + lines[index + indexDiff][i + 1:]
else:
if listInd and (not line or line == "*" or line.startswith("@note")):
lines.insert(index + indexDiff, " "*len(listInd) + "</li>")
indexDiff += 1
del listInd[-1]
lines.insert(index + indexDiff, " "*len(listInd) + "</ul>")
indexDiff += 1
i = len(listInd) - 1
for value in enumerate(listInd):
lines.append(" "*i + " </li>")
lines.append(" "*i + "</ul>")
i -= 1;
lines = list(map(lambda x: "* " + x[1:].strip() if x.startswith("*") and x != "*" else x, lines))
lines = list(map(lambda x: x if x.startswith("*") else "* " + x if x and x != "*" else "*", lines))
lines = list(map(lambda x: x
.replace("@note", "<b>Note:</b>")
, lines))
lines = list(map(lambda x: re.sub('@b ([\\w:]+?)\\b', '<b>' + r'\1' + '</b>', x), lines))
lines = list(map(lambda x: re.sub('@c ([\\w:]+?)\\b', '<tt>' + r'\1' + '</tt>', x), lines))
lines = list(map(lambda x: re.sub('`(.*?)`', "{@code " + r'\1' + '}', x), lines))
lines = list(map(lambda x: re.sub('@p ([\\w:]+?)\\b', '{@code ' + r'\1' + '}', x), lines))
hasValues = False
for line in lines:
if line != "*":
hasValues = True
break
return "/**\n " + "\n ".join(lines) + "\n */" if hasValues else ""
if __name__ == "__main__":
# initialize logger

@ -5,8 +5,7 @@ package org.opencv.$module;
$imports
$docs
$annotation
$docs$annotation
public class $jname {
protected final long nativeObj;

@ -5,8 +5,7 @@ package org.opencv.$module;
$imports
$docs
$annotation
$docs$annotation
public class $jname extends $base {
protected $jname(long addr) { super(addr); }

@ -5,6 +5,5 @@ package org.opencv.$module;
$imports
$docs
$annotation
$docs$annotation
public class $jname {

@ -25,6 +25,8 @@ if(OPENCV_JAVA_TARGET_VERSION)
set(OPENCV_ANT_JAVAC_EXTRA_ATTRS "${OPENCV_ANT_JAVAC_EXTRA_ATTRS} target=\"${OPENCV_JAVA_TARGET_VERSION}\"")
endif()
set(OPENCV_JAVADOC_DESTINATION "${OpenCV_BINARY_DIR}/doc/doxygen/html/javadoc" CACHE STRING "")
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/build.xml.in" "${OPENCV_JAVA_DIR}/build.xml" @ONLY)
list(APPEND depends "${OPENCV_JAVA_DIR}/build.xml")

@ -29,14 +29,35 @@
</target>
<target name="javadoc">
<copy file="@OpenCV_SOURCE_DIR@/doc/mymath.js"
todir="@OPENCV_JAVADOC_DESTINATION@" />
<!-- synchronize with platforms\android\build_sdk.py -->
<javadoc
packagenames="org.opencv.*"
sourcepath="java"
destdir="@OpenCV_BINARY_DIR@/doc/doxygen/html/javadoc"
destdir="@OPENCV_JAVADOC_DESTINATION@"
Windowtitle="OpenCV @OPENCV_VERSION_PLAIN@ Java documentation"
Doctitle="OpenCV Java documentation (@OPENCV_VERSION@)"
bottom="Generated on ${timestamp} / OpenCV @OPENCV_VCSVERSION@"
/>
failonerror="true"
encoding="UTF-8" charset="UTF-8" docencoding="UTF-8"
link="https://docs.oracle.com/javase/6/docs/api/"
additionalparam="--allow-script-in-comments"
>
<Header>
<![CDATA[
<script>
var url = window.location.href;
var pos = url.lastIndexOf('/javadoc/');
url = pos >= 0 ? (url.substring(0, pos) + '/javadoc/mymath.js') : (window.location.origin + '/mymath.js');
var script = document.createElement('script');
script.src = '@OPENCV_MATHJAX_RELPATH@/MathJax.js?config=TeX-AMS-MML_HTMLorMML,' + url;
document.getElementsByTagName('head')[0].appendChild(script);
</script>
]]>
</Header>
</javadoc>
</target>
</project>

@ -1004,7 +1004,7 @@ public:
@param samples Samples from which the Gaussian mixture model will be estimated. It should be a
one-channel matrix, each row of which is a sample. If the matrix does not have CV_64F type
it will be converted to the inner matrix of such type for the further computing.
@param probs0
@param probs0 the probabilities
@param logLikelihoods The optional output matrix that contains a likelihood logarithm value for
each sample. It has \f$nsamples \times 1\f$ size and CV_64FC1 type.
@param labels The optional output "class label" for each sample:

@ -3,9 +3,7 @@ package org.opencv.test.objdetect;
import org.opencv.core.Mat;
import org.opencv.objdetect.QRCodeDetector;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.objdetect.Objdetect;
import org.opencv.test.OpenCVTestCase;
import org.opencv.test.OpenCVTestRunner;
public class QRCodeDetectorTest extends OpenCVTestCase {

@ -0,0 +1,18 @@
#!/usr/bin/env python
'''
===============================================================================
QR code detect and decode pipeline.
===============================================================================
'''
import numpy as np
import cv2 as cv
from tests_common import NewOpenCVTests
class qrcode_detector_test(NewOpenCVTests):
def test_detect_and_decode(self):
img = cv.imread(self.extraTestDataPath + '/cv/qrcode/link_ocv.jpg')
detector = cv.QRCodeDetector()
retval, points, straight_qrcode = detector.detectAndDecode(img)
self.assertEqual(retval, "https://opencv.org/");

@ -1068,7 +1068,6 @@ std::string QRCodeDetector::decode(InputArray in, InputArray points,
inarr = gray;
}
CV_Assert(points.isVector());
vector<Point2f> src_points;
points.copyTo(src_points);
CV_Assert(src_points.size() == 4);

@ -299,7 +299,7 @@ icvTeleaInpaintFMM(const CvMat *f, CvMat *t, CvMat *out, int range, CvPriorityQu
else if(q==1) {i=ii; j=jj-1;}
else if(q==2) {i=ii+1; j=jj;}
else if(q==3) {i=ii; j=jj+1;}
if ((i<=1)||(j<=1)||(i>t->rows-1)||(j>t->cols-1)) continue;
if ((i<=0)||(j<=0)||(i>t->rows-1)||(j>t->cols-1)) continue;
if (CV_MAT_ELEM(*f,uchar,i,j)==INSIDE) {
dist = min4(FastMarching_solve(i-1,j,i,j-1,f,t),
@ -411,7 +411,7 @@ icvTeleaInpaintFMM(const CvMat *f, CvMat *t, CvMat *out, int range, CvPriorityQu
else if(q==1) {i=ii; j=jj-1;}
else if(q==2) {i=ii+1; j=jj;}
else if(q==3) {i=ii; j=jj+1;}
if ((i<=1)||(j<=1)||(i>t->rows-1)||(j>t->cols-1)) continue;
if ((i<=0)||(j<=0)||(i>t->rows-1)||(j>t->cols-1)) continue;
if (CV_MAT_ELEM(*f,uchar,i,j)==INSIDE) {
dist = min4(FastMarching_solve(i-1,j,i,j-1,f,t),
@ -531,7 +531,7 @@ icvNSInpaintFMM(const CvMat *f, CvMat *t, CvMat *out, int range, CvPriorityQueue
else if(q==1) {i=ii; j=jj-1;}
else if(q==2) {i=ii+1; j=jj;}
else if(q==3) {i=ii; j=jj+1;}
if ((i<=1)||(j<=1)||(i>t->rows-1)||(j>t->cols-1)) continue;
if ((i<=0)||(j<=0)||(i>t->rows-1)||(j>t->cols-1)) continue;
if (CV_MAT_ELEM(*f,uchar,i,j)==INSIDE) {
dist = min4(FastMarching_solve(i-1,j,i,j-1,f,t),
@ -619,7 +619,7 @@ icvNSInpaintFMM(const CvMat *f, CvMat *t, CvMat *out, int range, CvPriorityQueue
else if(q==1) {i=ii; j=jj-1;}
else if(q==2) {i=ii+1; j=jj;}
else if(q==3) {i=ii; j=jj+1;}
if ((i<=1)||(j<=1)||(i>t->rows-1)||(j>t->cols-1)) continue;
if ((i<=0)||(j<=0)||(i>t->rows-1)||(j>t->cols-1)) continue;
if (CV_MAT_ELEM(*f,uchar,i,j)==INSIDE) {
dist = min4(FastMarching_solve(i-1,j,i,j-1,f,t),

@ -139,4 +139,22 @@ TEST_P(formats, 1c)
INSTANTIATE_TEST_CASE_P(Photo_Inpaint, formats, testing::Values(CV_32F, CV_16U, CV_8U));
TEST(Photo_InpaintBorders, regression)
{
Mat img(64, 64, CV_8U);
img = 128;
img(Rect(0, 0, 16, 64)) = 0;
Mat mask(64, 64, CV_8U);
mask = 0;
mask(Rect(0, 0, 16, 64)) = 255;
Mat inpainted;
inpaint(img, mask, inpainted, 1, INPAINT_TELEA);
Mat diff;
cv::absdiff(inpainted, 128*Mat::ones(inpainted.size(), inpainted.type()), diff);
ASSERT_TRUE(countNonZero(diff) == 0);
}
}} // namespace

@ -61,7 +61,6 @@ ocv_update(OPENCV_PYTHON_EXTENSION_BUILD_PATH "${LIBRARY_OUTPUT_PATH}/${MODULE_I
set_target_properties(${the_module} PROPERTIES
LIBRARY_OUTPUT_DIRECTORY "${OPENCV_PYTHON_EXTENSION_BUILD_PATH}"
ARCHIVE_OUTPUT_NAME ${the_module} # prevent name conflict for python2/3 outputs
DEFINE_SYMBOL CVAPI_EXPORTS
PREFIX ""
OUTPUT_NAME cv2
SUFFIX ${CVPY_SUFFIX})

@ -1797,8 +1797,12 @@ static int to_ok(PyTypeObject *to)
}
#if defined(__GNUC__)
#pragma GCC visibility push(default)
#endif
#if PY_MAJOR_VERSION >= 3
extern "C" CV_EXPORTS PyObject* PyInit_cv2();
PyMODINIT_FUNC PyInit_cv2();
static struct PyModuleDef cv2_moduledef =
{
PyModuleDef_HEAD_INIT,
@ -1811,7 +1815,7 @@ static struct PyModuleDef cv2_moduledef =
PyObject* PyInit_cv2()
#else
extern "C" CV_EXPORTS void initcv2();
PyMODINIT_FUNC initcv2();
void initcv2()
#endif

@ -827,7 +827,7 @@ class CppHeaderParser(object):
if state == DOCSTRING:
pos = l.find("*/")
if pos < 0:
docstring += l + "\n"
docstring += l0
continue
docstring += l[:pos] + "\n"
l = l[pos+2:]

@ -8284,8 +8284,8 @@ DeathTest::TestRole WindowsDeathTest::AssumeRole() {
GTEST_DEATH_TEST_CHECK_(::CreateProcessA(
executable_path,
const_cast<char*>(command_line.c_str()),
NULL, // Retuned process handle is not inheritable.
NULL, // Retuned thread handle is not inheritable.
NULL, // Returned process handle is not inheritable.
NULL, // Returned thread handle is not inheritable.
TRUE, // Child inherits all inheritable handles (for write_handle_).
0x0, // Default creation flags.
NULL, // Inherit the parent's environment.

@ -550,7 +550,7 @@ double cv::findTransformECC(InputArray templateImage,
const double correlation = templateZM.dot(imageWarped);
// calculate enhanced correlation coefficiont (ECC)->rho
// calculate enhanced correlation coefficient (ECC)->rho
last_rho = rho;
rho = correlation/(imgNorm*tmpNorm);
if (cvIsNaN(rho)) {

@ -1281,7 +1281,7 @@ void SparsePyrLKOpticalFlowImpl::calc( InputArray _prevImg, InputArray _nextImg,
levels1 /= 2;
}
// ensure that pyramid has reqired padding
// ensure that pyramid has required padding
if(levels1 > 0)
{
Size fullSize;
@ -1309,7 +1309,7 @@ void SparsePyrLKOpticalFlowImpl::calc( InputArray _prevImg, InputArray _nextImg,
levels2 /= 2;
}
// ensure that pyramid has reqired padding
// ensure that pyramid has required padding
if(levels2 > 0)
{
Size fullSize;

@ -766,8 +766,8 @@ public:
@note Reading / writing properties involves many layers. Some unexpected result might happens
along this chain.
@code {.txt}
`VideoCapture -> API Backend -> Operating System -> Device Driver -> Device Hardware`
@code{.txt}
VideoCapture -> API Backend -> Operating System -> Device Driver -> Device Hardware
@endcode
The returned value might be different from what really used by the device or it could be encoded
using device dependent rules (eg. steps or percentage). Effective behaviour depends from device

@ -1,6 +1,13 @@
#!/usr/bin/env python
import os, sys, subprocess, argparse, shutil, glob, re
import os, sys
import argparse
import glob
import re
import shutil
import subprocess
import time
import logging as log
import xml.etree.ElementTree as ET
@ -245,14 +252,34 @@ class Builder:
for f in files:
if f == "android.jar" or f == "annotations.jar":
classpaths.append(os.path.join(dir, f))
srcdir = os.path.join(self.resultdest, 'sdk', 'java', 'src')
dstdir = self.docdest
# synchronize with modules/java/jar/build.xml.in
shutil.copy2(os.path.join(SCRIPT_DIR, '../../doc/mymath.js'), dstdir)
cmd = [
"javadoc",
"-header", "OpenCV %s" % self.opencv_version,
'-windowtitle', 'OpenCV %s Java documentation' % self.opencv_version,
'-doctitle', 'OpenCV Java documentation (%s)' % self.opencv_version,
"-nodeprecated",
"-footer", '<a href="http://docs.opencv.org">OpenCV %s Documentation</a>' % self.opencv_version,
"-public",
'-sourcepath', os.path.join(self.resultdest, 'sdk', 'java', 'src'),
"-d", self.docdest,
'-sourcepath', srcdir,
'-encoding', 'UTF-8',
'-charset', 'UTF-8',
'-docencoding', 'UTF-8',
'--allow-script-in-comments',
'-header',
'''
<script>
var url = window.location.href;
var pos = url.lastIndexOf('/javadoc/');
url = pos >= 0 ? (url.substring(0, pos) + '/javadoc/mymath.js') : (window.location.origin + '/mymath.js');
var script = document.createElement('script');
script.src = '%s/MathJax.js?config=TeX-AMS-MML_HTMLorMML,' + url;
document.getElementsByTagName('head')[0].appendChild(script);
</script>
''' % 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.0',
'-bottom', 'Generated on %s / OpenCV %s' % (time.strftime("%Y-%m-%d %H:%M:%S"), self.opencv_version),
"-d", dstdir,
"-classpath", ":".join(classpaths),
'-subpackages', 'org.opencv',
]

@ -266,7 +266,10 @@ int main(int argc, char *argv[])
cout << "Unsupported mode: " << mode << endl;
return -1;
}
file_name = samples::findFile(file_name);
if (mode == "decode")
{
file_name = samples::findFile(file_name);
}
cout << "Mode: " << mode << ", Backend: " << backend << ", File: " << file_name << ", Codec: " << codec << endl;
TickMeter total;

@ -177,7 +177,7 @@ int main( int argc, char* argv[] )
else
capture.set(CAP_OPENNI_IR_GENERATOR_PRESENT, false);
// Print some avalible device settings.
// Print some available device settings.
if (capture.get(CAP_OPENNI_DEPTH_GENERATOR_PRESENT))
{
cout << "\nDepth generator output mode:" << endl <<

@ -8,8 +8,8 @@ def my_ellipse(img, angle):
line_type = 8
cv.ellipse(img,
(W / 2, W / 2),
(W / 4, W / 16),
(W // 2, W // 2),
(W // 4, W // 16),
angle,
0,
360,
@ -24,7 +24,7 @@ def my_filled_circle(img, center):
cv.circle(img,
center,
W / 32,
W // 32,
(0, 0, 255),
thickness,
line_type)
@ -82,7 +82,7 @@ my_ellipse(atom_image, 45)
my_ellipse(atom_image, -45)
# 1.b. Creating circles
my_filled_circle(atom_image, (W / 2, W / 2))
my_filled_circle(atom_image, (W // 2, W // 2))
## [draw_atom]
## [draw_rook]
@ -93,7 +93,7 @@ my_polygon(rook_image)
## [rectangle]
# 2.b. Creating rectangles
cv.rectangle(rook_image,
(0, 7 * W / 8),
(0, 7 * W // 8),
(W, W),
(0, 255, 255),
-1,
@ -101,10 +101,10 @@ cv.rectangle(rook_image,
## [rectangle]
# 2.c. Create a few lines
my_line(rook_image, (0, 15 * W / 16), (W, 15 * W / 16))
my_line(rook_image, (W / 4, 7 * W / 8), (W / 4, W))
my_line(rook_image, (W / 2, 7 * W / 8), (W / 2, W))
my_line(rook_image, (3 * W / 4, 7 * W / 8), (3 * W / 4, W))
my_line(rook_image, (0, 15 * W // 16), (W, 15 * W // 16))
my_line(rook_image, (W // 4, 7 * W // 8), (W // 4, W))
my_line(rook_image, (W // 2, 7 * W // 8), (W // 2, W))
my_line(rook_image, (3 * W // 4, 7 * W // 8), (3 * W // 4, W))
## [draw_rook]
cv.imshow(atom_window, atom_image)
cv.moveWindow(atom_window, 0, 200)

@ -63,7 +63,7 @@ def main(argv):
# [horiz]
# Specify size on horizontal axis
cols = horizontal.shape[1]
horizontal_size = cols / 30
horizontal_size = cols // 30
# Create structure element for extracting horizontal lines through morphology operations
horizontalStructure = cv.getStructuringElement(cv.MORPH_RECT, (horizontal_size, 1))
@ -79,7 +79,7 @@ def main(argv):
# [vert]
# Specify size on vertical axis
rows = vertical.shape[0]
verticalsize = rows / 30
verticalsize = rows // 30
# Create structure element for extracting vertical lines through morphology operations
verticalStructure = cv.getStructuringElement(cv.MORPH_RECT, (1, verticalsize))

Loading…
Cancel
Save