diff --git a/modules/java/generator/gen_java.py b/modules/java/generator/gen_java.py index d4560a972e..f8d3f7d0a5 100755 --- a/modules/java/generator/gen_java.py +++ b/modules/java/generator/gen_java.py @@ -991,12 +991,12 @@ class JavaWrapperGenerator(object): if classinfo.base: classinfo.addImports(classinfo.base) - type_dict["Ptr_"+name] = \ - { "j_type" : name, - "jn_type" : "long", "jn_args" : (("__int64", ".nativeObj"),), - "jni_name" : "Ptr<"+name+">(("+name+"*)%(n)s_nativeObj)", "jni_type" : "jlong", - "suffix" : "J" } - logging.info('ok: %s', classinfo) + type_dict["Ptr_"+name] = \ + { "j_type" : name, + "jn_type" : "long", "jn_args" : (("__int64", ".nativeObj"),), + "jni_name" : "Ptr<"+name+">(("+classinfo.fullName(isCPP=True)+"*)%(n)s_nativeObj)", "jni_type" : "jlong", + "suffix" : "J" } + logging.info('ok: class %s, name: %s, base: %s', classinfo, name, classinfo.base) def add_const(self, decl): # [ "const cname", val, [], [] ] constinfo = ConstInfo(decl, namespaces=self.namespaces) @@ -1347,7 +1347,7 @@ class JavaWrapperGenerator(object): ret = "return (jlong) new %s(_retval_);" % self.fullTypeName(fi.ctype) elif fi.ctype.startswith('Ptr_'): c_prologue.append("typedef Ptr<%s> %s;" % (self.fullTypeName(fi.ctype[4:]), fi.ctype)) - ret = "return (jlong)(new %(ctype)s(_retval_));" % { 'ctype':fi.ctype } + ret = "%(ctype)s* curval = new %(ctype)s(_retval_);return (jlong)curval->get();" % { 'ctype':fi.ctype } elif self.isWrapped(ret_type): # pointer to wrapped class: ret = "return (jlong) _retval_;" elif type_dict[fi.ctype]["jni_type"] == "jdoubleArray": diff --git a/modules/ml/include/opencv2/ml.hpp b/modules/ml/include/opencv2/ml.hpp index cea8aec48c..d016810874 100644 --- a/modules/ml/include/opencv2/ml.hpp +++ b/modules/ml/include/opencv2/ml.hpp @@ -285,7 +285,7 @@ public: `, containing types of each input and output variable. See ml::VariableTypes. */ - CV_WRAP static Ptr create(InputArray samples, int layout, InputArray responses, + CV_WRAP static Ptr create(InputArray samples, int layout, InputArray responses, InputArray varIdx=noArray(), InputArray sampleIdx=noArray(), InputArray sampleWeights=noArray(), InputArray varType=noArray()); }; @@ -320,7 +320,7 @@ public: @param flags optional flags, depending on the model. Some of the models can be updated with the new training samples, not completely overwritten (such as NormalBayesClassifier or ANN_MLP). */ - CV_WRAP virtual bool train( const Ptr& trainData, int flags=0 ); + CV_WRAP virtual bool train( const Ptr& trainData, int flags=0 ); /** @brief Trains the statistical model @@ -343,7 +343,7 @@ public: The method uses StatModel::predict to compute the error. For regression models the error is computed as RMS, for classifiers - as a percent of missclassified samples (0%-100%). */ - CV_WRAP virtual float calcError( const Ptr& data, bool test, OutputArray resp ) const; + CV_WRAP virtual float calcError( const Ptr& data, bool test, OutputArray resp ) const; /** @brief Predicts response(s) for the provided sample(s) @@ -357,7 +357,7 @@ public: The class must implement static `create()` method with no parameters or with all default parameter values */ - template static Ptr<_Tp> train(const Ptr& data, int flags=0) + template static Ptr<_Tp> train(const Ptr& data, int flags=0) { Ptr<_Tp> model = _Tp::create(); return !model.empty() && model->train(data, flags) ? model : Ptr<_Tp>(); @@ -667,7 +667,7 @@ public: regression (SVM::EPS_SVR or SVM::NU_SVR). If it is SVM::ONE_CLASS, no optimization is made and the usual %SVM with parameters specified in params is executed. */ - virtual bool trainAuto( const Ptr& data, int kFold = 10, + virtual bool trainAuto( const Ptr& data, int kFold = 10, ParamGrid Cgrid = SVM::getDefaultGrid(SVM::C), ParamGrid gammaGrid = SVM::getDefaultGrid(SVM::GAMMA), ParamGrid pGrid = SVM::getDefaultGrid(SVM::P),