Merge pull request #23594 from fdivitto:fdivitto-traincascade-patch

fix: traincascade, use C++ persistence API #23594

This pull allows to compile traincascade application with OpenCV 4.6. Changes uses new persistence C++ API in place of legacy one.
pull/23672/head
Fabrizio Di Vittorio 2 years ago committed by GitHub
parent e3c5c0906b
commit 044a322519
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 22
      apps/traincascade/boost.cpp
  2. 5
      apps/traincascade/cascadeclassifier.cpp
  3. 68
      apps/traincascade/old_ml.hpp
  4. 110
      apps/traincascade/old_ml_boost.cpp
  5. 32
      apps/traincascade/old_ml_inner_functions.cpp
  6. 6
      apps/traincascade/old_ml_precomp.hpp
  7. 317
      apps/traincascade/old_ml_tree.cpp

@ -1005,7 +1005,7 @@ void CvCascadeBoostTree::read( const FileNode &node, CvBoost* _ensemble,
int step = 3 + ( maxCatCount>0 ? subsetN : 1 );
queue<CvDTreeNode*> internalNodesQueue;
FileNodeIterator internalNodesIt, leafValsuesIt;
int internalNodesIdx, leafValsuesIdx;
CvDTreeNode* prntNode, *cldNode;
clear();
@ -1015,9 +1015,9 @@ void CvCascadeBoostTree::read( const FileNode &node, CvBoost* _ensemble,
// read tree nodes
FileNode rnode = node[CC_INTERNAL_NODES];
internalNodesIt = rnode.end();
leafValsuesIt = node[CC_LEAF_VALUES].end();
internalNodesIt--; leafValsuesIt--;
internalNodesIdx = (int) rnode.size() - 1;
FileNode lnode = node[CC_LEAF_VALUES];
leafValsuesIdx = (int) lnode.size() - 1;
for( size_t i = 0; i < rnode.size()/step; i++ )
{
prntNode = data->new_node( 0, 0, 0, 0 );
@ -1026,23 +1026,23 @@ void CvCascadeBoostTree::read( const FileNode &node, CvBoost* _ensemble,
prntNode->split = data->new_split_cat( 0, 0 );
for( int j = subsetN-1; j>=0; j--)
{
*internalNodesIt >> prntNode->split->subset[j]; internalNodesIt--;
rnode[internalNodesIdx] >> prntNode->split->subset[j]; --internalNodesIdx;
}
}
else
{
float split_value;
*internalNodesIt >> split_value; internalNodesIt--;
rnode[internalNodesIdx] >> split_value; --internalNodesIdx;
prntNode->split = data->new_split_ord( 0, split_value, 0, 0, 0);
}
*internalNodesIt >> prntNode->split->var_idx; internalNodesIt--;
rnode[internalNodesIdx] >> prntNode->split->var_idx; --internalNodesIdx;
int ridx, lidx;
*internalNodesIt >> ridx; internalNodesIt--;
*internalNodesIt >> lidx;internalNodesIt--;
rnode[internalNodesIdx] >> ridx; --internalNodesIdx;
rnode[internalNodesIdx] >> lidx; --internalNodesIdx;
if ( ridx <= 0)
{
prntNode->right = cldNode = data->new_node( 0, 0, 0, 0 );
*leafValsuesIt >> cldNode->value; leafValsuesIt--;
lnode[leafValsuesIdx] >> cldNode->value; --leafValsuesIdx;
cldNode->parent = prntNode;
}
else
@ -1055,7 +1055,7 @@ void CvCascadeBoostTree::read( const FileNode &node, CvBoost* _ensemble,
if ( lidx <= 0)
{
prntNode->left = cldNode = data->new_node( 0, 0, 0, 0 );
*leafValsuesIt >> cldNode->value; leafValsuesIt--;
lnode[leafValsuesIdx] >> cldNode->value; --leafValsuesIdx;
cldNode->parent = prntNode;
}
else

@ -370,7 +370,7 @@ void CvCascadeClassifier::writeStages( FileStorage &fs, const Mat& featureMap )
it != stageClassifiers.end();++it, ++i )
{
snprintf( cmnt, sizeof(cmnt), "stage %d", i );
cvWriteComment( fs.fs, cmnt, 0 );
fs.writeComment(cmnt);
fs << "{";
(*it)->write( fs, featureMap );
fs << "}";
@ -466,7 +466,7 @@ void CvCascadeClassifier::save( const string filename, bool baseFormat )
fs << ICV_HAAR_TREES_NAME << "[";
for( int wi = 0; wi < weak->total; wi++ )
{
int inner_node_idx = -1, total_inner_node_idx = -1;
int total_inner_node_idx = -1;
queue<const CvDTreeNode*> inner_nodes_queue;
CvCascadeBoostTree* tree = *((CvCascadeBoostTree**) cvGetSeqElem( weak, wi ));
@ -482,7 +482,6 @@ void CvCascadeClassifier::save( const string filename, bool baseFormat )
while (!inner_nodes_queue.empty())
{
tempNode = inner_nodes_queue.front();
inner_node_idx++;
fs << "{";
fs << ICV_HAAR_FEATURE_NAME << "{";

@ -800,13 +800,12 @@ public:
virtual const CvMat* get_var_importance();
CV_WRAP virtual void clear();
virtual void read( CvFileStorage* fs, CvFileNode* node );
virtual void write( CvFileStorage* fs, const char* name ) const;
virtual void read( const cv::FileNode& node );
virtual void write( cv::FileStorage& fs, const char* name ) const;
// special read & write methods for trees in the tree ensembles
virtual void read( CvFileStorage* fs, CvFileNode* node,
CvDTreeTrainData* data );
virtual void write( CvFileStorage* fs ) const;
virtual void read( const cv::FileNode& node, CvDTreeTrainData* data );
virtual void write( cv::FileStorage& fs ) const;
const CvDTreeNode* get_root() const;
int get_pruned_tree_idx() const;
@ -843,12 +842,12 @@ protected:
virtual void free_prune_data(bool cut_tree);
virtual void free_tree();
virtual void write_node( CvFileStorage* fs, CvDTreeNode* node ) const;
virtual void write_split( CvFileStorage* fs, CvDTreeSplit* split ) const;
virtual CvDTreeNode* read_node( CvFileStorage* fs, CvFileNode* node, CvDTreeNode* parent );
virtual CvDTreeSplit* read_split( CvFileStorage* fs, CvFileNode* node );
virtual void write_tree_nodes( CvFileStorage* fs ) const;
virtual void read_tree_nodes( CvFileStorage* fs, CvFileNode* node );
virtual void write_node( cv::FileStorage& fs, CvDTreeNode* node ) const;
virtual void write_split( cv::FileStorage& fs, CvDTreeSplit* split ) const;
virtual CvDTreeNode* read_node( const cv::FileNode& node, CvDTreeNode* parent );
virtual CvDTreeSplit* read_split( const cv::FileNode& node );
virtual void write_tree_nodes( cv::FileStorage& fs ) const;
virtual void read_tree_nodes( const cv::FileNode& node );
CvDTreeNode* root;
CvMat* var_importance;
@ -876,7 +875,7 @@ public:
virtual bool train( CvDTreeTrainData* trainData, const CvMat* _subsample_idx, CvRTrees* forest );
virtual int get_var_count() const {return data ? data->var_count : 0;}
virtual void read( CvFileStorage* fs, CvFileNode* node, CvRTrees* forest, CvDTreeTrainData* _data );
virtual void read( cv::FileStorage& fs, cv::FileNode& node, CvRTrees* forest, CvDTreeTrainData* _data );
/* dummy methods to avoid warnings: BEGIN */
virtual bool train( const CvMat* trainData, int tflag,
@ -886,8 +885,8 @@ public:
CvDTreeParams params=CvDTreeParams() );
virtual bool train( CvDTreeTrainData* trainData, const CvMat* _subsample_idx );
virtual void read( CvFileStorage* fs, CvFileNode* node );
virtual void read( CvFileStorage* fs, CvFileNode* node,
virtual void read( cv::FileStorage& fs, cv::FileNode& node );
virtual void read( cv::FileStorage& fs, cv::FileNode& node,
CvDTreeTrainData* data );
/* dummy methods to avoid warnings: END */
@ -949,8 +948,8 @@ public:
virtual float get_train_error();
virtual void read( CvFileStorage* fs, CvFileNode* node );
virtual void write( CvFileStorage* fs, const char* name ) const;
virtual void read( cv::FileStorage& fs, cv::FileNode& node );
virtual void write( cv::FileStorage& fs, const char* name ) const;
CvMat* get_active_var_mask();
CvRNG* get_rng();
@ -1067,7 +1066,7 @@ public:
const CvMat* subsample_idx, CvBoost* ensemble );
virtual void scale( double s );
virtual void read( CvFileStorage* fs, CvFileNode* node,
virtual void read( const cv::FileNode& node,
CvBoost* ensemble, CvDTreeTrainData* _data );
virtual void clear();
@ -1079,9 +1078,8 @@ public:
CvDTreeParams params=CvDTreeParams() );
virtual bool train( CvDTreeTrainData* trainData, const CvMat* _subsample_idx );
virtual void read( CvFileStorage* fs, CvFileNode* node );
virtual void read( CvFileStorage* fs, CvFileNode* node,
CvDTreeTrainData* data );
virtual void read( cv::FileNode& node );
virtual void read( cv::FileNode& node, CvDTreeTrainData* data );
/* dummy methods to avoid warnings: END */
protected:
@ -1160,8 +1158,8 @@ public:
CV_WRAP virtual void clear();
virtual void write( CvFileStorage* storage, const char* name ) const;
virtual void read( CvFileStorage* storage, CvFileNode* node );
virtual void write( cv::FileStorage& storage, const char* name ) const;
virtual void read( cv::FileNode& node );
virtual const CvMat* get_active_vars(bool absolute_idx=true);
CvSeq* get_weak_predictors();
@ -1177,8 +1175,8 @@ protected:
virtual bool set_params( const CvBoostParams& params );
virtual void update_weights( CvBoostTree* tree );
virtual void trim_weights();
virtual void write_params( CvFileStorage* fs ) const;
virtual void read_params( CvFileStorage* fs, CvFileNode* node );
virtual void write_params( cv::FileStorage & fs ) const;
virtual void read_params( cv::FileNode& node );
virtual void initialize_weights(double (&p)[2]);
@ -1548,7 +1546,7 @@ public:
// Write parameters of the gtb model and data. Write learned model.
//
// API
// virtual void write( CvFileStorage* fs, const char* name ) const;
// virtual void write( cv::FileStorage& fs, const char* name ) const;
//
// INPUT
// fs - file storage to read parameters from.
@ -1556,7 +1554,7 @@ public:
// OUTPUT
// RESULT
*/
virtual void write( CvFileStorage* fs, const char* name ) const;
virtual void write( cv::FileStorage& fs, const char* name ) const;
/*
@ -1564,7 +1562,7 @@ public:
// Read parameters of the gtb model and data. Read learned model.
//
// API
// virtual void read( CvFileStorage* fs, CvFileNode* node );
// virtual void read( cv::FileStorage& fs, cv::FileNode& node );
//
// INPUT
// fs - file storage to read parameters from.
@ -1572,7 +1570,7 @@ public:
// OUTPUT
// RESULT
*/
virtual void read( CvFileStorage* fs, CvFileNode* node );
virtual void read( cv::FileStorage& fs, cv::FileNode& node );
// new-style C++ interface
@ -1723,14 +1721,14 @@ protected:
// Write parameters of the gtb model.
//
// API
// virtual void write_params( CvFileStorage* fs ) const;
// virtual void write_params( cv::FileStorage& fs ) const;
//
// INPUT
// fs - file storage to write parameters to.
// OUTPUT
// RESULT
*/
virtual void write_params( CvFileStorage* fs ) const;
virtual void write_params( cv::FileStorage& fs ) const;
/*
@ -1750,7 +1748,7 @@ protected:
// class_labels - output class labels map.
// RESULT
*/
virtual void read_params( CvFileStorage* fs, CvFileNode* fnode );
virtual void read_params( cv::FileStorage& fs, cv::FileNode& fnode );
int get_len(const CvMat* mat) const;
@ -1846,8 +1844,8 @@ public:
// available training flags
enum { UPDATE_WEIGHTS = 1, NO_INPUT_SCALE = 2, NO_OUTPUT_SCALE = 4 };
virtual void read( CvFileStorage* fs, CvFileNode* node );
virtual void write( CvFileStorage* storage, const char* name ) const;
virtual void read( cv::FileStorage& fs, cv::FileNode& node );
virtual void write( cv::FileStorage& storage, const char* name ) const;
int get_layer_count() { return layer_sizes ? layer_sizes->cols : 0; }
const CvMat* get_layer_sizes() { return layer_sizes; }
@ -1880,8 +1878,8 @@ protected:
virtual void calc_input_scale( const CvVectors* vecs, int flags );
virtual void calc_output_scale( const CvVectors* vecs, int flags );
virtual void write_params( CvFileStorage* fs ) const;
virtual void read_params( CvFileStorage* fs, CvFileNode* node );
virtual void write_params( cv::FileStorage& fs ) const;
virtual void read_params( cv::FileStorage& fs, cv::FileNode& node );
CvMat* layer_sizes;
CvMat* wbuf;

@ -876,21 +876,21 @@ CvBoostTree::calc_node_value( CvDTreeNode* node )
}
void CvBoostTree::read( CvFileStorage* fs, CvFileNode* fnode, CvBoost* _ensemble, CvDTreeTrainData* _data )
void CvBoostTree::read( const cv::FileNode& fnode, CvBoost* _ensemble, CvDTreeTrainData* _data )
{
CvDTree::read( fs, fnode, _data );
CvDTree::read( fnode, _data );
ensemble = _ensemble;
}
void CvBoostTree::read( CvFileStorage*, CvFileNode* )
void CvBoostTree::read( cv::FileNode& )
{
assert(0);
}
void CvBoostTree::read( CvFileStorage* _fs, CvFileNode* _node,
void CvBoostTree::read( cv::FileNode& _node,
CvDTreeTrainData* _data )
{
CvDTree::read( _fs, _node, _data );
CvDTree::read( _node, _data );
}
@ -1884,7 +1884,7 @@ float CvBoost::calc_error( CvMLData* _data, int type, std::vector<float> *resp )
return err;
}
void CvBoost::write_params( CvFileStorage* fs ) const
void CvBoost::write_params( cv::FileStorage& fs ) const
{
const char* boost_type_str =
params.boost_type == DISCRETE ? "DiscreteAdaboost" :
@ -1899,35 +1899,33 @@ void CvBoost::write_params( CvFileStorage* fs ) const
params.boost_type == SQERR ? "SquaredErr" : 0;
if( boost_type_str )
cvWriteString( fs, "boosting_type", boost_type_str );
fs.write( "boosting_type", boost_type_str );
else
cvWriteInt( fs, "boosting_type", params.boost_type );
fs.write( "boosting_type", params.boost_type );
if( split_crit_str )
cvWriteString( fs, "splitting_criteria", split_crit_str );
fs.write( "splitting_criteria", split_crit_str );
else
cvWriteInt( fs, "splitting_criteria", params.split_criteria );
fs.write( "splitting_criteria", params.split_criteria );
cvWriteInt( fs, "ntrees", weak->total );
cvWriteReal( fs, "weight_trimming_rate", params.weight_trim_rate );
fs.write( "ntrees", weak->total );
fs.write( "weight_trimming_rate", params.weight_trim_rate );
data->write_params( fs );
}
void CvBoost::read_params( CvFileStorage* fs, CvFileNode* fnode )
void CvBoost::read_params( cv::FileNode& fnode )
{
CV_FUNCNAME( "CvBoost::read_params" );
__BEGIN__;
CvFileNode* temp;
if( !fnode || !CV_NODE_IS_MAP(fnode->tag) )
if( fnode.empty() || !fnode.isMap() )
return;
data = new CvDTreeTrainData();
CV_CALL( data->read_params(fs, fnode));
data->read_params( fnode );
data->shared = true;
params.max_depth = data->params.max_depth;
@ -1937,41 +1935,41 @@ void CvBoost::read_params( CvFileStorage* fs, CvFileNode* fnode )
params.regression_accuracy = data->params.regression_accuracy;
params.use_surrogates = data->params.use_surrogates;
temp = cvGetFileNodeByName( fs, fnode, "boosting_type" );
if( !temp )
cv::FileNode temp = fnode[ "boosting_type" ];
if( temp.empty() )
return;
if( temp && CV_NODE_IS_STRING(temp->tag) )
if ( temp.isString() )
{
const char* boost_type_str = cvReadString( temp, "" );
params.boost_type = strcmp( boost_type_str, "DiscreteAdaboost" ) == 0 ? DISCRETE :
strcmp( boost_type_str, "RealAdaboost" ) == 0 ? REAL :
strcmp( boost_type_str, "LogitBoost" ) == 0 ? LOGIT :
strcmp( boost_type_str, "GentleAdaboost" ) == 0 ? GENTLE : -1;
std::string boost_type_str = temp;
params.boost_type = (boost_type_str == "DiscreteAdaboost") ? DISCRETE :
(boost_type_str == "RealAdaboost") ? REAL :
(boost_type_str == "LogitBoost") ? LOGIT :
(boost_type_str == "GentleAdaboost") ? GENTLE : -1;
}
else
params.boost_type = cvReadInt( temp, -1 );
params.boost_type = temp.empty() ? -1 : (int)temp;
if( params.boost_type < DISCRETE || params.boost_type > GENTLE )
CV_ERROR( CV_StsBadArg, "Unknown boosting type" );
temp = cvGetFileNodeByName( fs, fnode, "splitting_criteria" );
if( temp && CV_NODE_IS_STRING(temp->tag) )
temp = fnode[ "splitting_criteria" ];
if( !temp.empty() && temp.isString() )
{
const char* split_crit_str = cvReadString( temp, "" );
params.split_criteria = strcmp( split_crit_str, "Default" ) == 0 ? DEFAULT :
strcmp( split_crit_str, "Gini" ) == 0 ? GINI :
strcmp( split_crit_str, "Misclassification" ) == 0 ? MISCLASS :
strcmp( split_crit_str, "SquaredErr" ) == 0 ? SQERR : -1;
std::string split_crit_str = temp;
params.split_criteria = ( split_crit_str == "Default" ) ? DEFAULT :
( split_crit_str == "Gini" ) ? GINI :
( split_crit_str == "Misclassification" ) ? MISCLASS :
( split_crit_str == "SquaredErr" ) ? SQERR : -1;
}
else
params.split_criteria = cvReadInt( temp, -1 );
params.split_criteria = temp.empty() ? -1 : (int) temp;
if( params.split_criteria < DEFAULT || params.boost_type > SQERR )
CV_ERROR( CV_StsBadArg, "Unknown boosting type" );
params.weak_count = cvReadIntByName( fs, fnode, "ntrees" );
params.weight_trim_rate = cvReadRealByName( fs, fnode, "weight_trimming_rate", 0. );
params.weak_count = (int) fnode[ "ntrees" ];
params.weight_trim_rate = (double)fnode["weight_trimming_rate"];
__END__;
}
@ -1979,29 +1977,29 @@ void CvBoost::read_params( CvFileStorage* fs, CvFileNode* fnode )
void
CvBoost::read( CvFileStorage* fs, CvFileNode* node )
CvBoost::read( cv::FileNode& node )
{
CV_FUNCNAME( "CvBoost::read" );
__BEGIN__;
CvSeqReader reader;
CvFileNode* trees_fnode;
cv::FileNodeIterator reader;
cv::FileNode trees_fnode;
CvMemStorage* storage;
int i, ntrees;
int ntrees;
clear();
read_params( fs, node );
read_params( node );
if( !data )
EXIT;
trees_fnode = cvGetFileNodeByName( fs, node, "trees" );
if( !trees_fnode || !CV_NODE_IS_SEQ(trees_fnode->tag) )
trees_fnode = node[ "trees" ];
if( trees_fnode.empty() || !trees_fnode.isSeq() )
CV_ERROR( CV_StsParseError, "<trees> tag is missing" );
cvStartReadSeq( trees_fnode->data.seq, &reader );
ntrees = trees_fnode->data.seq->total;
reader = trees_fnode.begin();
ntrees = (int) trees_fnode.size();
if( ntrees != params.weak_count )
CV_ERROR( CV_StsUnmatchedSizes,
@ -2010,11 +2008,11 @@ CvBoost::read( CvFileStorage* fs, CvFileNode* node )
CV_CALL( storage = cvCreateMemStorage() );
weak = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvBoostTree*), storage );
for( i = 0; i < ntrees; i++ )
for( int i = 0; i < ntrees; i++ )
{
CvBoostTree* tree = new CvBoostTree();
CV_CALL(tree->read( fs, (CvFileNode*)reader.ptr, this, data ));
CV_NEXT_SEQ_ELEM( reader.seq->elem_size, reader );
tree->read( *reader, this, data );
reader++;
cvSeqPush( weak, &tree );
}
get_active_vars();
@ -2024,7 +2022,7 @@ CvBoost::read( CvFileStorage* fs, CvFileNode* node )
void
CvBoost::write( CvFileStorage* fs, const char* name ) const
CvBoost::write( cv::FileStorage& fs, const char* name ) const
{
CV_FUNCNAME( "CvBoost::write" );
@ -2033,27 +2031,27 @@ CvBoost::write( CvFileStorage* fs, const char* name ) const
CvSeqReader reader;
int i;
cvStartWriteStruct( fs, name, CV_NODE_MAP, CV_TYPE_NAME_ML_BOOSTING );
fs.startWriteStruct( name, cv::FileNode::MAP, CV_TYPE_NAME_ML_BOOSTING );
if( !weak )
CV_ERROR( CV_StsBadArg, "The classifier has not been trained yet" );
write_params( fs );
cvStartWriteStruct( fs, "trees", CV_NODE_SEQ );
fs.startWriteStruct( "trees", cv::FileNode::SEQ );
cvStartReadSeq( weak, &reader );
cvStartReadSeq(weak, &reader);
for( i = 0; i < weak->total; i++ )
{
CvBoostTree* tree;
CV_READ_SEQ_ELEM( tree, reader );
cvStartWriteStruct( fs, 0, CV_NODE_MAP );
fs.startWriteStruct( 0, cv::FileNode::MAP );
tree->write( fs );
cvEndWriteStruct( fs );
fs.endWriteStruct();
}
cvEndWriteStruct( fs );
cvEndWriteStruct( fs );
fs.endWriteStruct();
fs.endWriteStruct();
__END__;
}

@ -60,61 +60,57 @@ void CvStatModel::clear()
void CvStatModel::save( const char* filename, const char* name ) const
{
CvFileStorage* fs = 0;
cv::FileStorage fs;
CV_FUNCNAME( "CvStatModel::save" );
__BEGIN__;
CV_CALL( fs = cvOpenFileStorage( filename, 0, CV_STORAGE_WRITE ));
if( !fs )
if( !fs.open( filename, cv::FileStorage::WRITE ))
CV_ERROR( CV_StsError, "Could not open the file storage. Check the path and permissions" );
write( fs, name ? name : default_model_name );
__END__;
cvReleaseFileStorage( &fs );
}
void CvStatModel::load( const char* filename, const char* name )
{
CvFileStorage* fs = 0;
cv::FileStorage fs;
CV_FUNCNAME( "CvAlgorithm::load" );
CV_FUNCNAME( "CvStatModel::load" );
__BEGIN__;
CvFileNode* model_node = 0;
cv::FileNode model_node;
CV_CALL( fs = cvOpenFileStorage( filename, 0, CV_STORAGE_READ ));
if( !fs )
EXIT;
if( !fs.open(filename, cv::FileStorage::READ) )
CV_ERROR( CV_StsError, "Could not open the file storage. Check the path and permissions" );
if( name )
model_node = cvGetFileNodeByName( fs, 0, name );
model_node = fs[ name ];
else
{
CvFileNode* root = cvGetRootFileNode( fs );
if( root->data.seq->total > 0 )
model_node = (CvFileNode*)cvGetSeqElem( root->data.seq, 0 );
auto root = fs.root();
if ( root.size() > 0 )
model_node = fs[0];
}
read( fs, model_node );
read( model_node );
__END__;
cvReleaseFileStorage( &fs );
}
void CvStatModel::write( CvFileStorage*, const char* ) const
void CvStatModel::write( cv::FileStorage&, const char* ) const
{
OPENCV_ERROR( CV_StsNotImplemented, "CvStatModel::write", "" );
}
void CvStatModel::read( CvFileStorage*, CvFileNode* )
void CvStatModel::read( const cv::FileNode& )
{
OPENCV_ERROR( CV_StsNotImplemented, "CvStatModel::read", "" );
}

@ -321,9 +321,9 @@ cvWritebackLabels( const CvMat* labels, CvMat* dst_labels,
#define cvWritebackResponses cvWritebackLabels
#define XML_FIELD_NAME "_name"
CvFileNode* icvFileNodeGetChild(CvFileNode* father, const char* name);
CvFileNode* icvFileNodeGetChildArrayElem(CvFileNode* father, const char* name,int index);
CvFileNode* icvFileNodeGetNext(CvFileNode* n, const char* name);
cv::FileNode icvFileNodeGetChild( cv::FileNode& father, const char* name );
cv::FileNode icvFileNodeGetChildArrayElem( cv::FileNode& father, const char* name,int index );
cv::FileNode icvFileNodeGetNext( cv::FileNode& n, const char* name );
void cvCheckTrainData( const CvMat* train_data, int tflag,

@ -1287,7 +1287,7 @@ int CvDTreeTrainData::get_child_buf_idx( CvDTreeNode* n )
}
void CvDTreeTrainData::write_params( CvFileStorage* fs ) const
void CvDTreeTrainData::write_params( cv::FileStorage& fs ) const
{
CV_FUNCNAME( "CvDTreeTrainData::write_params" );
@ -1295,113 +1295,118 @@ void CvDTreeTrainData::write_params( CvFileStorage* fs ) const
int vi, vcount = var_count;
cvWriteInt( fs, "is_classifier", is_classifier ? 1 : 0 );
cvWriteInt( fs, "var_all", var_all );
cvWriteInt( fs, "var_count", var_count );
cvWriteInt( fs, "ord_var_count", ord_var_count );
cvWriteInt( fs, "cat_var_count", cat_var_count );
fs.write( "is_classifier", is_classifier ? 1 : 0 );
fs.write( "var_all", var_all );
fs.write( "var_count", var_count );
fs.write( "ord_var_count", ord_var_count );
fs.write( "cat_var_count", cat_var_count );
cvStartWriteStruct( fs, "training_params", CV_NODE_MAP );
cvWriteInt( fs, "use_surrogates", params.use_surrogates ? 1 : 0 );
fs.startWriteStruct( "training_params", FileNode::MAP );
fs.write( "use_surrogates", params.use_surrogates ? 1 : 0 );
if( is_classifier )
{
cvWriteInt( fs, "max_categories", params.max_categories );
fs.write( "max_categories", params.max_categories );
}
else
{
cvWriteReal( fs, "regression_accuracy", params.regression_accuracy );
fs.write( "regression_accuracy", params.regression_accuracy );
}
cvWriteInt( fs, "max_depth", params.max_depth );
cvWriteInt( fs, "min_sample_count", params.min_sample_count );
cvWriteInt( fs, "cross_validation_folds", params.cv_folds );
fs.write( "max_depth", params.max_depth );
fs.write( "min_sample_count", params.min_sample_count );
fs.write( "cross_validation_folds", params.cv_folds );
if( params.cv_folds > 1 )
{
cvWriteInt( fs, "use_1se_rule", params.use_1se_rule ? 1 : 0 );
cvWriteInt( fs, "truncate_pruned_tree", params.truncate_pruned_tree ? 1 : 0 );
fs.write( "use_1se_rule", params.use_1se_rule ? 1 : 0 );
fs.write( "truncate_pruned_tree", params.truncate_pruned_tree ? 1 : 0 );
}
if( priors )
cvWrite( fs, "priors", priors );
fs.write( "priors", cvarrToMat(priors) );
cvEndWriteStruct( fs );
fs.endWriteStruct();
if( var_idx )
cvWrite( fs, "var_idx", var_idx );
fs.write( "var_idx", cvarrToMat(var_idx) );
cvStartWriteStruct( fs, "var_type", CV_NODE_SEQ+CV_NODE_FLOW );
fs.startWriteStruct("var_type", FileNode::SEQ + FileNode::FLOW );
for( vi = 0; vi < vcount; vi++ )
cvWriteInt( fs, 0, var_type->data.i[vi] >= 0 );
fs.write( 0, var_type->data.i[vi] >= 0 );
cvEndWriteStruct( fs );
fs.endWriteStruct();
if( cat_count && (cat_var_count > 0 || is_classifier) )
{
CV_ASSERT( cat_count != 0 );
cvWrite( fs, "cat_count", cat_count );
cvWrite( fs, "cat_map", cat_map );
fs.write( "cat_count", cvarrToMat(cat_count) );
fs.write( "cat_map", cvarrToMat(cat_map) );
}
__END__;
}
void CvDTreeTrainData::read_params( CvFileStorage* fs, CvFileNode* node )
void CvDTreeTrainData::read_params( const cv::FileNode& node )
{
CV_FUNCNAME( "CvDTreeTrainData::read_params" );
__BEGIN__;
CvFileNode *tparams_node, *vartype_node;
CvSeqReader reader;
cv::FileNode tparams_node, vartype_node;
FileNodeIterator reader;
int vi, max_split_size, tree_block_size;
is_classifier = (cvReadIntByName( fs, node, "is_classifier" ) != 0);
var_all = cvReadIntByName( fs, node, "var_all" );
var_count = cvReadIntByName( fs, node, "var_count", var_all );
cat_var_count = cvReadIntByName( fs, node, "cat_var_count" );
ord_var_count = cvReadIntByName( fs, node, "ord_var_count" );
is_classifier = (int) node[ "is_classifier" ] != 0;
var_all = (int) node[ "var_all" ];
var_count = node[ "var_count" ].empty() ? var_all : (int)node[ "var_count" ];
cat_var_count = (int) node[ "cat_var_count" ];
ord_var_count = (int) node[ "ord_var_count" ];
tparams_node = cvGetFileNodeByName( fs, node, "training_params" );
tparams_node = node[ "training_params" ];
if( tparams_node ) // training parameters are not necessary
if( !tparams_node.empty() ) // training parameters are not necessary
{
params.use_surrogates = cvReadIntByName( fs, tparams_node, "use_surrogates", 1 ) != 0;
params.use_surrogates = (tparams_node[ "use_surrogates" ].empty() ? 1 : (int)tparams_node[ "use_surrogates" ] ) != 0;
if( is_classifier )
{
params.max_categories = cvReadIntByName( fs, tparams_node, "max_categories" );
params.max_categories = (int) tparams_node[ "max_categories" ];
}
else
{
params.regression_accuracy =
(float)cvReadRealByName( fs, tparams_node, "regression_accuracy" );
params.regression_accuracy = (float) tparams_node[ "regression_accuracy" ];
}
params.max_depth = cvReadIntByName( fs, tparams_node, "max_depth" );
params.min_sample_count = cvReadIntByName( fs, tparams_node, "min_sample_count" );
params.cv_folds = cvReadIntByName( fs, tparams_node, "cross_validation_folds" );
params.max_depth = (int) tparams_node[ "max_depth" ];
params.min_sample_count = (int) tparams_node[ "min_sample_count" ];
params.cv_folds = (int) tparams_node[ "cross_validation_folds" ];
if( params.cv_folds > 1 )
{
params.use_1se_rule = cvReadIntByName( fs, tparams_node, "use_1se_rule" ) != 0;
params.truncate_pruned_tree =
cvReadIntByName( fs, tparams_node, "truncate_pruned_tree" ) != 0;
params.use_1se_rule = (int)tparams_node[ "use_1se_rule" ] != 0;
params.truncate_pruned_tree = (int) tparams_node[ "truncate_pruned_tree" ] != 0;
}
priors = (CvMat*)cvReadByName( fs, tparams_node, "priors" );
if( priors )
priors = nullptr;
if(!tparams_node[ "priors" ].empty())
{
auto tmat = cvMat( tparams_node[ "priors" ].mat() );
priors = cvCloneMat( &tmat );
if( !CV_IS_MAT(priors) )
CV_ERROR( CV_StsParseError, "priors must stored as a matrix" );
priors_mult = cvCloneMat( priors );
}
}
CV_CALL( var_idx = (CvMat*)cvReadByName( fs, node, "var_idx" ));
var_idx = nullptr;
if (!node[ "var_idx" ].empty())
{
auto tmat = cvMat( tparams_node[ "var_idx" ].mat() );
var_idx = cvCloneMat( &tmat );
}
if( var_idx )
{
if( !CV_IS_MAT(var_idx) ||
@ -1421,25 +1426,25 @@ void CvDTreeTrainData::read_params( CvFileStorage* fs, CvFileNode* node )
cat_var_count = 0;
ord_var_count = -1;
vartype_node = cvGetFileNodeByName( fs, node, "var_type" );
vartype_node = node[ "var_type" ];
if( vartype_node && CV_NODE_TYPE(vartype_node->tag) == CV_NODE_INT && var_count == 1 )
var_type->data.i[0] = vartype_node->data.i ? cat_var_count++ : ord_var_count--;
if( !vartype_node.empty() && vartype_node.isInt() && var_count == 1 )
var_type->data.i[0] = (int)vartype_node ? cat_var_count++ : ord_var_count--;
else
{
if( !vartype_node || CV_NODE_TYPE(vartype_node->tag) != CV_NODE_SEQ ||
vartype_node->data.seq->total != var_count )
if( vartype_node.empty() || !vartype_node.isSeq() ||
vartype_node.size() != (size_t) var_count )
CV_ERROR( CV_StsParseError, "var_type must exist and be a sequence of 0's and 1's" );
cvStartReadSeq( vartype_node->data.seq, &reader );
reader = vartype_node.begin();
for( vi = 0; vi < var_count; vi++ )
{
CvFileNode* n = (CvFileNode*)reader.ptr;
if( CV_NODE_TYPE(n->tag) != CV_NODE_INT || (n->data.i & ~1) )
cv::FileNode n = *reader;
if( !n.isInt() || ((int) n & ~1) )
CV_ERROR( CV_StsParseError, "var_type must exist and be a sequence of 0's and 1's" );
var_type->data.i[vi] = n->data.i ? cat_var_count++ : ord_var_count--;
CV_NEXT_SEQ_ELEM( reader.seq->elem_size, reader );
var_type->data.i[vi] = (int) n ? cat_var_count++ : ord_var_count--;
reader++;
}
}
var_type->data.i[var_count] = cat_var_count;
@ -1450,8 +1455,12 @@ void CvDTreeTrainData::read_params( CvFileStorage* fs, CvFileNode* node )
if( cat_var_count > 0 || is_classifier )
{
int ccount, total_c_count = 0;
CV_CALL( cat_count = (CvMat*)cvReadByName( fs, node, "cat_count" ));
CV_CALL( cat_map = (CvMat*)cvReadByName( fs, node, "cat_map" ));
auto cat_count_m = cvMat( node["cat_count"].mat() );
cat_count = cvCloneMat( &cat_count_m );
auto cat_map_m = cvMat( node[ "cat_map" ].mat() );
cat_map = cvCloneMat( &cat_map_m );
if( !CV_IS_MAT(cat_count) || !CV_IS_MAT(cat_map) ||
(cat_count->cols != 1 && cat_count->rows != 1) ||
@ -3690,10 +3699,8 @@ CvDTreeNode* CvDTree::predict( const CvMat* _sample,
CV_Error( CV_StsBadArg,
"one of input categorical variable is not an integer" );
int sh = 0;
while( a < b )
{
sh++;
c = (a + b) >> 1;
if( ival < cmap[c] )
b = c;
@ -3779,13 +3786,13 @@ const CvMat* CvDTree::get_var_importance()
}
void CvDTree::write_split( CvFileStorage* fs, CvDTreeSplit* split ) const
void CvDTree::write_split( cv::FileStorage& fs, CvDTreeSplit* split ) const
{
int ci;
cvStartWriteStruct( fs, 0, CV_NODE_MAP + CV_NODE_FLOW );
cvWriteInt( fs, "var", split->var_idx );
cvWriteReal( fs, "quality", split->quality );
fs.startWriteStruct( 0, FileNode::MAP + FileNode::FLOW );
fs.write( "var", split->var_idx );
fs.write( "quality", split->quality );
ci = data->get_var_type(split->var_idx);
if( ci >= 0 ) // split on a categorical var
@ -3798,59 +3805,57 @@ void CvDTree::write_split( CvFileStorage* fs, CvDTreeSplit* split ) const
// to achieve more compact and clear representation
default_dir = to_right <= 1 || to_right <= MIN(3, n/2) || to_right <= n/3 ? -1 : 1;
cvStartWriteStruct( fs, default_dir*(split->inversed ? -1 : 1) > 0 ?
"in" : "not_in", CV_NODE_SEQ+CV_NODE_FLOW );
fs.startWriteStruct( default_dir*(split->inversed ? -1 : 1) > 0 ?
"in" : "not_in", FileNode::SEQ+FileNode::FLOW );
for( i = 0; i < n; i++ )
{
int dir = CV_DTREE_CAT_DIR(i,split->subset);
if( dir*default_dir < 0 )
cvWriteInt( fs, 0, i );
fs.write( 0, i );
}
cvEndWriteStruct( fs );
fs.endWriteStruct();
}
else
cvWriteReal( fs, !split->inversed ? "le" : "gt", split->ord.c );
fs.write( !split->inversed ? "le" : "gt", split->ord.c );
cvEndWriteStruct( fs );
fs.endWriteStruct();
}
void CvDTree::write_node( CvFileStorage* fs, CvDTreeNode* node ) const
void CvDTree::write_node( cv::FileStorage& fs, CvDTreeNode* node ) const
{
CvDTreeSplit* split;
cvStartWriteStruct( fs, 0, CV_NODE_MAP );
fs.startWriteStruct( 0, FileNode::MAP );
cvWriteInt( fs, "depth", node->depth );
cvWriteInt( fs, "sample_count", node->sample_count );
cvWriteReal( fs, "value", node->value );
fs.write( "depth", node->depth );
fs.write( "sample_count", node->sample_count );
fs.write( "value", node->value );
if( data->is_classifier )
cvWriteInt( fs, "norm_class_idx", node->class_idx );
fs.write( "norm_class_idx", node->class_idx );
cvWriteInt( fs, "Tn", node->Tn );
cvWriteInt( fs, "complexity", node->complexity );
cvWriteReal( fs, "alpha", node->alpha );
cvWriteReal( fs, "node_risk", node->node_risk );
cvWriteReal( fs, "tree_risk", node->tree_risk );
cvWriteReal( fs, "tree_error", node->tree_error );
fs.write( "Tn", node->Tn );
fs.write( "complexity", node->complexity );
fs.write( "alpha", node->alpha );
fs.write( "node_risk", node->node_risk );
fs.write( "tree_risk", node->tree_risk );
fs.write( "tree_error", node->tree_error );
if( node->left )
{
cvStartWriteStruct( fs, "splits", CV_NODE_SEQ );
fs.startWriteStruct( "splits", FileNode::SEQ );
for( split = node->split; split != 0; split = split->next )
for( CvDTreeSplit* split = node->split; split != 0; split = split->next )
write_split( fs, split );
cvEndWriteStruct( fs );
fs.endWriteStruct();
}
cvEndWriteStruct( fs );
fs.endWriteStruct();
}
void CvDTree::write_tree_nodes( CvFileStorage* fs ) const
void CvDTree::write_tree_nodes( cv::FileStorage& fs ) const
{
//CV_FUNCNAME( "CvDTree::write_tree_nodes" );
@ -3884,13 +3889,13 @@ void CvDTree::write_tree_nodes( CvFileStorage* fs ) const
}
void CvDTree::write( CvFileStorage* fs, const char* name ) const
void CvDTree::write( cv::FileStorage& fs, const char* name ) const
{
//CV_FUNCNAME( "CvDTree::write" );
__BEGIN__;
cvStartWriteStruct( fs, name, CV_NODE_MAP, CV_TYPE_NAME_ML_TREE );
fs.startWriteStruct( name, FileNode::MAP, CV_TYPE_NAME_ML_TREE );
//get_var_importance();
data->write_params( fs );
@ -3898,29 +3903,29 @@ void CvDTree::write( CvFileStorage* fs, const char* name ) const
//cvWrite( fs, "var_importance", var_importance );
write( fs );
cvEndWriteStruct( fs );
fs.endWriteStruct();
__END__;
}
void CvDTree::write( CvFileStorage* fs ) const
void CvDTree::write( cv::FileStorage& fs ) const
{
//CV_FUNCNAME( "CvDTree::write" );
__BEGIN__;
cvWriteInt( fs, "best_tree_idx", pruned_tree_idx );
fs.write( "best_tree_idx", pruned_tree_idx );
cvStartWriteStruct( fs, "nodes", CV_NODE_SEQ );
fs.startWriteStruct( "nodes", FileNode::SEQ );
write_tree_nodes( fs );
cvEndWriteStruct( fs );
fs.endWriteStruct();
__END__;
}
CvDTreeSplit* CvDTree::read_split( CvFileStorage* fs, CvFileNode* fnode )
CvDTreeSplit* CvDTree::read_split( const cv::FileNode& fnode )
{
CvDTreeSplit* split = 0;
@ -3930,10 +3935,10 @@ CvDTreeSplit* CvDTree::read_split( CvFileStorage* fs, CvFileNode* fnode )
int vi, ci;
if( !fnode || CV_NODE_TYPE(fnode->tag) != CV_NODE_MAP )
if( fnode.empty() || !fnode.isMap() )
CV_ERROR( CV_StsParseError, "some of the splits are not stored properly" );
vi = cvReadIntByName( fs, fnode, "var", -1 );
vi = fnode[ "var" ].empty() ? -1 : (int) fnode[ "var" ];
if( (unsigned)vi >= (unsigned)data->var_count )
CV_ERROR( CV_StsOutOfRange, "Split variable index is out of range" );
@ -3941,23 +3946,23 @@ CvDTreeSplit* CvDTree::read_split( CvFileStorage* fs, CvFileNode* fnode )
if( ci >= 0 ) // split on categorical var
{
int i, n = data->cat_count->data.i[ci], inversed = 0, val;
CvSeqReader reader;
CvFileNode* inseq;
FileNodeIterator reader;
cv::FileNode inseq;
split = data->new_split_cat( vi, 0 );
inseq = cvGetFileNodeByName( fs, fnode, "in" );
if( !inseq )
inseq = fnode[ "in" ];
if( inseq.empty() )
{
inseq = cvGetFileNodeByName( fs, fnode, "not_in" );
inseq = fnode[ "not_in" ];
inversed = 1;
}
if( !inseq ||
(CV_NODE_TYPE(inseq->tag) != CV_NODE_SEQ && CV_NODE_TYPE(inseq->tag) != CV_NODE_INT))
if( inseq.empty() ||
(!inseq.isSeq() && !inseq.isInt()))
CV_ERROR( CV_StsParseError,
"Either 'in' or 'not_in' tags should be inside a categorical split data" );
if( CV_NODE_TYPE(inseq->tag) == CV_NODE_INT )
if( inseq.isInt() )
{
val = inseq->data.i;
val = (int) inseq;
if( (unsigned)val >= (unsigned)n )
CV_ERROR( CV_StsOutOfRange, "some of in/not_in elements are out of range" );
@ -3965,17 +3970,17 @@ CvDTreeSplit* CvDTree::read_split( CvFileStorage* fs, CvFileNode* fnode )
}
else
{
cvStartReadSeq( inseq->data.seq, &reader );
reader = inseq.begin();
for( i = 0; i < reader.seq->total; i++ )
for( i = 0; i < (int) (*reader).size(); i++ )
{
CvFileNode* inode = (CvFileNode*)reader.ptr;
val = inode->data.i;
if( CV_NODE_TYPE(inode->tag) != CV_NODE_INT || (unsigned)val >= (unsigned)n )
cv::FileNode inode = *reader;
val = (int) inode;
if( !inode.isInt() || (unsigned)val >= (unsigned)n )
CV_ERROR( CV_StsOutOfRange, "some of in/not_in elements are out of range" );
split->subset[val >> 5] |= 1 << (val & 31);
CV_NEXT_SEQ_ELEM( reader.seq->elem_size, reader );
reader++;
}
}
@ -3987,20 +3992,20 @@ CvDTreeSplit* CvDTree::read_split( CvFileStorage* fs, CvFileNode* fnode )
}
else
{
CvFileNode* cmp_node;
cv::FileNode cmp_node;
split = data->new_split_ord( vi, 0, 0, 0, 0 );
cmp_node = cvGetFileNodeByName( fs, fnode, "le" );
if( !cmp_node )
cmp_node = fnode[ "le" ];
if( cmp_node.empty() )
{
cmp_node = cvGetFileNodeByName( fs, fnode, "gt" );
cmp_node = fnode[ "gt" ];
split->inversed = 1;
}
split->ord.c = (float)cvReadReal( cmp_node );
split->ord.c = (float) cmp_node;
}
split->quality = (float)cvReadRealByName( fs, fnode, "quality" );
split->quality = (float) fnode[ "quality" ];
__END__;
@ -4008,7 +4013,7 @@ CvDTreeSplit* CvDTree::read_split( CvFileStorage* fs, CvFileNode* fnode )
}
CvDTreeNode* CvDTree::read_node( CvFileStorage* fs, CvFileNode* fnode, CvDTreeNode* parent )
CvDTreeNode* CvDTree::read_node( const cv::FileNode& fnode, CvDTreeNode* parent )
{
CvDTreeNode* node = 0;
@ -4016,49 +4021,49 @@ CvDTreeNode* CvDTree::read_node( CvFileStorage* fs, CvFileNode* fnode, CvDTreeNo
__BEGIN__;
CvFileNode* splits;
cv::FileNode splits;
int i, depth;
if( !fnode || CV_NODE_TYPE(fnode->tag) != CV_NODE_MAP )
if( fnode.empty() || !fnode.isMap() )
CV_ERROR( CV_StsParseError, "some of the tree elements are not stored properly" );
CV_CALL( node = data->new_node( parent, 0, 0, 0 ));
depth = cvReadIntByName( fs, fnode, "depth", -1 );
depth = fnode[ "depth" ].empty() ? -1 : (int) fnode[ "depth" ];
if( depth != node->depth )
CV_ERROR( CV_StsParseError, "incorrect node depth" );
node->sample_count = cvReadIntByName( fs, fnode, "sample_count" );
node->value = cvReadRealByName( fs, fnode, "value" );
node->sample_count = (int) fnode[ "sample_count" ];
node->value = (double) fnode[ "value" ];
if( data->is_classifier )
node->class_idx = cvReadIntByName( fs, fnode, "norm_class_idx" );
node->class_idx = (int) fnode[ "norm_class_idx" ];
node->Tn = cvReadIntByName( fs, fnode, "Tn" );
node->complexity = cvReadIntByName( fs, fnode, "complexity" );
node->alpha = cvReadRealByName( fs, fnode, "alpha" );
node->node_risk = cvReadRealByName( fs, fnode, "node_risk" );
node->tree_risk = cvReadRealByName( fs, fnode, "tree_risk" );
node->tree_error = cvReadRealByName( fs, fnode, "tree_error" );
node->Tn = (int) fnode[ "Tn" ];
node->complexity = (int) fnode[ "complexity" ];
node->alpha = (double) fnode[ "alpha" ];
node->node_risk = (double) fnode[ "node_risk" ];
node->tree_risk = (double) fnode[ "tree_risk" ];
node->tree_error = (double) fnode[ "tree_error" ];
splits = cvGetFileNodeByName( fs, fnode, "splits" );
if( splits )
splits = fnode[ "splits" ];
if( !splits.empty() )
{
CvSeqReader reader;
FileNodeIterator reader;
CvDTreeSplit* last_split = 0;
if( CV_NODE_TYPE(splits->tag) != CV_NODE_SEQ )
if( !splits.isSeq() )
CV_ERROR( CV_StsParseError, "splits tag must stored as a sequence" );
cvStartReadSeq( splits->data.seq, &reader );
for( i = 0; i < reader.seq->total; i++ )
reader = splits.begin();
for( i = 0; i < (int) (*reader).size(); i++ )
{
CvDTreeSplit* split;
CV_CALL( split = read_split( fs, (CvFileNode*)reader.ptr ));
CV_CALL( split = read_split( *reader ));
if( !last_split )
node->split = last_split = split;
else
last_split = last_split->next = split;
CV_NEXT_SEQ_ELEM( reader.seq->elem_size, reader );
reader++;
}
}
@ -4068,25 +4073,25 @@ CvDTreeNode* CvDTree::read_node( CvFileStorage* fs, CvFileNode* fnode, CvDTreeNo
}
void CvDTree::read_tree_nodes( CvFileStorage* fs, CvFileNode* fnode )
void CvDTree::read_tree_nodes( const cv::FileNode& fnode )
{
CV_FUNCNAME( "CvDTree::read_tree_nodes" );
__BEGIN__;
CvSeqReader reader;
FileNodeIterator reader;
CvDTreeNode _root;
CvDTreeNode* parent = &_root;
int i;
parent->left = parent->right = parent->parent = 0;
cvStartReadSeq( fnode->data.seq, &reader );
reader = fnode.begin();
for( i = 0; i < reader.seq->total; i++ )
for( i = 0; i < (int) (*reader).size(); i++ )
{
CvDTreeNode* node;
CV_CALL( node = read_node( fs, (CvFileNode*)reader.ptr, parent != &_root ? parent : 0 ));
CV_CALL( node = read_node( *reader, parent != &_root ? parent : 0 ));
if( !parent->left )
parent->left = node;
else
@ -4099,7 +4104,7 @@ void CvDTree::read_tree_nodes( CvFileStorage* fs, CvFileNode* fnode )
parent = parent->parent;
}
CV_NEXT_SEQ_ELEM( reader.seq->elem_size, reader );
reader++;
}
root = _root.left;
@ -4108,34 +4113,34 @@ void CvDTree::read_tree_nodes( CvFileStorage* fs, CvFileNode* fnode )
}
void CvDTree::read( CvFileStorage* fs, CvFileNode* fnode )
void CvDTree::read( const cv::FileNode& fnode )
{
CvDTreeTrainData* _data = new CvDTreeTrainData();
_data->read_params( fs, fnode );
_data->read_params( fnode );
read( fs, fnode, _data );
read( fnode, _data );
get_var_importance();
}
// a special entry point for reading weak decision trees from the tree ensembles
void CvDTree::read( CvFileStorage* fs, CvFileNode* node, CvDTreeTrainData* _data )
void CvDTree::read( const cv::FileNode& node, CvDTreeTrainData* _data )
{
CV_FUNCNAME( "CvDTree::read" );
__BEGIN__;
CvFileNode* tree_nodes;
cv::FileNode tree_nodes;
clear();
data = _data;
tree_nodes = cvGetFileNodeByName( fs, node, "nodes" );
if( !tree_nodes || CV_NODE_TYPE(tree_nodes->tag) != CV_NODE_SEQ )
tree_nodes = node[ "nodes" ];
if( tree_nodes.empty() || !tree_nodes.isSeq() )
CV_ERROR( CV_StsParseError, "nodes tag is missing" );
pruned_tree_idx = cvReadIntByName( fs, node, "best_tree_idx", -1 );
read_tree_nodes( fs, tree_nodes );
pruned_tree_idx = node[ "best_tree_idx" ].empty() ? -1 : node[ "best_tree_idx" ];
read_tree_nodes( tree_nodes );
__END__;
}

Loading…
Cancel
Save