made random generators of MLL classes depended on default rng (theRNG) (#205).

pull/13383/head
Maria Dimashova 14 years ago
parent 7860d52e14
commit c104cdce96
  1. 10
      modules/ml/include/opencv2/ml/ml.hpp
  2. 12
      modules/ml/src/ann_mlp.cpp
  3. 6
      modules/ml/src/data.cpp
  4. 8
      modules/ml/src/em.cpp
  5. 25
      modules/ml/src/ertrees.cpp
  6. 6
      modules/ml/src/gbt.cpp
  7. 12
      modules/ml/src/rtrees.cpp
  8. 6
      modules/ml/src/svm.cpp
  9. 21
      modules/ml/src/tree.cpp
  10. 2
      modules/traincascade/boost.cpp

@ -913,7 +913,7 @@ struct CV_EXPORTS CvDTreeTrainData
CvSet* cv_heap;
CvSet* nv_heap;
CvRNG rng;
cv::RNG* rng;
};
class CvDTree;
@ -1147,7 +1147,7 @@ protected:
CvMat* var_importance;
int nsamples;
CvRNG rng;
cv::RNG* rng;
CvMat* active_var_mask;
};
@ -1908,7 +1908,7 @@ protected:
CvMat* missing;
CvMat* class_labels;
CvRNG rng;
cv::RNG* rng;
int class_count;
float delta;
@ -2034,7 +2034,7 @@ protected:
int activ_func;
int max_count, max_buf_sz;
CvANN_MLP_TrainParams params;
CvRNG rng;
cv::RNG* rng;
};
/****************************************************************************************\
@ -2177,7 +2177,7 @@ protected:
CvMat* test_sample_idx;
int* sample_idx; // data of train_sample_idx and test_sample_idx
CvRNG rng;
cv::RNG* rng;
};

@ -95,7 +95,7 @@ CvANN_MLP::CvANN_MLP()
layer_sizes = wbuf = 0;
min_val = max_val = min_val1 = max_val1 = 0.;
weights = 0;
rng = cvRNG(-1);
rng = &cv::theRNG();
default_model_name = "my_nn";
clear();
}
@ -108,7 +108,7 @@ CvANN_MLP::CvANN_MLP( const CvMat* _layer_sizes,
layer_sizes = wbuf = 0;
min_val = max_val = min_val1 = max_val1 = 0.;
weights = 0;
rng = cvRNG(-1);
rng = &cv::theRNG();
default_model_name = "my_nn";
create( _layer_sizes, _activ_func, _f_param1, _f_param2 );
}
@ -190,7 +190,7 @@ void CvANN_MLP::init_weights()
double s = 0;
for( k = 0; k <= n1; k++ )
{
val = cvRandReal(&rng)*2-1.;
val = rng->uniform(0., 1.)*2-1.;
w[k*n2 + j] = val;
s += fabs(val);
}
@ -928,8 +928,8 @@ int CvANN_MLP::train_backprop( CvVectors x0, CvVectors u, const double* sw )
for( i = 0; i < count; i++ )
{
int tt;
j = (unsigned)cvRandInt(&rng) % count;
k = (unsigned)cvRandInt(&rng) % count;
j = (*rng)(count);
k = (*rng)(count);
CV_SWAP( _idx->data.i[j], _idx->data.i[k], tt );
}
}
@ -1507,7 +1507,7 @@ CvANN_MLP::CvANN_MLP( const Mat& _layer_sizes, int _activ_func,
layer_sizes = wbuf = 0;
min_val = max_val = min_val1 = max_val1 = 0.;
weights = 0;
rng = cvRNG(-1);
rng = &cv::theRNG();
default_model_name = "my_nn";
create( _layer_sizes, _activ_func, _f_param1, _f_param2 );
}

@ -84,7 +84,7 @@ CvMLData :: CvMLData()
//flt_separator = '.';
class_map = new std::map<std::string, int>();
rng = cvRNG( -cvGetTickCount() );
rng = &cv::theRNG();
}
CvMLData :: ~CvMLData()
@ -608,8 +608,8 @@ void CvMLData :: mix_train_and_test_idx()
int n = values->rows;
for (int i = 0; i < n; i++)
{
int a = cvRandInt( &rng ) % n;
int b = cvRandInt( &rng ) % n;
int a = (*rng)(n);
int b = (*rng)(n);
int t;
CV_SWAP( sample_idx[a], sample_idx[b], t );
}

@ -581,7 +581,7 @@ void CvEM::kmeans( const CvVectors& train_data, int nclusters, CvMat* labels,
__BEGIN__;
CvRNG rng = cvRNG(-1);
cv::RNG* rng = &cv::theRNG();
int i, j, k, nsamples, dims;
int iter = 0;
double max_dist = DBL_MAX;
@ -605,7 +605,7 @@ void CvEM::kmeans( const CvVectors& train_data, int nclusters, CvMat* labels,
{
for( i = 0; i < nsamples; i++ )
labels->data.i[i] = i*nclusters/nsamples;
cvRandShuffle( labels, &rng );
cvRandShuffle( labels, &rng->state );
}
for( ;; )
@ -702,7 +702,7 @@ void CvEM::kmeans( const CvVectors& train_data, int nclusters, CvMat* labels,
const float* s;
for( j = 0; j < 10; j++ )
{
i = cvRandInt( &rng ) % nsamples;
i = (*rng)(nsamples);
if( counters->data.i[labels->data.i[i]] > 1 )
break;
}
@ -738,7 +738,7 @@ void CvEM::kmeans( const CvVectors& train_data, int nclusters, CvMat* labels,
if( counters->data.i[k] == 0 )
for(;;)
{
i = cvRandInt(&rng) % nsamples;
i = (*rng)(nsamples);
j = labels->data.i[i];
if( counters->data.i[j] > 1 )
{

@ -91,7 +91,7 @@ void CvERTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
clear();
var_all = 0;
rng = cvRNG(-1);
rng = &cv::theRNG();
CV_CALL( set_params( _params ));
@ -444,7 +444,6 @@ void CvERTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
{
unsigned short* udst = 0;
int* idst = 0;
CvRNG* r = &rng;
if (is_buf_16u)
{
@ -457,8 +456,8 @@ void CvERTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
for( i = 0; i < sample_count; i++ )
{
int a = cvRandInt(r) % sample_count;
int b = cvRandInt(r) % sample_count;
int a = (*rng)(sample_count);
int b = (*rng)(sample_count);
unsigned short unsh = (unsigned short)vi;
CV_SWAP( udst[a], udst[b], unsh );
}
@ -474,8 +473,8 @@ void CvERTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
for( i = 0; i < sample_count; i++ )
{
int a = cvRandInt(r) % sample_count;
int b = cvRandInt(r) % sample_count;
int a = (*rng)(sample_count);
int b = (*rng)(sample_count);
CV_SWAP( idst[a], idst[b], vi );
}
}
@ -894,8 +893,8 @@ CvDTreeSplit* CvForestERTree::find_split_ord_class( CvDTreeNode* node, int vi, f
if (fdiff > epsilon)
{
is_find_split = true;
CvRNG* rng = &data->rng;
split_val = pmin + cvRandReal(rng) * fdiff ;
cv::RNG* rng = data->rng;
split_val = pmin + rng->uniform(0.f, 1.f) * fdiff ;
if (split_val - pmin <= FLT_EPSILON)
split_val = pmin + split_delta;
if (pmax - split_val <= FLT_EPSILON)
@ -1047,7 +1046,7 @@ CvDTreeSplit* CvForestERTree::find_split_cat_class( CvDTreeNode* node, int vi, f
for (int i = 0; i < valid_ccount; i++)
{
uchar temp;
int i1 = cvRandInt( rng ) % valid_ccount;
int i1 = cvRandInt( rng ) % valid_ccount;
int i2 = cvRandInt( rng ) % valid_ccount;
CV_SWAP( var_class_mask->data.ptr[i1], var_class_mask->data.ptr[i2], temp );
}
@ -1189,8 +1188,8 @@ CvDTreeSplit* CvForestERTree::find_split_ord_reg( CvDTreeNode* node, int vi, flo
if (fdiff > epsilon)
{
is_find_split = true;
CvRNG* rng = &data->rng;
split_val = pmin + cvRandReal(rng) * fdiff ;
cv::RNG* rng = data->rng;
split_val = pmin + rng->uniform(0.f, 1.f) * fdiff ;
if (split_val - pmin <= FLT_EPSILON)
split_val = pmin + split_delta;
if (pmax - split_val <= FLT_EPSILON)
@ -1745,8 +1744,8 @@ bool CvERTrees::grow_forest( const CvTermCriteria term_crit )
int i1, i2;
float temp;
i1 = cvRandInt( &rng ) % nsamples;
i2 = cvRandInt( &rng ) % nsamples;
i1 = (*rng)(nsamples);
i2 = (*rng)(nsamples);
CV_SWAP( mth_var_ptr[i1*dims], mth_var_ptr[i2*dims], temp );
// turn values of (m-1)-th variable, that were permuted

@ -268,7 +268,7 @@ CvGBTrees::train( const CvMat* _train_data, int _tflag,
}
// subsample params and data
rng = CvRNG(time(0));
rng = &cv::theRNG();
int samples_count = get_len(sample_idx);
@ -698,8 +698,8 @@ void CvGBTrees::do_subsample()
if (subsample_test)
for (int i = 0; i < n; i++)
{
int a = cvRandInt( &rng ) % n;
int b = cvRandInt( &rng ) % n;
int a = (*rng)(n);
int b = (*rng)(n);
int t;
CV_SWAP( idx[a], idx[b], t );
}

@ -200,7 +200,7 @@ CvRTrees::CvRTrees()
data = NULL;
active_var_mask = NULL;
var_importance = NULL;
rng = cvRNG(0xffffffff);
rng = &cv::theRNG();
default_model_name = "my_random_trees";
}
@ -235,7 +235,7 @@ CvMat* CvRTrees::get_active_var_mask()
CvRNG* CvRTrees::get_rng()
{
return &rng;
return &rng->state;
}
bool CvRTrees::train( const CvMat* _train_data, int _tflag,
@ -375,7 +375,7 @@ bool CvRTrees::grow_forest( const CvTermCriteria term_crit )
cvZero( sample_idx_mask_for_tree );
for(i = 0; i < nsamples; i++ ) //form sample for creation one tree
{
int idx = cvRandInt( &rng ) % nsamples;
int idx = (*rng)(nsamples);
sample_idx_for_tree->data.i[i] = idx;
sample_idx_mask_for_tree->data.ptr[idx] = 0xFF;
}
@ -458,8 +458,8 @@ bool CvRTrees::grow_forest( const CvTermCriteria term_crit )
if( sample_idx_mask_for_tree->data.ptr[i] ) //the sample is not OOB
continue;
i1 = cvRandInt( &rng ) % nsamples;
i2 = cvRandInt( &rng ) % nsamples;
i1 = (*rng)(nsamples);
i2 = (*rng)(nsamples);
CV_SWAP( mth_var_ptr[i1*dims], mth_var_ptr[i2*dims], temp );
// turn values of (m-1)-th variable, that were permuted
@ -762,7 +762,7 @@ void CvRTrees::read( CvFileStorage* fs, CvFileNode* fnode )
CV_Error( CV_StsParseError, "Some <nclasses>, <nsamples>, <var_count>, "
"<nactive_vars>, <oob_error>, <ntrees> of tags are missing" );
rng = CvRNG( -1 );
rng = &cv::theRNG();
trees = (CvForestTree**)cvAlloc( sizeof(trees[0])*ntrees );
memset( trees, 0, sizeof(trees[0])*ntrees );

@ -1612,7 +1612,7 @@ bool CvSVM::train_auto( const CvMat* _train_data, const CvMat* _responses,
int block_size = 1 << 16;
double* alpha;
int i, k;
CvRNG rng = cvRNG(-1);
RNG* rng = &theRNG();
// all steps are logarithmic and must be > 1
double degree_step = 10, g_step = 10, coef_step = 10, C_step = 10, nu_step = 10, p_step = 10;
@ -1745,8 +1745,8 @@ bool CvSVM::train_auto( const CvMat* _train_data, const CvMat* _responses,
// randomly permute samples and responses
for( i = 0; i < sample_count; i++ )
{
int i1 = cvRandInt( &rng ) % sample_count;
int i2 = cvRandInt( &rng ) % sample_count;
int i1 = (*rng)(sample_count);
int i2 = (*rng)(sample_count);
const float* temp;
float t;
int y;

@ -196,7 +196,7 @@ void CvDTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
clear();
var_all = 0;
rng = cvRNG(-1);
rng = &cv::theRNG();
CV_CALL( set_params( _params ));
@ -566,7 +566,6 @@ void CvDTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
{
unsigned short* udst = 0;
int* idst = 0;
CvRNG* r = &rng;
if (is_buf_16u)
{
@ -579,8 +578,8 @@ void CvDTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
for( i = 0; i < sample_count; i++ )
{
int a = cvRandInt(r) % sample_count;
int b = cvRandInt(r) % sample_count;
int a = (*rng)(sample_count);
int b = (*rng)(sample_count);
unsigned short unsh = (unsigned short)vi;
CV_SWAP( udst[a], udst[b], unsh );
}
@ -596,8 +595,8 @@ void CvDTreeTrainData::set_data( const CvMat* _train_data, int _tflag,
for( i = 0; i < sample_count; i++ )
{
int a = cvRandInt(r) % sample_count;
int b = cvRandInt(r) % sample_count;
int a = (*rng)(sample_count);
int b = (*rng)(sample_count);
CV_SWAP( idst[a], idst[b], vi );
}
}
@ -1134,7 +1133,7 @@ void CvDTreeTrainData::clear()
data_root = 0;
rng = cvRNG(-1);
rng = &cv::theRNG();
}
@ -2040,14 +2039,14 @@ void CvDTree::cluster_categories( const int* vectors, int n, int m,
double* buf = (double*)cvStackAlloc( (n + k)*sizeof(buf[0]) );
double *v_weights = buf, *c_weights = buf + n;
bool modified = true;
CvRNG* r = &data->rng;
RNG* r = data->rng;
// assign labels randomly
for( i = 0; i < n; i++ )
{
int sum = 0;
const int* v = vectors + i*m;
labels[i] = i < k ? i : (cvRandInt(r) % k);
labels[i] = i < k ? i : (*r)(k);
// compute weight of each vector
for( j = 0; j < m; j++ )
@ -2057,8 +2056,8 @@ void CvDTree::cluster_categories( const int* vectors, int n, int m,
for( i = 0; i < n; i++ )
{
int i1 = cvRandInt(r) % n;
int i2 = cvRandInt(r) % n;
int i1 = (*r)(n);
int i2 = (*r)(n);
CV_SWAP( labels[i1], labels[i2], j );
}

@ -218,7 +218,7 @@ void CvCascadeBoostTrainData::setData( const CvFeatureEvaluator* _featureEvaluat
have_priors = false;
is_classifier = true;
rng = cvRNG(-1);
rng = &cv::theRNG();
set_params( _params );

Loading…
Cancel
Save