Merge pull request #10141 from LaurentBerger:MLP_ReLU

pull/10190/head
Vadim Pisarevsky 7 years ago
commit cfd845ac07
  1. 8
      modules/ml/include/opencv2/ml.hpp
  2. 76
      modules/ml/src/ann_mlp.cpp
  3. 72
      modules/ml/test/test_mltests2.cpp

@ -1503,14 +1503,18 @@ public:
enum ActivationFunctions {
/** Identity function: \f$f(x)=x\f$ */
IDENTITY = 0,
/** Symmetrical sigmoid: \f$f(x)=\beta*(1-e^{-\alpha x})/(1+e^{-\alpha x}\f$
/** Symmetrical sigmoid: \f$f(x)=\beta*(1-e^{-\alpha x})/(1+e^{-\alpha x})\f$
@note
If you are using the default sigmoid activation function with the default parameter values
fparam1=0 and fparam2=0 then the function used is y = 1.7159\*tanh(2/3 \* x), so the output
will range from [-1.7159, 1.7159], instead of [0,1].*/
SIGMOID_SYM = 1,
/** Gaussian function: \f$f(x)=\beta e^{-\alpha x*x}\f$ */
GAUSSIAN = 2
GAUSSIAN = 2,
/** ReLU function: \f$f(x)=max(0,x)\f$ */
RELU = 3,
/** Leaky ReLU function: for x>0 \f$f(x)=x \f$ and x<=0 \f$f(x)=\alpha x \f$*/
LEAKYRELU= 4
};
/** Train options */

@ -135,7 +135,7 @@ public:
void setActivationFunction(int _activ_func, double _f_param1, double _f_param2 )
{
if( _activ_func < 0 || _activ_func > GAUSSIAN )
if( _activ_func < 0 || _activ_func > LEAKYRELU)
CV_Error( CV_StsOutOfRange, "Unknown activation function" );
activ_func = _activ_func;
@ -158,6 +158,18 @@ public:
if (fabs(_f_param2) < FLT_EPSILON)
_f_param2 = 1.;
break;
case RELU:
if (fabs(_f_param1) < FLT_EPSILON)
_f_param1 = 1;
min_val = max_val = min_val1 = max_val1 = 0.;
_f_param2 = 0.;
break;
case LEAKYRELU:
if (fabs(_f_param1) < FLT_EPSILON)
_f_param1 = 0.01;
min_val = max_val = min_val1 = max_val1 = 0.;
_f_param2 = 0.;
break;
default:
min_val = max_val = min_val1 = max_val1 = 0.;
_f_param1 = 1.;
@ -385,6 +397,12 @@ public:
case GAUSSIAN:
scale = -f_param1*f_param1;
break;
case RELU:
scale = 1;
break;
case LEAKYRELU:
scale = 1;
break;
default:
;
}
@ -397,10 +415,18 @@ public:
{
double* data = sums.ptr<double>(i);
for (j = 0; j < cols; j++)
{
data[j] = (data[j] + bias[j])*scale;
if (activ_func == RELU)
if (data[j] < 0)
data[j] = 0;
if (activ_func == LEAKYRELU)
if (data[j] < 0)
data[j] *= f_param1;
}
}
if( activ_func == IDENTITY )
if (activ_func == IDENTITY || activ_func == RELU || activ_func == LEAKYRELU)
return;
}
else
@ -478,6 +504,46 @@ public:
}
}
}
else if (activ_func == RELU)
{
for (i = 0; i < n; i++)
{
double* xf = _xf.ptr<double>(i);
double* df = _df.ptr<double>(i);
for (j = 0; j < cols; j++)
{
xf[j] += bias[j];
if (xf[j] < 0)
{
xf[j] = 0;
df[j] = 0;
}
else
df[j] = 1;
}
}
}
else if (activ_func == LEAKYRELU)
{
for (i = 0; i < n; i++)
{
double* xf = _xf.ptr<double>(i);
double* df = _df.ptr<double>(i);
for (j = 0; j < cols; j++)
{
xf[j] += bias[j];
if (xf[j] < 0)
{
xf[j] = f_param1*xf[j];
df[j] = f_param1;
}
else
df[j] = 1;
}
}
}
else if (activ_func == GAUSSIAN)
{
double scale = -f_param1*f_param1;
@ -1110,7 +1176,9 @@ public:
{
const char* activ_func_name = activ_func == IDENTITY ? "IDENTITY" :
activ_func == SIGMOID_SYM ? "SIGMOID_SYM" :
activ_func == GAUSSIAN ? "GAUSSIAN" : 0;
activ_func == GAUSSIAN ? "GAUSSIAN" :
activ_func == RELU ? "RELU" :
activ_func == LEAKYRELU ? "LEAKYRELU" : 0;
if( activ_func_name )
fs << "activation_function" << activ_func_name;
@ -1191,6 +1259,8 @@ public:
{
activ_func = activ_func_name == "SIGMOID_SYM" ? SIGMOID_SYM :
activ_func_name == "IDENTITY" ? IDENTITY :
activ_func_name == "RELU" ? RELU :
activ_func_name == "LEAKYRELU" ? LEAKYRELU :
activ_func_name == "GAUSSIAN" ? GAUSSIAN : -1;
CV_Assert( activ_func >= 0 );
}

@ -85,6 +85,22 @@ int str_to_ann_train_method( String& str )
return -1;
}
int str_to_ann_activation_function(String& str)
{
if (!str.compare("IDENTITY"))
return ANN_MLP::IDENTITY;
if (!str.compare("SIGMOID_SYM"))
return ANN_MLP::SIGMOID_SYM;
if (!str.compare("GAUSSIAN"))
return ANN_MLP::GAUSSIAN;
if (!str.compare("RELU"))
return ANN_MLP::RELU;
if (!str.compare("LEAKYRELU"))
return ANN_MLP::LEAKYRELU;
CV_Error(CV_StsBadArg, "incorrect ann activation function string");
return -1;
}
void ann_check_data( Ptr<TrainData> _data )
{
CV_TRACE_FUNCTION();
@ -177,6 +193,62 @@ float ann_calc_error( Ptr<StatModel> ann, Ptr<TrainData> _data, map<int, int>& c
return err;
}
TEST(ML_ANN, ActivationFunction)
{
String folder = string(cvtest::TS::ptr()->get_data_path());
String original_path = folder + "waveform.data";
String dataname = folder + "waveform";
Ptr<TrainData> tdata = TrainData::loadFromCSV(original_path, 0);
ASSERT_FALSE(tdata.empty()) << "Could not find test data file : " << original_path;
RNG& rng = theRNG();
rng.state = 1027401484159173092;
tdata->setTrainTestSplit(500);
vector<int> activationType;
activationType.push_back(ml::ANN_MLP::IDENTITY);
activationType.push_back(ml::ANN_MLP::SIGMOID_SYM);
activationType.push_back(ml::ANN_MLP::GAUSSIAN);
activationType.push_back(ml::ANN_MLP::RELU);
activationType.push_back(ml::ANN_MLP::LEAKYRELU);
vector<String> activationName;
activationName.push_back("_identity");
activationName.push_back("_sigmoid_sym");
activationName.push_back("_gaussian");
activationName.push_back("_relu");
activationName.push_back("_leakyrelu");
for (size_t i = 0; i < activationType.size(); i++)
{
Ptr<ml::ANN_MLP> x = ml::ANN_MLP::create();
Mat_<int> layerSizes(1, 4);
layerSizes(0, 0) = tdata->getNVars();
layerSizes(0, 1) = 100;
layerSizes(0, 2) = 100;
layerSizes(0, 3) = tdata->getResponses().cols;
x->setLayerSizes(layerSizes);
x->setActivationFunction(activationType[i]);
x->setTrainMethod(ml::ANN_MLP::RPROP, 0.01, 0.1);
x->setTermCriteria(TermCriteria(TermCriteria::COUNT, 300, 0.01));
x->train(tdata, ml::ANN_MLP::NO_OUTPUT_SCALE);
ASSERT_TRUE(x->isTrained()) << "Could not train networks with " << activationName[i];
#ifdef GENERATE_TESTDATA
x->save(dataname + activationName[i] + ".yml");
#else
Ptr<ml::ANN_MLP> y = Algorithm::load<ANN_MLP>(dataname + activationName[i] + ".yml");
ASSERT_TRUE(y != NULL) << "Could not load " << dataname + activationName[i] + ".yml";
Mat testSamples = tdata->getTestSamples();
Mat rx, ry, dst;
x->predict(testSamples, rx);
y->predict(testSamples, ry);
absdiff(rx, ry, dst);
double minVal, maxVal;
minMaxLoc(dst, &minVal, &maxVal);
ASSERT_TRUE(maxVal<FLT_EPSILON) << "Predict are not equal for " << dataname + activationName[i] + ".yml and " << activationName[i];
#endif
}
}
// 6. dtree
// 7. boost
int str_to_boost_type( String& str )

Loading…
Cancel
Save