add relu as activation option in darknet

add relu option

add relu as activation option in darknet

simplify the setParams if-else ladder

add relu as activation option in darknet

correct activation_param type

format

format

add relu as activation option in darknet

spacing

spacing

add relu as activation option in darknet
pull/18017/head
danielenricocahall 4 years ago
parent f162c08cda
commit 8457e471fd
  1. 24
      modules/dnn/src/darknet/darknet_io.cpp
  2. 5
      modules/dnn/test/test_darknet_importer.cpp

@ -221,6 +221,10 @@ namespace cv {
{
cv::dnn::LayerParams activation_param;
if (type == "relu")
{
activation_param.type = "ReLU";
}
else if (type == "leaky")
{
activation_param.set<float>("negative_slope", 0.1f);
activation_param.type = "ReLU";
@ -862,24 +866,8 @@ namespace cv {
}
std::string activation = getParam<std::string>(layer_params, "activation", "linear");
if (activation == "leaky")
{
setParams.setActivation("relu");
}
else if (activation == "swish")
{
setParams.setActivation("swish");
}
else if (activation == "mish")
{
setParams.setActivation("mish");
}
else if (activation == "logistic")
{
setParams.setActivation("logistic");
}
else if (activation != "linear")
CV_Error(cv::Error::StsParseError, "Unsupported activation: " + activation);
if (activation != "linear")
setParams.setActivation(activation);
net->out_channels_vec[layers_counter] = tensor_shape[0];
}

@ -753,6 +753,11 @@ TEST_P(Test_Darknet_layers, connected)
testDarknetLayer("connected", true);
}
TEST_P(Test_Darknet_layers, relu)
{
testDarknetLayer("relu");
}
INSTANTIATE_TEST_CASE_P(/**/, Test_Darknet_layers, dnnBackendsAndTargets());
}} // namespace

Loading…
Cancel
Save