From 8574a757f9190e3e43be73f2a87313c6967cd563 Mon Sep 17 00:00:00 2001 From: Dmitry Kurtaev Date: Sun, 22 Mar 2020 18:50:15 +0300 Subject: [PATCH] Case sensitive dnn layers types --- modules/dnn/src/dnn.cpp | 13 +++++-------- modules/dnn/src/init.cpp | 1 + modules/dnn/src/onnx/onnx_importer.cpp | 13 +++++++++++-- modules/dnn/src/torch/torch_importer.cpp | 11 +++-------- modules/dnn/test/test_halide_layers.cpp | 2 +- modules/dnn/test/test_onnx_importer.cpp | 2 +- 6 files changed, 22 insertions(+), 20 deletions(-) diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index 0f8dacad35..5538932bb0 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -4626,16 +4626,15 @@ void LayerFactory::registerLayer(const String &type, Constructor constructor) CV_TRACE_ARG_VALUE(type, "type", type.c_str()); cv::AutoLock lock(getLayerFactoryMutex()); - String type_ = type.toLowerCase(); - LayerFactory_Impl::iterator it = getLayerFactoryImpl().find(type_); + LayerFactory_Impl::iterator it = getLayerFactoryImpl().find(type); if (it != getLayerFactoryImpl().end()) { if (it->second.back() == constructor) - CV_Error(cv::Error::StsBadArg, "Layer \"" + type_ + "\" already was registered"); + CV_Error(cv::Error::StsBadArg, "Layer \"" + type + "\" already was registered"); it->second.push_back(constructor); } - getLayerFactoryImpl().insert(std::make_pair(type_, std::vector(1, constructor))); + getLayerFactoryImpl().insert(std::make_pair(type, std::vector(1, constructor))); } void LayerFactory::unregisterLayer(const String &type) @@ -4644,9 +4643,8 @@ void LayerFactory::unregisterLayer(const String &type) CV_TRACE_ARG_VALUE(type, "type", type.c_str()); cv::AutoLock lock(getLayerFactoryMutex()); - String type_ = type.toLowerCase(); - LayerFactory_Impl::iterator it = getLayerFactoryImpl().find(type_); + LayerFactory_Impl::iterator it = getLayerFactoryImpl().find(type); if (it != getLayerFactoryImpl().end()) { if (it->second.size() > 1) @@ -4662,8 +4660,7 @@ Ptr LayerFactory::createLayerInstance(const String &type, LayerParams& pa CV_TRACE_ARG_VALUE(type, "type", type.c_str()); cv::AutoLock lock(getLayerFactoryMutex()); - String type_ = type.toLowerCase(); - LayerFactory_Impl::const_iterator it = getLayerFactoryImpl().find(type_); + LayerFactory_Impl::const_iterator it = getLayerFactoryImpl().find(type); if (it != getLayerFactoryImpl().end()) { diff --git a/modules/dnn/src/init.cpp b/modules/dnn/src/init.cpp index 07db56eb0b..df3a716f1d 100644 --- a/modules/dnn/src/init.cpp +++ b/modules/dnn/src/init.cpp @@ -95,6 +95,7 @@ void initializeLayerFactory() CV_DNN_REGISTER_LAYER_CLASS(LRN, LRNLayer); CV_DNN_REGISTER_LAYER_CLASS(InnerProduct, InnerProductLayer); CV_DNN_REGISTER_LAYER_CLASS(Softmax, SoftmaxLayer); + CV_DNN_REGISTER_LAYER_CLASS(SoftMax, SoftmaxLayer); // For compatibility. See https://github.com/opencv/opencv/issues/16877 CV_DNN_REGISTER_LAYER_CLASS(MVN, MVNLayer); CV_DNN_REGISTER_LAYER_CLASS(ReLU, ReLULayer); diff --git a/modules/dnn/src/onnx/onnx_importer.cpp b/modules/dnn/src/onnx/onnx_importer.cpp index 7913fa729d..f40fffbf8d 100644 --- a/modules/dnn/src/onnx/onnx_importer.cpp +++ b/modules/dnn/src/onnx/onnx_importer.cpp @@ -615,6 +615,15 @@ void ONNXImporter::populateNet(Net dstNet) layerParams.type = "ReLU"; replaceLayerParam(layerParams, "alpha", "negative_slope"); } + else if (layer_type == "Relu") + { + layerParams.type = "ReLU"; + } + else if (layer_type == "PRelu") + { + layerParams.type = "PReLU"; + layerParams.blobs.push_back(getBlob(node_proto, constBlobs, 1)); + } else if (layer_type == "LRN") { replaceLayerParam(layerParams, "size", "local_size"); @@ -1133,10 +1142,10 @@ void ONNXImporter::populateNet(Net dstNet) layerParams.set("zoom_factor_x", scales.at(3)); } } - else if (layer_type == "LogSoftmax") + else if (layer_type == "SoftMax" || layer_type == "LogSoftmax") { layerParams.type = "Softmax"; - layerParams.set("log_softmax", true); + layerParams.set("log_softmax", layer_type == "LogSoftmax"); } else { diff --git a/modules/dnn/src/torch/torch_importer.cpp b/modules/dnn/src/torch/torch_importer.cpp index c498a034a3..5dd9e3e290 100644 --- a/modules/dnn/src/torch/torch_importer.cpp +++ b/modules/dnn/src/torch/torch_importer.cpp @@ -865,15 +865,10 @@ struct TorchImporter layerParams.set("indices_blob_id", tensorParams["indices"].first); curModule->modules.push_back(newModule); } - else if (nnName == "SoftMax") + else if (nnName == "LogSoftMax" || nnName == "SoftMax") { - newModule->apiType = "SoftMax"; - curModule->modules.push_back(newModule); - } - else if (nnName == "LogSoftMax") - { - newModule->apiType = "SoftMax"; - layerParams.set("log_softmax", true); + newModule->apiType = "Softmax"; + layerParams.set("log_softmax", nnName == "LogSoftMax"); curModule->modules.push_back(newModule); } else if (nnName == "SpatialCrossMapLRN") diff --git a/modules/dnn/test/test_halide_layers.cpp b/modules/dnn/test/test_halide_layers.cpp index c926f5b7e3..3b43d33c71 100644 --- a/modules/dnn/test/test_halide_layers.cpp +++ b/modules/dnn/test/test_halide_layers.cpp @@ -431,7 +431,7 @@ TEST_P(SoftMax, Accuracy) Backend backendId = get<0>(get<1>(GetParam())); Target targetId = get<1>(get<1>(GetParam())); LayerParams lp; - lp.type = "SoftMax"; + lp.type = "Softmax"; lp.name = "testLayer"; int sz[] = {1, inChannels, 1, 1}; diff --git a/modules/dnn/test/test_onnx_importer.cpp b/modules/dnn/test/test_onnx_importer.cpp index 40110d2542..f936b08abb 100644 --- a/modules/dnn/test/test_onnx_importer.cpp +++ b/modules/dnn/test/test_onnx_importer.cpp @@ -70,7 +70,7 @@ public: { LayerParams lp; Net netSoftmax; - netSoftmax.addLayerToPrev("softmaxLayer", "SoftMax", lp); + netSoftmax.addLayerToPrev("softmaxLayer", "Softmax", lp); netSoftmax.setPreferableBackend(DNN_BACKEND_OPENCV); netSoftmax.setInput(out);