From 8d6b8b45b6450747146e6c9c7a11e2102903b146 Mon Sep 17 00:00:00 2001 From: Aleksandr Rybnikov Date: Tue, 1 Aug 2017 16:58:34 +0300 Subject: [PATCH] Added ELU and test for it --- .../dnn/include/opencv2/dnn/all_layers.hpp | 6 +++ modules/dnn/src/init.cpp | 1 + modules/dnn/src/layers/elementwise_layers.cpp | 37 +++++++++++++++++++ modules/dnn/src/tensorflow/tf_importer.cpp | 7 ++++ modules/dnn/test/test_layers.cpp | 18 +++++++++ 5 files changed, 69 insertions(+) diff --git a/modules/dnn/include/opencv2/dnn/all_layers.hpp b/modules/dnn/include/opencv2/dnn/all_layers.hpp index 4f01227573..333656a3cf 100644 --- a/modules/dnn/include/opencv2/dnn/all_layers.hpp +++ b/modules/dnn/include/opencv2/dnn/all_layers.hpp @@ -349,6 +349,12 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN static Ptr create(const LayerParams& params); }; + class CV_EXPORTS ELULayer : public ActivationLayer + { + public: + static Ptr create(const LayerParams ¶ms); + }; + class CV_EXPORTS TanHLayer : public ActivationLayer { public: diff --git a/modules/dnn/src/init.cpp b/modules/dnn/src/init.cpp index 97ea169f81..32ff69ecec 100644 --- a/modules/dnn/src/init.cpp +++ b/modules/dnn/src/init.cpp @@ -96,6 +96,7 @@ void initializeLayerFactory() CV_DNN_REGISTER_LAYER_CLASS(ChannelsPReLU, ChannelsPReLULayer); CV_DNN_REGISTER_LAYER_CLASS(Sigmoid, SigmoidLayer); CV_DNN_REGISTER_LAYER_CLASS(TanH, TanHLayer); + CV_DNN_REGISTER_LAYER_CLASS(ELU, ELULayer); CV_DNN_REGISTER_LAYER_CLASS(BNLL, BNLLLayer); CV_DNN_REGISTER_LAYER_CLASS(AbsVal, AbsLayer); CV_DNN_REGISTER_LAYER_CLASS(Power, PowerLayer); diff --git a/modules/dnn/src/layers/elementwise_layers.cpp b/modules/dnn/src/layers/elementwise_layers.cpp index 95afd96580..9c929a0098 100644 --- a/modules/dnn/src/layers/elementwise_layers.cpp +++ b/modules/dnn/src/layers/elementwise_layers.cpp @@ -302,6 +302,35 @@ struct SigmoidFunctor int64 getFLOPSPerElement() const { return 3; } }; +struct ELUFunctor +{ + typedef ELULayer Layer; + + explicit ELUFunctor() {} + + void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const + { + for( int cn = cn0; cn < cn1; cn++, srcptr += planeSize, dstptr += planeSize ) + { + for(int i = 0; i < len; i++ ) + { + float x = srcptr[i]; + dstptr[i] = x >= 0.f ? x : exp(x) - 1; + } + } + } + +#ifdef HAVE_HALIDE + void attachHalide(const Halide::Expr& input, Halide::Func& top) + { + Halide::Var x("x"), y("y"), c("c"), n("n"); + top(x, y, c, n) = select(input >= 0.0f, input, exp(input) - 1); + } +#endif // HAVE_HALIDE + + int64 getFLOPSPerElement() const { return 2; } +}; + struct AbsValFunctor { typedef AbsLayer Layer; @@ -504,6 +533,14 @@ Ptr SigmoidLayer::create(const LayerParams& params) return l; } +Ptr ELULayer::create(const LayerParams& params) +{ + Ptr l(new ElementWiseLayer(ELUFunctor())); + l->setParamsFrom(params); + + return l; +} + Ptr AbsLayer::create(const LayerParams& params) { Ptr l(new ElementWiseLayer()); diff --git a/modules/dnn/src/tensorflow/tf_importer.cpp b/modules/dnn/src/tensorflow/tf_importer.cpp index 7797fa7664..603b8367bc 100644 --- a/modules/dnn/src/tensorflow/tf_importer.cpp +++ b/modules/dnn/src/tensorflow/tf_importer.cpp @@ -677,6 +677,13 @@ void TFImporter::populateNet(Net dstNet) connectToAllBlobs(layer_id, dstNet, parsePin(layer.input(0)), id, layer.input_size()); } + else if (type == "Elu") + { + int id = dstNet.addLayer(name, "ELU", layerParams); + layer_id[name] = id; + + connectToAllBlobs(layer_id, dstNet, parsePin(layer.input(0)), id, layer.input_size()); + } else if (type == "MaxPool") { layerParams.set("pool", "max"); diff --git a/modules/dnn/test/test_layers.cpp b/modules/dnn/test/test_layers.cpp index 14c984f189..4ca06ef3d9 100644 --- a/modules/dnn/test/test_layers.cpp +++ b/modules/dnn/test/test_layers.cpp @@ -268,11 +268,29 @@ static void test_Reshape_Split_Slice_layers() normAssert(input, output); } + TEST(Layer_Test_Reshape_Split_Slice, Accuracy) { test_Reshape_Split_Slice_layers(); } +TEST(Layer_Conv_Elu, Accuracy) +{ + Net net; + { + Ptr importer = createTensorflowImporter(_tf("layer_elu_model.pb")); + ASSERT_TRUE(importer != NULL); + importer->populateNet(net); + } + Mat inp = blobFromNPY(_tf("layer_elu_in.npy")); + Mat ref = blobFromNPY(_tf("layer_elu_out.npy")); + + net.setInput(inp, "input"); + Mat out = net.forward(); + + normAssert(ref, out); +} + class Layer_LSTM_Test : public ::testing::Test { public: