From 96947c30c058fbc56d39f61b41c5cede06c43c55 Mon Sep 17 00:00:00 2001 From: SamFC10 Date: Sat, 20 Feb 2021 22:16:00 +0530 Subject: [PATCH] Added exp layer backport of commit: 6111935835dfd557c6e5ada81f2acfd8a6ae32f9 partial backport of commit: dd5976162b58cc9f78a3abac42da283c0778162f --- .../dnn/include/opencv2/dnn/all_layers.hpp | 8 ++ modules/dnn/src/init.cpp | 1 + modules/dnn/src/layers/elementwise_layers.cpp | 113 ++++++++++++++++++ modules/dnn/src/opencl/activations.cl | 11 ++ modules/dnn/src/tensorflow/tf_importer.cpp | 2 +- modules/dnn/test/test_halide_layers.cpp | 25 ++++ modules/dnn/test/test_layers.cpp | 8 +- modules/dnn/test/test_onnx_importer.cpp | 7 ++ 8 files changed, 173 insertions(+), 2 deletions(-) diff --git a/modules/dnn/include/opencv2/dnn/all_layers.hpp b/modules/dnn/include/opencv2/dnn/all_layers.hpp index ffc2568a89..98d7671fdf 100644 --- a/modules/dnn/include/opencv2/dnn/all_layers.hpp +++ b/modules/dnn/include/opencv2/dnn/all_layers.hpp @@ -499,6 +499,14 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN static Ptr create(const LayerParams ¶ms); }; + class CV_EXPORTS ExpLayer : public ActivationLayer + { + public: + float base, scale, shift; + + static Ptr create(const LayerParams ¶ms); + }; + /* Layers used in semantic segmentation */ class CV_EXPORTS CropLayer : public Layer diff --git a/modules/dnn/src/init.cpp b/modules/dnn/src/init.cpp index be4e115793..4c804dccd6 100644 --- a/modules/dnn/src/init.cpp +++ b/modules/dnn/src/init.cpp @@ -110,6 +110,7 @@ void initializeLayerFactory() CV_DNN_REGISTER_LAYER_CLASS(BNLL, BNLLLayer); CV_DNN_REGISTER_LAYER_CLASS(AbsVal, AbsLayer); CV_DNN_REGISTER_LAYER_CLASS(Power, PowerLayer); + CV_DNN_REGISTER_LAYER_CLASS(Exp, ExpLayer); CV_DNN_REGISTER_LAYER_CLASS(BatchNorm, BatchNormLayer); CV_DNN_REGISTER_LAYER_CLASS(MaxUnpool, MaxUnpoolLayer); CV_DNN_REGISTER_LAYER_CLASS(Dropout, BlankLayer); diff --git a/modules/dnn/src/layers/elementwise_layers.cpp b/modules/dnn/src/layers/elementwise_layers.cpp index d53393c3e0..d47e08886c 100644 --- a/modules/dnn/src/layers/elementwise_layers.cpp +++ b/modules/dnn/src/layers/elementwise_layers.cpp @@ -1202,6 +1202,105 @@ struct PowerFunctor : public BaseFunctor int64 getFLOPSPerElement() const { return power == 1 ? 2 : 10; } }; +struct ExpFunctor : public BaseFunctor +{ + typedef ExpLayer Layer; + float base, scale, shift; + float normScale, normShift; + + ExpFunctor(float base_ = -1.f, float scale_ = 1.f, float shift_ = 0.f) + : base(base_), scale(scale_), shift(shift_) + { + // For base > 0 : + // y = base^(scale * input + shift) + // ln(y) = ln(base)*(scale * input + shift) + // y = exp((ln(base)*scale) * input + (ln(base)*shift)) + // y = exp(normalized_scale * input + normalized_shift) + CV_Check(base, base == -1.f || base > 0.f, "Unsupported 'base' value"); + const float ln_base = (base == -1.f) ? 1.f : log(base); + normScale = scale * ln_base; + normShift = shift * ln_base; + } + + bool supportBackend(int backendId, int targetId) + { + return backendId == DNN_BACKEND_OPENCV || + backendId == DNN_BACKEND_HALIDE || backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; + } + + void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const + { + float a = normScale, b = normShift; + for( int cn = cn0; cn < cn1; cn++, srcptr += planeSize, dstptr += planeSize ) + { + for( int i = 0; i < len; i++ ) + { + float x = srcptr[i]; + dstptr[i] = exp(a*x + b); + } + } + } + +#ifdef HAVE_OPENCL + bool applyOCL(InputArrayOfArrays inps, OutputArrayOfArrays outs, OutputArrayOfArrays internals) + { + std::vector inputs; + std::vector outputs; + + inps.getUMatVector(inputs); + outs.getUMatVector(outputs); + String buildopt = oclGetTMacro(inputs[0]); + + for (size_t i = 0; i < inputs.size(); i++) + { + UMat& src = inputs[i]; + UMat& dst = outputs[i]; + + ocl::Kernel kernel("ExpForward", ocl::dnn::activations_oclsrc, buildopt); + kernel.set(0, (int)src.total()); + kernel.set(1, ocl::KernelArg::PtrReadOnly(src)); + kernel.set(2, ocl::KernelArg::PtrWriteOnly(dst)); + kernel.set(3, (float)normScale); + kernel.set(4, (float)normShift); + + size_t gSize = src.total(); + CV_Assert(kernel.run(1, &gSize, NULL, false)); + } + return true; + } +#endif + +#ifdef HAVE_HALIDE + void attachHalide(const Halide::Expr& input, Halide::Func& top) + { + Halide::Var x("x"), y("y"), c("c"), n("n"); + top(x, y, c, n) = exp(normScale * input + normShift); + } +#endif // HAVE_HALIDE + +#ifdef HAVE_DNN_IE_NN_BUILDER_2019 + InferenceEngine::Builder::Layer initInfEngineBuilderAPI() + { + CV_Error(Error::StsNotImplemented, ""); + } +#endif // HAVE_DNN_IE_NN_BUILDER_2019 + +#ifdef HAVE_DNN_NGRAPH + std::shared_ptr initNgraphAPI(const std::shared_ptr& node) + { + auto scale_node = std::make_shared(ngraph::element::f32, + ngraph::Shape{1}, &normScale); + auto shift_node = std::make_shared(ngraph::element::f32, + ngraph::Shape{1}, &normShift); + auto mul = std::make_shared(scale_node, node, ngraph::op::AutoBroadcastType::NUMPY); + auto scale_shift = std::make_shared(mul, shift_node, ngraph::op::AutoBroadcastType::NUMPY); + return std::make_shared(scale_shift); + } +#endif // HAVE_DNN_NGRAPH + + int64 getFLOPSPerElement() const { return 3; } +}; + struct ChannelsPReLUFunctor : public BaseFunctor { typedef ChannelsPReLULayer Layer; @@ -1419,6 +1518,20 @@ Ptr PowerLayer::create(const LayerParams& params) return l; } +Ptr ExpLayer::create(const LayerParams& params) +{ + float base = params.get("base", -1.0f); + float scale = params.get("scale", 1.0f); + float shift = params.get("shift", 0.0f); + Ptr l(new ElementWiseLayer(ExpFunctor(base, scale, shift))); + l->setParamsFrom(params); + l->base = base; + l->scale = scale; + l->shift = shift; + + return l; +} + Ptr ChannelsPReLULayer::create(const LayerParams& params) { CV_Assert(params.blobs.size() == 1); diff --git a/modules/dnn/src/opencl/activations.cl b/modules/dnn/src/opencl/activations.cl index b900e6add6..68f0dd7268 100644 --- a/modules/dnn/src/opencl/activations.cl +++ b/modules/dnn/src/opencl/activations.cl @@ -140,3 +140,14 @@ __kernel void ELUForward(const int n, __global const T* in, __global T* out) out[index] = (src >= 0.f) ? src : exp(src) - 1; } } + +__kernel void ExpForward(const int n, __global const T* in, __global T* out, + const KERNEL_ARG_DTYPE normScale, + const KERNEL_ARG_DTYPE normShift) +{ + int index = get_global_id(0); + if (index < n) + { + out[index] = exp(normShift + normScale * in[index]); + } +} diff --git a/modules/dnn/src/tensorflow/tf_importer.cpp b/modules/dnn/src/tensorflow/tf_importer.cpp index cbe6418b77..c03ac8a943 100644 --- a/modules/dnn/src/tensorflow/tf_importer.cpp +++ b/modules/dnn/src/tensorflow/tf_importer.cpp @@ -2425,7 +2425,7 @@ void TFImporter::parseNode(const tensorflow::NodeDef& layer_) connectToAllBlobs(layer_id, dstNet, parsePin(layer.input(0)), id, num_inputs); } else if (type == "Abs" || type == "Tanh" || type == "Sigmoid" || - type == "Relu" || type == "Elu" || + type == "Relu" || type == "Elu" || type == "Exp" || type == "Identity" || type == "Relu6") { CV_CheckGT(num_inputs, 0, ""); diff --git a/modules/dnn/test/test_halide_layers.cpp b/modules/dnn/test/test_halide_layers.cpp index 295d93071e..a1b13b7f63 100644 --- a/modules/dnn/test/test_halide_layers.cpp +++ b/modules/dnn/test/test_halide_layers.cpp @@ -621,6 +621,31 @@ INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Power, Combine( dnnBackendsAndTargetsWithHalide() )); +typedef TestWithParam > > Exp; +TEST_P(Exp, Accuracy) +{ + float base = get<0>(GetParam())[0]; + float scale = get<0>(GetParam())[1]; + float shift = get<0>(GetParam())[2]; + Backend backendId = get<0>(get<1>(GetParam())); + Target targetId = get<1>(get<1>(GetParam())); + + LayerParams lp; + lp.set("base", base); + lp.set("scale", scale); + lp.set("shift", shift); + lp.type = "Exp"; + lp.name = "testLayer"; + testInPlaceActivation(lp, backendId, targetId); +} + +INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Exp, Combine( +/*base, scale, shift*/ Values(Vec3f(0.9f, -1.0f, 1.1f), Vec3f(0.9f, 1.1f, -1.0f), + Vec3f(-1.0f, 0.9f, 1.1f), Vec3f(-1.0f, 1.1f, 0.9f), + Vec3f(1.1f, 0.9f, -1.0f), Vec3f(1.1f, -1.0f, 0.9f)), + dnnBackendsAndTargetsWithHalide() +)); + TEST_P(Test_Halide_layers, ChannelsPReLU) { LayerParams lp; diff --git a/modules/dnn/test/test_layers.cpp b/modules/dnn/test/test_layers.cpp index 327f3e9abd..73491fcf25 100644 --- a/modules/dnn/test/test_layers.cpp +++ b/modules/dnn/test/test_layers.cpp @@ -2107,6 +2107,12 @@ public: randu(scales, -1.0f, 1.0f); activationParams.blobs.push_back(scales); } + else if (activationParams.type == "Exp") + { + activationParams.set("base", -1.0f); + activationParams.set("scale", 0.3f); + activationParams.set("shift", 0.6f); + } } static void makeDefaultTestEltwiseLayer(LayerParams& eltwiseParams, const std::string& op, bool withCoefficients) @@ -2178,7 +2184,7 @@ public: static testing::internal::ParamGenerator activationLayersList() { // TODO: automate list generation - return Values("ReLU", "ReLU6", "ChannelsPReLU", "TanH", "Swish", "Mish", "Sigmoid", "ELU", "AbsVal", "BNLL", "Power"); + return Values("ReLU", "ReLU6", "ChannelsPReLU", "TanH", "Swish", "Mish", "Sigmoid", "ELU", "AbsVal", "BNLL", "Power", "Exp"); } static testing::internal::ParamGenerator > dnnBackendsAndTargetsForFusionTests() diff --git a/modules/dnn/test/test_onnx_importer.cpp b/modules/dnn/test/test_onnx_importer.cpp index 9a1f28cdea..eb63aa085a 100644 --- a/modules/dnn/test/test_onnx_importer.cpp +++ b/modules/dnn/test/test_onnx_importer.cpp @@ -307,6 +307,13 @@ TEST_P(Test_ONNX_layers, Power) testONNXModels("pow2", npy, 0, 0, false, false); } +TEST_P(Test_ONNX_layers, Exp) +{ + if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019) + applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER); + testONNXModels("exp"); +} + TEST_P(Test_ONNX_layers, Concatenation) { if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)