diff --git a/modules/dnn/src/layers/softmax_layer.cpp b/modules/dnn/src/layers/softmax_layer.cpp index 75e31006de..d7ffef0bbf 100644 --- a/modules/dnn/src/layers/softmax_layer.cpp +++ b/modules/dnn/src/layers/softmax_layer.cpp @@ -92,7 +92,8 @@ public: { return backendId == DNN_BACKEND_OPENCV || (backendId == DNN_BACKEND_HALIDE && haveHalide() && axisRaw == 1) || - ((backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 || backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) && haveInfEngine() && !logSoftMax); + backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH || + (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && haveInfEngine() && !logSoftMax); } #ifdef HAVE_OPENCL @@ -330,6 +331,9 @@ public: auto& ieInpNode = nodes[0].dynamicCast()->node; int axis = clamp(axisRaw, ieInpNode->get_shape().size()); auto softmax = std::make_shared(ieInpNode, axis); + if (logSoftMax) + return Ptr(new InfEngineNgraphNode(std::make_shared(softmax))); + return Ptr(new InfEngineNgraphNode(softmax)); } #endif // HAVE_DNN_NGRAPH