diff --git a/modules/dnn/src/layers/einsum_layer.cpp b/modules/dnn/src/layers/einsum_layer.cpp index 7f8eb47112..4e3eca2378 100644 --- a/modules/dnn/src/layers/einsum_layer.cpp +++ b/modules/dnn/src/layers/einsum_layer.cpp @@ -5,6 +5,7 @@ #include #include #include "../precomp.hpp" +#include "../ie_ngraph.hpp" #include "layers_common.hpp" #include "cpu_kernels/fast_gemm.hpp" @@ -304,7 +305,7 @@ public: MatShape einsumOutDims; // vector to store output dimentions // These hold equation subring, left hand side and right it of - String lhs_eq, rhs_eq; + String lhs_eq, rhs_eq, equation; // Holds token from left hand side of the equation std::vector lhs_eq_tokens; @@ -378,7 +379,7 @@ public: LayerEinsumImpl(const LayerParams& params) { setParamsFrom(params); - String equation = params.get("equation"); + equation = params.get("equation"); int outputSize = params.get("outputSize"); numInputs = params.get("inputSize"); @@ -423,6 +424,11 @@ public: calculateOutputShape(); } + virtual bool supportBackend(int backendId) CV_OVERRIDE { + return backendId == DNN_BACKEND_OPENCV || + backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH; + } + // getMeoryShapes bool getMemoryShapes(const std::vector &inputs, const int requiredOutputs, @@ -553,6 +559,19 @@ public: result = result.reshape(1, einsumOutDims.size(), einsumOutDims.data()); result.copyTo(outputs[0]); } // forward + +#ifdef HAVE_DNN_NGRAPH + virtual Ptr initNgraph(const std::vector >&, + const std::vector >& nodes) CV_OVERRIDE { + ov::OutputVector inputs(nodes.size()); + for (size_t i = 0; i < nodes.size(); ++i) { + inputs[i] = nodes[i].dynamicCast()->node; + } + auto einsum = std::make_shared(inputs, equation); + return new InfEngineNgraphNode(einsum); + } +#endif // HAVE_DNN_NGRAPH + }; // EinsumClass Mat LayerEinsumImpl::reduceSum(Mat& src, MatShape& reduceAxis) diff --git a/modules/dnn/test/test_onnx_conformance_layer_filter__openvino.inl.hpp b/modules/dnn/test/test_onnx_conformance_layer_filter__openvino.inl.hpp index 291ea30e92..509cf6007d 100644 --- a/modules/dnn/test/test_onnx_conformance_layer_filter__openvino.inl.hpp +++ b/modules/dnn/test/test_onnx_conformance_layer_filter__openvino.inl.hpp @@ -597,7 +597,7 @@ CASE(test_dynamicquantizelinear_min_adjusted_expanded) CASE(test_edge_pad) // no filter CASE(test_einsum_batch_diagonal) - // no filter + SKIP; CASE(test_einsum_batch_matmul) // no filter CASE(test_einsum_inner_prod) diff --git a/modules/dnn/test/test_onnx_importer.cpp b/modules/dnn/test/test_onnx_importer.cpp index 859757d17a..4d475857e5 100644 --- a/modules/dnn/test/test_onnx_importer.cpp +++ b/modules/dnn/test/test_onnx_importer.cpp @@ -1471,6 +1471,8 @@ TEST_P(Test_ONNX_layers, Einsum_2D) TEST_P(Test_ONNX_layers, Einsum_2D_Ellipses) { + if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) + applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH); testONNXModels("einsum_2d_ellipses", npy, 0, 0, false, false, 2); } @@ -1501,6 +1503,8 @@ TEST_P(Test_ONNX_layers, DISABLED_Einsum_HadamardProduct) TEST_P(Test_ONNX_layers, Einsum_Batch_Diagonal) { + if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) + applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH); testONNXModels("einsum_batch_diagonal", npy, 0, 0, false, false, 1); }