Einsum OpenVINO backend

pull/25291/head
Dmitry Kurtaev 1 year ago
parent f87e1efd2a
commit cfa42e4338
  1. 23
      modules/dnn/src/layers/einsum_layer.cpp
  2. 2
      modules/dnn/test/test_onnx_conformance_layer_filter__openvino.inl.hpp
  3. 4
      modules/dnn/test/test_onnx_importer.cpp

@ -5,6 +5,7 @@
#include <inttypes.h>
#include <opencv2/dnn/shape_utils.hpp>
#include "../precomp.hpp"
#include "../ie_ngraph.hpp"
#include "layers_common.hpp"
#include "cpu_kernels/fast_gemm.hpp"
@ -304,7 +305,7 @@ public:
MatShape einsumOutDims; // vector to store output dimentions
// These hold equation subring, left hand side and right it of
String lhs_eq, rhs_eq;
String lhs_eq, rhs_eq, equation;
// Holds token from left hand side of the equation
std::vector<String> lhs_eq_tokens;
@ -378,7 +379,7 @@ public:
LayerEinsumImpl(const LayerParams& params)
{
setParamsFrom(params);
String equation = params.get<String>("equation");
equation = params.get<String>("equation");
int outputSize = params.get<int>("outputSize");
numInputs = params.get<int>("inputSize");
@ -423,6 +424,11 @@ public:
calculateOutputShape();
}
virtual bool supportBackend(int backendId) CV_OVERRIDE {
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
}
// getMeoryShapes
bool getMemoryShapes(const std::vector<MatShape> &inputs,
const int requiredOutputs,
@ -553,6 +559,19 @@ public:
result = result.reshape(1, einsumOutDims.size(), einsumOutDims.data());
result.copyTo(outputs[0]);
} // forward
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >&,
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE {
ov::OutputVector inputs(nodes.size());
for (size_t i = 0; i < nodes.size(); ++i) {
inputs[i] = nodes[i].dynamicCast<InfEngineNgraphNode>()->node;
}
auto einsum = std::make_shared<ov::op::v7::Einsum>(inputs, equation);
return new InfEngineNgraphNode(einsum);
}
#endif // HAVE_DNN_NGRAPH
}; // EinsumClass
Mat LayerEinsumImpl::reduceSum(Mat& src, MatShape& reduceAxis)

@ -597,7 +597,7 @@ CASE(test_dynamicquantizelinear_min_adjusted_expanded)
CASE(test_edge_pad)
// no filter
CASE(test_einsum_batch_diagonal)
// no filter
SKIP;
CASE(test_einsum_batch_matmul)
// no filter
CASE(test_einsum_inner_prod)

@ -1471,6 +1471,8 @@ TEST_P(Test_ONNX_layers, Einsum_2D)
TEST_P(Test_ONNX_layers, Einsum_2D_Ellipses)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
testONNXModels("einsum_2d_ellipses", npy, 0, 0, false, false, 2);
}
@ -1501,6 +1503,8 @@ TEST_P(Test_ONNX_layers, DISABLED_Einsum_HadamardProduct)
TEST_P(Test_ONNX_layers, Einsum_Batch_Diagonal)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
testONNXModels("einsum_batch_diagonal", npy, 0, 0, false, false, 1);
}

Loading…
Cancel
Save