Merge pull request #16694 from alalek:dnn_disable_nn_builder_api

pull/16753/head
Alexander Alekhin 5 years ago
commit 6271192a32
  1. 6
      modules/dnn/CMakeLists.txt
  2. 38
      modules/dnn/src/dnn.cpp
  3. 144
      modules/dnn/src/ie_ngraph.cpp
  4. 4
      modules/dnn/src/layers/batch_norm_layer.cpp
  5. 4
      modules/dnn/src/layers/blank_layer.cpp
  6. 4
      modules/dnn/src/layers/concat_layer.cpp
  7. 6
      modules/dnn/src/layers/const_layer.cpp
  8. 13
      modules/dnn/src/layers/convolution_layer.cpp
  9. 4
      modules/dnn/src/layers/detection_output_layer.cpp
  10. 52
      modules/dnn/src/layers/elementwise_layers.cpp
  11. 4
      modules/dnn/src/layers/eltwise_layer.cpp
  12. 5
      modules/dnn/src/layers/flatten_layer.cpp
  13. 4
      modules/dnn/src/layers/fully_connected_layer.cpp
  14. 4
      modules/dnn/src/layers/lrn_layer.cpp
  15. 15
      modules/dnn/src/layers/mvn_layer.cpp
  16. 4
      modules/dnn/src/layers/normalize_bbox_layer.cpp
  17. 2
      modules/dnn/src/layers/padding_layer.cpp
  18. 4
      modules/dnn/src/layers/permute_layer.cpp
  19. 18
      modules/dnn/src/layers/pooling_layer.cpp
  20. 4
      modules/dnn/src/layers/prior_box_layer.cpp
  21. 4
      modules/dnn/src/layers/proposal_layer.cpp
  22. 4
      modules/dnn/src/layers/reorg_layer.cpp
  23. 4
      modules/dnn/src/layers/reshape_layer.cpp
  24. 8
      modules/dnn/src/layers/resize_layer.cpp
  25. 4
      modules/dnn/src/layers/scale_layer.cpp
  26. 19
      modules/dnn/src/layers/slice_layer.cpp
  27. 4
      modules/dnn/src/layers/softmax_layer.cpp
  28. 70
      modules/dnn/src/op_inf_engine.cpp
  29. 16
      modules/dnn/src/op_inf_engine.hpp
  30. 5
      modules/dnn/test/test_common.impl.hpp
  31. 4
      modules/dnn/test/test_misc.cpp
  32. 2
      modules/ts/src/ts_tags.cpp

@ -113,9 +113,15 @@ endif()
set(dnn_runtime_libs "")
if(INF_ENGINE_TARGET)
ocv_option(OPENCV_DNN_IE_NN_BUILDER_2019 "Build with Inference Engine NN Builder API support" ON)
if(OPENCV_DNN_IE_NN_BUILDER_2019)
message(STATUS "DNN: Enabling Inference Engine NN Builder API support")
add_definitions(-DHAVE_DNN_IE_NN_BUILDER_2019=1)
endif()
list(APPEND dnn_runtime_libs ${INF_ENGINE_TARGET})
endif()
if(HAVE_NGRAPH)
message(STATUS "DNN: Enabling Inference Engine nGraph API support")
add_definitions(-DHAVE_DNN_NGRAPH)
list(APPEND dnn_runtime_libs ngraph::ngraph)
endif()

@ -185,30 +185,40 @@ private:
#ifdef HAVE_INF_ENGINE
if (checkIETarget(DNN_TARGET_CPU)) {
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019, DNN_TARGET_CPU));
#endif
#ifdef HAVE_DNN_NGRAPH
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_CPU));
#endif
}
if (checkIETarget(DNN_TARGET_MYRIAD)) {
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019, DNN_TARGET_MYRIAD));
#endif
#ifdef HAVE_DNN_NGRAPH
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_MYRIAD));
#endif
}
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
if (checkIETarget(DNN_TARGET_FPGA))
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019, DNN_TARGET_FPGA));
#endif
#ifdef HAVE_OPENCL
if (cv::ocl::useOpenCL() && ocl::Device::getDefault().isIntel())
{
if (checkIETarget(DNN_TARGET_OPENCL)) {
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019, DNN_TARGET_OPENCL));
#endif
#ifdef HAVE_DNN_NGRAPH
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_OPENCL));
#endif
}
if (checkIETarget(DNN_TARGET_OPENCL_FP16)) {
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019, DNN_TARGET_OPENCL_FP16));
#endif
#ifdef HAVE_DNN_NGRAPH
backends.push_back(std::make_pair(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH, DNN_TARGET_OPENCL_FP16));
#endif
@ -797,7 +807,7 @@ struct DataLayer : public Layer
}
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
CV_CheckEQ(inputsData.size(), (size_t)1, "");
@ -829,7 +839,7 @@ struct DataLayer : public Layer
addConstantData("biases", biases, ieLayer);
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
std::vector<String> outNames;
std::vector<MatShape> shapes;
@ -1087,10 +1097,10 @@ static Ptr<BackendWrapper> wrapMat(int backendId, int targetId, cv::Mat& m)
}
else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
{
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
return Ptr<BackendWrapper>(new InfEngineBackendWrapper(targetId, m));
#else
CV_Error(Error::StsNotImplemented, "This OpenCV version is built without Inference Engine API support");
CV_Error(Error::StsNotImplemented, "This OpenCV version is built without Inference Engine NN Builder API support");
#endif
}
else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
@ -1587,10 +1597,10 @@ struct Net::Impl
initHalideBackend();
else if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
{
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
initInfEngineBackend(blobsToKeep_);
#else
CV_Assert(false && "This OpenCV version is built without Inference Engine API support");
CV_Assert(false && "This OpenCV version is built without Inference Engine NN Builder API support");
#endif
}
else if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
@ -1664,7 +1674,7 @@ struct Net::Impl
}
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
// Before launching Inference Engine graph we need to specify output blobs.
// This function requests output blobs based on inputs references of
// layers from default backend or layers from different graphs.
@ -1969,7 +1979,7 @@ struct Net::Impl
}
}
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
@ -3377,8 +3387,12 @@ struct Net::Impl
CV_Assert(preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 || preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019) {
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
Ptr<InfEngineBackendWrapper> wrapper = ld.outputBlobsWrappers[pin.oid].dynamicCast<InfEngineBackendWrapper>();
return std::move(wrapper->futureMat);
#else
CV_Error(Error::StsNotImplemented, "This OpenCV version is built without Inference Engine NN Builder API support");
#endif
}
else if (preferableBackend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
{
@ -3470,9 +3484,13 @@ Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNe
else
#endif
{
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
Ptr<InfEngineBackendNode> backendNodeNN(new InfEngineBackendNode(InferenceEngine::Builder::Layer("")));
backendNodeNN->net = Ptr<InfEngineBackendNet>(new InfEngineBackendNet(ieNet));
backendNode = backendNodeNN;
#else
CV_Error(Error::StsNotImplemented, "This OpenCV version is built without Inference Engine NN Builder API support");
#endif
}
for (auto& it : ieNet.getOutputsInfo())
@ -3499,6 +3517,7 @@ Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNe
else
#endif
{
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
Ptr<Layer> cvLayer(new InfEngineBackendLayer(ieNet));
InferenceEngine::CNNLayerPtr ieLayer = ieNet.getLayerByName(it.first.c_str());
@ -3509,6 +3528,9 @@ Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNe
ld.layerInstance = cvLayer;
ld.backendNodes[DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019] = backendNode;
#else
CV_Error(Error::StsNotImplemented, "This OpenCV version is built without Inference Engine NN Builder API support");
#endif
}
for (int i = 0; i < inputsNames.size(); ++i)

@ -25,8 +25,8 @@ namespace cv { namespace dnn {
// For networks with input layer which has an empty name, IE generates a name id[some_number].
// OpenCV lets users use an empty input name and to prevent unexpected naming,
// we can use some predefined name.
static std::string kDefaultInpLayerName = "empty_inp_layer_name";
static constexpr const char* kOpenCVLayersType = "OpenCVLayer";
static std::string kDefaultInpLayerName = "opencv_ngraph_empty_inp_layer_name";
static constexpr const char* kOpenCVLayersType = "opencv_ngraph_layer";
static std::string shapesToStr(const std::vector<Mat>& mats)
{
@ -77,7 +77,6 @@ public:
return type_info;
}
NgraphCustomOp() {};
NgraphCustomOp(const ngraph::NodeVector& inputs,
const std::map<std::string, InferenceEngine::Parameter>& params = {}):
Op(inputs), params(params)
@ -85,6 +84,11 @@ public:
constructor_validate_and_infer_types();
}
~NgraphCustomOp()
{
// nothing
}
void validate_and_infer_types() override
{
std::vector<std::vector<size_t> > shapes;
@ -116,6 +120,136 @@ private:
std::map<std::string, InferenceEngine::Parameter> params;
};
class InfEngineNgraphCustomLayer : public InferenceEngine::ILayerExecImpl
{
public:
explicit InfEngineNgraphCustomLayer(const InferenceEngine::CNNLayer& layer) : cnnLayer(layer)
{
std::istringstream iss(layer.GetParamAsString("impl"));
size_t ptr;
iss >> ptr;
cvLayer = (Layer*)ptr;
std::vector<std::vector<size_t> > shapes;
strToShapes(layer.GetParamAsString("internals"), shapes);
internals.resize(shapes.size());
for (int i = 0; i < shapes.size(); ++i)
internals[i].create(std::vector<int>(shapes[i].begin(), shapes[i].end()), CV_32F);
}
~InfEngineNgraphCustomLayer()
{
// nothing
}
virtual InferenceEngine::StatusCode execute(std::vector<InferenceEngine::Blob::Ptr>& inputs,
std::vector<InferenceEngine::Blob::Ptr>& outputs,
InferenceEngine::ResponseDesc *resp) noexcept
{
std::vector<Mat> inpMats, outMats;
infEngineBlobsToMats(inputs, inpMats);
infEngineBlobsToMats(outputs, outMats);
try
{
cvLayer->forward(inpMats, outMats, internals);
return InferenceEngine::StatusCode::OK;
}
catch (...)
{
return InferenceEngine::StatusCode::GENERAL_ERROR;
}
}
virtual InferenceEngine::StatusCode
getSupportedConfigurations(std::vector<InferenceEngine::LayerConfig>& conf,
InferenceEngine::ResponseDesc* resp) noexcept
{
std::vector<InferenceEngine::DataConfig> inDataConfig;
std::vector<InferenceEngine::DataConfig> outDataConfig;
for (auto& it : cnnLayer.insData)
{
InferenceEngine::DataConfig conf;
conf.desc = it.lock()->getTensorDesc();
inDataConfig.push_back(conf);
}
for (auto& it : cnnLayer.outData)
{
InferenceEngine::DataConfig conf;
conf.desc = it->getTensorDesc();
outDataConfig.push_back(conf);
}
InferenceEngine::LayerConfig layerConfig;
layerConfig.inConfs = inDataConfig;
layerConfig.outConfs = outDataConfig;
conf.push_back(layerConfig);
return InferenceEngine::StatusCode::OK;
}
InferenceEngine::StatusCode init(InferenceEngine::LayerConfig& config,
InferenceEngine::ResponseDesc *resp) noexcept
{
return InferenceEngine::StatusCode::OK;
}
private:
InferenceEngine::CNNLayer cnnLayer;
dnn::Layer* cvLayer;
std::vector<Mat> internals;
};
class InfEngineNgraphCustomLayerFactory : public InferenceEngine::ILayerImplFactory {
public:
explicit InfEngineNgraphCustomLayerFactory(const InferenceEngine::CNNLayer* layer) : cnnLayer(*layer)
{
// nothing
}
InferenceEngine::StatusCode
getImplementations(std::vector<InferenceEngine::ILayerImpl::Ptr>& impls,
InferenceEngine::ResponseDesc* resp) noexcept override
{
impls.push_back(std::make_shared<InfEngineNgraphCustomLayer>(cnnLayer));
return InferenceEngine::StatusCode::OK;
}
private:
InferenceEngine::CNNLayer cnnLayer;
};
class InfEngineNgraphExtension : public InferenceEngine::IExtension
{
public:
virtual void SetLogCallback(InferenceEngine::IErrorListener&) noexcept {}
virtual void Unload() noexcept {}
virtual void Release() noexcept {}
virtual void GetVersion(const InferenceEngine::Version*&) const noexcept {}
virtual InferenceEngine::StatusCode getPrimitiveTypes(char**&, unsigned int&,
InferenceEngine::ResponseDesc*) noexcept
{
return InferenceEngine::StatusCode::OK;
}
InferenceEngine::StatusCode getFactoryFor(InferenceEngine::ILayerImplFactory*& factory,
const InferenceEngine::CNNLayer* cnnLayer,
InferenceEngine::ResponseDesc* resp) noexcept
{
if (cnnLayer->type != kOpenCVLayersType)
return InferenceEngine::StatusCode::NOT_IMPLEMENTED;
factory = new InfEngineNgraphCustomLayerFactory(cnnLayer);
return InferenceEngine::StatusCode::OK;
}
};
InfEngineNgraphNode::InfEngineNgraphNode(std::shared_ptr<ngraph::Node>&& _node)
: BackendNode(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH), node(std::move(_node)) {}
@ -423,11 +557,11 @@ void InfEngineNgraphNet::initPlugin(InferenceEngine::CNNNetwork& net)
// OpenCV fallbacks as extensions.
try
{
ie.AddExtension(std::make_shared<InfEngineExtension>(), "CPU");
ie.AddExtension(std::make_shared<InfEngineNgraphExtension>(), "CPU");
}
catch(const std::exception& e)
{
CV_LOG_INFO(NULL, "DNN-IE: Can't register OpenCV custom layers extension: " << e.what());
CV_LOG_INFO(NULL, "DNN-IE: Can't register OpenCV custom layers nGraph extension: " << e.what());
}
#ifndef _WIN32
// Limit the number of CPU threads.

@ -373,7 +373,7 @@ public:
}
#endif // HAVE_HALIDE
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
InferenceEngine::Builder::Layer ieLayer = InferenceEngine::Builder::ScaleShiftLayer(name);
@ -382,7 +382,7 @@ public:
addConstantData("biases", wrapToInfEngineBlob(bias_, {numChannels}, InferenceEngine::Layout::C), ieLayer);
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs, const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE

@ -127,7 +127,7 @@ public:
}
#endif
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >& inputs) CV_OVERRIDE
{
InferenceEngine::DataPtr input = infEngineDataNode(inputs[0]);
@ -150,7 +150,7 @@ public:
ieLayer.setOutputPorts(std::vector<InferenceEngine::Port>(1));
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH

@ -335,7 +335,7 @@ public:
return Ptr<BackendNode>();
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >& inputs) CV_OVERRIDE
{
InferenceEngine::DataPtr input = infEngineDataNode(inputs[0]);
@ -345,7 +345,7 @@ public:
ieLayer.setInputPorts(std::vector<InferenceEngine::Port>(inputs.size()));
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH

@ -75,14 +75,14 @@ public:
blobs[0].copyTo(outputs[0]);
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
InferenceEngine::Builder::ConstLayer ieLayer(name);
ieLayer.setData(wrapToInfEngineBlob(blobs[0]));
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,
@ -93,7 +93,7 @@ public:
blobs[0].data);
return Ptr<BackendNode>(new InfEngineNgraphNode(node));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_CUDA
Ptr<BackendNode> initCUDA(

@ -622,7 +622,7 @@ public:
return Ptr<BackendNode>();
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> > &inputs) CV_OVERRIDE
{
InferenceEngine::DataPtr input = infEngineDataNode(inputs[0]);
@ -683,7 +683,7 @@ public:
return Ptr<BackendNode>(new InfEngineBackendNode(l));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> > &inputs,
@ -1558,6 +1558,7 @@ public:
return group == 1;
}
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
{
if (kernel_size.size() == 3 && preferableTarget != DNN_TARGET_CPU) {
@ -1601,10 +1602,12 @@ public:
return std::accumulate(dilations.begin(), dilations.end(), 1, std::multiplies<size_t>()) == 1;
return true;
}
else
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#endif // HAVE_INF_ENGINE
{
return backendId == DNN_BACKEND_CUDA ||
(kernel_size.size() == 2 && (backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE));
}
}
bool getMemoryShapes(const std::vector<MatShape> &inputs,
@ -2244,7 +2247,7 @@ public:
return Ptr<BackendNode>();
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> > &) CV_OVERRIDE
{
InferenceEngine::Layout layout = blobs[0].dims == 5? InferenceEngine::Layout::NCDHW :
@ -2299,7 +2302,7 @@ public:
addConstantData("biases", wrapToInfEngineBlob(biasesMat, {(size_t)numOutput}, InferenceEngine::Layout::C), l);
return Ptr<BackendNode>(new InfEngineBackendNode(l));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH

@ -924,7 +924,7 @@ public:
}
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
InferenceEngine::Builder::DetectionOutputLayer ieLayer(name);
@ -946,7 +946,7 @@ public:
return Ptr<BackendNode>(new InfEngineBackendNode(l));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH

@ -163,14 +163,14 @@ public:
return Ptr<BackendNode>();
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
InferenceEngine::Builder::Layer ieLayer = func.initInfEngineBuilderAPI();
ieLayer.setName(this->name);
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs, const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
@ -299,9 +299,11 @@ struct ReLUFunctor : public BaseFunctor
bool supportBackend(int backendId, int)
{
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
return slope >= 0 || !INF_ENGINE_VER_MAJOR_EQ(INF_ENGINE_RELEASE_2019R1);
#endif
#ifdef HAVE_DNN_NGRAPH
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
return true;
#endif
@ -408,12 +410,12 @@ struct ReLUFunctor : public BaseFunctor
}
#endif // HAVE_HALIDE
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
InferenceEngine::Builder::Layer initInfEngineBuilderAPI()
{
return InferenceEngine::Builder::ReLULayer("").setNegativeSlope(slope);
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
@ -535,12 +537,12 @@ struct ReLU6Functor : public BaseFunctor
}
#endif // HAVE_HALIDE
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
InferenceEngine::Builder::Layer initInfEngineBuilderAPI()
{
return InferenceEngine::Builder::ClampLayer("").setMinValue(minValue).setMaxValue(maxValue);
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
@ -627,12 +629,12 @@ struct TanHFunctor : public BaseFunctor
}
#endif // HAVE_HALIDE
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
InferenceEngine::Builder::Layer initInfEngineBuilderAPI()
{
return InferenceEngine::Builder::TanHLayer("");
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
@ -718,12 +720,12 @@ struct SwishFunctor : public BaseFunctor
}
#endif // HAVE_HALIDE
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
InferenceEngine::Builder::Layer initInfEngineBuilderAPI()
{
CV_Error(Error::StsNotImplemented, "");
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
@ -810,12 +812,12 @@ struct MishFunctor : public BaseFunctor
}
#endif // HAVE_HALIDE
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
InferenceEngine::Builder::Layer initInfEngineBuilderAPI()
{
CV_Error(Error::StsNotImplemented, "");
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
@ -908,12 +910,12 @@ struct SigmoidFunctor : public BaseFunctor
}
#endif // HAVE_HALIDE
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
InferenceEngine::Builder::Layer initInfEngineBuilderAPI()
{
return InferenceEngine::Builder::SigmoidLayer("");
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
@ -1000,12 +1002,12 @@ struct ELUFunctor : public BaseFunctor
}
#endif // HAVE_HALIDE
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
InferenceEngine::Builder::Layer initInfEngineBuilderAPI()
{
return InferenceEngine::Builder::ELULayer("");
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
@ -1095,12 +1097,12 @@ struct AbsValFunctor : public BaseFunctor
}
#endif // HAVE_HALIDE
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
InferenceEngine::Builder::Layer initInfEngineBuilderAPI()
{
return InferenceEngine::Builder::ReLULayer("").setNegativeSlope(-0.999999f);
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
@ -1191,12 +1193,12 @@ struct BNLLFunctor : public BaseFunctor
}
#endif // HAVE_HALIDE
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
InferenceEngine::Builder::Layer initInfEngineBuilderAPI()
{
CV_Error(Error::StsNotImplemented, "");
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
@ -1328,14 +1330,14 @@ struct PowerFunctor : public BaseFunctor
}
#endif // HAVE_HALIDE
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
InferenceEngine::Builder::Layer initInfEngineBuilderAPI()
{
return InferenceEngine::Builder::PowerLayer("").setPower(power)
.setScale(scale)
.setShift(shift);
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
@ -1497,7 +1499,7 @@ struct ChannelsPReLUFunctor : public BaseFunctor
}
#endif // HAVE_HALIDE
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
InferenceEngine::Builder::Layer initInfEngineBuilderAPI()
{
InferenceEngine::Builder::Layer l = InferenceEngine::Builder::PReLULayer("");
@ -1505,7 +1507,7 @@ struct ChannelsPReLUFunctor : public BaseFunctor
addConstantData("weights", wrapToInfEngineBlob(scale, {numChannels}, InferenceEngine::Layout::C), l);
return l;
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)

@ -715,7 +715,7 @@ public:
return Ptr<BackendNode>();
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >& inputs) CV_OVERRIDE
{
InferenceEngine::Builder::EltwiseLayer ieLayer(name);
@ -739,7 +739,7 @@ public:
return Ptr<BackendNode>(new InfEngineBackendNode(l));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH

@ -183,7 +183,7 @@ public:
}
#endif
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >& inputs) CV_OVERRIDE
{
InferenceEngine::Builder::Layer ieLayer(name);
@ -195,7 +195,7 @@ public:
ieLayer.setOutputPorts(std::vector<InferenceEngine::Port>(1));
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,
@ -223,7 +223,6 @@ virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inp
return Ptr<BackendNode>(new InfEngineNgraphNode(reshape));
}
#endif // HAVE_DNN_NGRAPH
// HAVE_INF_ENGINE
int _startAxis;
int _endAxis;

@ -466,7 +466,7 @@ public:
return Ptr<BackendNode>();
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
InferenceEngine::Builder::FullyConnectedLayer ieLayer(name);
@ -481,7 +481,7 @@ public:
return Ptr<BackendNode>(new InfEngineBackendNode(l));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH

@ -444,7 +444,7 @@ public:
#endif // HAVE_HALIDE
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
float alphaSize = alpha;
@ -461,7 +461,7 @@ public:
l.getParameters()["k"] = bias;
return Ptr<BackendNode>(new InfEngineBackendNode(l));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs, const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE

@ -118,14 +118,17 @@ public:
virtual bool supportBackend(int backendId) CV_OVERRIDE
{
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
return !zeroDev && (preferableTarget != DNN_TARGET_MYRIAD || eps <= 1e-7f);
else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
#endif
#ifdef HAVE_DNN_NGRAPH
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
return true;
else
#endif // HAVE_INF_ENGINE
#endif
{
return backendId == DNN_BACKEND_OPENCV;
}
}
#ifdef HAVE_OPENCL
@ -375,7 +378,7 @@ public:
}
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
InferenceEngine::Builder::MVNLayer ieLayer(name);
@ -384,7 +387,7 @@ public:
ieLayer.setEpsilon(eps);
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,

@ -295,7 +295,7 @@ public:
}
#endif
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >& inputs) CV_OVERRIDE
{
InferenceEngine::DataPtr input = infEngineDataNode(inputs[0]);
@ -344,7 +344,7 @@ public:
return Ptr<BackendNode>(new InfEngineBackendNode(l));
}
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,

@ -212,7 +212,7 @@ public:
return Ptr<BackendNode>();
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
InferenceEngine::Builder::Layer ieLayer(name);

@ -403,14 +403,14 @@ public:
return Ptr<BackendNode>();
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
InferenceEngine::Builder::PermuteLayer ieLayer(name);
ieLayer.setOrder(_order);
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,

@ -188,15 +188,15 @@ public:
{
return type == MAX || type == AVE || type == ROI;
}
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
{
if (computeMaxIdx)
return false;
#ifdef HAVE_INF_ENGINE
if (kernel_size.size() == 3)
return preferableTarget == DNN_TARGET_CPU;
if (preferableTarget == DNN_TARGET_MYRIAD) {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1)
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1)
if (type == MAX && (pad_l == 1 && pad_t == 1) && stride == Size(2, 2) ) {
return !isMyriadX();
}
@ -205,14 +205,13 @@ public:
}
else
return type != STOCHASTIC;
#else
return false;
#endif
}
else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) {
#endif
else if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
{
return !computeMaxIdx && type != STOCHASTIC;
}
else
else if (backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE || backendId == DNN_BACKEND_VKCOM)
{
if (kernel_size.size() == 3)
return (backendId == DNN_BACKEND_OPENCV && preferableTarget == DNN_TARGET_CPU);
@ -225,6 +224,7 @@ public:
else
return false;
}
return false;
}
#ifdef HAVE_OPENCL
@ -454,7 +454,7 @@ public:
return Ptr<BackendNode>();
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
if (type == MAX || type == AVE)
@ -500,7 +500,7 @@ public:
CV_Error(Error::StsNotImplemented, "Unsupported pooling type");
return Ptr<BackendNode>();
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019

@ -555,7 +555,7 @@ public:
return Ptr<BackendNode>();
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
if (_explicitSizes)
@ -615,7 +615,7 @@ public:
return Ptr<BackendNode>(new InfEngineBackendNode(l));
}
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs, const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE

@ -327,7 +327,7 @@ public:
layerOutputs[0].col(2).copyTo(dst);
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
InferenceEngine::Builder::ProposalLayer ieLayer(name);
@ -351,7 +351,7 @@ public:
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH

@ -205,14 +205,14 @@ public:
}
#endif
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
InferenceEngine::Builder::ReorgYoloLayer ieLayer(name);
ieLayer.setStride(reorgStride);
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> > &inputs,

@ -279,7 +279,7 @@ public:
}
#endif
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >& inputs) CV_OVERRIDE
{
InferenceEngine::Builder::ReshapeLayer ieLayer(name);
@ -287,7 +287,7 @@ public:
ieLayer.setDims(outShapes[0]);
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,

@ -65,8 +65,7 @@ public:
return interpolation == "nearest" || interpolation == "bilinear";
#ifdef HAVE_INF_ENGINE
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ||
backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 || backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
{
return (interpolation == "nearest" && scaleWidth == scaleHeight) ||
(interpolation == "bilinear");
@ -192,9 +191,9 @@ public:
}
#endif
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
#ifdef HAVE_INF_ENGINE
InferenceEngine::Builder::Layer ieLayer(name);
ieLayer.setName(name);
if (interpolation == "nearest")
@ -220,9 +219,8 @@ public:
ieLayer.setInputPorts(std::vector<InferenceEngine::Port>(1));
ieLayer.setOutputPorts(std::vector<InferenceEngine::Port>(1));
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
#endif // HAVE_INF_ENGINE
return Ptr<BackendNode>();
}
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH

@ -227,7 +227,7 @@ public:
}
#endif // HAVE_HALIDE
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE
{
InferenceEngine::Builder::Layer l = InferenceEngine::Builder::ScaleShiftLayer(name);
@ -253,7 +253,7 @@ public:
addConstantData("biases", wrapToInfEngineBlob(blobs.back(), {numChannels}, InferenceEngine::Layout::C), l);
return Ptr<BackendNode>(new InfEngineBackendNode(l));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH

@ -119,14 +119,17 @@ public:
virtual bool supportBackend(int backendId) CV_OVERRIDE
{
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_CUDA ||
(backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && sliceRanges.size() == 1) ||
(backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 &&
#ifdef HAVE_INF_ENGINE
INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1) &&
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
return INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1) &&
sliceRanges.size() == 1 && sliceRanges[0].size() == 4;
#endif
#ifdef HAVE_DNN_NGRAPH
if (backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
return sliceRanges.size() == 1;
#endif
sliceRanges.size() == 1 && sliceRanges[0].size() == 4);
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_CUDA;
}
bool getMemoryShapes(const std::vector<MatShape> &inputs,
@ -292,7 +295,7 @@ public:
}
#endif
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1)
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >& inputs) CV_OVERRIDE
{

@ -348,7 +348,7 @@ public:
return Ptr<BackendNode>();
}
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >& inputs) CV_OVERRIDE
{
InferenceEngine::DataPtr input = infEngineDataNode(inputs[0]);
@ -358,7 +358,7 @@ public:
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
}
#endif // HAVE_INF_ENGINE
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#ifdef HAVE_DNN_NGRAPH
virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inputs,

@ -42,8 +42,8 @@ Backend& getInferenceEngineBackendTypeParam()
{
static Backend param = parseInferenceEngineBackendType(
utils::getConfigurationParameterString("OPENCV_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019_TYPE",
#ifdef HAVE_NGRAPH
CV_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_API // future: CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH
#ifndef HAVE_DNN_IE_NN_BUILDER_2019
CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH
#else
CV_DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_API
#endif
@ -69,6 +69,36 @@ cv::String setInferenceEngineBackendType(const cv::String& newBackendType)
CV__DNN_INLINE_NS_END
Mat infEngineBlobToMat(const InferenceEngine::Blob::Ptr& blob)
{
// NOTE: Inference Engine sizes are reversed.
std::vector<size_t> dims = blob->getTensorDesc().getDims();
std::vector<int> size(dims.begin(), dims.end());
auto precision = blob->getTensorDesc().getPrecision();
int type = -1;
switch (precision)
{
case InferenceEngine::Precision::FP32: type = CV_32F; break;
case InferenceEngine::Precision::U8: type = CV_8U; break;
default:
CV_Error(Error::StsNotImplemented, "Unsupported blob precision");
}
return Mat(size, type, (void*)blob->buffer());
}
void infEngineBlobsToMats(const std::vector<InferenceEngine::Blob::Ptr>& blobs,
std::vector<Mat>& mats)
{
mats.resize(blobs.size());
for (int i = 0; i < blobs.size(); ++i)
mats[i] = infEngineBlobToMat(blobs[i]);
}
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
// For networks with input layer which has an empty name, IE generates a name id[some_number].
// OpenCV lets users use an empty input name and to prevent unexpected naming,
// we can use some predefined name.
@ -556,6 +586,7 @@ void InfEngineBackendWrapper::setHostDirty()
}
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1)
static std::map<std::string, InferenceEngine::InferenceEnginePluginPtr>& getSharedPlugins()
@ -687,6 +718,9 @@ static bool detectMyriadX_()
}
#endif // !defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
void InfEngineBackendNet::initPlugin(InferenceEngine::CNNNetwork& net)
{
CV_Assert(!isInitialized());
@ -985,32 +1019,6 @@ void InfEngineBackendNet::forward(const std::vector<Ptr<BackendWrapper> >& outBl
}
}
Mat infEngineBlobToMat(const InferenceEngine::Blob::Ptr& blob)
{
// NOTE: Inference Engine sizes are reversed.
std::vector<size_t> dims = blob->getTensorDesc().getDims();
std::vector<int> size(dims.begin(), dims.end());
auto precision = blob->getTensorDesc().getPrecision();
int type = -1;
switch (precision)
{
case InferenceEngine::Precision::FP32: type = CV_32F; break;
case InferenceEngine::Precision::U8: type = CV_8U; break;
default:
CV_Error(Error::StsNotImplemented, "Unsupported blob precision");
}
return Mat(size, type, (void*)blob->buffer());
}
void infEngineBlobsToMats(const std::vector<InferenceEngine::Blob::Ptr>& blobs,
std::vector<Mat>& mats)
{
mats.resize(blobs.size());
for (int i = 0; i < blobs.size(); ++i)
mats[i] = infEngineBlobToMat(blobs[i]);
}
bool InfEngineBackendLayer::getMemoryShapes(const std::vector<MatShape> &inputs,
const int requiredOutputs,
std::vector<MatShape> &outputs,
@ -1077,6 +1085,8 @@ void addConstantData(const std::string& name, InferenceEngine::Blob::Ptr data,
#endif
}
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#endif // HAVE_INF_ENGINE
bool haveInfEngine()
@ -1092,11 +1102,13 @@ void forwardInfEngine(const std::vector<Ptr<BackendWrapper> >& outBlobsWrappers,
Ptr<BackendNode>& node, bool isAsync)
{
CV_Assert(haveInfEngine());
#ifdef HAVE_INF_ENGINE
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
CV_Assert(!node.empty());
Ptr<InfEngineBackendNode> ieNode = node.dynamicCast<InfEngineBackendNode>();
CV_Assert(!ieNode.empty());
ieNode->net->forward(outBlobsWrappers, isAsync);
#else
CV_Error(Error::StsNotImplemented, "This OpenCV version is built without Inference Engine NN Builder API support");
#endif // HAVE_INF_ENGINE
}

@ -41,6 +41,7 @@
#pragma GCC diagnostic ignored "-Wsuggest-override"
#endif
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
//#define INFERENCE_ENGINE_DEPRECATED // turn off deprecation warnings from IE
//there is no way to suppress warnings from IE only at this moment, so we are forced to suppress warnings globally
#if defined(__GNUC__)
@ -49,6 +50,7 @@
#ifdef _MSC_VER
#pragma warning(disable: 4996) // was declared deprecated
#endif
#endif // HAVE_DNN_IE_NN_BUILDER_2019
#if defined(__GNUC__) && INF_ENGINE_VER_MAJOR_LT(INF_ENGINE_RELEASE_2020_1)
#pragma GCC visibility push(default)
@ -74,6 +76,13 @@ namespace cv { namespace dnn {
Backend& getInferenceEngineBackendTypeParam();
Mat infEngineBlobToMat(const InferenceEngine::Blob::Ptr& blob);
void infEngineBlobsToMats(const std::vector<InferenceEngine::Blob::Ptr>& blobs,
std::vector<Mat>& mats);
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
class InfEngineBackendNet
{
public:
@ -180,11 +189,6 @@ InferenceEngine::Blob::Ptr wrapToInfEngineBlob(const Mat& m, const std::vector<s
InferenceEngine::DataPtr infEngineDataNode(const Ptr<BackendWrapper>& ptr);
Mat infEngineBlobToMat(const InferenceEngine::Blob::Ptr& blob);
void infEngineBlobsToMats(const std::vector<InferenceEngine::Blob::Ptr>& blobs,
std::vector<Mat>& mats);
// Convert Inference Engine blob with FP32 precision to FP16 precision.
// Allocates memory for a new blob.
InferenceEngine::Blob::Ptr convertFp16(const InferenceEngine::Blob::Ptr& blob);
@ -233,6 +237,8 @@ public:
InferenceEngine::ResponseDesc* resp) noexcept;
};
#endif // HAVE_DNN_IE_NN_BUILDER_2019
CV__DNN_INLINE_NS_BEGIN

@ -392,7 +392,10 @@ void initDNNTests()
#ifdef HAVE_DNN_NGRAPH
CV_TEST_TAG_DNN_SKIP_IE_NGRAPH,
#endif
CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER,
#endif
""
);
#endif
registerGlobalSkipTag(

@ -130,14 +130,18 @@ void test_readNet_IE_do_not_call_setInput(Backend backendId)
EXPECT_TRUE(res.empty()) << res.size;
}
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
TEST(readNet, do_not_call_setInput_IE_NN_BUILDER_2019)
{
test_readNet_IE_do_not_call_setInput(DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019);
}
#endif
#ifdef HAVE_DNN_NGRAPH
TEST(readNet, do_not_call_setInput_IE_NGRAPH)
{
test_readNet_IE_do_not_call_setInput(DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
}
#endif
#endif // HAVE_INF_ENGINE
typedef testing::TestWithParam<tuple<Backend, Target> > dump;

@ -62,6 +62,8 @@ static std::vector<std::string>& getTestTagsSkipList()
void registerGlobalSkipTag(const std::string& skipTag)
{
if (skipTag.empty())
return; // do nothing
std::vector<std::string>& skipTags = getTestTagsSkipList();
for (size_t i = 0; i < skipTags.size(); ++i)
{

Loading…
Cancel
Save