|
|
@ -152,10 +152,16 @@ public: |
|
|
|
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE |
|
|
|
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >&) CV_OVERRIDE |
|
|
|
{ |
|
|
|
{ |
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
|
|
|
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5) |
|
|
|
|
|
|
|
InferenceEngine::Builder::Layer ieLayer = func.initInfEngineBuilderAPI(); |
|
|
|
|
|
|
|
ieLayer.setName(this->name); |
|
|
|
|
|
|
|
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer)); |
|
|
|
|
|
|
|
#else |
|
|
|
InferenceEngine::LayerParams lp; |
|
|
|
InferenceEngine::LayerParams lp; |
|
|
|
lp.name = this->name; |
|
|
|
lp.name = this->name; |
|
|
|
lp.precision = InferenceEngine::Precision::FP32; |
|
|
|
lp.precision = InferenceEngine::Precision::FP32; |
|
|
|
return Ptr<BackendNode>(new InfEngineBackendNode(func.initInfEngine(lp))); |
|
|
|
return Ptr<BackendNode>(new InfEngineBackendNode(func.initInfEngine(lp))); |
|
|
|
|
|
|
|
#endif |
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
return Ptr<BackendNode>(); |
|
|
|
return Ptr<BackendNode>(); |
|
|
|
} |
|
|
|
} |
|
|
@ -345,6 +351,12 @@ struct ReLUFunctor |
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
|
|
|
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5) |
|
|
|
|
|
|
|
InferenceEngine::Builder::Layer initInfEngineBuilderAPI() |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
return InferenceEngine::Builder::ReLULayer("").setNegativeSlope(slope); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
#else |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
{ |
|
|
|
{ |
|
|
|
lp.type = "ReLU"; |
|
|
|
lp.type = "ReLU"; |
|
|
@ -353,6 +365,7 @@ struct ReLUFunctor |
|
|
|
ieLayer->params["negative_slope"] = format("%f", slope); |
|
|
|
ieLayer->params["negative_slope"] = format("%f", slope); |
|
|
|
return ieLayer; |
|
|
|
return ieLayer; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
#endif |
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
|
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
@ -452,6 +465,12 @@ struct ReLU6Functor |
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
|
|
|
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5) |
|
|
|
|
|
|
|
InferenceEngine::Builder::Layer initInfEngineBuilderAPI() |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
return InferenceEngine::Builder::ClampLayer("").setMinValue(minValue).setMaxValue(maxValue); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
#else |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
{ |
|
|
|
{ |
|
|
|
lp.type = "Clamp"; |
|
|
|
lp.type = "Clamp"; |
|
|
@ -462,6 +481,7 @@ struct ReLU6Functor |
|
|
|
ieLayer->params["max"] = format("%f", maxValue); |
|
|
|
ieLayer->params["max"] = format("%f", maxValue); |
|
|
|
return ieLayer; |
|
|
|
return ieLayer; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
#endif |
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
|
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
@ -530,12 +550,19 @@ struct TanHFunctor |
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
|
|
|
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5) |
|
|
|
|
|
|
|
InferenceEngine::Builder::Layer initInfEngineBuilderAPI() |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
return InferenceEngine::Builder::TanHLayer(""); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
#else |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
{ |
|
|
|
{ |
|
|
|
lp.type = "TanH"; |
|
|
|
lp.type = "TanH"; |
|
|
|
std::shared_ptr<InferenceEngine::CNNLayer> ieLayer(new InferenceEngine::CNNLayer(lp)); |
|
|
|
std::shared_ptr<InferenceEngine::CNNLayer> ieLayer(new InferenceEngine::CNNLayer(lp)); |
|
|
|
return ieLayer; |
|
|
|
return ieLayer; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
#endif |
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
|
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
@ -604,12 +631,19 @@ struct SigmoidFunctor |
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
|
|
|
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5) |
|
|
|
|
|
|
|
InferenceEngine::Builder::Layer initInfEngineBuilderAPI() |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
return InferenceEngine::Builder::SigmoidLayer(""); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
#else |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
{ |
|
|
|
{ |
|
|
|
lp.type = "Sigmoid"; |
|
|
|
lp.type = "Sigmoid"; |
|
|
|
std::shared_ptr<InferenceEngine::CNNLayer> ieLayer(new InferenceEngine::CNNLayer(lp)); |
|
|
|
std::shared_ptr<InferenceEngine::CNNLayer> ieLayer(new InferenceEngine::CNNLayer(lp)); |
|
|
|
return ieLayer; |
|
|
|
return ieLayer; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
#endif |
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
|
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
@ -680,11 +714,18 @@ struct ELUFunctor |
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
|
|
|
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5) |
|
|
|
|
|
|
|
InferenceEngine::Builder::Layer initInfEngineBuilderAPI() |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
return InferenceEngine::Builder::ELULayer(""); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
#else |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
{ |
|
|
|
{ |
|
|
|
lp.type = "ELU"; |
|
|
|
lp.type = "ELU"; |
|
|
|
return InferenceEngine::CNNLayerPtr(new InferenceEngine::CNNLayer(lp)); |
|
|
|
return InferenceEngine::CNNLayerPtr(new InferenceEngine::CNNLayer(lp)); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
#endif |
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
|
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
@ -753,6 +794,12 @@ struct AbsValFunctor |
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
|
|
|
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5) |
|
|
|
|
|
|
|
InferenceEngine::Builder::Layer initInfEngineBuilderAPI() |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
return InferenceEngine::Builder::ReLULayer("").setNegativeSlope(-1); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
#else |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
{ |
|
|
|
{ |
|
|
|
lp.type = "ReLU"; |
|
|
|
lp.type = "ReLU"; |
|
|
@ -761,6 +808,7 @@ struct AbsValFunctor |
|
|
|
ieLayer->params["negative_slope"] = "-1.0"; |
|
|
|
ieLayer->params["negative_slope"] = "-1.0"; |
|
|
|
return ieLayer; |
|
|
|
return ieLayer; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
#endif |
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
|
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
@ -808,11 +856,18 @@ struct BNLLFunctor |
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
|
|
|
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5) |
|
|
|
|
|
|
|
InferenceEngine::Builder::Layer initInfEngineBuilderAPI() |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
CV_Error(Error::StsNotImplemented, ""); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
#else |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
{ |
|
|
|
{ |
|
|
|
CV_Error(Error::StsNotImplemented, "BNLL"); |
|
|
|
CV_Error(Error::StsNotImplemented, "BNLL"); |
|
|
|
return InferenceEngine::CNNLayerPtr(); |
|
|
|
return InferenceEngine::CNNLayerPtr(); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
#endif |
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
|
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
@ -917,6 +972,14 @@ struct PowerFunctor |
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
|
|
|
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5) |
|
|
|
|
|
|
|
InferenceEngine::Builder::Layer initInfEngineBuilderAPI() |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
return InferenceEngine::Builder::PowerLayer("").setPower(power) |
|
|
|
|
|
|
|
.setScale(scale) |
|
|
|
|
|
|
|
.setShift(shift); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
#else |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
{ |
|
|
|
{ |
|
|
|
if (power == 1.0f && scale == 1.0f && shift == 0.0f) |
|
|
|
if (power == 1.0f && scale == 1.0f && shift == 0.0f) |
|
|
@ -936,6 +999,7 @@ struct PowerFunctor |
|
|
|
return ieLayer; |
|
|
|
return ieLayer; |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
#endif |
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
|
|
|
|
bool tryFuse(Ptr<dnn::Layer>& top) |
|
|
|
bool tryFuse(Ptr<dnn::Layer>& top) |
|
|
@ -1067,6 +1131,15 @@ struct ChannelsPReLUFunctor |
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
#endif // HAVE_HALIDE
|
|
|
|
|
|
|
|
|
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
|
|
|
|
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2018R5) |
|
|
|
|
|
|
|
InferenceEngine::Builder::Layer initInfEngineBuilderAPI() |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
InferenceEngine::Builder::PReLULayer ieLayer(""); |
|
|
|
|
|
|
|
const size_t numChannels = scale.total(); |
|
|
|
|
|
|
|
ieLayer.setWeights(wrapToInfEngineBlob(scale, {numChannels}, InferenceEngine::Layout::C)); |
|
|
|
|
|
|
|
return ieLayer; |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
#else |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) |
|
|
|
{ |
|
|
|
{ |
|
|
|
lp.type = "PReLU"; |
|
|
|
lp.type = "PReLU"; |
|
|
@ -1075,6 +1148,7 @@ struct ChannelsPReLUFunctor |
|
|
|
ieLayer->_weights = wrapToInfEngineBlob(scale, {numChannels}, InferenceEngine::Layout::C); |
|
|
|
ieLayer->_weights = wrapToInfEngineBlob(scale, {numChannels}, InferenceEngine::Layout::C); |
|
|
|
return ieLayer; |
|
|
|
return ieLayer; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
#endif |
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
|
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
|
bool tryFuse(Ptr<dnn::Layer>&) { return false; } |
|
|
|