Merge pull request #11739 from dkurt:more_ie_models

pull/11755/head
Alexander Alekhin 7 years ago
commit f040282bf8
  1. 12
      modules/dnn/perf/perf_net.cpp
  2. 7
      modules/dnn/src/layers/convolution_layer.cpp
  3. 56
      modules/dnn/src/layers/elementwise_layers.cpp
  4. 6
      modules/dnn/src/layers/resize_layer.cpp
  5. 30
      modules/dnn/src/layers/slice_layer.cpp
  6. 17
      modules/dnn/test/test_backends.cpp
  7. 2
      modules/dnn/test/test_darknet_importer.cpp
  8. 1
      modules/dnn/test/test_torch_importer.cpp

@ -144,7 +144,8 @@ PERF_TEST_P_(DNNTestNetwork, SSD)
PERF_TEST_P_(DNNTestNetwork, OpenFace)
{
if (backend == DNN_BACKEND_HALIDE ||
backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU)
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16) ||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
processNet("dnn/openface_nn4.small2.v1.t7", "", "",
Mat(cv::Size(96, 96), CV_32FC3));
@ -248,6 +249,15 @@ PERF_TEST_P_(DNNTestNetwork, EAST_text_detection)
processNet("dnn/frozen_east_text_detection.pb", "", "", Mat(cv::Size(320, 320), CV_32FC3));
}
PERF_TEST_P_(DNNTestNetwork, FastNeuralStyle_eccv16)
{
if (backend == DNN_BACKEND_HALIDE ||
(backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) ||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
processNet("dnn/fast_neural_style_eccv16_starry_night.t7", "", "", Mat(cv::Size(320, 240), CV_32FC3));
}
const tuple<DNNBackend, DNNTarget> testCases[] = {
#ifdef HAVE_HALIDE
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_HALIDE, DNN_TARGET_CPU),

@ -81,9 +81,10 @@ public:
virtual bool supportBackend(int backendId) CV_OVERRIDE
{
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_HALIDE && haveHalide() ||
backendId == DNN_BACKEND_INFERENCE_ENGINE && haveInfEngine();
if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
return preferableTarget != DNN_TARGET_MYRIAD || type != "Deconvolution" || adjustPad == Size();
else
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE;
}
void finalize(const std::vector<Mat*> &inputs, std::vector<Mat> &outputs) CV_OVERRIDE

@ -115,9 +115,7 @@ public:
virtual bool supportBackend(int backendId) CV_OVERRIDE
{
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_HALIDE && haveHalide() ||
backendId == DNN_BACKEND_INFERENCE_ENGINE && haveInfEngine();
return func.supportBackend(backendId, this->preferableTarget);
}
virtual Ptr<BackendNode> tryAttach(const Ptr<BackendNode>& node) CV_OVERRIDE
@ -238,6 +236,12 @@ struct ReLUFunctor
explicit ReLUFunctor(float slope_=1.f) : slope(slope_) {}
bool supportBackend(int backendId, int)
{
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE ||
backendId == DNN_BACKEND_INFERENCE_ENGINE;
}
void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const
{
float s = slope;
@ -353,6 +357,12 @@ struct ReLU6Functor
CV_Assert(minValue <= maxValue);
}
bool supportBackend(int backendId, int)
{
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE ||
backendId == DNN_BACKEND_INFERENCE_ENGINE;
}
void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const
{
for( int cn = cn0; cn < cn1; cn++, srcptr += planeSize, dstptr += planeSize )
@ -445,6 +455,12 @@ struct TanHFunctor
{
typedef TanHLayer Layer;
bool supportBackend(int backendId, int)
{
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE ||
backendId == DNN_BACKEND_INFERENCE_ENGINE;
}
void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const
{
for( int cn = cn0; cn < cn1; cn++, srcptr += planeSize, dstptr += planeSize )
@ -509,6 +525,12 @@ struct SigmoidFunctor
{
typedef SigmoidLayer Layer;
bool supportBackend(int backendId, int)
{
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE ||
backendId == DNN_BACKEND_INFERENCE_ENGINE;
}
void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const
{
for( int cn = cn0; cn < cn1; cn++, srcptr += planeSize, dstptr += planeSize )
@ -575,6 +597,11 @@ struct ELUFunctor
explicit ELUFunctor() {}
bool supportBackend(int backendId, int)
{
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE;
}
void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const
{
for( int cn = cn0; cn < cn1; cn++, srcptr += planeSize, dstptr += planeSize )
@ -638,6 +665,11 @@ struct AbsValFunctor
{
typedef AbsLayer Layer;
bool supportBackend(int backendId, int)
{
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE;
}
void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const
{
for( int cn = cn0; cn < cn1; cn++, srcptr += planeSize, dstptr += planeSize )
@ -701,6 +733,11 @@ struct BNLLFunctor
{
typedef BNLLLayer Layer;
bool supportBackend(int backendId, int)
{
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE;
}
void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const
{
for( int cn = cn0; cn < cn1; cn++, srcptr += planeSize, dstptr += planeSize )
@ -751,6 +788,14 @@ struct PowerFunctor
explicit PowerFunctor(float power_ = 1.f, float scale_ = 1.f, float shift_ = 0.f)
: power(power_), scale(scale_), shift(shift_) {}
bool supportBackend(int backendId, int targetId)
{
if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
return (targetId != DNN_TARGET_OPENCL && targetId != DNN_TARGET_OPENCL_FP16) || power == 1.0;
else
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE;
}
void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const
{
float a = scale, b = shift, p = power;
@ -853,6 +898,11 @@ struct ChannelsPReLUFunctor
scale_umat = scale.getUMat(ACCESS_READ);
}
bool supportBackend(int backendId, int)
{
return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE;
}
void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const
{
CV_Assert(scale.isContinuous() && scale.type() == CV_32F);

@ -53,8 +53,10 @@ public:
virtual bool supportBackend(int backendId) CV_OVERRIDE
{
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_INFERENCE_ENGINE && haveInfEngine() && interpolation == "nearest";
if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
return interpolation == "nearest" && preferableTarget != DNN_TARGET_MYRIAD;
else
return backendId == DNN_BACKEND_OPENCV;
}
virtual void finalize(const std::vector<Mat*>& inputs, std::vector<Mat> &outputs) CV_OVERRIDE

@ -41,6 +41,7 @@
//M*/
#include "../precomp.hpp"
#include "../op_inf_engine.hpp"
#include "layers_common.hpp"
#include <opencv2/dnn/shape_utils.hpp>
@ -107,6 +108,12 @@ public:
}
}
virtual bool supportBackend(int backendId) CV_OVERRIDE
{
return backendId == DNN_BACKEND_OPENCV ||
backendId == DNN_BACKEND_INFERENCE_ENGINE && sliceRanges.size() == 1;
}
bool getMemoryShapes(const std::vector<MatShape> &inputs,
const int requiredOutputs,
std::vector<MatShape> &outputs,
@ -247,6 +254,29 @@ public:
inpMat(sliceRanges[i]).copyTo(outputs[i]);
}
}
virtual Ptr<BackendNode> initInfEngine(const std::vector<Ptr<BackendWrapper> >& inputs) CV_OVERRIDE
{
#ifdef HAVE_INF_ENGINE
InferenceEngine::DataPtr input = infEngineDataNode(inputs[0]);
InferenceEngine::LayerParams lp;
lp.name = name;
lp.type = "Crop";
lp.precision = InferenceEngine::Precision::FP32;
std::shared_ptr<InferenceEngine::CropLayer> ieLayer(new InferenceEngine::CropLayer(lp));
CV_Assert(sliceRanges.size() == 1);
for (int i = sliceRanges[0].size() - 1; i >= 0; --i)
{
ieLayer->axis.push_back(i);
ieLayer->offset.push_back(sliceRanges[0][i].start);
ieLayer->dim.push_back(sliceRanges[0][i].end - sliceRanges[0][i].start);
}
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
#endif // HAVE_INF_ENGINE
return Ptr<BackendNode>();
}
};
Ptr<SliceLayer> SliceLayer::create(const LayerParams& params)

@ -256,7 +256,8 @@ TEST_P(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages)
TEST_P(DNNTestNetwork, OpenFace)
{
if (backend == DNN_BACKEND_HALIDE ||
backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU)
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16) ||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
processNet("dnn/openface_nn4.small2.v1.t7", "", Size(96, 96), "");
}
@ -296,6 +297,20 @@ TEST_P(DNNTestNetwork, DenseNet_121)
processNet("dnn/DenseNet_121.caffemodel", "dnn/DenseNet_121.prototxt", Size(224, 224), "", "caffe");
}
TEST_P(DNNTestNetwork, FastNeuralStyle_eccv16)
{
if (backend == DNN_BACKEND_HALIDE ||
(backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) ||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
Mat img = imread(findDataFile("dnn/googlenet_1.png", false));
Mat inp = blobFromImage(img, 1.0, Size(320, 240), Scalar(103.939, 116.779, 123.68), false, false);
// Output image has values in range [-143.526, 148.539].
float l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.3 : 4e-5;
float lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 7.0 : 2e-3;
processNet("dnn/fast_neural_style_eccv16_starry_night.t7", "", inp, "", "", l1, lInf);
}
const tuple<DNNBackend, DNNTarget> testCases[] = {
#ifdef HAVE_HALIDE
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_HALIDE, DNN_TARGET_CPU),

@ -135,8 +135,6 @@ TEST_P(Test_Darknet_nets, YoloVoc)
{
int backendId = get<0>(GetParam());
int targetId = get<1>(GetParam());
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD)
throw SkipTestException("");
std::vector<cv::String> outNames(1, "detection_out");
std::vector<int> classIds(3);

@ -296,6 +296,7 @@ TEST_P(Test_Torch_nets, FastNeuralStyle_accuracy)
Mat inputBlob = blobFromImage(img, 1.0, Size(), Scalar(103.939, 116.779, 123.68), false);
net.setInput(inputBlob);
net.setPreferableBackend(DNN_BACKEND_OPENCV);
Mat out = net.forward();
// Deprocessing.

Loading…
Cancel
Save