Merge pull request #11635 from dkurt:dnn_fix_ie_fused_layer_output

pull/11641/head
Vadim Pisarevsky 7 years ago
commit 9d91c63502
  1. 2
      modules/dnn/src/dnn.cpp
  2. 53
      modules/dnn/test/test_layers.cpp

@ -1132,7 +1132,7 @@ struct Net::Impl
if (layerNet != ieInpNode->net)
{
// layerNet is empty or nodes are from different graphs.
ieInpNode->net->addOutput(inpLd.name);
ieInpNode->net->addOutput(ieInpNode->layer->name);
}
}
}

@ -834,6 +834,59 @@ TEST(Test_DLDT, two_inputs)
normAssert(out, firstInp + secondInp);
}
class UnsupportedLayer : public Layer
{
public:
UnsupportedLayer(const LayerParams &params) {}
static Ptr<Layer> create(const LayerParams& params)
{
return Ptr<Layer>(new UnsupportedLayer(params));
}
virtual bool supportBackend(int backendId) CV_OVERRIDE
{
return backendId == DNN_BACKEND_DEFAULT;
}
virtual void forward(std::vector<cv::Mat*> &inputs, std::vector<cv::Mat> &outputs, std::vector<cv::Mat> &internals) CV_OVERRIDE {}
virtual void forward(cv::InputArrayOfArrays inputs, cv::OutputArrayOfArrays outputs, cv::OutputArrayOfArrays internals) CV_OVERRIDE {}
};
TEST(Test_DLDT, fused_output)
{
static const int kNumChannels = 3;
CV_DNN_REGISTER_LAYER_CLASS(Unsupported, UnsupportedLayer);
Net net;
{
LayerParams lp;
lp.set("kernel_size", 1);
lp.set("num_output", 3);
lp.set("bias_term", false);
lp.type = "Convolution";
lp.name = "testConv";
lp.blobs.push_back(Mat({kNumChannels, 1, 1, 1}, CV_32F, Scalar(1)));
net.addLayerToPrev(lp.name, lp.type, lp);
}
{
LayerParams lp;
lp.set("bias_term", false);
lp.type = "Scale";
lp.name = "testScale";
lp.blobs.push_back(Mat({kNumChannels}, CV_32F, Scalar(1)));
net.addLayerToPrev(lp.name, lp.type, lp);
}
{
LayerParams lp;
net.addLayerToPrev("unsupported_layer", "Unsupported", lp);
}
net.setPreferableBackend(DNN_BACKEND_INFERENCE_ENGINE);
net.setInput(Mat({1, 1, 1, 1}, CV_32FC1, Scalar(1)));
ASSERT_NO_THROW(net.forward());
LayerFactory::unregisterLayer("Unsupported");
}
#endif // HAVE_INF_ENGINE
// Test a custom layer.

Loading…
Cancel
Save