diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index 973c98abc3..a5656821c6 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -1132,7 +1132,7 @@ struct Net::Impl if (layerNet != ieInpNode->net) { // layerNet is empty or nodes are from different graphs. - ieInpNode->net->addOutput(inpLd.name); + ieInpNode->net->addOutput(ieInpNode->layer->name); } } } diff --git a/modules/dnn/test/test_layers.cpp b/modules/dnn/test/test_layers.cpp index 89c6ed8915..5cbfba5517 100644 --- a/modules/dnn/test/test_layers.cpp +++ b/modules/dnn/test/test_layers.cpp @@ -834,6 +834,59 @@ TEST(Test_DLDT, two_inputs) normAssert(out, firstInp + secondInp); } + +class UnsupportedLayer : public Layer +{ +public: + UnsupportedLayer(const LayerParams ¶ms) {} + + static Ptr create(const LayerParams& params) + { + return Ptr(new UnsupportedLayer(params)); + } + + virtual bool supportBackend(int backendId) CV_OVERRIDE + { + return backendId == DNN_BACKEND_DEFAULT; + } + + virtual void forward(std::vector &inputs, std::vector &outputs, std::vector &internals) CV_OVERRIDE {} + + virtual void forward(cv::InputArrayOfArrays inputs, cv::OutputArrayOfArrays outputs, cv::OutputArrayOfArrays internals) CV_OVERRIDE {} +}; + +TEST(Test_DLDT, fused_output) +{ + static const int kNumChannels = 3; + CV_DNN_REGISTER_LAYER_CLASS(Unsupported, UnsupportedLayer); + Net net; + { + LayerParams lp; + lp.set("kernel_size", 1); + lp.set("num_output", 3); + lp.set("bias_term", false); + lp.type = "Convolution"; + lp.name = "testConv"; + lp.blobs.push_back(Mat({kNumChannels, 1, 1, 1}, CV_32F, Scalar(1))); + net.addLayerToPrev(lp.name, lp.type, lp); + } + { + LayerParams lp; + lp.set("bias_term", false); + lp.type = "Scale"; + lp.name = "testScale"; + lp.blobs.push_back(Mat({kNumChannels}, CV_32F, Scalar(1))); + net.addLayerToPrev(lp.name, lp.type, lp); + } + { + LayerParams lp; + net.addLayerToPrev("unsupported_layer", "Unsupported", lp); + } + net.setPreferableBackend(DNN_BACKEND_INFERENCE_ENGINE); + net.setInput(Mat({1, 1, 1, 1}, CV_32FC1, Scalar(1))); + ASSERT_NO_THROW(net.forward()); + LayerFactory::unregisterLayer("Unsupported"); +} #endif // HAVE_INF_ENGINE // Test a custom layer.