Merge pull request #9996 from dkurt:dnn_multiple_inputs

pull/10152/head
Alexander Alekhin 7 years ago
commit 107582c767
  1. 8
      modules/dnn/src/caffe/caffe_importer.cpp
  2. 24
      modules/dnn/src/dnn.cpp
  3. 27
      modules/dnn/test/test_caffe_importer.cpp
  4. 42
      modules/dnn/test/test_layers.cpp

@ -327,8 +327,12 @@ public:
if (type == "Input")
{
addedBlobs.push_back(BlobNote(name, 0, netInputs.size()));
netInputs.push_back(name);
for (int outNum = 0; outNum < layer.top_size(); outNum++)
{
addOutput(layer, 0, outNum);
addedBlobs.back().outNum = netInputs.size();
netInputs.push_back(addedBlobs.back().name);
}
continue;
}

@ -279,6 +279,16 @@ struct DataLayer : public Layer
outNames.assign(names.begin(), names.end());
}
bool getMemoryShapes(const std::vector<MatShape> &inputs,
const int requiredOutputs,
std::vector<MatShape> &outputs,
std::vector<MatShape> &internals) const
{
CV_Assert(inputs.size() == requiredOutputs);
outputs.assign(inputs.begin(), inputs.end());
return false;
}
private:
std::vector<String> outNames;
};
@ -1385,7 +1395,7 @@ struct Net::Impl
layers[ld.inputBlobsId[i].lid].getLayerInstance()->name.c_str(),
inp_i_data->getLayerInstance()->name.c_str()));
if(inp_i_data->skipFlags[DNN_BACKEND_DEFAULT])
if(inp_i_data->skipFlags[DNN_BACKEND_DEFAULT] || inp_i_data->consumers.size() != 1)
break;
realinputs[i] = pin;
}
@ -1407,6 +1417,14 @@ struct Net::Impl
Mat& curr_output = inp_i_data->outputBlobs[pin.oid];
CV_Assert(output_slice.isContinuous() && output_slice.size == curr_output.size);
curr_output = output_slice;
pin = ld.inputBlobsId[i];
inp_i_data = &layers[pin.lid];
for (int j = 0; j < inp_i_data->consumers.size(); ++j)
{
LayerPin consumer = inp_i_data->consumers[j];
layers[consumer.lid].inputBlobs[consumer.oid] = &curr_output;
}
}
ld.skipFlags[DNN_BACKEND_DEFAULT] = true;
printf_(("\toptimized out Concat layer %s\n", concatLayer->name.c_str()));
@ -1438,7 +1456,9 @@ struct Net::Impl
blobManager.setPreferableTarget(preferableTarget);
blobManager.setPreferableBackend(preferableBackend);
backendWrappers.clear();
blobManager.addReference(LayerPin(0, 0));
// Fake references to input blobs.
for (int i = 0; i < layers[0].outputBlobs.size(); ++i)
blobManager.addReference(LayerPin(0, i));
for (it = layers.begin(); it != layers.end(); ++it)
{
const LayerData& ld = it->second;

@ -313,4 +313,31 @@ TEST(Reproducibility_DenseNet_121, Accuracy)
normAssert(out, ref);
}
TEST(Test_Caffe, multiple_inputs)
{
const string proto = findDataFile("dnn/layers/net_input.prototxt", false);
Net net = readNetFromCaffe(proto);
Mat first_image(10, 11, CV_32FC3);
Mat second_image(10, 11, CV_32FC3);
randu(first_image, -1, 1);
randu(second_image, -1, 1);
first_image = blobFromImage(first_image);
second_image = blobFromImage(second_image);
Mat first_image_blue_green = slice(first_image, Range::all(), Range(0, 2), Range::all(), Range::all());
Mat first_image_red = slice(first_image, Range::all(), Range(2, 3), Range::all(), Range::all());
Mat second_image_blue_green = slice(second_image, Range::all(), Range(0, 2), Range::all(), Range::all());
Mat second_image_red = slice(second_image, Range::all(), Range(2, 3), Range::all(), Range::all());
net.setInput(first_image_blue_green, "old_style_input_blue_green");
net.setInput(first_image_red, "different_name_for_red");
net.setInput(second_image_blue_green, "input_layer_blue_green");
net.setInput(second_image_red, "old_style_input_red");
Mat out = net.forward();
normAssert(out, first_image + second_image);
}
}

@ -274,6 +274,48 @@ OCL_TEST(Layer_Test_Concat, Accuracy)
testLayerUsingCaffeModels("layer_concat", DNN_TARGET_OPENCL);
}
TEST(Layer_Test_Fused_Concat, Accuracy)
{
// Test case
// input
// |
// v
// some_layer
// | |
// v v
// concat
Net net;
int interLayer;
{
LayerParams lp;
lp.type = "AbsVal";
lp.name = "someLayer";
interLayer = net.addLayerToPrev(lp.name, lp.type, lp);
}
{
LayerParams lp;
lp.set("axis", 1);
lp.type = "Concat";
lp.name = "testConcat";
int id = net.addLayer(lp.name, lp.type, lp);
net.connect(interLayer, 0, id, 0);
net.connect(interLayer, 0, id, 1);
}
int shape[] = {1, 2, 3, 4};
Mat input(4, shape, CV_32F);
randu(input, 0.0f, 1.0f); // [0, 1] to make AbsVal an identity transformation.
net.setInput(input);
Mat out = net.forward();
normAssert(slice(out, Range::all(), Range(0, 2), Range::all(), Range::all()), input);
normAssert(slice(out, Range::all(), Range(2, 4), Range::all(), Range::all()), input);
//
testLayerUsingCaffeModels("layer_concat_optim", DNN_TARGET_CPU, true, false);
}
TEST(Layer_Test_Eltwise, Accuracy)
{
testLayerUsingCaffeModels("layer_eltwise");

Loading…
Cancel
Save