diff --git a/modules/dnn/src/layers/fully_connected_layer.cpp b/modules/dnn/src/layers/fully_connected_layer.cpp index e3d4a111c8..ba741aa662 100644 --- a/modules/dnn/src/layers/fully_connected_layer.cpp +++ b/modules/dnn/src/layers/fully_connected_layer.cpp @@ -167,10 +167,11 @@ public: cAxis = normalize_axis(axis, inputsTmp[0]); } - MatShape outShape(cAxis + 1); + MatShape outShape((!inputs[0].empty()) ? cAxis + 1 : cAxis); for (int i = 0; i < cAxis; ++i) outShape[i] = inputsTmp[0][i]; - outShape.back() = numOutput; + if (!inputs[0].empty()) + outShape.back() = numOutput; outputs.resize(1, outShape); return false; diff --git a/modules/dnn/test/test_layers_1d.cpp b/modules/dnn/test/test_layers_1d.cpp index 4dbc8ba696..4e5e82e031 100644 --- a/modules/dnn/test/test_layers_1d.cpp +++ b/modules/dnn/test/test_layers_1d.cpp @@ -567,4 +567,40 @@ INSTANTIATE_TEST_CASE_P(/*nothing*/, Layer_Slice_Test, std::vector({1, 4}) )); +typedef testing::TestWithParam>> Layer_FullyConnected_Test; +TEST_P(Layer_FullyConnected_Test, Accuracy_01D) +{ + LayerParams lp; + lp.type = "InnerProduct"; + lp.name = "InnerProductLayer"; + lp.set("num_output", 1); + lp.set("bias_term", false); + lp.set("axis", 0); + + std::vector input_shape = get<0>(GetParam()); + + RNG& rng = TS::ptr()->get_rng(); + float inp_value = rng.uniform(0.0, 10.0); + Mat weights(std::vector{total(input_shape), 1}, CV_32F, inp_value); + lp.blobs.push_back(weights); + + Ptr layer = LayerFactory::createLayerInstance("InnerProduct", lp); + + Mat input(input_shape.size(), input_shape.data(), CV_32F); + randn(input, 0, 1); + Mat output_ref = input.reshape(1, 1) * weights; + output_ref.dims = 1; + + std::vector inputs{input}; + std::vector outputs; + runLayer(layer, inputs, outputs); + normAssert(output_ref, outputs[0]); +} +INSTANTIATE_TEST_CASE_P(/*nothting*/, Layer_FullyConnected_Test, + testing::Values( + std::vector({}), + std::vector({1}), + std::vector({4}) +)); + }}