diff --git a/modules/dnn/src/layers/concat_layer.cpp b/modules/dnn/src/layers/concat_layer.cpp index 3a6466bd80..6cb083e453 100644 --- a/modules/dnn/src/layers/concat_layer.cpp +++ b/modules/dnn/src/layers/concat_layer.cpp @@ -302,6 +302,8 @@ public: ranges[cAxis].start = 0; for (size_t i = 0; i < inputs.size(); i++) { + if (inputs[i].empty()) + continue; ranges[cAxis].end = ranges[cAxis].start + inputs[i].size[cAxis]; for (int j = 0; j < outMat.dims; ++j) { diff --git a/modules/dnn/src/layers/slice_layer.cpp b/modules/dnn/src/layers/slice_layer.cpp index de302ec291..195ed7cb24 100644 --- a/modules/dnn/src/layers/slice_layer.cpp +++ b/modules/dnn/src/layers/slice_layer.cpp @@ -69,10 +69,12 @@ Range normalizeRange(const Range& input_range, int n) { Range range = input_range; - range.start = std::min(std::max(range.start, -n), n - 1); - if (range.start < 0) - { - range.start += n; + if (range.start != n){ + range.start = std::min(std::max(range.start, -n), n - 1); + if (range.start < 0) + { + range.start += n; + } } range.end = std::min(std::max(range.end, -n), n); @@ -610,7 +612,9 @@ public: { for (size_t i = 0; i < outputs.size(); i++) { - inpMat(finalSliceRanges[i]).copyTo(outputs[i]); + if (finalSliceRanges[i][0].start != finalSliceRanges[i][0].end){ + inpMat(finalSliceRanges[i]).copyTo(outputs[i]); + } } } else diff --git a/modules/dnn/test/test_onnx_importer.cpp b/modules/dnn/test/test_onnx_importer.cpp index 8855eb6439..e560ff2dbe 100644 --- a/modules/dnn/test/test_onnx_importer.cpp +++ b/modules/dnn/test/test_onnx_importer.cpp @@ -3110,6 +3110,13 @@ TEST_P(Test_ONNX_layers, Attention) { TEST_P(Test_ONNX_layers, AttentionSingleHead) { testONNXModels("attention_single_head"); } +TEST_P(Test_ONNX_layers, PyTorchAttentionSingleHead){ + testONNXModels("pytorch_attention_single_head"); +} + +TEST_P(Test_ONNX_layers, PyTorchUnflatten){ + testONNXModels("unflatten"); +} TEST_P(Test_ONNX_nets, ViT_B_32) { applyTestTag(CV_TEST_TAG_LONG, CV_TEST_TAG_DEBUG_LONG);