diff --git a/modules/dnn/src/layers/concat_layer.cpp b/modules/dnn/src/layers/concat_layer.cpp index 24ef4fd8b..82f782865 100644 --- a/modules/dnn/src/layers/concat_layer.cpp +++ b/modules/dnn/src/layers/concat_layer.cpp @@ -62,6 +62,7 @@ namespace dnn CV_Assert(axis < refShape.dims()); int axisSum = 0; + for (size_t i = 0; i < inputs.size(); i++) { BlobShape curShape = inputs[i]->shape(); @@ -83,18 +84,16 @@ namespace dnn void ConcatLayer::forward(std::vector &inputs, std::vector &outputs) { - const Mat& outMat = outputs[0].matRef(); - std::vector ranges(outputs[0].dims(), Range::all()); - int sizeStart = 0; + // In case when Blob shape used in allocation and inner matrix shape do not match, this layer did not work in previous implementation. This implementation is just a fix and needs to be rewritten. + + size_t usedSize = 0; for (size_t i = 0; i < inputs.size(); i++) { - int sizeEnd = sizeStart + inputs[i]->size(axis); - ranges[axis] = Range(sizeStart, sizeEnd); - - Mat outSubMat = outMat(&ranges[0]); - inputs[i]->matRef().copyTo(outSubMat); + Mat inMat(1, inputs[i]->total(), CV_32F, inputs[i]->ptrf()); + Mat outMat(1, inputs[i]->total(), CV_32F, outputs[0].ptrf() + usedSize); - sizeStart = sizeEnd; + inMat.copyTo(outMat); + usedSize += inputs[i]->total(); } } }