diff --git a/modules/dnn/src/darknet/darknet_io.cpp b/modules/dnn/src/darknet/darknet_io.cpp index 99715df829..11aad453e3 100644 --- a/modules/dnn/src/darknet/darknet_io.cpp +++ b/modules/dnn/src/darknet/darknet_io.cpp @@ -376,7 +376,7 @@ namespace cv { int begin[] = {0, split_size * group_id, 0, 0}; cv::dnn::DictValue paramBegin = cv::dnn::DictValue::arrayInt(begin, 4); - int end[] = {-1, begin[1] + split_size, -1, -1}; + int end[] = {INT_MAX, begin[1] + split_size, INT_MAX, INT_MAX}; cv::dnn::DictValue paramEnd = cv::dnn::DictValue::arrayInt(end, 4); darknet::LayerParameter lp; diff --git a/modules/dnn/src/layers/slice_layer.cpp b/modules/dnn/src/layers/slice_layer.cpp index a470772813..71de70e93f 100644 --- a/modules/dnn/src/layers/slice_layer.cpp +++ b/modules/dnn/src/layers/slice_layer.cpp @@ -58,12 +58,32 @@ namespace cv namespace dnn { -void sliceRangesFromShape(const MatShape& inpShape, int& axis, std::vector >& sliceRanges) +Range normalizeRange(const Range& input_range, int n) { + Range range = input_range; + + range.start = std::min(std::max(range.start, -n), n - 1); + if (range.start < 0) + { + range.start += n; + } + + range.end = std::min(std::max(range.end, -n), n); + if (range.end < 0) + { + range.end += n; + } + + return range; +} + +std::vector > finalizeSliceRange(const MatShape& inpShape, int& axis, + const std::vector >& inputSliceRanges) +{ + std::vector > sliceRanges = inputSliceRanges; CV_Assert(inpShape.size() > 0); bool axisNeg = (axis < 0); axis = (axis + static_cast(inpShape.size())) % inpShape.size(); - int n = inpShape[axis]; for (size_t i = 0; i < sliceRanges.size(); ++i){ std::vector& ranges = sliceRanges[i]; @@ -71,16 +91,20 @@ void sliceRangesFromShape(const MatShape& inpShape, int& axis, std::vector= 0) + for (size_t j = 0; j < ranges.size(); ++j) { - continue; - } + int n = inpShape[j]; + if (n <= 0) + { + continue; + } - CV_Assert(n != 0); - range.start = (n + range.start) % n; + ranges[j] = normalizeRange(ranges[j], n); + } } + + return sliceRanges; } class SliceLayerImpl : public SliceLayer @@ -130,7 +154,7 @@ public: { int size = sizeOrEnd; CV_Assert(size == -1 || size > 0); // -1 value means range [start, axis_size). - sliceRanges[0][i].end = size > 0 ? (start + size) : -1; // We'll finalize a negative value later. + sliceRanges[0][i].end = size > 0 ? (start + size) : INT_MAX; // We'll finalize a negative value later. } else { @@ -181,8 +205,7 @@ public: MatShape inpShape = inputs[0]; int axis_rw = axis; - std::vector > sliceRanges_rw = sliceRanges; - sliceRangesFromShape(inpShape, axis_rw, sliceRanges_rw); + std::vector > sliceRanges_rw = finalizeSliceRange(inpShape, axis_rw, sliceRanges); if (!sliceRanges_rw.empty()) { @@ -193,7 +216,7 @@ public: for (int j = 0; j < sliceRanges_rw[i].size(); ++j) { if (shapesInitialized || inpShape[j] > 0) - outputs[i][j] = normalize_axis_range(sliceRanges_rw[i][j], inpShape[j]).size(); + outputs[i][j] = normalizeRange(sliceRanges_rw[i][j], inpShape[j]).size(); if (!sliceSteps.empty() && (i < sliceSteps.size()) && (j < sliceSteps[i].size()) && (sliceSteps[i][j] > 1)) outputs[i][j] = (outputs[i][j] + sliceSteps[i][j] - 1) / sliceSteps[i][j]; @@ -230,8 +253,7 @@ public: CV_Assert(inputs.size() == 1); const MatSize& inpShape = inputs[0].size; - sliceRangesFromShape(shape(inputs[0]), axis, sliceRanges); - finalSliceRanges = sliceRanges; + finalSliceRanges = finalizeSliceRange(shape(inputs[0]), axis, sliceRanges); if (sliceRanges.empty()) { @@ -261,7 +283,7 @@ public: // Clamp. for (int j = 0; j < finalSliceRanges[i].size(); ++j) { - finalSliceRanges[i][j] = normalize_axis_range(finalSliceRanges[i][j], inpShape[j]); + finalSliceRanges[i][j] = normalizeRange(finalSliceRanges[i][j], inpShape[j]); } } diff --git a/modules/dnn/src/onnx/onnx_importer.cpp b/modules/dnn/src/onnx/onnx_importer.cpp index 736f3a27de..e753fbb103 100644 --- a/modules/dnn/src/onnx/onnx_importer.cpp +++ b/modules/dnn/src/onnx/onnx_importer.cpp @@ -1013,13 +1013,12 @@ void ONNXImporter::parseSlice(LayerParams& layerParams, const opencv_onnx::NodeP if (axis > 0) { begin.resize(axis, 0); - end.resize(axis, -1); + end.resize(axis, INT_MAX); } for (int i = 0; i < starts.size(); ++i) { begin.push_back(starts.get(i)); - int finish = ends.get(i); - end.push_back((finish < 0) ? --finish : finish); // numpy doesn't include last dim + end.push_back(ends.get(i)); } } else { // inp_size > 1 CV_Assert(inp_size >= 3); @@ -1043,14 +1042,10 @@ void ONNXImporter::parseSlice(LayerParams& layerParams, const opencv_onnx::NodeP const int* ends = end_blob.ptr(); if (axis > 0) { begin.resize(axis, 0); - end.resize(axis, -1); + end.resize(axis, INT_MAX); } std::copy(starts, starts + start_blob.total(), std::back_inserter(begin)); - for (int i = 0; i < end_blob.total(); ++i) - { - int finish = ends[i]; - end.push_back((finish < 0) ? --finish : finish); // numpy doesn't include last dim - } + std::copy(ends, ends + end_blob.total(), std::back_inserter(end)); if (inp_size == 5) { CV_Assert(constBlobs.find(node_proto.input(4)) != constBlobs.end()); @@ -2133,9 +2128,15 @@ void ONNXImporter::parseExpand(LayerParams& layerParams, const opencv_onnx::Node if (!haveVariables) { - if (broadcast_axes.size() != 1) + if (broadcast_axes.size() > 1) CV_Error(Error::StsNotImplemented, "Expand op doesn't support multiple axes for constant input"); + if (broadcast_axes.empty()) + { + addConstant(output_name, getBlob(node_proto, 0)); + return; + } + Mat input = getBlob(node_proto, 0); input = input.reshape(0, total(inpShape, 0, broadcast_axes[0])); Mat output = cv::repeat(input, 1, targetShape[broadcast_axes[0]]); @@ -2354,7 +2355,7 @@ void ONNXImporter::parseGather(LayerParams& layerParams, const opencv_onnx::Node sliceLp.type = "Slice"; sliceLp.name = inpShape.size() > 1 ? layerParams.name + "/slice" : layerParams.name; std::vector begin(inpShape.size(), 0); - std::vector end(inpShape.size(), -1); + std::vector end(inpShape.size(), INT_MAX); begin[axis] = index; end[axis] = index + 1; diff --git a/modules/dnn/src/tensorflow/tf_importer.cpp b/modules/dnn/src/tensorflow/tf_importer.cpp index 8cbe1c4b23..f9e129622c 100644 --- a/modules/dnn/src/tensorflow/tf_importer.cpp +++ b/modules/dnn/src/tensorflow/tf_importer.cpp @@ -1490,10 +1490,8 @@ void TFImporter::parseStridedSlice(tensorflow::GraphDef& net, const tensorflow:: int end_mask = getLayerAttr(layer, "end_mask").i(); for (int i = 0; i < num; ++i) { - if (ends.at(i) < 0) - ends.at(i) -= 1; if (end_mask & (1 << i)) - ends.at(i) = -1; + ends.at(i) = INT_MAX; if (strides.at(i) != 1) CV_Error(Error::StsNotImplemented, format("StridedSlice with stride %d", strides.at(i))); @@ -1791,15 +1789,16 @@ void TFImporter::parseConv2DBackpropInput(tensorflow::GraphDef& net, const tenso int64_t pads[8]; bool explicit_pads = getExplicitPadding(layerParams, layer, pads); int64_t begs[4] = {}; - int64_t ends[4] = {-1, -1, -1, -1}; + int64_t ends[4] = {}; if (explicit_pads) { name += "/deconv"; layerParams.set("pad_mode", "VALID"); + ends[0] = ends[1] = INT_MAX; for (int i = 2; i < 4; ++i) // begins=[0, 0, a, b], ends=[-1, -1, c, d] { begs[i] = pads[2*i]; - ends[i] = -1 - pads[2*i + 1]; + ends[i] = -pads[2*i + 1]; } } @@ -1819,8 +1818,8 @@ void TFImporter::parseConv2DBackpropInput(tensorflow::GraphDef& net, const tenso const int strideX = layerParams.get("stride_w"); Mat outShape = getTensorContent(getConstBlob(layer, value_id, 0)); int shift = (getDataLayout(layer) == DATA_LAYOUT_NCHW); - const int outH = outShape.at(1 + shift) + begs[2] - 1 - ends[2]; - const int outW = outShape.at(2 + shift) + begs[3] - 1 - ends[3]; + const int outH = outShape.at(1 + shift) + begs[2] - ends[2]; + const int outW = outShape.at(2 + shift) + begs[3] - ends[3]; if (layerParams.get("pad_mode") == "SAME") { layerParams.set("adj_w", (outW - 1) % strideX); diff --git a/modules/dnn/src/torch/torch_importer.cpp b/modules/dnn/src/torch/torch_importer.cpp index 5dd9e3e290..e595e993ef 100644 --- a/modules/dnn/src/torch/torch_importer.cpp +++ b/modules/dnn/src/torch/torch_importer.cpp @@ -949,7 +949,7 @@ struct TorchImporter int size = scalarParams.get("size"); int begins[] = {0, 0, size, size}; - int ends[] = {-1, -1, -size - 1, -size - 1}; + int ends[] = {INT_MAX, INT_MAX, -size, -size}; newModule->apiType = "Slice"; layerParams.set("begin", DictValue::arrayInt(&begins[0], 4)); diff --git a/modules/dnn/test/test_layers.cpp b/modules/dnn/test/test_layers.cpp index 2d4f78c88c..2b17e6fa24 100644 --- a/modules/dnn/test/test_layers.cpp +++ b/modules/dnn/test/test_layers.cpp @@ -2028,7 +2028,7 @@ TEST_P(Layer_Test_Slice, variable_input_shape) int targetId = get<1>(GetParam()); int begin[] = {0, 0, 0, 0}; - int end[] = {-1, -1, -1, -1}; + int end[] = {INT_MAX, INT_MAX, INT_MAX, INT_MAX}; Net net; LayerParams lp;