remove asymmetric padding checks

pull/22362/head
fengyuentau 2 years ago
parent cde18648dc
commit e7e814fa8c
  1. 8
      modules/dnn/src/layers/convolution_layer.cpp
  2. 38
      modules/dnn/src/onnx/onnx_importer.cpp

@ -101,10 +101,6 @@ public:
if (kernel_size.size() == 2) {
kernel = Size(kernel_size[1], kernel_size[0]);
stride = Size(strides[1], strides[0]);
for (int i = 0; i < pads_begin.size(); i++) {
if (pads_begin[i] != pads_end[i])
CV_Error(Error::StsNotImplemented, "Unsupported asymmetric padding in convolution layer");
}
pad = Size(pads_begin[1], pads_begin[0]);
dilation = Size(dilations[1], dilations[0]);
@ -163,10 +159,6 @@ public:
}
getConvPoolPaddings(inpShape, kernel_size, strides, padMode, pads_begin, pads_end);
if (pads_begin.size() == 2) {
for (int i = 0; i < pads_begin.size(); i++) {
if (pads_begin[i] != pads_end[i])
CV_Error(Error::StsNotImplemented, "Unsupported asymmetric padding in convolution layer");
}
pad = Size(pads_begin[1], pads_begin[0]);
}
fusedWeights = false;

@ -2067,44 +2067,6 @@ void ONNXImporter::parseConv(LayerParams& layerParams, const opencv_onnx::NodePr
int outCn = layerParams.blobs.empty() ? outShapes[node_proto.input(1)][0] : layerParams.blobs[0].size[0];
layerParams.set("num_output", outCn);
// Check for asymmetric padding in Conv2D
if (layerParams.has("pad"))
{
bool asymmetricPadding = false;
DictValue pads = layerParams.get("pad");
const int dims = pads.size() / 2;
for (int i = 0; i < dims; ++i)
{
if (pads.get<int>(i) != pads.get<int>(i + dims))
{
asymmetricPadding = true;
break;
}
}
if (asymmetricPadding && pads.size() == 4) // [pad_t, pad_l, pad_b, pad_r]
{
layerParams.erase("pad");
// No paddings required for N, C axis
std::vector<int> paddings(4, 0);
// Add paddings for H, W axis
for (int i = 0; i < dims; ++i)
{
paddings.push_back(pads.get<int>(i));
paddings.push_back(pads.get<int>(dims + i));
}
LayerParams padLp;
padLp.name = layerParams.name + "/pad";
padLp.type = "Padding";
padLp.set("paddings", DictValue::arrayInt(&paddings[0], paddings.size()));
opencv_onnx::NodeProto proto;
proto.add_input(node_proto.input(0));
proto.add_output(padLp.name);
addLayer(padLp, proto);
node_proto.set_input(0, padLp.name);
}
}
addLayer(layerParams, node_proto);
}

Loading…
Cancel
Save