Merge pull request #24613 from WanliZhong:softmax_default_axis

Make default axis of softmax in onnx "-1" without opset option #24613

Try to solve problem: https://github.com/opencv/opencv/pull/24476#discussion_r1404821158

**ONNX**
`opset <= 11` use 1
`else` use -1

**TensorFlow**
`TF version = 2.x` use -1
`else` use 1

**Darknet, Caffe, Torch**
use 1 by definition
pull/24702/head
Wanli 1 year ago committed by GitHub
parent 3d1f6465da
commit 6ae1709c6a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 5
      modules/dnn/src/caffe/caffe_importer.cpp
  2. 3
      modules/dnn/src/darknet/darknet_io.cpp
  3. 2
      modules/dnn/src/layers/softmax_layer.cpp
  4. 10
      modules/dnn/src/onnx/onnx_importer.cpp
  5. 6
      modules/dnn/src/tensorflow/tf_importer.cpp
  6. 3
      modules/dnn/src/torch/torch_importer.cpp
  7. 2
      modules/dnn/test/test_onnx_conformance_layer_filter_opencv_all_denylist.inl.hpp

@ -499,6 +499,11 @@ public:
{
type = "Convolution";
}
else if (type == "Softmax"){
// set default axis to 1
if(!layerParams.has("axis"))
layerParams.set("axis", 1);
}
int id = dstNet.addLayer(name, type, layerParams);

@ -314,6 +314,9 @@ namespace cv {
cv::dnn::LayerParams softmax_param;
softmax_param.name = "Softmax-name";
softmax_param.type = "Softmax";
// set default axis to 1
if(!softmax_param.has("axis"))
softmax_param.set("axis", 1);
darknet::LayerParameter lp;
std::string layer_name = cv::format("softmax_%d", layer_id);

@ -76,7 +76,7 @@ public:
SoftMaxLayerImpl(const LayerParams& params)
{
axisRaw = params.get<int>("axis", 1);
axisRaw = params.get<int>("axis", -1);
logSoftMax = params.get<bool>("log_softmax", false);
setParamsFrom(params);
}

@ -2788,10 +2788,10 @@ void ONNXImporter::parseSoftMax(LayerParams& layerParams, const opencv_onnx::Nod
{
const std::string& layer_type = node_proto.op_type();
int axis;
if (layerParams.has("opset") && layerParams.get<int>("opset") > 11) {
axis = layerParams.get<int>("axis", -1);
} else {
if (onnx_opset != 0 && onnx_opset <= 11) {
axis = layerParams.get<int>("axis", 1);
} else {
axis = layerParams.get<int>("axis", -1);
}
layerParams.set<int>("axis", axis);
layerParams.type = "Softmax";
@ -3962,7 +3962,7 @@ void ONNXImporter::buildDispatchMap_ONNX_AI(int opset_version)
dispatch["Concat"] = &ONNXImporter::parseConcat;
dispatch["Resize"] = &ONNXImporter::parseResize;
dispatch["Upsample"] = &ONNXImporter::parseUpsample;
dispatch["SoftMax"] = dispatch["LogSoftmax"] = &ONNXImporter::parseSoftMax;
dispatch["SoftMax"] = dispatch["Softmax"] = dispatch["LogSoftmax"] = &ONNXImporter::parseSoftMax;
dispatch["DetectionOutput"] = &ONNXImporter::parseDetectionOutput;
dispatch["CumSum"] = &ONNXImporter::parseCumSum;
dispatch["SpaceToDepth"] = dispatch["DepthToSpace"] = &ONNXImporter::parseDepthToSpace;
@ -3981,7 +3981,7 @@ void ONNXImporter::buildDispatchMap_ONNX_AI(int opset_version)
std::vector<std::string> simpleLayers{"Acos", "Acosh", "Asin", "Asinh", "Atan", "Atanh", "Ceil", "Celu", "Cos",
"Cosh", "Dropout", "Erf", "Exp", "Floor", "HardSigmoid", "HardSwish",
"Identity", "Log", "Round", "Reciprocal", "Selu", "Sign", "Sigmoid", "Sin", "Sinh", "Softmax",
"Identity", "Log", "Round", "Reciprocal", "Selu", "Sign", "Sigmoid", "Sin", "Sinh",
"Softplus", "Softsign", "Shrink", "Sqrt", "Tan", "ThresholdedRelu", "Gelu",
"GeluApproximation"};
for (const auto& name : simpleLayers)

@ -2300,6 +2300,12 @@ void TFImporter::parseSoftmax(tensorflow::GraphDef& net, const tensorflow::NodeD
CV_CheckGT(num_inputs, 0, "");
if (hasLayerAttr(layer, "axis"))
layerParams.set("axis", getLayerAttr(layer, "axis").i());
// if tf version is 2.x, use axis -1 as default
else if(netBin.has_versions() && (int)netBin.versions().producer() >= 2)
layerParams.set("axis", -1);
// else use axis 1 as default
else
layerParams.set("axis", 1);
int id = dstNet.addLayer(name, "Softmax", layerParams);
layer_id[name] = id;

@ -874,6 +874,9 @@ struct TorchImporter
{
newModule->apiType = "Softmax";
layerParams.set("log_softmax", nnName == "LogSoftMax");
// set default axis to 1
if(!layerParams.has("axis"))
layerParams.set("axis", 1);
curModule->modules.push_back(newModule);
}
else if (nnName == "SpatialCrossMapLRN")

@ -43,7 +43,6 @@
"test_castlike_STRING_to_FLOAT_expanded",
"test_concat_1d_axis_negative_1",
"test_div_uint8", // output type mismatch
"test_logsoftmax_default_axis",
"test_maxpool_2d_dilations",
"test_maxpool_2d_same_lower",
"test_maxpool_2d_uint8", // output type mismatch
@ -51,7 +50,6 @@
"test_maxpool_with_argmax_2d_precomputed_strides",
"test_maxunpool_export_with_output_shape", // exception during net.forward() call
"test_mul_uint8", // output type mismatch
"test_softmax_default_axis",
"test_sub_bcast",
"test_sub_uint8", // output type mismatch
"test_upsample_nearest",

Loading…
Cancel
Save