Fix LogSoftmax for ONNX

Fix wrong indentation as well while at it
pull/14586/head
dianlujitao 6 years ago
parent 447116a93c
commit f0f50b757d
  1. 7
      modules/dnn/src/onnx/onnx_importer.cpp
  2. 6
      modules/dnn/test/test_onnx_importer.cpp

@ -786,6 +786,11 @@ void ONNXImporter::populateNet(Net dstNet)
}
replaceLayerParam(layerParams, "mode", "interpolation");
}
else if (layer_type == "LogSoftmax")
{
layerParams.type = "Softmax";
layerParams.set("log_softmax", true);
}
else
{
for (int j = 0; j < node_proto.input_size(); j++) {
@ -816,7 +821,7 @@ void ONNXImporter::populateNet(Net dstNet)
CV_Assert(!layerOutShapes.empty());
outShapes[layerParams.name] = layerOutShapes[0];
}
}
}
Net readNetFromONNX(const String& onnxFile)
{

@ -245,6 +245,12 @@ TEST_P(Test_ONNX_layers, Reshape)
testONNXModels("unsqueeze");
}
TEST_P(Test_ONNX_layers, Softmax)
{
testONNXModels("softmax");
testONNXModels("log_softmax");
}
INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_ONNX_layers, dnnBackendsAndTargets());
class Test_ONNX_nets : public Test_ONNX_layers {};

Loading…
Cancel
Save