Merge remote-tracking branch 'upstream/3.4' into merge-3.4

pull/22022/head
Alexander Alekhin 3 years ago
commit d9bf522b27
  1. 8
      3rdparty/libpng/CMakeLists.txt
  2. 5
      3rdparty/libtiff/CMakeLists.txt
  3. 2
      apps/traincascade/cascadeclassifier.cpp
  4. 24
      modules/dnn/src/onnx/onnx_importer.cpp
  5. 22
      modules/dnn/test/test_int8_layers.cpp
  6. 5
      modules/dnn/test/test_onnx_importer.cpp
  7. 2
      modules/ts/src/ts_gtest.cpp
  8. 6
      modules/video/src/tracking/tracker_dasiamrpn.cpp

@ -77,9 +77,11 @@ endif(MSVC)
add_library(${PNG_LIBRARY} STATIC ${OPENCV_3RDPARTY_EXCLUDE_FROM_ALL} ${lib_srcs} ${lib_hdrs}) add_library(${PNG_LIBRARY} STATIC ${OPENCV_3RDPARTY_EXCLUDE_FROM_ALL} ${lib_srcs} ${lib_hdrs})
target_link_libraries(${PNG_LIBRARY} ${ZLIB_LIBRARIES}) target_link_libraries(${PNG_LIBRARY} ${ZLIB_LIBRARIES})
ocv_warnings_disable(CMAKE_C_FLAGS -Wundef -Wcast-align -Wimplicit-fallthrough -Wunused-parameter -Wsign-compare) ocv_warnings_disable(CMAKE_C_FLAGS -Wundef -Wcast-align -Wimplicit-fallthrough -Wunused-parameter -Wsign-compare
ocv_warnings_disable(CMAKE_C_FLAGS -Wnull-pointer-subtraction) # clang15 -Wmaybe-uninitialized
ocv_warnings_disable(CMAKE_C_FLAGS -Wunused-but-set-variable) # clang15 -Wnull-pointer-subtraction # clang15
-Wunused-but-set-variable # clang15
)
set_target_properties(${PNG_LIBRARY} set_target_properties(${PNG_LIBRARY}
PROPERTIES OUTPUT_NAME ${PNG_LIBRARY} PROPERTIES OUTPUT_NAME ${PNG_LIBRARY}

@ -452,9 +452,10 @@ ocv_warnings_disable(CMAKE_C_FLAGS -Wno-unused-but-set-variable -Wmissing-protot
-Wcast-align -Wshadow -Wno-maybe-uninitialized -Wno-pointer-to-int-cast -Wno-int-to-pointer-cast -Wcast-align -Wshadow -Wno-maybe-uninitialized -Wno-pointer-to-int-cast -Wno-int-to-pointer-cast
-Wmisleading-indentation -Wmisleading-indentation
-Wimplicit-fallthrough -Wimplicit-fallthrough
-Wunused-parameter # clang
-Warray-parameter
-Wstrict-prototypes # clang15
) )
ocv_warnings_disable(CMAKE_C_FLAGS -Wunused-parameter) # clang
ocv_warnings_disable(CMAKE_C_FLAGS -Wstrict-prototypes) # clang15
ocv_warnings_disable(CMAKE_CXX_FLAGS -Wmissing-declarations -Wunused-parameter -Wmissing-prototypes ocv_warnings_disable(CMAKE_CXX_FLAGS -Wmissing-declarations -Wunused-parameter -Wmissing-prototypes
-Wundef # tiffiop.h: #if __clang_major__ >= 4 -Wundef # tiffiop.h: #if __clang_major__ >= 4
) )

@ -252,7 +252,7 @@ bool CvCascadeClassifier::train( const string _cascadeDirName,
fs << "}"; fs << "}";
} }
// save current stage // save current stage
char buf[10]; char buf[32];
sprintf(buf, "%s%d", "stage", i ); sprintf(buf, "%s%d", "stage", i );
string stageFilename = dirName + buf + ".xml"; string stageFilename = dirName + buf + ".xml";
FileStorage fs( stageFilename, FileStorage::WRITE ); FileStorage fs( stageFilename, FileStorage::WRITE );

@ -197,8 +197,18 @@ private:
void parseOperatorSet(); void parseOperatorSet();
const std::string str_domain_ai_onnx = "ai.onnx"; const std::string str_domain_ai_onnx = "ai.onnx";
bool useLegacyNames;
bool getParamUseLegacyNames()
{
bool param = utils::getConfigurationParameterBool("OPENCV_DNN_ONNX_USE_LEGACY_NAMES", false);
return param;
}
const std::string extractNodeName(const opencv_onnx::NodeProto& node_proto);
}; };
class ONNXLayerHandler : public detail::LayerHandler class ONNXLayerHandler : public detail::LayerHandler
{ {
public: public:
@ -233,6 +243,7 @@ ONNXImporter::ONNXImporter(Net& net, const char *onnxFile)
: layerHandler(DNN_DIAGNOSTICS_RUN ? new ONNXLayerHandler(this) : nullptr) : layerHandler(DNN_DIAGNOSTICS_RUN ? new ONNXLayerHandler(this) : nullptr)
, dstNet(net) , dstNet(net)
, onnx_opset(0) , onnx_opset(0)
, useLegacyNames(getParamUseLegacyNames())
{ {
hasDynamicShapes = false; hasDynamicShapes = false;
CV_Assert(onnxFile); CV_Assert(onnxFile);
@ -256,6 +267,7 @@ ONNXImporter::ONNXImporter(Net& net, const char* buffer, size_t sizeBuffer)
: layerHandler(DNN_DIAGNOSTICS_RUN ? new ONNXLayerHandler(this) : nullptr) : layerHandler(DNN_DIAGNOSTICS_RUN ? new ONNXLayerHandler(this) : nullptr)
, dstNet(net) , dstNet(net)
, onnx_opset(0) , onnx_opset(0)
, useLegacyNames(getParamUseLegacyNames())
{ {
hasDynamicShapes = false; hasDynamicShapes = false;
CV_LOG_DEBUG(NULL, "DNN/ONNX: processing in-memory ONNX model (" << sizeBuffer << " bytes)"); CV_LOG_DEBUG(NULL, "DNN/ONNX: processing in-memory ONNX model (" << sizeBuffer << " bytes)");
@ -278,6 +290,7 @@ ONNXImporter::ONNXImporter(Net& net, const char* buffer, size_t sizeBuffer)
populateNet(); populateNet();
} }
inline void replaceLayerParam(LayerParams& layerParams, const String& oldKey, const String& newKey) inline void replaceLayerParam(LayerParams& layerParams, const String& oldKey, const String& newKey)
{ {
if (layerParams.has(oldKey)) { if (layerParams.has(oldKey)) {
@ -909,11 +922,14 @@ const ONNXImporter::DispatchMap& ONNXImporter::getDispatchMap(const opencv_onnx:
return it->second; return it->second;
} }
const std::string& extractNodeName(const opencv_onnx::NodeProto& node_proto) const std::string ONNXImporter::extractNodeName(const opencv_onnx::NodeProto& node_proto)
{ {
// We need to rework DNN outputs API, this is a workaround for #21698
if (node_proto.has_name() && !node_proto.name().empty()) if (node_proto.has_name() && !node_proto.name().empty())
{ {
return node_proto.name(); if (useLegacyNames)
return node_proto.name();
return cv::format("onnx_node!%s", node_proto.name().c_str());
} }
for (int i = 0; i < node_proto.output_size(); ++i) for (int i = 0; i < node_proto.output_size(); ++i)
{ {
@ -923,7 +939,9 @@ const std::string& extractNodeName(const opencv_onnx::NodeProto& node_proto)
// the second method is to use an empty string in place of an input or output name. // the second method is to use an empty string in place of an input or output name.
if (!name.empty()) if (!name.empty())
{ {
return name; if (useLegacyNames)
return name.c_str();
return cv::format("onnx_node_output_%d!%s", i, name.c_str());
} }
} }
CV_Error(Error::StsAssert, "Couldn't deduce Node name."); CV_Error(Error::StsAssert, "Couldn't deduce Node name.");

@ -265,14 +265,32 @@ TEST_P(Test_Int8_layers, Mish)
testLayer("mish", "ONNX", 0.0015, 0.0025); testLayer("mish", "ONNX", 0.0015, 0.0025);
} }
TEST_P(Test_Int8_layers, Softmax) TEST_P(Test_Int8_layers, Softmax_Caffe)
{ {
testLayer("layer_softmax", "Caffe", 0.0011, 0.0036); testLayer("layer_softmax", "Caffe", 0.0011, 0.0036);
}
TEST_P(Test_Int8_layers, Softmax_keras_TF)
{
testLayer("keras_softmax", "TensorFlow", 0.00093, 0.0027); testLayer("keras_softmax", "TensorFlow", 0.00093, 0.0027);
}
TEST_P(Test_Int8_layers, Softmax_slim_TF)
{
testLayer("slim_softmax", "TensorFlow", 0.0016, 0.0034); testLayer("slim_softmax", "TensorFlow", 0.0016, 0.0034);
}
TEST_P(Test_Int8_layers, Softmax_slim_v2_TF)
{
testLayer("slim_softmax_v2", "TensorFlow", 0.0029, 0.017); testLayer("slim_softmax_v2", "TensorFlow", 0.0029, 0.017);
}
TEST_P(Test_Int8_layers, Softmax_ONNX)
{
testLayer("softmax", "ONNX", 0.0016, 0.0028); testLayer("softmax", "ONNX", 0.0016, 0.0028);
}
TEST_P(Test_Int8_layers, Softmax_log_ONNX)
{
testLayer("log_softmax", "ONNX", 0.014, 0.025); testLayer("log_softmax", "ONNX", 0.014, 0.025);
}
TEST_P(Test_Int8_layers, DISABLED_Softmax_unfused_ONNX) // FIXIT Support 'Identity' layer for outputs (#22022)
{
testLayer("softmax_unfused", "ONNX", 0.0009, 0.0021); testLayer("softmax_unfused", "ONNX", 0.0009, 0.0021);
} }
@ -389,7 +407,7 @@ TEST_P(Test_Int8_layers, Slice_strided_tf)
testLayer("strided_slice", "TensorFlow", 0.008, 0.0142); testLayer("strided_slice", "TensorFlow", 0.008, 0.0142);
} }
TEST_P(Test_Int8_layers, Slice_onnx) TEST_P(Test_Int8_layers, DISABLED_Slice_onnx) // FIXIT Support 'Identity' layer for outputs (#22022)
{ {
testLayer("slice", "ONNX", 0.0046, 0.0077); testLayer("slice", "ONNX", 0.0046, 0.0077);
} }

@ -1855,6 +1855,11 @@ TEST_P(Test_ONNX_layers, Quantized_Constant)
testONNXModels("quantized_constant", npy, 0.002, 0.008); testONNXModels("quantized_constant", npy, 0.002, 0.008);
} }
TEST_P(Test_ONNX_layers, OutputRegistration)
{
testONNXModels("output_registration", npy, 0, 0, false, true, 2);
}
INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_ONNX_layers, dnnBackendsAndTargets()); INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_ONNX_layers, dnnBackendsAndTargets());
class Test_ONNX_nets : public Test_ONNX_layers class Test_ONNX_nets : public Test_ONNX_layers

@ -8718,7 +8718,7 @@ static void StackLowerThanAddress(const void* ptr, bool* result) {
// Make sure AddressSanitizer does not tamper with the stack here. // Make sure AddressSanitizer does not tamper with the stack here.
GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_ GTEST_ATTRIBUTE_NO_SANITIZE_ADDRESS_
static bool StackGrowsDown() { static bool StackGrowsDown() {
int dummy; int dummy = 0;
bool result; bool result;
StackLowerThanAddress(&dummy, &result); StackLowerThanAddress(&dummy, &result);
return result; return result;

@ -160,7 +160,7 @@ void TrackerDaSiamRPNImpl::trackerInit(Mat img)
dnn::blobFromImage(zCrop, blob, 1.0, Size(trackState.exemplarSize, trackState.exemplarSize), Scalar(), trackState.swapRB, false, CV_32F); dnn::blobFromImage(zCrop, blob, 1.0, Size(trackState.exemplarSize, trackState.exemplarSize), Scalar(), trackState.swapRB, false, CV_32F);
siamRPN.setInput(blob); siamRPN.setInput(blob);
Mat out1; Mat out1;
siamRPN.forward(out1, "63"); siamRPN.forward(out1, "onnx_node_output_0!63");
siamKernelCL1.setInput(out1); siamKernelCL1.setInput(out1);
siamKernelR1.setInput(out1); siamKernelR1.setInput(out1);
@ -169,8 +169,8 @@ void TrackerDaSiamRPNImpl::trackerInit(Mat img)
Mat r1 = siamKernelR1.forward(); Mat r1 = siamKernelR1.forward();
std::vector<int> r1_shape = { 20, 256, 4, 4 }, cls1_shape = { 10, 256, 4, 4 }; std::vector<int> r1_shape = { 20, 256, 4, 4 }, cls1_shape = { 10, 256, 4, 4 };
siamRPN.setParam(siamRPN.getLayerId("65"), 0, r1.reshape(0, r1_shape)); siamRPN.setParam(siamRPN.getLayerId("onnx_node_output_0!65"), 0, r1.reshape(0, r1_shape));
siamRPN.setParam(siamRPN.getLayerId("68"), 0, cls1.reshape(0, cls1_shape)); siamRPN.setParam(siamRPN.getLayerId("onnx_node_output_0!68"), 0, cls1.reshape(0, cls1_shape));
} }
bool TrackerDaSiamRPNImpl::update(InputArray image, Rect& boundingBox) bool TrackerDaSiamRPNImpl::update(InputArray image, Rect& boundingBox)

Loading…
Cancel
Save