dnn: modified IE search, R2 compatibility fixed

pull/12105/head
Maksim Shabunin 7 years ago
parent 9433784e47
commit 7cf52de47e
  1. 20
      CMakeLists.txt
  2. 122
      cmake/OpenCVDetectInferenceEngine.cmake
  3. 16
      modules/dnn/CMakeLists.txt
  4. 2
      modules/dnn/src/op_inf_engine.cpp
  5. 14
      modules/dnn/src/op_inf_engine.hpp

@ -1407,15 +1407,19 @@ if(WITH_HALIDE OR HAVE_HALIDE)
status(" Halide:" HAVE_HALIDE THEN "YES (${HALIDE_LIBRARIES} ${HALIDE_INCLUDE_DIRS})" ELSE NO)
endif()
if(WITH_INF_ENGINE OR HAVE_INF_ENGINE)
if(HAVE_INF_ENGINE)
set(__msg "YES")
if(DEFINED INF_ENGINE_VERSION)
set(__msg "YES (ver ${INF_ENGINE_VERSION})")
endif()
if(WITH_INF_ENGINE OR INF_ENGINE_TARGET)
if(INF_ENGINE_TARGET)
set(__msg "YES (${INF_ENGINE_RELEASE} / ${INF_ENGINE_VERSION})")
get_target_property(_lib ${INF_ENGINE_TARGET} IMPORTED_LOCATION)
if(NOT _lib)
get_target_property(_lib_rel ${INF_ENGINE_TARGET} IMPORTED_IMPLIB_RELEASE)
get_target_property(_lib_dbg ${INF_ENGINE_TARGET} IMPORTED_IMPLIB_DEBUG)
set(_lib "${_lib_rel} / ${_lib_dbg}")
endif()
get_target_property(_inc ${INF_ENGINE_TARGET} INTERFACE_INCLUDE_DIRECTORIES)
status(" Inference Engine:" "${__msg}")
status(" libs:" "${INF_ENGINE_LIBRARIES}")
status(" includes:" "${INF_ENGINE_INCLUDE_DIRS}")
status(" libs:" "${_lib}")
status(" includes:" "${_inc}")
else()
status(" Inference Engine:" "NO")
endif()

@ -1,83 +1,87 @@
# The script detects Intel(R) Inference Engine installation
#
# Parameters:
# INTEL_CVSDK_DIR - Path to Inference Engine root folder
# IE_PLUGINS_PATH - Path to folder with Inference Engine plugins
# Cache variables:
# INF_ENGINE_OMP_DIR - directory with OpenMP library to link with (needed by some versions of IE)
# INF_ENGINE_RELEASE - a number reflecting IE source interface (linked with OpenVINO release)
#
# On return this will define:
# Detect parameters:
# 1. Native cmake IE package:
# - enironment variable InferenceEngine_DIR is set to location of cmake module
# 2. Custom location:
# - INF_ENGINE_INCLUDE_DIRS - headers search location
# - INF_ENGINE_LIB_DIRS - library search location
# 3. OpenVINO location:
# - environment variable INTEL_CVSDK_DIR is set to location of OpenVINO installation dir
# - INF_ENGINE_PLATFORM - part of name of library directory representing its platform (default ubuntu_16.04)
#
# HAVE_INF_ENGINE - True if Intel Inference Engine was found
# INF_ENGINE_INCLUDE_DIRS - Inference Engine include folder
# INF_ENGINE_LIBRARIES - Inference Engine libraries and it's dependencies
# Result:
# INF_ENGINE_TARGET - set to name of imported library target representing InferenceEngine
#
macro(ie_fail)
set(HAVE_INF_ENGINE FALSE)
return()
endmacro()
if(NOT HAVE_CXX11)
message(WARNING "DL Inference engine requires C++11. You can turn it on via ENABLE_CXX11=ON CMake flag.")
ie_fail()
endif()
find_package(InferenceEngine QUIET)
if(InferenceEngine_FOUND)
set(INF_ENGINE_LIBRARIES "${InferenceEngine_LIBRARIES}")
set(INF_ENGINE_INCLUDE_DIRS "${InferenceEngine_INCLUDE_DIRS}")
set(INF_ENGINE_VERSION "${InferenceEngine_VERSION}")
set(HAVE_INF_ENGINE TRUE)
return()
endif()
ocv_check_environment_variables(INTEL_CVSDK_DIR INF_ENGINE_ROOT_DIR IE_PLUGINS_PATH)
# =======================
if(NOT INF_ENGINE_ROOT_DIR OR NOT EXISTS "${INF_ENGINE_ROOT_DIR}/include/inference_engine.hpp")
set(ie_root_paths "${INF_ENGINE_ROOT_DIR}")
if(DEFINED INTEL_CVSDK_DIR)
list(APPEND ie_root_paths "${INTEL_CVSDK_DIR}/")
list(APPEND ie_root_paths "${INTEL_CVSDK_DIR}/deployment_tools/inference_engine")
function(add_custom_ie_build _inc _lib _lib_rel _lib_dbg _msg)
if(NOT _inc OR NOT (_lib OR _lib_rel OR _lib_dbg))
return()
endif()
if(NOT ie_root_paths)
list(APPEND ie_root_paths "/opt/intel/computer_vision_sdk/deployment_tools/inference_engine/")
add_library(inference_engine UNKNOWN IMPORTED)
set_target_properties(inference_engine PROPERTIES
IMPORTED_LOCATION "${_lib}"
IMPORTED_IMPLIB_RELEASE "${_lib_rel}"
IMPORTED_IMPLIB_DEBUG "${_lib_dbg}"
INTERFACE_INCLUDE_DIRECTORIES "${_inc}"
)
find_library(omp_lib iomp5 PATHS "${INF_ENGINE_OMP_DIR}" NO_DEFAULT_PATH)
if(NOT omp_lib)
message(WARNING "OpenMP for IE have not been found. Set INF_ENGINE_OMP_DIR variable if you experience build errors.")
else()
set_target_properties(inference_engine PROPERTIES IMPORTED_LINK_INTERFACE_LIBRARIES "${omp_lib}")
endif()
set(INF_ENGINE_VERSION "Unknown" CACHE STRING "")
set(INF_ENGINE_TARGET inference_engine PARENT_SCOPE)
message(STATUS "Detected InferenceEngine: ${_msg}")
endfunction()
find_path(INF_ENGINE_ROOT_DIR include/inference_engine.hpp PATHS ${ie_root_paths})
if(INF_ENGINE_ROOT_DIR MATCHES "-NOTFOUND$")
unset(INF_ENGINE_ROOT_DIR CACHE)
endif()
endif()
# ======================
set(INF_ENGINE_INCLUDE_DIRS "${INF_ENGINE_ROOT_DIR}/include" CACHE PATH "Path to Inference Engine include directory")
find_package(InferenceEngine QUIET)
if(InferenceEngine_FOUND)
set(INF_ENGINE_TARGET IE::inference_engine)
set(INF_ENGINE_VERSION "${InferenceEngine_VERSION}" CACHE STRING "")
message(STATUS "Detected InferenceEngine: cmake package")
endif()
if(NOT INF_ENGINE_ROOT_DIR
OR NOT EXISTS "${INF_ENGINE_ROOT_DIR}"
OR NOT EXISTS "${INF_ENGINE_ROOT_DIR}/include/inference_engine.hpp"
)
message(WARNING "DL IE: Can't detect INF_ENGINE_ROOT_DIR location.")
ie_fail()
if(NOT INF_ENGINE_TARGET AND INF_ENGINE_LIB_DIRS AND INF_ENGINE_INCLUDE_DIRS)
find_path(ie_custom_inc "inference_engine.hpp" PATHS "${INF_ENGINE_INCLUDE_DIRS}" NO_DEFAULT_PATH)
find_library(ie_custom_lib "inference_engine" PATHS "${INF_ENGINE_LIB_DIRS}" NO_DEFAULT_PATH)
find_library(ie_custom_lib_rel "inference_engine" PATHS "${INF_ENGINE_LIB_DIRS}/Release" NO_DEFAULT_PATH)
find_library(ie_custom_lib_dbg "inference_engine" PATHS "${INF_ENGINE_LIB_DIRS}/Debug" NO_DEFAULT_PATH)
add_custom_ie_build("${ie_custom_inc}" "${ie_custom_lib}" "${ie_custom_lib_rel}" "${ie_custom_lib_dbg}" "INF_ENGINE_{INCLUDE,LIB}_DIRS")
endif()
set(INF_ENGINE_LIBRARIES "")
set(_loc "$ENV{INTEL_CVSDK_DIR}")
if(NOT INF_ENGINE_TARGET AND _loc)
set(INF_ENGINE_PLATFORM "ubuntu_16.04" CACHE STRING "InferenceEngine platform (library dir)")
find_path(ie_custom_env_inc "inference_engine.hpp" PATHS "${_loc}/deployment_tools/inference_engine/include" NO_DEFAULT_PATH)
find_library(ie_custom_env_lib "inference_engine" PATHS "${_loc}/deployment_tools/inference_engine/lib/${INF_ENGINE_PLATFORM}/intel64" NO_DEFAULT_PATH)
find_library(ie_custom_env_lib_rel "inference_engine" PATHS "${_loc}/deployment_tools/inference_engine/lib/intel64/Release" NO_DEFAULT_PATH)
find_library(ie_custom_env_lib_dbg "inference_engine" PATHS "${_loc}/deployment_tools/inference_engine/lib/intel64/Debug" NO_DEFAULT_PATH)
add_custom_ie_build("${ie_custom_env_inc}" "${ie_custom_env_lib}" "${ie_custom_env_lib_rel}" "${ie_custom_env_lib_dbg}" "OpenVINO (${_loc})")
endif()
set(ie_lib_list inference_engine)
# Add more features to the target
if(NOT IS_ABSOLUTE "${IE_PLUGINS_PATH}")
set(IE_PLUGINS_PATH "${INF_ENGINE_ROOT_DIR}/${IE_PLUGINS_PATH}")
if(INF_ENGINE_TARGET)
if(NOT INF_ENGINE_RELEASE)
message(WARNING "InferenceEngine version have not been set, 2018R2 will be used by default. Set INF_ENGINE_RELEASE variable if you experience build errors.")
endif()
link_directories(
${INF_ENGINE_ROOT_DIR}/external/mkltiny_lnx/lib
${INF_ENGINE_ROOT_DIR}/external/cldnn/lib
set(INF_ENGINE_RELEASE "2018020000" CACHE STRING "Force IE version, should be in form YYYYAABBCC (e.g. 2018R2.0.2 -> 2018020002)")
set_target_properties(${INF_ENGINE_TARGET} PROPERTIES
INTERFACE_COMPILE_DEFINITIONS "HAVE_INF_ENGINE=1;INF_ENGINE_RELEASE=${INF_ENGINE_RELEASE}"
)
foreach(lib ${ie_lib_list})
find_library(${lib} NAMES ${lib} HINTS ${IE_PLUGINS_PATH})
if(NOT ${lib})
message(WARNING "DL IE: Can't find library: '${lib}'")
ie_fail()
endif()
list(APPEND INF_ENGINE_LIBRARIES ${${lib}})
endforeach()
set(HAVE_INF_ENGINE TRUE)

@ -85,12 +85,6 @@ else()
set(sources_options EXCLUDE_OPENCL)
endif()
if(HAVE_INF_ENGINE)
add_definitions(-DHAVE_INF_ENGINE=1)
list(APPEND include_dirs ${INF_ENGINE_INCLUDE_DIRS})
list(APPEND libs ${INF_ENGINE_LIBRARIES})
endif()
ocv_module_include_directories(${include_dirs})
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
ocv_append_source_files_cxx_compiler_options(fw_srcs "-Wno-suggest-override") # GCC
@ -98,9 +92,9 @@ elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
ocv_append_source_files_cxx_compiler_options(fw_srcs "-Wno-inconsistent-missing-override") # Clang
endif()
ocv_glob_module_sources(${sources_options} SOURCES ${fw_srcs})
ocv_create_module(${libs})
ocv_create_module(${libs} ${INF_ENGINE_TARGET})
ocv_add_samples()
ocv_add_accuracy_tests()
ocv_add_accuracy_tests(${INF_ENGINE_TARGET})
ocv_add_perf_tests()
ocv_option(${the_module}_PERF_CAFFE "Add performance tests of Caffe framework" OFF)
@ -120,9 +114,3 @@ if(BUILD_PERF_TESTS)
endif()
endif()
endif()
# Test Intel's Inference Engine models
if(HAVE_INF_ENGINE AND TARGET opencv_test_dnn)
ocv_target_include_directories(opencv_test_dnn PRIVATE ${INF_ENGINE_INCLUDE_DIRS})
ocv_target_link_libraries(opencv_test_dnn LINK_PRIVATE ${INF_ENGINE_LIBRARIES})
endif()

@ -334,6 +334,7 @@ size_t InfEngineBackendNet::getBatchSize() const noexcept
return 0;
}
#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R2)
InferenceEngine::StatusCode InfEngineBackendNet::AddExtension(const InferenceEngine::IShapeInferExtensionPtr &extension, InferenceEngine::ResponseDesc *resp) noexcept
{
CV_Error(Error::StsNotImplemented, "");
@ -345,6 +346,7 @@ InferenceEngine::StatusCode InfEngineBackendNet::reshape(const InferenceEngine::
CV_Error(Error::StsNotImplemented, "");
return InferenceEngine::StatusCode::OK;
}
#endif
void InfEngineBackendNet::init(int targetId)
{

@ -21,6 +21,17 @@
#if defined(__GNUC__) && __GNUC__ >= 5
//#pragma GCC diagnostic pop
#endif
#define INF_ENGINE_RELEASE_2018R1 2018010000
#define INF_ENGINE_RELEASE_2018R2 2018020000
#ifndef INF_ENGINE_RELEASE
#warning("IE version have not been provided via command-line. Using 2018R2 by default")
#define INF_ENGINE_RELEASE INF_ENGINE_RELEASE_2018R2
#endif
#define INF_ENGINE_VER_MAJOR_GT(ver) (((INF_ENGINE_RELEASE) / 10000) > ((ver) / 10000))
#endif // HAVE_INF_ENGINE
namespace cv { namespace dnn {
@ -92,9 +103,10 @@ public:
virtual size_t getBatchSize() const noexcept CV_OVERRIDE;
#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R2)
virtual InferenceEngine::StatusCode AddExtension(const InferenceEngine::IShapeInferExtensionPtr& extension, InferenceEngine::ResponseDesc* resp) noexcept;
virtual InferenceEngine::StatusCode reshape(const InputShapes& inputShapes, InferenceEngine::ResponseDesc* resp) noexcept;
#endif
void init(int targetId);

Loading…
Cancel
Save