From a817813b5090808925bc474a31b4d61e43cafc85 Mon Sep 17 00:00:00 2001 From: Anatoliy Talamanov Date: Wed, 26 Jul 2023 14:00:20 +0100 Subject: [PATCH] Merge pull request #24045 from TolyaTalamanov:at/add-onnx-directml-execution-provider G-API: Support DirectML Execution Provider for ONNXRT Backend #24045 ### Pull Request Readiness Checklist See details at https://github.com/opencv/opencv/wiki/How_to_contribute#making-a-good-pull-request - [ ] I agree to contribute to the project under Apache 2 License. - [ ] To the best of my knowledge, the proposed patch is not based on a code under GPL or another license that is incompatible with OpenCV - [ ] The PR is proposed to the proper branch - [ ] There is a reference to the original bug report and related work - [ ] There is accuracy test, performance test and test data in opencv_extra repository, if applicable Patch to opencv_extra has the same branch name. - [ ] The feature is well documented and sample code can be built with the project CMake --- cmake/FindONNX.cmake | 15 +++ modules/gapi/CMakeLists.txt | 4 + .../opencv2/gapi/infer/bindings_onnx.hpp | 5 +- .../gapi/include/opencv2/gapi/infer/onnx.hpp | 99 ++++++++++++------- modules/gapi/misc/python/pyopencv_gapi.hpp | 1 + .../gapi/src/backends/onnx/bindings_onnx.cpp | 10 +- modules/gapi/src/backends/onnx/dml_ep.cpp | 40 ++++++++ modules/gapi/src/backends/onnx/dml_ep.hpp | 23 +++++ .../gapi/src/backends/onnx/gonnxbackend.cpp | 64 +++++++----- 9 files changed, 198 insertions(+), 63 deletions(-) create mode 100644 modules/gapi/src/backends/onnx/dml_ep.cpp create mode 100644 modules/gapi/src/backends/onnx/dml_ep.hpp diff --git a/cmake/FindONNX.cmake b/cmake/FindONNX.cmake index 56dd6d5098..b2c79a9031 100644 --- a/cmake/FindONNX.cmake +++ b/cmake/FindONNX.cmake @@ -16,7 +16,22 @@ if(ONNXRT_ROOT_DIR) CMAKE_FIND_ROOT_PATH_BOTH) endif() +macro(detect_onxxrt_ep filename dir have_ep_var) + find_path(ORT_EP_INCLUDE ${filename} ${dir} CMAKE_FIND_ROOT_PATH_BOTH) + if(ORT_EP_INCLUDE) + set(${have_ep_var} TRUE) + endif() +endmacro() + if(ORT_LIB AND ORT_INCLUDE) + # Check DirectML Execution Provider availability + get_filename_component(dml_dir ${ONNXRT_ROOT_DIR}/include/onnxruntime/core/providers/dml ABSOLUTE) + detect_onxxrt_ep( + dml_provider_factory.h + ${dml_dir} + HAVE_ONNX_DML + ) + set(HAVE_ONNX TRUE) # For CMake output only set(ONNX_LIBRARIES "${ORT_LIB}" CACHE STRING "ONNX Runtime libraries") diff --git a/modules/gapi/CMakeLists.txt b/modules/gapi/CMakeLists.txt index 31322d533a..e30cb77e9e 100644 --- a/modules/gapi/CMakeLists.txt +++ b/modules/gapi/CMakeLists.txt @@ -162,6 +162,7 @@ set(gapi_srcs # ONNX backend src/backends/onnx/gonnxbackend.cpp + src/backends/onnx/dml_ep.cpp # Render backend src/backends/render/grenderocv.cpp @@ -366,6 +367,9 @@ endif() if(HAVE_ONNX) ocv_target_link_libraries(${the_module} PRIVATE ${ONNX_LIBRARY}) ocv_target_compile_definitions(${the_module} PRIVATE HAVE_ONNX=1) + if(HAVE_ONNX_DML) + ocv_target_compile_definitions(${the_module} PRIVATE HAVE_ONNX_DML=1) + endif() if(TARGET opencv_test_gapi) ocv_target_compile_definitions(opencv_test_gapi PRIVATE HAVE_ONNX=1) ocv_target_link_libraries(opencv_test_gapi PRIVATE ${ONNX_LIBRARY}) diff --git a/modules/gapi/include/opencv2/gapi/infer/bindings_onnx.hpp b/modules/gapi/include/opencv2/gapi/infer/bindings_onnx.hpp index 0f764fe2cd..4ba829df09 100644 --- a/modules/gapi/include/opencv2/gapi/infer/bindings_onnx.hpp +++ b/modules/gapi/include/opencv2/gapi/infer/bindings_onnx.hpp @@ -34,7 +34,10 @@ public: PyParams& cfgNormalize(const std::string &layer_name, bool flag); GAPI_WRAP - PyParams& cfgExecutionProvider(ep::OpenVINO ov_ep); + PyParams& cfgAddExecutionProvider(ep::OpenVINO ep); + + GAPI_WRAP + PyParams& cfgAddExecutionProvider(ep::DirectML ep); GAPI_WRAP PyParams& cfgDisableMemPattern(); diff --git a/modules/gapi/include/opencv2/gapi/infer/onnx.hpp b/modules/gapi/include/opencv2/gapi/infer/onnx.hpp index 9c2118c3ad..64b855acd7 100644 --- a/modules/gapi/include/opencv2/gapi/infer/onnx.hpp +++ b/modules/gapi/include/opencv2/gapi/infer/onnx.hpp @@ -45,28 +45,13 @@ struct GAPI_EXPORTS_W_SIMPLE OpenVINO { /** @brief Class constructor. - Constructs OpenVINO parameters based on device information. + Constructs OpenVINO parameters based on device type information. - @param device Target device to use. + @param dev_type Target device type to use. ("CPU_FP32", "GPU_FP16", etc) */ GAPI_WRAP - OpenVINO(const std::string &device) - : device_id(device) { - } - - /** @brief Specifies OpenVINO Execution Provider device type. - - This function is used to override the accelerator hardware type - and precision at runtime. If this option is not explicitly configured, default - hardware and precision specified during onnxruntime build time is used. - - @param type Device type ("CPU_FP32", "GPU_FP16", etc) - @return reference to this parameter structure. - */ - GAPI_WRAP - OpenVINO& cfgDeviceType(const std::string &type) { - device_type = cv::util::make_optional(type); - return *this; + explicit OpenVINO(const std::string &dev_type) + : device_type(dev_type) { } /** @brief Specifies OpenVINO Execution Provider cache dir. @@ -86,15 +71,14 @@ struct GAPI_EXPORTS_W_SIMPLE OpenVINO { /** @brief Specifies OpenVINO Execution Provider number of threads. This function is used to override the accelerator default value - of number of threads with this value at runtime. If this option - is not explicitly set, default value of 8 is used during build time. + of number of threads with this value at runtime. @param nthreads Number of threads. @return reference to this parameter structure. */ GAPI_WRAP OpenVINO& cfgNumThreads(size_t nthreads) { - num_of_threads = cv::util::make_optional(nthreads); + num_of_threads = nthreads; return *this; } @@ -127,15 +111,39 @@ struct GAPI_EXPORTS_W_SIMPLE OpenVINO { return *this; } - std::string device_id; + std::string device_type; std::string cache_dir; - cv::optional device_type; - cv::optional num_of_threads; + size_t num_of_threads = 0; bool enable_opencl_throttling = false; bool enable_dynamic_shapes = false; }; -using EP = cv::util::variant; +/** + * @brief This structure provides functions + * that fill inference options for ONNX DirectML Execution Provider. + * Please follow https://onnxruntime.ai/docs/execution-providers/DirectML-ExecutionProvider.html#directml-execution-provider + */ +class GAPI_EXPORTS_W_SIMPLE DirectML { +public: + // NB: Used from python. + /// @private -- Exclude this constructor from OpenCV documentation + GAPI_WRAP + DirectML() = default; + + /** @brief Class constructor. + + Constructs DirectML parameters based on device id. + + @param device_id Target device id to use. ("0", "1", etc) + */ + GAPI_WRAP + explicit DirectML(const int device_id) : ddesc(device_id) { }; + + using DeviceDesc = cv::util::variant; + DeviceDesc ddesc; +}; + +using EP = cv::util::variant; } // namespace ep @@ -191,7 +199,7 @@ struct ParamDesc { std::unordered_map > generic_mstd; std::unordered_map generic_norm; - cv::gapi::onnx::ep::EP execution_provider; + std::vector execution_providers; bool disable_mem_pattern; }; } // namespace detail @@ -395,17 +403,31 @@ public: return *this; } - /** @brief Specifies execution provider for runtime. + /** @brief Adds execution provider for runtime. - The function is used to set ONNX Runtime OpenVINO Execution Provider options. + The function is used to add ONNX Runtime OpenVINO Execution Provider options. - @param ovep OpenVINO Execution Provider options. + @param ep OpenVINO Execution Provider options. @see cv::gapi::onnx::ep::OpenVINO. @return the reference on modified object. */ - Params& cfgExecutionProvider(ep::OpenVINO&& ovep) { - desc.execution_provider = std::move(ovep); + Params& cfgAddExecutionProvider(ep::OpenVINO&& ep) { + desc.execution_providers.emplace_back(std::move(ep)); + return *this; + } + + /** @brief Adds execution provider for runtime. + + The function is used to add ONNX Runtime DirectML Execution Provider options. + + @param ep DirectML Execution Provider options. + @see cv::gapi::onnx::ep::DirectML. + + @return the reference on modified object. + */ + Params& cfgAddExecutionProvider(ep::DirectML&& ep) { + desc.execution_providers.emplace_back(std::move(ep)); return *this; } @@ -447,20 +469,29 @@ public: Params(const std::string& tag, const std::string& model_path) : desc{model_path, 0u, 0u, {}, {}, {}, {}, {}, {}, {}, {}, {}, true, {}, {}, {}, false }, m_tag(tag) {} + /** @see onnx::Params::cfgMeanStdDev. */ void cfgMeanStdDev(const std::string &layer, const cv::Scalar &m, const cv::Scalar &s) { desc.generic_mstd[layer] = std::make_pair(m, s); } + /** @see onnx::Params::cfgNormalize. */ void cfgNormalize(const std::string &layer, bool flag) { desc.generic_norm[layer] = flag; } - void cfgExecutionProvider(ep::OpenVINO&& ov_ep) { - desc.execution_provider = std::move(ov_ep); + /** @see onnx::Params::cfgAddExecutionProvider. */ + void cfgAddExecutionProvider(ep::OpenVINO&& ep) { + desc.execution_providers.emplace_back(std::move(ep)); + } + + /** @see onnx::Params::cfgAddExecutionProvider. */ + void cfgAddExecutionProvider(ep::DirectML&& ep) { + desc.execution_providers.emplace_back(std::move(ep)); } + /** @see onnx::Params::cfgDisableMemPattern. */ void cfgDisableMemPattern() { desc.disable_mem_pattern = true; } diff --git a/modules/gapi/misc/python/pyopencv_gapi.hpp b/modules/gapi/misc/python/pyopencv_gapi.hpp index bdd0f0232f..60d5f85479 100644 --- a/modules/gapi/misc/python/pyopencv_gapi.hpp +++ b/modules/gapi/misc/python/pyopencv_gapi.hpp @@ -30,6 +30,7 @@ using map_string_and_vector_size_t = std::map> using map_string_and_vector_float = std::map>; using map_int_and_double = std::map; using ep_OpenVINO = cv::gapi::onnx::ep::OpenVINO; +using ep_DirectML = cv::gapi::onnx::ep::DirectML; // NB: Python wrapper generate T_U for T // This behavior is only observed for inputs diff --git a/modules/gapi/src/backends/onnx/bindings_onnx.cpp b/modules/gapi/src/backends/onnx/bindings_onnx.cpp index ada90bd130..6051c6bb4d 100644 --- a/modules/gapi/src/backends/onnx/bindings_onnx.cpp +++ b/modules/gapi/src/backends/onnx/bindings_onnx.cpp @@ -22,8 +22,14 @@ cv::gapi::onnx::PyParams& cv::gapi::onnx::PyParams::cfgNormalize(const std::stri } cv::gapi::onnx::PyParams& -cv::gapi::onnx::PyParams::cfgExecutionProvider(cv::gapi::onnx::ep::OpenVINO ov_ep) { - m_priv->cfgExecutionProvider(std::move(ov_ep)); +cv::gapi::onnx::PyParams::cfgAddExecutionProvider(cv::gapi::onnx::ep::OpenVINO ep) { + m_priv->cfgAddExecutionProvider(std::move(ep)); + return *this; +} + +cv::gapi::onnx::PyParams& +cv::gapi::onnx::PyParams::cfgAddExecutionProvider(cv::gapi::onnx::ep::DirectML ep) { + m_priv->cfgAddExecutionProvider(std::move(ep)); return *this; } diff --git a/modules/gapi/src/backends/onnx/dml_ep.cpp b/modules/gapi/src/backends/onnx/dml_ep.cpp new file mode 100644 index 0000000000..7f59e1f3d6 --- /dev/null +++ b/modules/gapi/src/backends/onnx/dml_ep.cpp @@ -0,0 +1,40 @@ +// This file is part of OpenCV project. +// It is subject to the license terms in the LICENSE file found in the top-level directory +// of this distribution and at http://opencv.org/license.html. +// +// Copyright (C) 2023 Intel Corporation + +#include "backends/onnx/dml_ep.hpp" +#include "logger.hpp" + +#ifdef HAVE_ONNX +#include + +#ifdef HAVE_ONNX_DML +#include "../providers/dml/dml_provider_factory.h" + +void cv::gimpl::onnx::addDMLExecutionProvider(Ort::SessionOptions *session_options, + const cv::gapi::onnx::ep::DirectML &dml_ep) { + namespace ep = cv::gapi::onnx::ep; + GAPI_Assert(cv::util::holds_alternative(dml_ep.ddesc)); + const int device_id = cv::util::get(dml_ep.ddesc); + try { + OrtSessionOptionsAppendExecutionProvider_DML(*session_options, device_id); + } catch (const std::exception &e) { + std::stringstream ss; + ss << "ONNX Backend: Failed to enable DirectML" + << " Execution Provider: " << e.what(); + cv::util::throw_error(std::runtime_error(ss.str())); + } +} + +#else // HAVE_ONNX_DML + +void cv::gimpl::onnx::addDMLExecutionProvider(Ort::SessionOptions*, + const cv::gapi::onnx::ep::DirectML&) { + util::throw_error(std::runtime_error("G-API has been compiled with ONNXRT" + " without DirectML support")); +} + +#endif // HAVE_ONNX_DML +#endif // HAVE_ONNX diff --git a/modules/gapi/src/backends/onnx/dml_ep.hpp b/modules/gapi/src/backends/onnx/dml_ep.hpp new file mode 100644 index 0000000000..d7e43dc888 --- /dev/null +++ b/modules/gapi/src/backends/onnx/dml_ep.hpp @@ -0,0 +1,23 @@ +// This file is part of OpenCV project. +// It is subject to the license terms in the LICENSE file found in the top-level directory +// of this distribution and at http://opencv.org/license.html. +// +// Copyright (C) 2023 Intel Corporation + +#ifndef OPENCV_GAPI_DML_EP_HPP +#define OPENCV_GAPI_DML_EP_HPP + +#include "opencv2/gapi/infer/onnx.hpp" +#ifdef HAVE_ONNX + +#include + +namespace cv { +namespace gimpl { +namespace onnx { +void addDMLExecutionProvider(Ort::SessionOptions *session_options, + const cv::gapi::onnx::ep::DirectML &dml_ep); +}}} + +#endif // HAVE_ONNX +#endif // OPENCV_GAPI_DML_EP_HPP diff --git a/modules/gapi/src/backends/onnx/gonnxbackend.cpp b/modules/gapi/src/backends/onnx/gonnxbackend.cpp index 79bdad93d9..b90d4d6974 100644 --- a/modules/gapi/src/backends/onnx/gonnxbackend.cpp +++ b/modules/gapi/src/backends/onnx/gonnxbackend.cpp @@ -9,6 +9,8 @@ #ifdef HAVE_ONNX +#include "backends/onnx/dml_ep.hpp" + #include // any_of #include #include @@ -143,37 +145,44 @@ public: void run(); }; -static void appendExecutionProvider(Ort::SessionOptions *session_options, - const cv::gapi::onnx::ep::EP &execution_provider) { +static void addOpenVINOExecutionProvider(Ort::SessionOptions *session_options, + const cv::gapi::onnx::ep::OpenVINO &ov_ep) { + OrtOpenVINOProviderOptions options; + options.device_type = ov_ep.device_type.c_str(); + options.cache_dir = ov_ep.cache_dir.c_str(); + options.num_of_threads = ov_ep.num_of_threads; + options.enable_opencl_throttling = ov_ep.enable_opencl_throttling; + options.enable_dynamic_shapes = ov_ep.enable_dynamic_shapes; + options.context = nullptr; + + try { + session_options->AppendExecutionProvider_OpenVINO(options); + } catch (const std::exception &e) { + std::stringstream ss; + ss << "ONNX Backend: Failed to enable OpenVINO" + << " Execution Provider: " << e.what(); + cv::util::throw_error(std::runtime_error(ss.str())); + } +} + +static void addExecutionProvider(Ort::SessionOptions *session_options, + const cv::gapi::onnx::ep::EP &execution_provider) { namespace ep = cv::gapi::onnx::ep; switch (execution_provider.index()) { case ep::EP::index_of(): { - GAPI_LOG_INFO(NULL, "OpenVINO Execution Provider is selected."); - const auto &ovep = cv::util::get(execution_provider); - OrtOpenVINOProviderOptions options; - options.device_id = ovep.device_id.c_str(); - options.cache_dir = ovep.cache_dir.c_str(); - options.enable_opencl_throttling = ovep.enable_opencl_throttling; - options.enable_dynamic_shapes = ovep.enable_dynamic_shapes; - // NB: If are not specified, will be taken from onnxruntime build. - if (ovep.device_type) { - options.device_type = ovep.device_type->c_str(); - } - if (ovep.num_of_threads) { - options.num_of_threads = *ovep.num_of_threads; - } - try { - session_options->AppendExecutionProvider_OpenVINO(options); - } catch (const std::exception &e) { - std::stringstream ss; - ss << "ONNX Backend: Failed to enable OpenVINO Execution Provider: " - << e.what() << "\nMake sure that onnxruntime has" - " been compiled with OpenVINO support."; - cv::util::throw_error(std::runtime_error(ss.str())); - } + GAPI_LOG_INFO(NULL, "OpenVINO Execution Provider is added."); + const auto &ov_ep = cv::util::get(execution_provider); + addOpenVINOExecutionProvider(session_options, ov_ep); break; } + case ep::EP::index_of(): { + GAPI_LOG_INFO(NULL, "DirectML Execution Provider is added."); + const auto &dml_ep = cv::util::get(execution_provider); + addDMLExecutionProvider(session_options, dml_ep); + break; + } default: + GAPI_LOG_INFO(NULL, "CPU Execution Provider is added."); break; } } @@ -629,7 +638,10 @@ ONNXCompiled::ONNXCompiled(const gapi::onnx::detail::ParamDesc &pp) } // Create and initialize the ONNX session Ort::SessionOptions session_options; - cv::gimpl::onnx::appendExecutionProvider(&session_options, pp.execution_provider); + GAPI_LOG_INFO(NULL, "Adding Execution Providers for \"" << pp.model_path << "\""); + for (const auto &ep : pp.execution_providers) { + cv::gimpl::onnx::addExecutionProvider(&session_options, ep); + } if (pp.disable_mem_pattern) { session_options.DisableMemPattern();