Apply ov::Core WA

pull/23859/head
TolyaTalamanov 1 year ago
parent 2849a774e3
commit c0fda696f3
  1. 42
      modules/gapi/src/backends/ov/govbackend.cpp
  2. 10
      modules/gapi/src/backends/ov/util.hpp
  3. 11
      modules/gapi/test/infer/gapi_infer_ov_tests.cpp

@ -18,6 +18,7 @@
#include <opencv2/gapi/gcommon.hpp>
#include <opencv2/gapi/infer/ov.hpp>
#include <opencv2/core/utils/configuration.private.hpp> // getConfigurationParameterBool
#if defined(HAVE_TBB)
# include <tbb/concurrent_queue.h> // FIXME: drop it from here!
@ -37,11 +38,37 @@ template<typename T> using QueueClass = cv::gapi::own::concurrent_bounded_queue<
using ParamDesc = cv::gapi::ov::detail::ParamDesc;
static ov::Core getCore() {
// NB: Some of OV plugins fail during ov::Core destroying in specific cases.
// Solution is allocate ov::Core in heap and doesn't destroy it, which cause
// leak, but fixes tests on CI. This behaviour is configurable by using
// OPENCV_GAPI_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND=0
static ov::Core create_OV_Core_pointer() {
// NB: 'delete' is never called
static ov::Core* core = new ov::Core();
return *core;
}
static ov::Core create_OV_Core_instance() {
static ov::Core core;
return core;
}
ov::Core cv::gapi::ov::wrap::getCore() {
// NB: to make happy memory leak tools use:
// - OPENCV_GAPI_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND=0
static bool param_GAPI_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND =
utils::getConfigurationParameterBool(
"OPENCV_GAPI_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND",
#if defined(_WIN32) || defined(__APPLE__)
true
#else
false
#endif
);
return param_GAPI_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND
? create_OV_Core_pointer() : create_OV_Core_instance();
}
static ov::AnyMap toOV(const ParamDesc::PluginConfigT &config) {
return {config.begin(), config.end()};
}
@ -175,7 +202,8 @@ struct OVUnit {
// FIXME: Can this logic be encapsulated to prevent checking every time?
if (cv::util::holds_alternative<ParamDesc::Model>(params.kind)) {
const auto desc = cv::util::get<ParamDesc::Model>(params.kind);
model = getCore().read_model(desc.model_path, desc.bin_path);
model = cv::gapi::ov::wrap::getCore()
.read_model(desc.model_path, desc.bin_path);
GAPI_Assert(model);
if (params.num_in == 1u && params.input_names.empty()) {
@ -190,9 +218,8 @@ struct OVUnit {
std::ifstream file(cv::util::get<ParamDesc::CompiledModel>(params.kind).blob_path,
std::ios_base::in | std::ios_base::binary);
GAPI_Assert(file.is_open());
compiled_model = getCore().import_model(file,
params.device,
toOV(params.config));
compiled_model = cv::gapi::ov::wrap::getCore()
.import_model(file, params.device, toOV(params.config));
if (params.num_in == 1u && params.input_names.empty()) {
params.input_names = { compiled_model.inputs().begin()->get_any_name() };
@ -205,9 +232,8 @@ struct OVUnit {
cv::gimpl::ov::OVCompiled compile() {
if (cv::util::holds_alternative<ParamDesc::Model>(params.kind)) {
compiled_model = getCore().compile_model(model,
params.device,
toOV(params.config));
compiled_model = cv::gapi::ov::wrap::getCore()
.compile_model(model, params.device, toOV(params.config));
}
return {compiled_model};
}

@ -22,15 +22,19 @@ namespace cv {
namespace gapi {
namespace ov {
namespace util {
// NB: These functions are EXPORTed to make them accessible by the
// test suite only.
GAPI_EXPORTS std::vector<int> to_ocv(const ::ov::Shape &shape);
GAPI_EXPORTS int to_ocv(const ::ov::element::Type &type);
GAPI_EXPORTS void to_ov(const cv::Mat &mat, ::ov::Tensor &tensor);
GAPI_EXPORTS void to_ocv(const ::ov::Tensor &tensor, cv::Mat &mat);
}}}}
} // namespace util
namespace wrap {
GAPI_EXPORTS ::ov::Core getCore();
} // namespace wrap
} // namespace ov
} // namespace gapi
} // namespace cv
#endif // HAVE_INF_ENGINE && INF_ENGINE_RELEASE >= 2022010000

@ -52,11 +52,6 @@ void normAssert(cv::InputArray ref, cv::InputArray test,
EXPECT_LE(normInf, lInf) << comment;
}
ov::Core getCore() {
static ov::Core core;
return core;
}
// TODO: AGNetGenComp, AGNetTypedComp, AGNetOVComp, AGNetOVCompiled
// can be generalized to work with any model and used as parameters for tests.
@ -228,7 +223,8 @@ public:
const std::string &bin_path,
const std::string &device)
: m_device(device) {
m_model = getCore().read_model(xml_path, bin_path);
m_model = cv::gapi::ov::wrap::getCore()
.read_model(xml_path, bin_path);
}
using PrePostProcessF = std::function<void(ov::preprocess::PrePostProcessor&)>;
@ -240,7 +236,8 @@ public:
}
AGNetOVCompiled compile() {
auto compiled_model = getCore().compile_model(m_model, m_device);
auto compiled_model = cv::gapi::ov::wrap::getCore()
.compile_model(m_model, m_device);
return {std::move(compiled_model)};
}

Loading…
Cancel
Save