Merge pull request #17668 from OrestChura:oc/giebackend_migration_to_core

GAPI: Migration to IE Core API

* Migration to IE Core API
 - both versions are maintained
 - checked building with all the OpenVINO versions (2019.R1, R2, R3, 2020.4 (newest))

* commit to awake builders

* Addressing comments
 - migrated to Core API in 'gapi_ie_infer_test.cpp'
 - made Core a singleton object
 - dropped redundant steps

* Addressing comments
 - modified Mutex locking

* Update

* Addressing comments
 - remove getInitMutex()
 - reduce amount of #ifdef by abstracting into functions

* return to single IE::Core

* Divide functions readNet and loadNet to avoid warnings on GCC

* Fix deprecated code warnings

* Fix deprecated code warnings on CMake level

* Functions wrapped
 - All the functions depended on IE version wrapped into a cv::gapi::ie::wrap namesapace
 - All this contained to a new "giebackend/gieapi.hpp" header
 - The header shared with G-API infer tests to avoid code duplications

* Addressing comments
 - Renamed `gieapi.hpp` -> `giewrapper.hpp`, `cv::gapi::ie::wrap` -> `cv::gimpl::ie::wrap`
 - Created new `giewrapper.cpp` source file to avoid potential "multiple definition" problems
 - removed unnecessary step SetLayout() in tests

* Enabling two NN infer teest

* Two-NN infer test change for CI
 - deleted additional network
 - inference of two identical NN used instead

* Fix CI fileNotFound

* Disable MYRIAD test not to fail Custom CI runs
pull/17866/head
Orest Chura 4 years ago committed by GitHub
parent a216b8bf87
commit d17ab271e8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 5
      modules/gapi/CMakeLists.txt
  2. 69
      modules/gapi/src/backends/ie/giebackend.cpp
  3. 26
      modules/gapi/src/backends/ie/giebackend.hpp
  4. 123
      modules/gapi/src/backends/ie/giebackend/giewrapper.cpp
  5. 51
      modules/gapi/src/backends/ie/giebackend/giewrapper.hpp
  6. 173
      modules/gapi/test/infer/gapi_infer_ie_test.cpp

@ -30,6 +30,10 @@ if(MSVC)
# Disable obsollete warning C4503 popping up on MSVC <<2017 # Disable obsollete warning C4503 popping up on MSVC <<2017
# https://docs.microsoft.com/en-us/cpp/error-messages/compiler-warnings/compiler-warning-level-1-c4503?view=vs-2019 # https://docs.microsoft.com/en-us/cpp/error-messages/compiler-warnings/compiler-warning-level-1-c4503?view=vs-2019
ocv_warnings_disable(CMAKE_CXX_FLAGS /wd4503) ocv_warnings_disable(CMAKE_CXX_FLAGS /wd4503)
if (OPENCV_GAPI_INF_ENGINE AND NOT INF_ENGINE_RELEASE VERSION_GREATER "2021000000")
# Disable IE deprecated code warning C4996 for releases < 2021.1
ocv_warnings_disable(CMAKE_CXX_FLAGS /wd4996)
endif()
endif() endif()
file(GLOB gapi_ext_hdrs file(GLOB gapi_ext_hdrs
@ -116,6 +120,7 @@ set(gapi_srcs
# IE Backend. FIXME: should be included by CMake # IE Backend. FIXME: should be included by CMake
# if and only if IE support is enabled # if and only if IE support is enabled
src/backends/ie/giebackend.cpp src/backends/ie/giebackend.cpp
src/backends/ie/giebackend/giewrapper.cpp
# Render Backend. # Render Backend.
src/backends/render/grenderocvbackend.cpp src/backends/render/grenderocvbackend.cpp

@ -2,7 +2,7 @@
// It is subject to the license terms in the LICENSE file found in the top-level directory // It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html. // of this distribution and at http://opencv.org/license.html.
// //
// Copyright (C) 2018 Intel Corporation // Copyright (C) 2018-2020 Intel Corporation
#include "precomp.hpp" #include "precomp.hpp"
@ -41,6 +41,7 @@
#include "compiler/gmodel.hpp" #include "compiler/gmodel.hpp"
#include "backends/ie/util.hpp" #include "backends/ie/util.hpp"
#include "backends/ie/giebackend/giewrapper.hpp"
#include "api/gbackend_priv.hpp" // FIXME: Make it part of Backend SDK! #include "api/gbackend_priv.hpp" // FIXME: Make it part of Backend SDK!
@ -176,14 +177,9 @@ struct IEUnit {
explicit IEUnit(const cv::gapi::ie::detail::ParamDesc &pp) explicit IEUnit(const cv::gapi::ie::detail::ParamDesc &pp)
: params(pp) { : params(pp) {
net = cv::gimpl::ie::wrap::readNetwork(params);
IE::CNNNetReader reader;
reader.ReadNetwork(params.model_path);
reader.ReadWeights(params.weights_path);
net = reader.getNetwork();
inputs = net.getInputsInfo(); inputs = net.getInputsInfo();
outputs = net.getOutputsInfo(); outputs = net.getOutputsInfo();
// The practice shows that not all inputs and not all outputs // The practice shows that not all inputs and not all outputs
// are mandatory to specify in IE model. // are mandatory to specify in IE model.
// So what we're concerned here about is: // So what we're concerned here about is:
@ -208,55 +204,9 @@ struct IEUnit {
} }
// This method is [supposed to be] called at Island compilation stage // This method is [supposed to be] called at Island compilation stage
// TODO: Move to a new OpenVINO Core API!
cv::gimpl::ie::IECompiled compile() const { cv::gimpl::ie::IECompiled compile() const {
auto this_plugin = IE::PluginDispatcher().getPluginByDevice(params.device_id); auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
#if INF_ENGINE_RELEASE < 2020000000 // <= 2019.R3
// Load extensions (taken from DNN module)
if (params.device_id == "CPU" || params.device_id == "FPGA")
{
const std::string suffixes[] = { "_avx2", "_sse4", ""};
const bool haveFeature[] = {
cv::checkHardwareSupport(CPU_AVX2),
cv::checkHardwareSupport(CPU_SSE4_2),
true
};
std::vector<std::string> candidates;
for (auto &&it : ade::util::zip(ade::util::toRange(suffixes),
ade::util::toRange(haveFeature)))
{
std::string suffix;
bool available = false;
std::tie(suffix, available) = it;
if (!available) continue;
#ifdef _WIN32
candidates.push_back("cpu_extension" + suffix + ".dll");
#elif defined(__APPLE__)
candidates.push_back("libcpu_extension" + suffix + ".so"); // built as loadable module
candidates.push_back("libcpu_extension" + suffix + ".dylib"); // built as shared library
#else
candidates.push_back("libcpu_extension" + suffix + ".so");
#endif // _WIN32
}
for (auto &&extlib : candidates)
{
try
{
this_plugin.AddExtension(IE::make_so_pointer<IE::IExtension>(extlib));
CV_LOG_INFO(NULL, "DNN-IE: Loaded extension plugin: " << extlib);
break;
}
catch(...)
{
CV_LOG_INFO(NULL, "Failed to load IE extension: " << extlib);
}
}
}
#endif
auto this_network = this_plugin.LoadNetwork(net, {}); // FIXME: 2nd parameter to be
// configurable via the API
auto this_request = this_network.CreateInferRequest(); auto this_request = this_network.CreateInferRequest();
// Bind const data to infer request // Bind const data to infer request
@ -267,8 +217,7 @@ struct IEUnit {
// Still, constant data is to set only once. // Still, constant data is to set only once.
this_request.SetBlob(p.first, wrapIE(p.second.first, p.second.second)); this_request.SetBlob(p.first, wrapIE(p.second.first, p.second.second));
} }
return {plugin, this_network, this_request};
return {this_plugin, this_network, this_request};
} }
}; };
@ -796,18 +745,18 @@ cv::gapi::GBackend cv::gapi::ie::backend() {
return this_backend; return this_backend;
} }
cv::Mat cv::gapi::ie::util::to_ocv(InferenceEngine::Blob::Ptr blob) { cv::Mat cv::gapi::ie::util::to_ocv(IE::Blob::Ptr blob) {
const auto& tdesc = blob->getTensorDesc(); const auto& tdesc = blob->getTensorDesc();
return cv::Mat(toCV(tdesc.getDims()), return cv::Mat(toCV(tdesc.getDims()),
toCV(tdesc.getPrecision()), toCV(tdesc.getPrecision()),
blob->buffer().as<uint8_t*>()); blob->buffer().as<uint8_t*>());
} }
std::vector<int> cv::gapi::ie::util::to_ocv(const InferenceEngine::SizeVector &dims) { std::vector<int> cv::gapi::ie::util::to_ocv(const IE::SizeVector &dims) {
return toCV(dims); return toCV(dims);
} }
InferenceEngine::Blob::Ptr cv::gapi::ie::util::to_ie(cv::Mat &blob) { IE::Blob::Ptr cv::gapi::ie::util::to_ie(cv::Mat &blob) {
return wrapIE(blob, cv::gapi::ie::TraitAs::IMAGE); return wrapIE(blob, cv::gapi::ie::TraitAs::IMAGE);
} }

@ -2,7 +2,7 @@
// It is subject to the license terms in the LICENSE file found in the top-level directory // It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html. // of this distribution and at http://opencv.org/license.html.
// //
// Copyright (C) 2018 Intel Corporation // Copyright (C) 2018-2020 Intel Corporation
#ifndef OPENCV_GAPI_GIEBACKEND_HPP #ifndef OPENCV_GAPI_GIEBACKEND_HPP
#define OPENCV_GAPI_GIEBACKEND_HPP #define OPENCV_GAPI_GIEBACKEND_HPP
@ -14,28 +14,8 @@
#include <ade/util/algorithm.hpp> // type_list_index #include <ade/util/algorithm.hpp> // type_list_index
////////////////////////////////////////////////////////////////////////////////
// FIXME: Suppress deprecation warnings for OpenVINO 2019R2+
// BEGIN {{{
#if defined(__GNUC__)
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
#endif
#ifdef _MSC_VER
#pragma warning(disable: 4996) // was declared deprecated
#endif
#if defined(__GNUC__)
#pragma GCC visibility push(default)
#endif
#include <inference_engine.hpp> #include <inference_engine.hpp>
#if defined(__GNUC__)
#pragma GCC visibility pop
#endif
// END }}}
////////////////////////////////////////////////////////////////////////////////
#include <opencv2/gapi/garg.hpp> #include <opencv2/gapi/garg.hpp>
#include <opencv2/gapi/gproto.hpp> #include <opencv2/gapi/gproto.hpp>
@ -48,7 +28,11 @@ namespace gimpl {
namespace ie { namespace ie {
struct IECompiled { struct IECompiled {
#if INF_ENGINE_RELEASE < 2019020000 // < 2019.R2
InferenceEngine::InferencePlugin this_plugin; InferenceEngine::InferencePlugin this_plugin;
#else
InferenceEngine::Core this_core;
#endif
InferenceEngine::ExecutableNetwork this_network; InferenceEngine::ExecutableNetwork this_network;
InferenceEngine::InferRequest this_request; InferenceEngine::InferRequest this_request;
}; };

@ -0,0 +1,123 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2020 Intel Corporation
#ifdef HAVE_INF_ENGINE
#include <vector>
#include <string>
#include <tuple>
#include "backends/ie/giebackend/giewrapper.hpp"
#include <ade/util/range.hpp>
#include <ade/util/zip_range.hpp>
#include <opencv2/core/utility.hpp>
#include <opencv2/core/utils/logger.hpp>
namespace IE = InferenceEngine;
namespace giewrap = cv::gimpl::ie::wrap;
using GIEParam = cv::gapi::ie::detail::ParamDesc;
#if INF_ENGINE_RELEASE < 2020000000 // < 2020.1
// Load extensions (taken from DNN module)
std::vector<std::string> giewrap::getExtensions(const GIEParam& params) {
std::vector<std::string> candidates;
if (params.device_id == "CPU" || params.device_id == "FPGA")
{
const std::string suffixes[] = { "_avx2", "_sse4", ""};
const bool haveFeature[] = {
cv::checkHardwareSupport(CPU_AVX2),
cv::checkHardwareSupport(CPU_SSE4_2),
true
};
for (auto &&it : ade::util::zip(ade::util::toRange(suffixes),
ade::util::toRange(haveFeature)))
{
std::string suffix;
bool available = false;
std::tie(suffix, available) = it;
if (!available) continue;
#ifdef _WIN32
candidates.push_back("cpu_extension" + suffix + ".dll");
#elif defined(__APPLE__)
candidates.push_back("libcpu_extension" + suffix + ".so"); // built as loadable module
candidates.push_back("libcpu_extension" + suffix + ".dylib"); // built as shared library
#else
candidates.push_back("libcpu_extension" + suffix + ".so");
#endif // _WIN32
}
}
return candidates;
}
IE::CNNNetwork giewrap::readNetwork(const GIEParam& params) {
IE::CNNNetReader reader;
reader.ReadNetwork(params.model_path);
reader.ReadWeights(params.weights_path);
return reader.getNetwork();
}
#else // >= 2020.1
std::vector<std::string> giewrap::getExtensions(const GIEParam&) {
return std::vector<std::string>();
}
IE::CNNNetwork giewrap::readNetwork(const GIEParam& params) {
auto core = giewrap::getCore();
return core.ReadNetwork(params.model_path, params.weights_path);
}
#endif // INF_ENGINE_RELEASE < 2020000000
#if INF_ENGINE_RELEASE < 2019020000 // < 2019.R2
IE::InferencePlugin giewrap::getPlugin(const GIEParam& params) {
auto plugin = IE::PluginDispatcher().getPluginByDevice(params.device_id);
if (params.device_id == "CPU" || params.device_id == "FPGA")
{
for (auto &&extlib : giewrap::getExtensions(params))
{
try
{
plugin.AddExtension(IE::make_so_pointer<IE::IExtension>(extlib));
CV_LOG_INFO(NULL, "DNN-IE: Loaded extension plugin: " << extlib);
break;
}
catch(...)
{
CV_LOG_INFO(NULL, "Failed to load IE extension: " << extlib);
}
}
}
return plugin;
}
#else // >= 2019.R2
IE::Core giewrap::getCore() {
static IE::Core core;
return core;
}
IE::Core giewrap::getPlugin(const GIEParam& params) {
auto plugin = giewrap::getCore();
if (params.device_id == "CPU" || params.device_id == "FPGA")
{
for (auto &&extlib : giewrap::getExtensions(params))
{
try
{
plugin.AddExtension(IE::make_so_pointer<IE::IExtension>(extlib), params.device_id);
CV_LOG_INFO(NULL, "DNN-IE: Loaded extension plugin: " << extlib);
break;
}
catch(...)
{
CV_LOG_INFO(NULL, "Failed to load IE extension: " << extlib);
}
}
}
return plugin;
}
#endif // INF_ENGINE_RELEASE < 2019020000
#endif //HAVE_INF_ENGINE

@ -0,0 +1,51 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2020 Intel Corporation
#ifndef OPENCV_GAPI_IEWRAPPER_HPP
#define OPENCV_GAPI_IEWRAPPER_HPP
#ifdef HAVE_INF_ENGINE
#include <inference_engine.hpp>
#include <vector>
#include <string>
#include "opencv2/gapi/infer/ie.hpp"
namespace IE = InferenceEngine;
using GIEParam = cv::gapi::ie::detail::ParamDesc;
namespace cv {
namespace gimpl {
namespace ie {
namespace wrap {
// NB: These functions are EXPORTed to make them accessible by the
// test suite only.
GAPI_EXPORTS std::vector<std::string> getExtensions(const GIEParam& params);
GAPI_EXPORTS IE::CNNNetwork readNetwork(const GIEParam& params);
#if INF_ENGINE_RELEASE < 2019020000 // < 2019.R2
GAPI_EXPORTS IE::InferencePlugin getPlugin(const GIEParam& params);
GAPI_EXPORTS inline IE::ExecutableNetwork loadNetwork( IE::InferencePlugin& plugin,
const IE::CNNNetwork& net,
const GIEParam&) {
return plugin.LoadNetwork(net, {}); // FIXME: 2nd parameter to be
// configurable via the API
}
#else // >= 2019.R2
GAPI_EXPORTS IE::Core getCore();
GAPI_EXPORTS IE::Core getPlugin(const GIEParam& params);
GAPI_EXPORTS inline IE::ExecutableNetwork loadNetwork( IE::Core& core,
const IE::CNNNetwork& net,
const GIEParam& params) {
return core.LoadNetwork(net, params.device_id);
}
#endif // INF_ENGINE_RELEASE < 2019020000
}}}}
#endif //HAVE_INF_ENGINE
#endif // OPENCV_GAPI_IEWRAPPER_HPP

@ -2,7 +2,7 @@
// It is subject to the license terms in the LICENSE file found in the top-level directory // It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html. // of this distribution and at http://opencv.org/license.html.
// //
// Copyright (C) 2019 Intel Corporation // Copyright (C) 2019-2020 Intel Corporation
#include "../test_precomp.hpp" #include "../test_precomp.hpp"
@ -10,33 +10,14 @@
#include <stdexcept> #include <stdexcept>
////////////////////////////////////////////////////////////////////////////////
// FIXME: Suppress deprecation warnings for OpenVINO 2019R2+
// BEGIN {{{
#if defined(__GNUC__)
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
#endif
#ifdef _MSC_VER
#pragma warning(disable: 4996) // was declared deprecated
#endif
#if defined(__GNUC__)
#pragma GCC visibility push(default)
#endif
#include <inference_engine.hpp> #include <inference_engine.hpp>
#if defined(__GNUC__)
#pragma GCC visibility pop
#endif
// END }}}
////////////////////////////////////////////////////////////////////////////////
#include <ade/util/iota_range.hpp> #include <ade/util/iota_range.hpp>
#include <opencv2/gapi/infer/ie.hpp> #include <opencv2/gapi/infer/ie.hpp>
#include "backends/ie/util.hpp" #include "backends/ie/util.hpp"
#include "backends/ie/giebackend/giewrapper.hpp"
namespace opencv_test namespace opencv_test
{ {
@ -78,8 +59,10 @@ void normAssert(cv::InputArray ref, cv::InputArray test,
std::vector<std::string> modelPathByName(const std::string &model_name) { std::vector<std::string> modelPathByName(const std::string &model_name) {
// Handle OMZ model layout changes among OpenVINO versions here // Handle OMZ model layout changes among OpenVINO versions here
static const std::unordered_multimap<std::string, std::string> map = { static const std::unordered_multimap<std::string, std::string> map = {
#if INF_ENGINE_RELEASE >= 2019040000 // >= 2019.R4
{"age-gender-recognition-retail-0013", {"age-gender-recognition-retail-0013",
"2020.3.0/intel/age-gender-recognition-retail-0013/FP32"}, "2020.3.0/intel/age-gender-recognition-retail-0013/FP32"},
#endif // INF_ENGINE_RELEASE >= 2019040000
{"age-gender-recognition-retail-0013", {"age-gender-recognition-retail-0013",
"Retail/object_attributes/age_gender/dldt"}, "Retail/object_attributes/age_gender/dldt"},
}; };
@ -113,6 +96,13 @@ std::tuple<std::string, std::string> findModel(const std::string &model_name) {
throw SkipTestException("Files for " + model_name + " were not found"); throw SkipTestException("Files for " + model_name + " were not found");
} }
namespace IE = InferenceEngine;
void setNetParameters(IE::CNNNetwork& net) {
auto &ii = net.getInputsInfo().at("data");
ii->setPrecision(IE::Precision::U8);
ii->getPreProcess().setResizeAlgorithm(IE::RESIZE_BILINEAR);
}
} // anonymous namespace } // anonymous namespace
// TODO: Probably DNN/IE part can be further parametrized with a template // TODO: Probably DNN/IE part can be further parametrized with a template
@ -121,30 +111,26 @@ TEST(TestAgeGenderIE, InferBasicTensor)
{ {
initDLDTDataPath(); initDLDTDataPath();
std::string topology_path, weights_path; cv::gapi::ie::detail::ParamDesc params;
std::tie(topology_path, weights_path) = findModel("age-gender-recognition-retail-0013"); std::tie(params.model_path, params.weights_path) = findModel("age-gender-recognition-retail-0013");
params.device_id = "CPU";
// Load IE network, initialize input data using that. // Load IE network, initialize input data using that.
namespace IE = InferenceEngine;
cv::Mat in_mat; cv::Mat in_mat;
cv::Mat gapi_age, gapi_gender; cv::Mat gapi_age, gapi_gender;
IE::Blob::Ptr ie_age, ie_gender; IE::Blob::Ptr ie_age, ie_gender;
{ {
IE::CNNNetReader reader; auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
reader.ReadNetwork(topology_path); auto net = cv::gimpl::ie::wrap::readNetwork(params);
reader.ReadWeights(weights_path); auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
auto net = reader.getNetwork(); auto infer_request = this_network.CreateInferRequest();
const auto &iedims = net.getInputsInfo().begin()->second->getTensorDesc().getDims(); const auto &iedims = net.getInputsInfo().begin()->second->getTensorDesc().getDims();
auto cvdims = cv::gapi::ie::util::to_ocv(iedims); auto cvdims = cv::gapi::ie::util::to_ocv(iedims);
in_mat.create(cvdims, CV_32F); in_mat.create(cvdims, CV_32F);
cv::randu(in_mat, -1, 1); cv::randu(in_mat, -1, 1);
auto plugin = IE::PluginDispatcher().getPluginByDevice("CPU");
auto plugin_net = plugin.LoadNetwork(net, {});
auto infer_request = plugin_net.CreateInferRequest();
infer_request.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat)); infer_request.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat));
infer_request.Infer(); infer_request.Infer();
ie_age = infer_request.GetBlob("age_conv3"); ie_age = infer_request.GetBlob("age_conv3");
@ -161,7 +147,7 @@ TEST(TestAgeGenderIE, InferBasicTensor)
cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender)); cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
auto pp = cv::gapi::ie::Params<AgeGender> { auto pp = cv::gapi::ie::Params<AgeGender> {
topology_path, weights_path, "CPU" params.model_path, params.weights_path, params.device_id
}.cfgOutputLayers({ "age_conv3", "prob" }); }.cfgOutputLayers({ "age_conv3", "prob" });
comp.apply(cv::gin(in_mat), cv::gout(gapi_age, gapi_gender), comp.apply(cv::gin(in_mat), cv::gout(gapi_age, gapi_gender),
cv::compile_args(cv::gapi::networks(pp))); cv::compile_args(cv::gapi::networks(pp)));
@ -175,8 +161,9 @@ TEST(TestAgeGenderIE, InferBasicImage)
{ {
initDLDTDataPath(); initDLDTDataPath();
std::string topology_path, weights_path; cv::gapi::ie::detail::ParamDesc params;
std::tie(topology_path, weights_path) = findModel("age-gender-recognition-retail-0013"); std::tie(params.model_path, params.weights_path) = findModel("age-gender-recognition-retail-0013");
params.device_id = "CPU";
// FIXME: Ideally it should be an image from disk // FIXME: Ideally it should be an image from disk
// cv::Mat in_mat = cv::imread(findDataFile("grace_hopper_227.png")); // cv::Mat in_mat = cv::imread(findDataFile("grace_hopper_227.png"));
@ -186,21 +173,13 @@ TEST(TestAgeGenderIE, InferBasicImage)
cv::Mat gapi_age, gapi_gender; cv::Mat gapi_age, gapi_gender;
// Load & run IE network // Load & run IE network
namespace IE = InferenceEngine;
IE::Blob::Ptr ie_age, ie_gender; IE::Blob::Ptr ie_age, ie_gender;
{ {
IE::CNNNetReader reader; auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
reader.ReadNetwork(topology_path); auto net = cv::gimpl::ie::wrap::readNetwork(params);
reader.ReadWeights(weights_path); setNetParameters(net);
auto net = reader.getNetwork(); auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
auto &ii = net.getInputsInfo().at("data"); auto infer_request = this_network.CreateInferRequest();
ii->setPrecision(IE::Precision::U8);
ii->getPreProcess().setResizeAlgorithm(IE::RESIZE_BILINEAR);
auto plugin = IE::PluginDispatcher().getPluginByDevice("CPU");
auto plugin_net = plugin.LoadNetwork(net, {});
auto infer_request = plugin_net.CreateInferRequest();
infer_request.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat)); infer_request.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat));
infer_request.Infer(); infer_request.Infer();
ie_age = infer_request.GetBlob("age_conv3"); ie_age = infer_request.GetBlob("age_conv3");
@ -217,7 +196,7 @@ TEST(TestAgeGenderIE, InferBasicImage)
cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender)); cv::GComputation comp(cv::GIn(in), cv::GOut(age, gender));
auto pp = cv::gapi::ie::Params<AgeGender> { auto pp = cv::gapi::ie::Params<AgeGender> {
topology_path, weights_path, "CPU" params.model_path, params.weights_path, params.device_id
}.cfgOutputLayers({ "age_conv3", "prob" }); }.cfgOutputLayers({ "age_conv3", "prob" });
comp.apply(cv::gin(in_mat), cv::gout(gapi_age, gapi_gender), comp.apply(cv::gin(in_mat), cv::gout(gapi_age, gapi_gender),
cv::compile_args(cv::gapi::networks(pp))); cv::compile_args(cv::gapi::networks(pp)));
@ -228,8 +207,7 @@ TEST(TestAgeGenderIE, InferBasicImage)
} }
struct ROIList: public ::testing::Test { struct ROIList: public ::testing::Test {
std::string m_model_path; cv::gapi::ie::detail::ParamDesc params;
std::string m_weights_path;
cv::Mat m_in_mat; cv::Mat m_in_mat;
std::vector<cv::Rect> m_roi_list; std::vector<cv::Rect> m_roi_list;
@ -245,7 +223,8 @@ struct ROIList: public ::testing::Test {
ROIList() { ROIList() {
initDLDTDataPath(); initDLDTDataPath();
std::tie(m_model_path, m_weights_path) = findModel("age-gender-recognition-retail-0013"); std::tie(params.model_path, params.weights_path) = findModel("age-gender-recognition-retail-0013");
params.device_id = "CPU";
// FIXME: it must be cv::imread(findDataFile("../dnn/grace_hopper_227.png", false)); // FIXME: it must be cv::imread(findDataFile("../dnn/grace_hopper_227.png", false));
m_in_mat = cv::Mat(cv::Size(320, 240), CV_8UC3); m_in_mat = cv::Mat(cv::Size(320, 240), CV_8UC3);
@ -258,19 +237,12 @@ struct ROIList: public ::testing::Test {
}; };
// Load & run IE network // Load & run IE network
namespace IE = InferenceEngine;
{ {
IE::CNNNetReader reader; auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
reader.ReadNetwork(m_model_path); auto net = cv::gimpl::ie::wrap::readNetwork(params);
reader.ReadWeights(m_weights_path); setNetParameters(net);
auto net = reader.getNetwork(); auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
auto &ii = net.getInputsInfo().at("data"); auto infer_request = this_network.CreateInferRequest();
ii->setPrecision(IE::Precision::U8);
ii->getPreProcess().setResizeAlgorithm(IE::RESIZE_BILINEAR);
auto plugin = IE::PluginDispatcher().getPluginByDevice("CPU");
auto plugin_net = plugin.LoadNetwork(net, {});
auto infer_request = plugin_net.CreateInferRequest();
auto frame_blob = cv::gapi::ie::util::to_ie(m_in_mat); auto frame_blob = cv::gapi::ie::util::to_ie(m_in_mat);
for (auto &&rc : m_roi_list) { for (auto &&rc : m_roi_list) {
@ -314,7 +286,7 @@ TEST_F(ROIList, TestInfer)
cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender)); cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
auto pp = cv::gapi::ie::Params<AgeGender> { auto pp = cv::gapi::ie::Params<AgeGender> {
m_model_path, m_weights_path, "CPU" params.model_path, params.weights_path, params.device_id
}.cfgOutputLayers({ "age_conv3", "prob" }); }.cfgOutputLayers({ "age_conv3", "prob" });
comp.apply(cv::gin(m_in_mat, m_roi_list), comp.apply(cv::gin(m_in_mat, m_roi_list),
cv::gout(m_out_gapi_ages, m_out_gapi_genders), cv::gout(m_out_gapi_ages, m_out_gapi_genders),
@ -331,7 +303,7 @@ TEST_F(ROIList, TestInfer2)
cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender)); cv::GComputation comp(cv::GIn(in, rr), cv::GOut(age, gender));
auto pp = cv::gapi::ie::Params<AgeGender> { auto pp = cv::gapi::ie::Params<AgeGender> {
m_model_path, m_weights_path, "CPU" params.model_path, params.weights_path, params.device_id
}.cfgOutputLayers({ "age_conv3", "prob" }); }.cfgOutputLayers({ "age_conv3", "prob" });
comp.apply(cv::gin(m_in_mat, m_roi_list), comp.apply(cv::gin(m_in_mat, m_roi_list),
cv::gout(m_out_gapi_ages, m_out_gapi_genders), cv::gout(m_out_gapi_ages, m_out_gapi_genders),
@ -339,6 +311,75 @@ TEST_F(ROIList, TestInfer2)
validate(); validate();
} }
TEST(DISABLED_TestTwoIENNPipeline, InferBasicImage)
{
initDLDTDataPath();
cv::gapi::ie::detail::ParamDesc AGparams;
std::tie(AGparams.model_path, AGparams.weights_path) = findModel("age-gender-recognition-retail-0013");
AGparams.device_id = "MYRIAD";
// FIXME: Ideally it should be an image from disk
// cv::Mat in_mat = cv::imread(findDataFile("grace_hopper_227.png"));
cv::Mat in_mat(cv::Size(320, 240), CV_8UC3);
cv::randu(in_mat, 0, 255);
cv::Mat gapi_age1, gapi_gender1, gapi_age2, gapi_gender2;
// Load & run IE network
IE::Blob::Ptr ie_age1, ie_gender1, ie_age2, ie_gender2;
{
auto AGplugin1 = cv::gimpl::ie::wrap::getPlugin(AGparams);
auto AGnet1 = cv::gimpl::ie::wrap::readNetwork(AGparams);
setNetParameters(AGnet1);
auto AGplugin_network1 = cv::gimpl::ie::wrap::loadNetwork(AGplugin1, AGnet1, AGparams);
auto AGinfer_request1 = AGplugin_network1.CreateInferRequest();
AGinfer_request1.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat));
AGinfer_request1.Infer();
ie_age1 = AGinfer_request1.GetBlob("age_conv3");
ie_gender1 = AGinfer_request1.GetBlob("prob");
auto AGplugin2 = cv::gimpl::ie::wrap::getPlugin(AGparams);
auto AGnet2 = cv::gimpl::ie::wrap::readNetwork(AGparams);
setNetParameters(AGnet2);
auto AGplugin_network2 = cv::gimpl::ie::wrap::loadNetwork(AGplugin2, AGnet2, AGparams);
auto AGinfer_request2 = AGplugin_network2.CreateInferRequest();
AGinfer_request2.SetBlob("data", cv::gapi::ie::util::to_ie(in_mat));
AGinfer_request2.Infer();
ie_age2 = AGinfer_request2.GetBlob("age_conv3");
ie_gender2 = AGinfer_request2.GetBlob("prob");
}
// Configure & run G-API
using AGInfo = std::tuple<cv::GMat, cv::GMat>;
G_API_NET(AgeGender1, <AGInfo(cv::GMat)>, "test-age-gender1");
G_API_NET(AgeGender2, <AGInfo(cv::GMat)>, "test-age-gender2");
cv::GMat in;
cv::GMat age1, gender1;
std::tie(age1, gender1) = cv::gapi::infer<AgeGender1>(in);
cv::GMat age2, gender2;
// FIXME: "Multi-node inference is not supported!", workarounded 'till enabling proper tools
std::tie(age2, gender2) = cv::gapi::infer<AgeGender2>(cv::gapi::copy(in));
cv::GComputation comp(cv::GIn(in), cv::GOut(age1, gender1, age2, gender2));
auto age_net1 = cv::gapi::ie::Params<AgeGender1> {
AGparams.model_path, AGparams.weights_path, AGparams.device_id
}.cfgOutputLayers({ "age_conv3", "prob" });
auto age_net2 = cv::gapi::ie::Params<AgeGender2> {
AGparams.model_path, AGparams.weights_path, AGparams.device_id
}.cfgOutputLayers({ "age_conv3", "prob" });
comp.apply(cv::gin(in_mat), cv::gout(gapi_age1, gapi_gender1, gapi_age2, gapi_gender2),
cv::compile_args(cv::gapi::networks(age_net1, age_net2)));
// Validate with IE itself (avoid DNN module dependency here)
normAssert(cv::gapi::ie::util::to_ocv(ie_age1), gapi_age1, "Test age output 1");
normAssert(cv::gapi::ie::util::to_ocv(ie_gender1), gapi_gender1, "Test gender output 1");
normAssert(cv::gapi::ie::util::to_ocv(ie_age2), gapi_age2, "Test age output 2");
normAssert(cv::gapi::ie::util::to_ocv(ie_gender2), gapi_gender2, "Test gender output 2");
}
} // namespace opencv_test } // namespace opencv_test
#endif // HAVE_INF_ENGINE #endif // HAVE_INF_ENGINE

Loading…
Cancel
Save