Merge pull request #18542 from TolyaTalamanov:at/import-network

[G-API] Support ImportNetwork for cv::gapi::infer

* wip

* Refactoring

* Fix comments to review

* Fix warning

Co-authored-by: Ruslan Garnov <ruslan.garnov@intel.com>
pull/18598/head
Anatoliy Talamanov 4 years ago committed by GitHub
parent 0d3e05f9b3
commit 8bf451a3e0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 20
      modules/gapi/include/opencv2/gapi/infer/ie.hpp
  2. 1
      modules/gapi/src/backends/common/gbackend.hpp
  3. 37
      modules/gapi/src/backends/ie/giebackend.cpp
  4. 18
      modules/gapi/src/backends/ie/giebackend/giewrapper.cpp
  5. 13
      modules/gapi/src/backends/ie/giebackend/giewrapper.hpp

@ -60,6 +60,8 @@ namespace detail {
std::size_t num_in; // How many inputs are defined in the operation
std::size_t num_out; // How many outputs are defined in the operation
enum class Kind { Load, Import };
Kind kind;
bool is_generic;
};
} // namespace detail
@ -83,6 +85,16 @@ public:
: desc{ model, weights, device, {}, {}, {}
, std::tuple_size<typename Net::InArgs>::value // num_in
, std::tuple_size<typename Net::OutArgs>::value // num_out
, detail::ParamDesc::Kind::Load
, false} {
};
Params(const std::string &model,
const std::string &device)
: desc{ model, {}, device, {}, {}, {}
, std::tuple_size<typename Net::InArgs>::value // num_in
, std::tuple_size<typename Net::OutArgs>::value // num_out
, detail::ParamDesc::Kind::Import
, false} {
};
@ -126,7 +138,13 @@ public:
const std::string &model,
const std::string &weights,
const std::string &device)
: desc{ model, weights, device, {}, {}, {}, 0u, 0u, true}, m_tag(tag) {
: desc{ model, weights, device, {}, {}, {}, 0u, 0u, detail::ParamDesc::Kind::Load, true}, m_tag(tag) {
};
Params(const std::string &tag,
const std::string &model,
const std::string &device)
: desc{ model, {}, device, {}, {}, {}, 0u, 0u, detail::ParamDesc::Kind::Import, true}, m_tag(tag) {
};
// BEGIN(G-API's network parametrization API)

@ -27,6 +27,7 @@ namespace gimpl {
: cv::Mat(v.dims(), v.type(), v.ptr());
}
inline RMat::View asView(const Mat& m, RMat::View::DestroyCallback&& cb = nullptr) {
// FIXME: View doesn't support multidimensional cv::Mat's
return RMat::View(cv::descr_of(m), m.data, m.step, std::move(cb));
}

@ -175,11 +175,26 @@ struct IEUnit {
IE::InputsDataMap inputs;
IE::OutputsDataMap outputs;
IE::ExecutableNetwork this_network;
cv::gimpl::ie::wrap::Plugin this_plugin;
explicit IEUnit(const cv::gapi::ie::detail::ParamDesc &pp)
: params(pp) {
if (params.kind == cv::gapi::ie::detail::ParamDesc::Kind::Load) {
net = cv::gimpl::ie::wrap::readNetwork(params);
inputs = net.getInputsInfo();
outputs = net.getOutputsInfo();
} else if (params.kind == cv::gapi::ie::detail::ParamDesc::Kind::Import) {
this_plugin = cv::gimpl::ie::wrap::getPlugin(params);
this_network = cv::gimpl::ie::wrap::importNetwork(this_plugin, params);
// FIXME: ICNNetwork returns InputsDataMap/OutputsDataMap,
// but ExecutableNetwork returns ConstInputsDataMap/ConstOutputsDataMap
inputs = cv::gimpl::ie::wrap::toInputsDataMap(this_network.GetInputsInfo());
outputs = cv::gimpl::ie::wrap::toOutputsDataMap(this_network.GetOutputsInfo());
} else {
cv::util::throw_error(std::logic_error("Unsupported ParamDesc::Kind"));
}
// The practice shows that not all inputs and not all outputs
// are mandatory to specify in IE model.
// So what we're concerned here about is:
@ -205,10 +220,23 @@ struct IEUnit {
// This method is [supposed to be] called at Island compilation stage
cv::gimpl::ie::IECompiled compile() const {
auto plugin = cv::gimpl::ie::wrap::getPlugin(params);
auto this_network = cv::gimpl::ie::wrap::loadNetwork(plugin, net, params);
auto this_request = this_network.CreateInferRequest();
IEUnit* non_const_this = const_cast<IEUnit*>(this);
if (params.kind == cv::gapi::ie::detail::ParamDesc::Kind::Load) {
// FIXME: In case importNetwork for fill inputs/outputs need to obtain ExecutableNetwork, but
// for loadNetwork they can be obtained by using readNetwork
non_const_this->this_plugin = cv::gimpl::ie::wrap::getPlugin(params);
non_const_this->this_network = cv::gimpl::ie::wrap::loadNetwork(non_const_this->this_plugin, net, params);
}
auto this_request = non_const_this->this_network.CreateInferRequest();
// Bind const data to infer request
for (auto &&p : params.const_inputs) {
// FIXME: SetBlob is known to be inefficient,
// it is worth to make a customizable "initializer" and pass the
// cv::Mat-wrapped blob there to support IE's optimal "GetBlob idiom"
// Still, constant data is to set only once.
this_request.SetBlob(p.first, wrapIE(p.second.first, p.second.second));
}
// Bind const data to infer request
for (auto &&p : params.const_inputs) {
// FIXME: SetBlob is known to be inefficient,
@ -217,7 +245,8 @@ struct IEUnit {
// Still, constant data is to set only once.
this_request.SetBlob(p.first, wrapIE(p.second.first, p.second.second));
}
return {plugin, this_network, this_request};
return {this_plugin, this_network, this_request};
}
};

@ -22,6 +22,24 @@ namespace IE = InferenceEngine;
namespace giewrap = cv::gimpl::ie::wrap;
using GIEParam = cv::gapi::ie::detail::ParamDesc;
IE::InputsDataMap giewrap::toInputsDataMap (const IE::ConstInputsDataMap& inputs) {
IE::InputsDataMap transformed;
auto convert = [](const std::pair<std::string, IE::InputInfo::CPtr>& p) {
return std::make_pair(p.first, std::make_shared<IE::InputInfo>(*p.second));
};
std::transform(inputs.begin(), inputs.end(), std::inserter(transformed, transformed.end()), convert);
return transformed;
}
IE::OutputsDataMap giewrap::toOutputsDataMap (const IE::ConstOutputsDataMap& outputs) {
IE::OutputsDataMap transformed;
auto convert = [](const std::pair<std::string, IE::CDataPtr>& p) {
return std::make_pair(p.first, std::make_shared<IE::Data>(*p.second));
};
std::transform(outputs.begin(), outputs.end(), std::inserter(transformed, transformed.end()), convert);
return transformed;
}
#if INF_ENGINE_RELEASE < 2020000000 // < 2020.1
// Load extensions (taken from DNN module)
std::vector<std::string> giewrap::getExtensions(const GIEParam& params) {

@ -28,7 +28,11 @@ namespace wrap {
GAPI_EXPORTS std::vector<std::string> getExtensions(const GIEParam& params);
GAPI_EXPORTS IE::CNNNetwork readNetwork(const GIEParam& params);
IE::InputsDataMap toInputsDataMap (const IE::ConstInputsDataMap& inputs);
IE::OutputsDataMap toOutputsDataMap(const IE::ConstOutputsDataMap& outputs);
#if INF_ENGINE_RELEASE < 2019020000 // < 2019.R2
using Plugin = IE::InferencePlugin;
GAPI_EXPORTS IE::InferencePlugin getPlugin(const GIEParam& params);
GAPI_EXPORTS inline IE::ExecutableNetwork loadNetwork( IE::InferencePlugin& plugin,
const IE::CNNNetwork& net,
@ -36,7 +40,12 @@ GAPI_EXPORTS inline IE::ExecutableNetwork loadNetwork( IE::InferencePlugin&
return plugin.LoadNetwork(net, {}); // FIXME: 2nd parameter to be
// configurable via the API
}
GAPI_EXPORTS inline IE::ExecutableNetwork importNetwork( IE::CNNNetwork& plugin,
const GIEParam& param) {
return plugin.ImportNetwork(param.model_path, param.device_id, {});
}
#else // >= 2019.R2
using Plugin = IE::Core;
GAPI_EXPORTS IE::Core getCore();
GAPI_EXPORTS IE::Core getPlugin(const GIEParam& params);
GAPI_EXPORTS inline IE::ExecutableNetwork loadNetwork( IE::Core& core,
@ -44,6 +53,10 @@ GAPI_EXPORTS inline IE::ExecutableNetwork loadNetwork( IE::Core& core
const GIEParam& params) {
return core.LoadNetwork(net, params.device_id);
}
GAPI_EXPORTS inline IE::ExecutableNetwork importNetwork( IE::Core& core,
const GIEParam& param) {
return core.ImportNetwork(param.model_path, param.device_id, {});
}
#endif // INF_ENGINE_RELEASE < 2019020000
}}}}

Loading…
Cancel
Save