Merge pull request #23843 from TolyaTalamanov:at/fix-missing-opaque-kind-for-kernel

G-API: Fix incorrect OpaqueKind for Kernel outputs #23843

### Pull Request Readiness Checklist

#### Overview
The PR is going to fix several problems:
1. Major: `GKernel` doesn't hold `kind` for its outputs. Since `GModelBuilder` traverse graph from outputs to inputs once it reaches any output of the operation it will use its `kind` to create  `Data` meta for all operation outputs. Since it essential for `python` to know `GTypeInfo` (which is `shape` and `kind`) it will be confused.

Consider this operation:
```
 @cv.gapi.op('custom.square_mean', in_types=[cv.GArray.Int], out_types=[cv.GOpaque.Float, cv.GArray.Int])
    class GSquareMean:
        @staticmethod
        def outMeta(desc):
            return cv.empty_gopaque_desc(), cv.empty_array_desc()
```
Even though `GOpaque` is `Float`, corresponding metadata might have `Int` kind because it might be taken from `cv.GArray.Int`
so it will be a problem if one of the outputs of these operation is graph output because python will cast it to the wrong type based on `Data` meta.

2. Minor: Some of the OpenVINO `IR`'s doesn't any layout information for input. It's usually true only for `IRv10` but since `OpenVINO 2.0` need this information to correctly configure resize we need to put default layout if there no such assigned in `ov::Model`. 

See details at https://github.com/opencv/opencv/wiki/How_to_contribute#making-a-good-pull-request

- [ ] I agree to contribute to the project under Apache 2 License.
- [ ] To the best of my knowledge, the proposed patch is not based on a code under GPL or another license that is incompatible with OpenCV
- [ ] The PR is proposed to the proper branch
- [ ] There is a reference to the original bug report and related work
- [ ] There is accuracy test, performance test and test data in opencv_extra repository, if applicable
      Patch to opencv_extra has the same branch name.
- [ ] The feature is well documented and sample code can be built with the project CMake
pull/23848/head
Anatoliy Talamanov 2 years ago committed by GitHub
parent 61d48dd0f8
commit 60848519b5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 7
      modules/gapi/include/opencv2/gapi/gkernel.hpp
  2. 3
      modules/gapi/include/opencv2/gapi/infer.hpp
  3. 1
      modules/gapi/include/opencv2/gapi/streaming/desync.hpp
  4. 1
      modules/gapi/include/opencv2/gapi/streaming/meta.hpp
  5. 27
      modules/gapi/misc/python/python_bridge.hpp
  6. 41
      modules/gapi/misc/python/test/test_gapi_sample_pipelines.py
  7. 14
      modules/gapi/src/backends/ov/govbackend.cpp
  8. 18
      modules/gapi/src/compiler/gmodelbuilder.cpp
  9. 6
      modules/gapi/test/internal/gapi_int_gmodel_builder_test.cpp

@ -51,6 +51,7 @@ struct GAPI_EXPORTS GKernel
GShapes outShapes; // types (shapes) kernel's outputs
GKinds inKinds; // kinds of kernel's inputs (fixme: below)
GCtors outCtors; // captured constructors for template output types
GKinds outKinds; // kinds of kernel's outputs (fixme: below)
};
// TODO: It's questionable if inKinds should really be here. Instead,
// this information could come from meta.
@ -227,7 +228,8 @@ public:
, &K::getOutMeta
, {detail::GTypeTraits<R>::shape...}
, {detail::GTypeTraits<Args>::op_kind...}
, {detail::GObtainCtor<R>::get()...}});
, {detail::GObtainCtor<R>::get()...}
, {detail::GTypeTraits<R>::op_kind...}});
call.pass(args...); // TODO: std::forward() here?
return yield(call, typename detail::MkSeq<sizeof...(R)>::type());
}
@ -251,7 +253,8 @@ public:
, &K::getOutMeta
, {detail::GTypeTraits<R>::shape}
, {detail::GTypeTraits<Args>::op_kind...}
, {detail::GObtainCtor<R>::get()}});
, {detail::GObtainCtor<R>::get()}
, {detail::GTypeTraits<R>::op_kind}});
call.pass(args...);
return detail::Yield<R>::yield(call, 0);
}

@ -101,8 +101,10 @@ public:
if (it == m_priv->blobs.end()) {
// FIXME: Avoid modifying GKernel
auto shape = cv::detail::GTypeTraits<OutT>::shape;
auto kind = cv::detail::GTypeTraits<OutT>::op_kind;
m_priv->call->kernel().outShapes.push_back(shape);
m_priv->call->kernel().outCtors.emplace_back(cv::detail::GObtainCtor<OutT>::get());
m_priv->call->kernel().outKinds.emplace_back(kind);
auto out_idx = static_cast<int>(m_priv->blobs.size());
it = m_priv->blobs.emplace(name,
cv::detail::Yield<OutT>::yield(*(m_priv->call), out_idx)).first;
@ -175,6 +177,7 @@ std::shared_ptr<cv::GCall> makeCall(const std::string &tag,
{}, // outShape will be filled later
std::move(kinds),
{}, // outCtors will be filled later
{}, // outKinds will be filled later
});
call->setArgs(std::move(args));

@ -46,6 +46,7 @@ G desync(const G &g) {
, {cv::detail::GTypeTraits<G>::shape} // output Shape
, {cv::detail::GTypeTraits<G>::op_kind} // input data kinds
, {cv::detail::GObtainCtor<G>::get()} // output template ctors
, {cv::detail::GTypeTraits<G>::op_kind} // output data kinds
};
cv::GCall call(std::move(k));
call.pass(g);

@ -50,6 +50,7 @@ cv::GOpaque<T> meta(G g, const std::string &tag) {
, {cv::detail::GTypeTraits<O>::shape} // output Shape
, {cv::detail::GTypeTraits<G>::op_kind} // input data kinds
, {cv::detail::GObtainCtor<O>::get()} // output template ctors
, {cv::detail::GTypeTraits<O>::op_kind} // output data kind
};
cv::GCall call(std::move(k));
call.pass(g);

@ -267,13 +267,14 @@ cv::gapi::wip::GOutputs::Priv::Priv(const std::string& id, cv::GKernel::M outMet
std::transform(args.begin(), args.end(), std::back_inserter(kinds),
[](const cv::GArg& arg) { return arg.opaque_kind; });
m_call.reset(new cv::GCall{cv::GKernel{id, {}, outMeta, {}, std::move(kinds), {}}});
m_call.reset(new cv::GCall{cv::GKernel{id, {}, outMeta, {}, std::move(kinds), {}, {}}});
m_call->setArgs(std::move(args));
}
cv::GMat cv::gapi::wip::GOutputs::Priv::getGMat()
{
m_call->kernel().outShapes.push_back(cv::GShape::GMAT);
m_call->kernel().outKinds.push_back(cv::detail::OpaqueKind::CV_UNKNOWN);
// ...so _empty_ constructor is passed here.
m_call->kernel().outCtors.emplace_back(cv::util::monostate{});
return m_call->yield(output++);
@ -282,6 +283,7 @@ cv::GMat cv::gapi::wip::GOutputs::Priv::getGMat()
cv::GScalar cv::gapi::wip::GOutputs::Priv::getGScalar()
{
m_call->kernel().outShapes.push_back(cv::GShape::GSCALAR);
m_call->kernel().outKinds.push_back(cv::detail::OpaqueKind::CV_UNKNOWN);
// ...so _empty_ constructor is passed here.
m_call->kernel().outCtors.emplace_back(cv::util::monostate{});
return m_call->yieldScalar(output++);
@ -290,10 +292,14 @@ cv::GScalar cv::gapi::wip::GOutputs::Priv::getGScalar()
cv::GArrayT cv::gapi::wip::GOutputs::Priv::getGArray(cv::gapi::ArgType type)
{
m_call->kernel().outShapes.push_back(cv::GShape::GARRAY);
#define HC(T, K) \
case K: \
m_call->kernel().outCtors.emplace_back(cv::detail::GObtainCtor<cv::GArray<T>>::get()); \
return cv::GArrayT(m_call->yieldArray<T>(output++)); \
#define HC(T, K) \
case K: { \
const auto kind = cv::detail::GTypeTraits<cv::GArray<T>>::op_kind; \
m_call->kernel().outKinds.emplace_back(kind); \
m_call->kernel().outCtors.emplace_back(cv::detail::GObtainCtor<cv::GArray<T>>::get()); \
return cv::GArrayT(m_call->yieldArray<T>(output++)); \
}
SWITCH(type, GARRAY_TYPE_LIST_G, HC)
#undef HC
@ -302,10 +308,13 @@ cv::GArrayT cv::gapi::wip::GOutputs::Priv::getGArray(cv::gapi::ArgType type)
cv::GOpaqueT cv::gapi::wip::GOutputs::Priv::getGOpaque(cv::gapi::ArgType type)
{
m_call->kernel().outShapes.push_back(cv::GShape::GOPAQUE);
#define HC(T, K) \
case K: \
m_call->kernel().outCtors.emplace_back(cv::detail::GObtainCtor<cv::GOpaque<T>>::get()); \
return cv::GOpaqueT(m_call->yieldOpaque<T>(output++)); \
#define HC(T, K) \
case K: { \
const auto kind = cv::detail::GTypeTraits<cv::GOpaque<T>>::op_kind; \
m_call->kernel().outKinds.emplace_back(kind); \
m_call->kernel().outCtors.emplace_back(cv::detail::GObtainCtor<cv::GOpaque<T>>::get()); \
return cv::GOpaqueT(m_call->yieldOpaque<T>(output++)); \
}
SWITCH(type, GOPAQUE_TYPE_LIST_G, HC)
#undef HC

@ -207,7 +207,48 @@ try:
return Op
# NB: Just mock operation to test different kinds for output G-types.
@cv.gapi.op('custom.square_mean', in_types=[cv.GArray.Int], out_types=[cv.GOpaque.Float, cv.GArray.Int])
class GSquareMean:
@staticmethod
def outMeta(desc):
return cv.empty_gopaque_desc(), cv.empty_array_desc()
@cv.gapi.kernel(GSquareMean)
class GSquareMeanImpl:
@staticmethod
def run(arr):
squares = [val**2 for val in arr]
return sum(arr) / len(arr), squares
@cv.gapi.op('custom.squares', in_types=[cv.GArray.Int], out_types=[cv.GArray.Int])
class GSquare:
@staticmethod
def outMeta(desc):
return cv.empty_array_desc()
@cv.gapi.kernel(GSquare)
class GSquareImpl:
@staticmethod
def run(arr):
squares = [val**2 for val in arr]
return squares
class gapi_sample_pipelines(NewOpenCVTests):
def test_different_output_opaque_kinds(self):
g_in = cv.GArray.Int()
g_mean, g_squares = GSquareMean.on(g_in)
comp = cv.GComputation(cv.GIn(g_in), cv.GOut(g_mean, g_squares))
pkg = cv.gapi.kernels(GSquareMeanImpl)
mean, squares = comp.apply(cv.gin([1,2,3]), args=cv.gapi.compile_args(pkg))
self.assertEqual([1,4,9], list(squares))
self.assertEqual(2.0, mean)
def test_custom_op_add(self):
sz = (3, 3)

@ -738,6 +738,15 @@ public:
const auto explicit_in_model_layout = lookUp(m_input_model_layout, input_name);
if (explicit_in_model_layout) {
input_info.model().set_layout(::ov::Layout(*explicit_in_model_layout));
} else if (m_model->input(input_name).get_shape().size() == 4u) {
// NB: Back compatibility with IR's without any layout information.
// Note that default is only applicable for 4D inputs in order to
// support auto resize for image use cases.
GAPI_LOG_WARNING(NULL, "Failed to find layout for input layer \""
<< input_name << "\" - NCHW is set by default");
const std::string default_layout = "NCHW";
input_info.model().set_layout(::ov::Layout(default_layout));
m_input_model_layout.emplace(input_name, default_layout);
}
const auto explicit_in_tensor_layout = lookUp(m_input_tensor_layout, input_name);
if (explicit_in_tensor_layout) {
@ -765,6 +774,7 @@ public:
const auto &matdesc = cv::util::get<cv::GMatDesc>(input_meta);
const auto explicit_in_tensor_layout = lookUp(m_input_tensor_layout, input_name);
const auto explicit_in_model_layout = lookUp(m_input_model_layout, input_name);
const auto explicit_resize = lookUp(m_interpolation, input_name);
if (disable_img_resize && explicit_resize.has_value()) {
@ -810,7 +820,9 @@ public:
if (matdesc.isND()) {
// NB: ND case - need to obtain "H" and "W" positions
// in order to configure resize.
const auto model_layout = ::ov::layout::get_layout(m_model->input(input_name));
const auto model_layout = explicit_in_model_layout
? ::ov::Layout(*explicit_in_model_layout)
: ::ov::layout::get_layout(m_model->input(input_name));
if (!explicit_in_tensor_layout && model_layout.empty()) {
std::stringstream ss;
ss << "Resize for input layer: " << input_name

@ -59,7 +59,6 @@ private:
} // namespace
cv::gimpl::Unrolled cv::gimpl::unrollExpr(const GProtoArgs &ins,
const GProtoArgs &outs)
{
@ -135,18 +134,19 @@ cv::gimpl::Unrolled cv::gimpl::unrollExpr(const GProtoArgs &ins,
// Put the outputs object description of the node
// so that they are not lost if they are not consumed by other operations
GAPI_Assert(call_p.m_k.outCtors.size() == call_p.m_k.outShapes.size());
for (const auto it : ade::util::indexed(call_p.m_k.outShapes))
for (const auto it : ade::util::indexed(ade::util::zip(call_p.m_k.outShapes,
call_p.m_k.outCtors,
call_p.m_k.outKinds)))
{
std::size_t port = ade::util::index(it);
GShape shape = ade::util::value(it);
// FIXME: then use ZIP
HostCtor ctor = call_p.m_k.outCtors[port];
auto port = ade::util::index(it);
auto &val = ade::util::value(it);
auto shape = std::get<0>(val);
auto ctor = std::get<1>(val);
auto kind = std::get<2>(val);
// NB: Probably this fixes all other "missing host ctor"
// problems.
// TODO: Clean-up the old workarounds if it really is.
GOrigin org {shape, node, port, std::move(ctor), origin.kind};
GOrigin org {shape, node, port, std::move(ctor), kind};
origins.insert(org);
}

@ -30,7 +30,8 @@ namespace
, nullptr
, { GShape::GMAT }
, { D::OpaqueKind::CV_UNKNOWN }
, { cv::detail::HostCtor{cv::util::monostate{}} }
, { D::HostCtor{cv::util::monostate{}} }
, { D::OpaqueKind::CV_UNKNOWN }
}).pass(m).yield(0);
}
@ -41,7 +42,8 @@ namespace
, nullptr
, { GShape::GMAT }
, { D::OpaqueKind::CV_UNKNOWN, D::OpaqueKind::CV_UNKNOWN }
, { cv::detail::HostCtor{cv::util::monostate{}} }
, { D::HostCtor{cv::util::monostate{}} }
, { D::OpaqueKind::CV_UNKNOWN}
}).pass(m1, m2).yield(0);
}

Loading…
Cancel
Save