Merge pull request #19009 from TolyaTalamanov:at/media-frame-copy

[G-API] GStreamingBackend

* Snapshot

* Implement StreamingBackend

* Refactoring

* Refactoring 2

* Clean up

* Add missing functionality to support MediaFrame as output

* Partially address review comments

* Fix build

* Implement reshape for gstreamingbackend and add a test on it

* Address more comments

* Add format.hpp to gapi.hpp

* Fix debug build

* Address review comments

Co-authored-by: Smirnov Alexey <alexey.smirnov@intel.com>
pull/19085/head
Anatoliy Talamanov 4 years ago committed by GitHub
parent 9f01b97e14
commit 8ed0fc6f0c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 3
      modules/gapi/CMakeLists.txt
  2. 3
      modules/gapi/include/opencv2/gapi.hpp
  3. 8
      modules/gapi/include/opencv2/gapi/cpu/gcpukernel.hpp
  4. 1
      modules/gapi/include/opencv2/gapi/garg.hpp
  5. 2
      modules/gapi/include/opencv2/gapi/gcall.hpp
  6. 3
      modules/gapi/include/opencv2/gapi/gframe.hpp
  7. 6
      modules/gapi/include/opencv2/gapi/gkernel.hpp
  8. 52
      modules/gapi/include/opencv2/gapi/streaming/format.hpp
  9. 12
      modules/gapi/src/api/gbackend.cpp
  10. 5
      modules/gapi/src/api/gcall.cpp
  11. 5
      modules/gapi/src/api/gframe.cpp
  12. 4
      modules/gapi/src/api/gproto.cpp
  13. 10
      modules/gapi/src/api/kernels_streaming.cpp
  14. 5
      modules/gapi/src/backends/cpu/gcpukernel.cpp
  15. 203
      modules/gapi/src/backends/streaming/gstreamingbackend.cpp
  16. 89
      modules/gapi/src/backends/streaming/gstreamingbackend.hpp
  17. 37
      modules/gapi/src/backends/streaming/gstreamingkernel.hpp
  18. 4
      modules/gapi/src/compiler/gcompiler.cpp
  19. 2
      modules/gapi/src/compiler/gstreaming_priv.hpp
  20. 2
      modules/gapi/src/compiler/passes/intrin.cpp
  21. 2
      modules/gapi/src/compiler/passes/streaming.cpp
  22. 10
      modules/gapi/src/executor/gstreamingexecutor.cpp
  23. 270
      modules/gapi/test/streaming/gapi_streaming_tests.cpp

@ -157,6 +157,9 @@ set(gapi_srcs
src/api/s11n.cpp
src/backends/common/serialization.cpp
# Streaming backend
src/backends/streaming/gstreamingbackend.cpp
# Python bridge
src/backends/ie/bindings_ie.cpp
)

@ -33,8 +33,9 @@
#include <opencv2/gapi/gkernel.hpp>
#include <opencv2/gapi/operators.hpp>
// Include this file here to avoid cyclic dependency between
// Include these files here to avoid cyclic dependency between
// Desync & GKernel & GComputation & GStreamingCompiled.
#include <opencv2/gapi/streaming/desync.hpp>
#include <opencv2/gapi/streaming/format.hpp>
#endif // OPENCV_GAPI_HPP

@ -101,6 +101,7 @@ public:
const cv::Scalar& inVal(int input);
cv::Scalar& outValR(int output); // FIXME: Avoid cv::Scalar s = ctx.outValR()
cv::MediaFrame& outFrame(int output);
template<typename T> std::vector<T>& outVecR(int output) // FIXME: the same issue
{
return outVecRef(output).wref<T>();
@ -258,6 +259,13 @@ template<> struct get_out<cv::GScalar>
return ctx.outValR(idx);
}
};
template<> struct get_out<cv::GFrame>
{
static cv::MediaFrame& get(GCPUContext &ctx, int idx)
{
return ctx.outFrame(idx);
}
};
template<typename U> struct get_out<cv::GArray<U>>
{
static std::vector<U>& get(GCPUContext &ctx, int idx)

@ -210,6 +210,7 @@ using GRunArgP = util::variant<
cv::Mat*,
cv::RMat*,
cv::Scalar*,
cv::MediaFrame*,
cv::detail::VectorRef,
cv::detail::OpaqueRef
>;

@ -11,6 +11,7 @@
#include <opencv2/gapi/garg.hpp> // GArg
#include <opencv2/gapi/gmat.hpp> // GMat
#include <opencv2/gapi/gscalar.hpp> // GScalar
#include <opencv2/gapi/gframe.hpp> // GFrame
#include <opencv2/gapi/garray.hpp> // GArray<T>
#include <opencv2/gapi/gopaque.hpp> // GOpaque<T>
@ -41,6 +42,7 @@ public:
GMat yield (int output = 0);
GMatP yieldP (int output = 0);
GScalar yieldScalar(int output = 0);
GFrame yieldFrame (int output = 0);
template<class T> GArray<T> yieldArray(int output = 0)
{

@ -62,6 +62,9 @@ struct GAPI_EXPORTS GFrameDesc
static inline GFrameDesc empty_gframe_desc() { return GFrameDesc{}; }
/** @} */
class MediaFrame;
GAPI_EXPORTS GFrameDesc descr_of(const MediaFrame &frame);
GAPI_EXPORTS std::ostream& operator<<(std::ostream& os, const cv::GFrameDesc &desc);
} // namespace cv

@ -90,6 +90,10 @@ namespace detail
{
static inline cv::GOpaque<U> yield(cv::GCall &call, int i) { return call.yieldOpaque<U>(i); }
};
template<> struct Yield<GFrame>
{
static inline cv::GFrame yield(cv::GCall &call, int i) { return call.yieldFrame(i); }
};
////////////////////////////////////////////////////////////////////////////
// Helper classes which brings outputMeta() marshalling to kernel
@ -239,8 +243,6 @@ public:
using InArgs = std::tuple<Args...>;
using OutArgs = std::tuple<R>;
static_assert(!cv::detail::contains<GFrame, OutArgs>::value, "Values of GFrame type can't be used as operation outputs");
static R on(Args... args)
{
cv::GCall call(GKernel{ K::id()

@ -0,0 +1,52 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2020 Intel Corporation
#ifndef OPENCV_GAPI_GSTREAMING_FORMAT_HPP
#define OPENCV_GAPI_GSTREAMING_FORMAT_HPP
#include <opencv2/gapi/gkernel.hpp> // GKernelPackage
namespace cv {
namespace gapi {
namespace streaming {
cv::gapi::GKernelPackage kernels();
cv::gapi::GBackend backend();
// FIXME: Make a generic kernel
G_API_OP(GCopy, <GFrame(GFrame)>, "org.opencv.streaming.copy")
{
static GFrameDesc outMeta(const GFrameDesc& in) { return in; }
};
G_API_OP(GBGR, <GMat(GFrame)>, "org.opencv.streaming.BGR")
{
static GMatDesc outMeta(const GFrameDesc& in) { return GMatDesc{CV_8U, 3, in.size}; }
};
/** @brief Gets copy from the input frame
@note Function textual ID is "org.opencv.streaming.copy"
@param in Input frame
@return Copy of the input frame
*/
GAPI_EXPORTS cv::GFrame copy(const cv::GFrame& in);
/** @brief Gets bgr plane from input frame
@note Function textual ID is "org.opencv.streaming.BGR"
@param in Input frame
@return Image in BGR format
*/
GAPI_EXPORTS cv::GMat BGR (const cv::GFrame& in);
} // namespace streaming
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_GSTREAMING_FORMAT_HPP

@ -211,6 +211,9 @@ void bindOutArg(Mag& mag, const RcDesc &rc, const GRunArgP &arg, HandleRMat hand
}
break;
}
case GShape::GFRAME:
mag.template slot<cv::MediaFrame>()[rc.id] = *util::get<cv::MediaFrame*>(arg);
break;
case GShape::GARRAY:
mag.template slot<cv::detail::VectorRef>()[rc.id] = util::get<cv::detail::VectorRef>(arg);
break;
@ -319,6 +322,9 @@ cv::GRunArgP getObjPtr(Mag& mag, const RcDesc &rc, bool is_umat)
// debugging this!!!1
return GRunArgP(const_cast<const Mag&>(mag)
.template slot<cv::detail::OpaqueRef>().at(rc.id));
case GShape::GFRAME:
return GRunArgP(&mag.template slot<cv::MediaFrame>()[rc.id]);
default:
util::throw_error(std::logic_error("Unsupported GShape type"));
break;
@ -345,6 +351,12 @@ void writeBack(const Mag& mag, const RcDesc &rc, GRunArgP &g_arg)
break;
}
case GShape::GFRAME:
{
*util::get<cv::MediaFrame*>(g_arg) = mag.template slot<cv::MediaFrame>().at(rc.id);
break;
}
default:
util::throw_error(std::logic_error("Unsupported GShape type"));
break;

@ -69,6 +69,11 @@ cv::detail::GOpaqueU cv::GCall::yieldOpaque(int output)
return cv::detail::GOpaqueU(m_priv->m_node, output);
}
cv::GFrame cv::GCall::yieldFrame(int output)
{
return cv::GFrame(m_priv->m_node, output);
}
cv::GCall::Priv& cv::GCall::priv()
{
return *m_priv;

@ -8,6 +8,7 @@
#include "precomp.hpp"
#include <opencv2/gapi/gframe.hpp>
#include <opencv2/gapi/media.hpp>
#include "api/gorigin.hpp"
@ -34,6 +35,10 @@ bool GFrameDesc::operator== (const GFrameDesc &rhs) const {
return fmt == rhs.fmt && size == rhs.size;
}
GFrameDesc descr_of(const cv::MediaFrame &frame) {
return frame.desc();
}
std::ostream& operator<<(std::ostream& os, const cv::GFrameDesc &d) {
os << '[';
switch (d.fmt) {

@ -146,6 +146,7 @@ cv::GMetaArg cv::descr_of(const cv::GRunArgP &argp)
#endif // !defined(GAPI_STANDALONE)
case GRunArgP::index_of<cv::Mat*>(): return GMetaArg(cv::descr_of(*util::get<cv::Mat*>(argp)));
case GRunArgP::index_of<cv::Scalar*>(): return GMetaArg(descr_of(*util::get<cv::Scalar*>(argp)));
case GRunArgP::index_of<cv::MediaFrame*>(): return GMetaArg(descr_of(*util::get<cv::MediaFrame*>(argp)));
case GRunArgP::index_of<cv::detail::VectorRef>(): return GMetaArg(util::get<cv::detail::VectorRef>(argp).descr_of());
case GRunArgP::index_of<cv::detail::OpaqueRef>(): return GMetaArg(util::get<cv::detail::OpaqueRef>(argp).descr_of());
default: util::throw_error(std::logic_error("Unsupported GRunArgP type"));
@ -163,6 +164,7 @@ bool cv::can_describe(const GMetaArg& meta, const GRunArgP& argp)
case GRunArgP::index_of<cv::Mat*>(): return util::holds_alternative<GMatDesc>(meta) &&
util::get<GMatDesc>(meta).canDescribe(*util::get<cv::Mat*>(argp));
case GRunArgP::index_of<cv::Scalar*>(): return meta == GMetaArg(cv::descr_of(*util::get<cv::Scalar*>(argp)));
case GRunArgP::index_of<cv::MediaFrame*>(): return meta == GMetaArg(cv::descr_of(*util::get<cv::MediaFrame*>(argp)));
case GRunArgP::index_of<cv::detail::VectorRef>(): return meta == GMetaArg(util::get<cv::detail::VectorRef>(argp).descr_of());
case GRunArgP::index_of<cv::detail::OpaqueRef>(): return meta == GMetaArg(util::get<cv::detail::OpaqueRef>(argp).descr_of());
default: util::throw_error(std::logic_error("Unsupported GRunArgP type"));
@ -288,6 +290,8 @@ const void* cv::gimpl::proto::ptr(const GRunArgP &arg)
return cv::util::get<cv::detail::VectorRef>(arg).ptr();
case GRunArgP::index_of<cv::detail::OpaqueRef>():
return cv::util::get<cv::detail::OpaqueRef>(arg).ptr();
case GRunArgP::index_of<cv::MediaFrame*>():
return static_cast<const void*>(cv::util::get<cv::MediaFrame*>(arg));
default:
util::throw_error(std::logic_error("Unknown GRunArgP type!"));
}

@ -7,6 +7,8 @@
#include "precomp.hpp"
#include <opencv2/gapi/streaming/desync.hpp>
#include <opencv2/gapi/streaming/format.hpp>
#include <opencv2/gapi/core.hpp>
cv::GMat cv::gapi::streaming::desync(const cv::GMat &g) {
@ -72,3 +74,11 @@ cv::GMat cv::gapi::streaming::desync(const cv::GMat &g) {
// connected to a desynchronized data object, and this sole last_written_value
// object will feed both branches of the streaming executable.
}
cv::GFrame cv::gapi::streaming::copy(const cv::GFrame& in) {
return cv::gapi::streaming::GCopy::on(in);
}
cv::GMat cv::gapi::streaming::BGR(const cv::GFrame& in) {
return cv::gapi::streaming::GBGR::on(in);
}

@ -41,6 +41,11 @@ cv::detail::OpaqueRef& cv::GCPUContext::outOpaqueRef(int output)
return util::get<cv::detail::OpaqueRef>(m_results.at(output));
}
cv::MediaFrame& cv::GCPUContext::outFrame(int output)
{
return *util::get<cv::MediaFrame*>(m_results.at(output));
}
cv::GCPUKernel::GCPUKernel()
{
}

@ -0,0 +1,203 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2020 Intel Corporation
#include <opencv2/imgproc.hpp>
#include <opencv2/gapi/util/throw.hpp> // throw_error
#include <opencv2/gapi/streaming/format.hpp> // kernels
#include "api/gbackend_priv.hpp"
#include "backends/common/gbackend.hpp"
#include "gstreamingbackend.hpp"
#include "gstreamingkernel.hpp"
namespace {
struct StreamingCreateFunction
{
static const char *name() { return "StreamingCreateFunction"; }
cv::gapi::streaming::CreateActorFunction createActorFunction;
};
using StreamingGraph = ade::TypedGraph
< cv::gimpl::Op
, StreamingCreateFunction
>;
using ConstStreamingGraph = ade::ConstTypedGraph
< cv::gimpl::Op
, StreamingCreateFunction
>;
class GStreamingIntrinExecutable final: public cv::gimpl::GIslandExecutable
{
virtual void run(std::vector<InObj> &&,
std::vector<OutObj> &&) override {
GAPI_Assert(false && "Not implemented");
}
virtual void run(GIslandExecutable::IInput &in,
GIslandExecutable::IOutput &out) override;
virtual bool allocatesOutputs() const override { return true; }
// Return an empty RMat since we will reuse the input.
// There is no need to allocate and copy 4k image here.
virtual cv::RMat allocate(const cv::GMatDesc&) const override { return {}; }
virtual bool canReshape() const override { return true; }
virtual void reshape(ade::Graph&, const cv::GCompileArgs&) override {
// Do nothing here
}
public:
GStreamingIntrinExecutable(const ade::Graph &,
const std::vector<ade::NodeHandle> &);
const ade::Graph& m_g;
cv::gimpl::GModel::ConstGraph m_gm;
cv::gapi::streaming::IActor::Ptr m_actor;
};
void GStreamingIntrinExecutable::run(GIslandExecutable::IInput &in,
GIslandExecutable::IOutput &out)
{
m_actor->run(in, out);
}
class GStreamingBackendImpl final: public cv::gapi::GBackend::Priv
{
virtual void unpackKernel(ade::Graph &graph,
const ade::NodeHandle &op_node,
const cv::GKernelImpl &impl) override
{
StreamingGraph gm(graph);
const auto &kimpl = cv::util::any_cast<cv::gapi::streaming::GStreamingKernel>(impl.opaque);
gm.metadata(op_node).set(StreamingCreateFunction{kimpl.createActorFunction});
}
virtual EPtr compile(const ade::Graph &graph,
const cv::GCompileArgs &,
const std::vector<ade::NodeHandle> &nodes) const override
{
return EPtr{new GStreamingIntrinExecutable(graph, nodes)};
}
virtual bool controlsMerge() const override
{
return true;
}
virtual bool allowsMerge(const cv::gimpl::GIslandModel::Graph &,
const ade::NodeHandle &,
const ade::NodeHandle &,
const ade::NodeHandle &) const override
{
return false;
}
};
GStreamingIntrinExecutable::GStreamingIntrinExecutable(const ade::Graph& g,
const std::vector<ade::NodeHandle>& nodes)
: m_g(g), m_gm(m_g)
{
using namespace cv::gimpl;
const auto is_op = [this](const ade::NodeHandle &nh)
{
return m_gm.metadata(nh).get<NodeType>().t == NodeType::OP;
};
auto it = std::find_if(nodes.begin(), nodes.end(), is_op);
GAPI_Assert(it != nodes.end() && "No operators found for this island?!");
ConstStreamingGraph cag(m_g);
m_actor = cag.metadata(*it).get<StreamingCreateFunction>().createActorFunction();
// Ensure this the only op in the graph
if (std::any_of(it+1, nodes.end(), is_op))
{
cv::util::throw_error
(std::logic_error
("Internal error: Streaming subgraph has multiple operations"));
}
}
} // anonymous namespace
cv::gapi::GBackend cv::gapi::streaming::backend()
{
static cv::gapi::GBackend this_backend(std::make_shared<GStreamingBackendImpl>());
return this_backend;
}
cv::gapi::GKernelPackage cv::gapi::streaming::kernels()
{
return cv::gapi::kernels<cv::gimpl::Copy, cv::gimpl::BGR>();
}
void cv::gimpl::Copy::Actor::run(cv::gimpl::GIslandExecutable::IInput &in,
cv::gimpl::GIslandExecutable::IOutput &out)
{
while (true)
{
const auto in_msg = in.get();
if (cv::util::holds_alternative<cv::gimpl::EndOfStream>(in_msg))
{
out.post(cv::gimpl::EndOfStream{});
return;
}
const cv::GRunArgs &in_args = cv::util::get<cv::GRunArgs>(in_msg);
GAPI_Assert(in_args.size() == 1u);
cv::GRunArgP out_arg = out.get(0);
*cv::util::get<cv::MediaFrame*>(out_arg) = cv::util::get<cv::MediaFrame>(in_args[0]);
out.post(std::move(out_arg));
}
}
void cv::gimpl::BGR::Actor::run(cv::gimpl::GIslandExecutable::IInput &in,
cv::gimpl::GIslandExecutable::IOutput &out)
{
while (true)
{
const auto in_msg = in.get();
if (cv::util::holds_alternative<cv::gimpl::EndOfStream>(in_msg))
{
out.post(cv::gimpl::EndOfStream{});
return;
}
const cv::GRunArgs &in_args = cv::util::get<cv::GRunArgs>(in_msg);
GAPI_Assert(in_args.size() == 1u);
cv::GRunArgP out_arg = out.get(0);
auto frame = cv::util::get<cv::MediaFrame>(in_args[0]);
const auto& desc = frame.desc();
auto& rmat = *cv::util::get<cv::RMat*>(out_arg);
switch (desc.fmt)
{
case cv::MediaFormat::BGR:
rmat = cv::make_rmat<cv::gimpl::RMatMediaBGRAdapter>(frame);
break;
case cv::MediaFormat::NV12:
{
cv::Mat bgr;
auto view = frame.access(cv::MediaFrame::Access::R);
cv::Mat y_plane (desc.size, CV_8UC1, view.ptr[0]);
cv::Mat uv_plane(desc.size / 2, CV_8UC2, view.ptr[1]);
cv::cvtColorTwoPlane(y_plane, uv_plane, bgr, cv::COLOR_YUV2BGR_NV12);
rmat = cv::make_rmat<cv::gimpl::RMatAdapter>(bgr);
break;
}
default:
cv::util::throw_error(
std::logic_error("Unsupported MediaFormat for cv::gapi::streaming::BGR"));
}
out.post(std::move(out_arg));
}
}

@ -0,0 +1,89 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2020 Intel Corporation
#ifndef OPENCV_GAPI_GSTREAMINGBACKEND_HPP
#define OPENCV_GAPI_GSTREAMINGBACKEND_HPP
#include <opencv2/gapi/gkernel.hpp>
#include <opencv2/gapi/streaming/format.hpp>
#include "gstreamingkernel.hpp"
namespace cv {
namespace gimpl {
struct RMatMediaBGRAdapter final: public cv::RMat::Adapter
{
RMatMediaBGRAdapter(cv::MediaFrame frame) : m_frame(frame) { };
virtual cv::RMat::View access(cv::RMat::Access a) override
{
auto view = m_frame.access(a == cv::RMat::Access::W ? cv::MediaFrame::Access::W
: cv::MediaFrame::Access::R);
auto ptr = reinterpret_cast<uchar*>(view.ptr[0]);
auto stride = view.stride[0];
std::shared_ptr<cv::MediaFrame::View> view_ptr =
std::make_shared<cv::MediaFrame::View>(std::move(view));
auto callback = [view_ptr]() mutable { view_ptr.reset(); };
return cv::RMat::View(desc(), ptr, stride, callback);
}
virtual cv::GMatDesc desc() const override
{
const auto& desc = m_frame.desc();
GAPI_Assert(desc.fmt == cv::MediaFormat::BGR);
return cv::GMatDesc{CV_8U, 3, desc.size};
}
cv::MediaFrame m_frame;
};
struct Copy: public cv::detail::KernelTag
{
using API = cv::gapi::streaming::GCopy;
static gapi::GBackend backend() { return cv::gapi::streaming::backend(); }
class Actor final: public cv::gapi::streaming::IActor
{
public:
explicit Actor() {}
virtual void run(cv::gimpl::GIslandExecutable::IInput &in,
cv::gimpl::GIslandExecutable::IOutput &out) override;
};
static cv::gapi::streaming::IActor::Ptr create()
{
return cv::gapi::streaming::IActor::Ptr(new Actor());
}
static cv::gapi::streaming::GStreamingKernel kernel() { return {&create}; };
};
struct BGR: public cv::detail::KernelTag
{
using API = cv::gapi::streaming::GBGR;
static gapi::GBackend backend() { return cv::gapi::streaming::backend(); }
class Actor final: public cv::gapi::streaming::IActor {
public:
explicit Actor() {}
virtual void run(cv::gimpl::GIslandExecutable::IInput &in,
cv::gimpl::GIslandExecutable::IOutput&out) override;
};
static cv::gapi::streaming::IActor::Ptr create()
{
return cv::gapi::streaming::IActor::Ptr(new Actor());
}
static cv::gapi::streaming::GStreamingKernel kernel() { return {&create}; };
};
} // namespace gimpl
} // namespace cv
#endif // OPENCV_GAPI_GSTREAMINGBACKEND_HPP

@ -0,0 +1,37 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2020 Intel Corporation
#ifndef OPENCV_GAPI_GSTREAMINGKERNEL_HPP
#define OPENCV_GAPI_GSTREAMINGKERNEL_HPP
#include "compiler/gislandmodel.hpp"
namespace cv {
namespace gapi {
namespace streaming {
class IActor {
public:
using Ptr = std::shared_ptr<IActor>;
virtual void run(cv::gimpl::GIslandExecutable::IInput &in,
cv::gimpl::GIslandExecutable::IOutput &out) = 0;
virtual ~IActor() = default;
};
using CreateActorFunction = std::function<IActor::Ptr()>;
struct GStreamingKernel
{
CreateActorFunction createActorFunction;
};
} // namespace streaming
} // namespace gapi
} // namespace cv
#endif // OPENCV_GAPI_GSTREAMINGKERNEL_HPP

@ -43,6 +43,7 @@
#include <opencv2/gapi/cpu/imgproc.hpp> // ...Imgproc
#include <opencv2/gapi/cpu/video.hpp> // ...and Video kernel implementations
#include <opencv2/gapi/render/render.hpp> // render::ocv::backend()
#include <opencv2/gapi/streaming/format.hpp> // streaming::kernels()
#endif // !defined(GAPI_STANDALONE)
// </FIXME:>
@ -72,7 +73,8 @@ namespace
combine(cv::gapi::core::cpu::kernels(),
cv::gapi::imgproc::cpu::kernels(),
cv::gapi::video::cpu::kernels(),
cv::gapi::render::ocv::kernels());
cv::gapi::render::ocv::kernels(),
cv::gapi::streaming::kernels());
#else
cv::gapi::GKernelPackage();
#endif // !defined(GAPI_STANDALONE)

@ -20,7 +20,7 @@ namespace gimpl
// FIXME: GAPI_EXPORTS is here only due to tests and Windows linker issues
// FIXME: It seems it clearly duplicates the GStreamingCompiled and
// GStreamingExecutable APIs so is highly redundant now.
// GStreamingIntrinExecutable APIs so is highly redundant now.
// Same applies to GCompiled/GCompiled::Priv/GExecutor.
class GAPI_EXPORTS GStreamingCompiled::Priv
{

@ -201,7 +201,7 @@ void traceDown(cv::gimpl::GModel::Graph &g,
// Streaming case: ensure the graph has proper isolation of the
// desynchronized parts, set proper Edge metadata hints for
// GStreamingExecutable
// GStreamingIntrinExecutable
void apply(cv::gimpl::GModel::Graph &g) {
using namespace cv::gimpl;

@ -32,7 +32,7 @@ namespace cv { namespace gimpl { namespace passes {
* connected to a new "Sink" node which becomes its _consumer_.
*
* These extra nodes are required to streamline the queues
* initialization by the GStreamingExecutable and its derivatives.
* initialization by the GStreamingIntrinExecutable and its derivatives.
*/
void addStreaming(ade::passes::PassContext &ctx)
{

@ -144,6 +144,9 @@ void sync_data(cv::GRunArgs &results, cv::GRunArgsP &outputs)
case T::index_of<cv::detail::OpaqueRef>():
cv::util::get<cv::detail::OpaqueRef>(out_obj).mov(cv::util::get<cv::detail::OpaqueRef>(res_obj));
break;
case T::index_of<cv::MediaFrame*>():
*cv::util::get<cv::MediaFrame*>(out_obj) = std::move(cv::util::get<cv::MediaFrame>(res_obj));
break;
default:
GAPI_Assert(false && "This value type is not supported!"); // ...maybe because of STANDALONE mode.
break;
@ -636,6 +639,13 @@ class StreamingOutput final: public cv::gimpl::GIslandExecutable::IOutput
ret_val = cv::GRunArgP(rr);
}
break;
case cv::GShape::GFRAME:
{
cv::MediaFrame frame;
out_arg = cv::GRunArg(std::move(frame));
ret_val = cv::GRunArgP(&cv::util::get<cv::MediaFrame>(out_arg));
}
break;
default:
cv::util::throw_error(std::logic_error("Unsupported GShape"));
}

@ -21,6 +21,7 @@
#include <opencv2/gapi/streaming/cap.hpp>
#include <opencv2/gapi/streaming/desync.hpp>
#include <opencv2/gapi/streaming/format.hpp>
namespace opencv_test
{
@ -113,6 +114,111 @@ GAPI_OCV_KERNEL(OCVDelay, Delay) {
}
};
class TestMediaBGR final: public cv::MediaFrame::IAdapter {
cv::Mat m_mat;
using Cb = cv::MediaFrame::View::Callback;
Cb m_cb;
public:
explicit TestMediaBGR(cv::Mat m, Cb cb = [](){})
: m_mat(m), m_cb(cb) {
}
cv::GFrameDesc meta() const override {
return cv::GFrameDesc{cv::MediaFormat::BGR, cv::Size(m_mat.cols, m_mat.rows)};
}
cv::MediaFrame::View access(cv::MediaFrame::Access) override {
cv::MediaFrame::View::Ptrs pp = { m_mat.ptr(), nullptr, nullptr, nullptr };
cv::MediaFrame::View::Strides ss = { m_mat.step, 0u, 0u, 0u };
return cv::MediaFrame::View(std::move(pp), std::move(ss), Cb{m_cb});
}
};
class TestMediaNV12 final: public cv::MediaFrame::IAdapter {
cv::Mat m_y;
cv::Mat m_uv;
public:
TestMediaNV12(cv::Mat y, cv::Mat uv) : m_y(y), m_uv(uv) {
}
cv::GFrameDesc meta() const override {
return cv::GFrameDesc{cv::MediaFormat::NV12, cv::Size(m_y.cols, m_y.rows)};
}
cv::MediaFrame::View access(cv::MediaFrame::Access) override {
cv::MediaFrame::View::Ptrs pp = {
m_y.ptr(), m_uv.ptr(), nullptr, nullptr
};
cv::MediaFrame::View::Strides ss = {
m_y.step, m_uv.step, 0u, 0u
};
return cv::MediaFrame::View(std::move(pp), std::move(ss));
}
};
class BGRSource : public cv::gapi::wip::GCaptureSource {
public:
explicit BGRSource(const std::string& pipeline)
: cv::gapi::wip::GCaptureSource(pipeline) {
}
bool pull(cv::gapi::wip::Data& data) {
if (cv::gapi::wip::GCaptureSource::pull(data)) {
data = cv::MediaFrame::Create<TestMediaBGR>(cv::util::get<cv::Mat>(data));
return true;
}
return false;
}
GMetaArg descr_of() const override {
return cv::GMetaArg{cv::GFrameDesc{cv::MediaFormat::BGR,
cv::util::get<cv::GMatDesc>(
cv::gapi::wip::GCaptureSource::descr_of()).size}};
}
};
void cvtBGR2NV12(const cv::Mat& bgr, cv::Mat& y, cv::Mat& uv) {
cv::Size frame_sz = bgr.size();
cv::Size half_sz = frame_sz / 2;
cv::Mat yuv;
cv::cvtColor(bgr, yuv, cv::COLOR_BGR2YUV_I420);
// Copy Y plane
yuv.rowRange(0, frame_sz.height).copyTo(y);
// Merge sampled U and V planes
std::vector<int> dims = {half_sz.height, half_sz.width};
auto start = frame_sz.height;
auto range_h = half_sz.height/2;
std::vector<cv::Mat> uv_planes = {
yuv.rowRange(start, start + range_h) .reshape(0, dims),
yuv.rowRange(start + range_h, start + range_h*2).reshape(0, dims)
};
cv::merge(uv_planes, uv);
}
class NV12Source : public cv::gapi::wip::GCaptureSource {
public:
explicit NV12Source(const std::string& pipeline)
: cv::gapi::wip::GCaptureSource(pipeline) {
}
bool pull(cv::gapi::wip::Data& data) {
if (cv::gapi::wip::GCaptureSource::pull(data)) {
cv::Mat bgr = cv::util::get<cv::Mat>(data);
cv::Mat y, uv;
cvtBGR2NV12(bgr, y, uv);
data = cv::MediaFrame::Create<TestMediaNV12>(y, uv);
return true;
}
return false;
}
GMetaArg descr_of() const override {
return cv::GMetaArg{cv::GFrameDesc{cv::MediaFormat::NV12,
cv::util::get<cv::GMatDesc>(
cv::gapi::wip::GCaptureSource::descr_of()).size}};
}
};
} // anonymous namespace
TEST_P(GAPI_Streaming, SmokeTest_ConstInput_GMat)
@ -1247,7 +1353,6 @@ TEST(GAPI_Streaming_Desync, SmokeTest_Streaming)
}
EXPECT_EQ(100u, out1_hits); // out1 must be available for all frames
EXPECT_LE(out2_hits, out1_hits); // out2 must appear less times than out1
std::cout << "Got " << out1_hits << " out1's and " << out2_hits << " out2's" << std::endl;
}
TEST(GAPI_Streaming_Desync, SmokeTest_Streaming_TwoParts)
@ -1588,4 +1693,167 @@ TEST(GAPI_Streaming_Desync, DesyncObjectConsumedByTwoIslandsViaSameDesync) {
EXPECT_NO_THROW(c.compileStreaming(cv::compile_args(p)));
}
TEST(GAPI_Streaming, CopyFrame)
{
initTestDataPath();
std::string filepath = findDataFile("cv/video/768x576.avi");
cv::GFrame in;
auto out = cv::gapi::streaming::copy(in);
cv::GComputation comp(cv::GIn(in), cv::GOut(out));
auto cc = comp.compileStreaming();
cc.setSource<BGRSource>(filepath);
cv::VideoCapture cap;
cap.open(filepath);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
cv::MediaFrame frame;
cv::Mat ocv_mat;
std::size_t num_frames = 0u;
std::size_t max_frames = 10u;
cc.start();
while (cc.pull(cv::gout(frame)) && num_frames < max_frames)
{
auto view = frame.access(cv::MediaFrame::Access::R);
cv::Mat gapi_mat(frame.desc().size, CV_8UC3, view.ptr[0]);
num_frames++;
cap >> ocv_mat;
EXPECT_EQ(0, cvtest::norm(ocv_mat, gapi_mat, NORM_INF));
}
}
TEST(GAPI_Streaming, Reshape)
{
initTestDataPath();
std::string filepath = findDataFile("cv/video/768x576.avi");
cv::GFrame in;
auto out = cv::gapi::streaming::copy(in);
cv::GComputation comp(cv::GIn(in), cv::GOut(out));
auto cc = comp.compileStreaming();
cc.setSource<BGRSource>(filepath);
cv::VideoCapture cap;
cap.open(filepath);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
cv::MediaFrame frame;
cv::Mat ocv_mat;
std::size_t num_frames = 0u;
std::size_t max_frames = 10u;
cc.start();
while (cc.pull(cv::gout(frame)) && num_frames < max_frames)
{
auto view = frame.access(cv::MediaFrame::Access::R);
cv::Mat gapi_mat(frame.desc().size, CV_8UC3, view.ptr[0]);
num_frames++;
cap >> ocv_mat;
EXPECT_EQ(0, cvtest::norm(ocv_mat, gapi_mat, NORM_INF));
}
// Reshape the graph meta
filepath = findDataFile("cv/video/1920x1080.avi");
cc.stop();
cc.setSource<BGRSource>(filepath);
cap.open(filepath);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
cv::MediaFrame frame2;
cv::Mat ocv_mat2;
num_frames = 0u;
cc.start();
while (cc.pull(cv::gout(frame2)) && num_frames < max_frames)
{
auto view = frame2.access(cv::MediaFrame::Access::R);
cv::Mat gapi_mat(frame2.desc().size, CV_8UC3, view.ptr[0]);
num_frames++;
cap >> ocv_mat2;
EXPECT_EQ(0, cvtest::norm(ocv_mat2, gapi_mat, NORM_INF));
}
}
TEST(GAPI_Streaming, AccessBGRFromBGRFrame)
{
initTestDataPath();
std::string filepath = findDataFile("cv/video/768x576.avi");
cv::GFrame in;
auto out = cv::gapi::streaming::BGR(in);
cv::GComputation comp(cv::GIn(in), cv::GOut(out));
auto cc = comp.compileStreaming();
cc.setSource<BGRSource>(filepath);
cv::VideoCapture cap;
cap.open(filepath);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
cv::Mat ocv_mat, gapi_mat;
std::size_t num_frames = 0u;
std::size_t max_frames = 10u;
cc.start();
while (cc.pull(cv::gout(gapi_mat)) && num_frames < max_frames)
{
num_frames++;
cap >> ocv_mat;
EXPECT_EQ(0, cvtest::norm(ocv_mat, gapi_mat, NORM_INF));
}
}
TEST(GAPI_Streaming, AccessBGRFromNV12Frame)
{
initTestDataPath();
std::string filepath = findDataFile("cv/video/768x576.avi");
cv::GFrame in;
auto out = cv::gapi::streaming::BGR(in);
cv::GComputation comp(cv::GIn(in), cv::GOut(out));
auto cc = comp.compileStreaming();
cc.setSource<NV12Source>(filepath);
cv::VideoCapture cap;
cap.open(filepath);
if (!cap.isOpened())
throw SkipTestException("Video file can not be opened");
cv::Mat ocv_mat, gapi_mat;
std::size_t num_frames = 0u;
std::size_t max_frames = 10u;
cc.start();
while (cc.pull(cv::gout(gapi_mat)) && num_frames < max_frames)
{
num_frames++;
cap >> ocv_mat;
cv::Mat y, uv;
cvtBGR2NV12(ocv_mat, y, uv);
cv::cvtColorTwoPlane(y, uv, ocv_mat, cv::COLOR_YUV2BGR_NV12);
EXPECT_EQ(0, cvtest::norm(ocv_mat, gapi_mat, NORM_INF));
}
}
} // namespace opencv_test

Loading…
Cancel
Save