Merge pull request #21560 from dbudniko:dbudniko/gapi_media_format_gray_plus_gst_source

G-API gst source gray support
pull/21579/head
Dmitry Budnikov 3 years ago committed by GitHub
parent 08356007c9
commit 3eeec4faae
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 4
      modules/gapi/include/opencv2/gapi/streaming/gstreamer/gstreamersource.hpp
  2. 105
      modules/gapi/src/streaming/gstreamer/gstreamer_media_adapter.cpp
  3. 87
      modules/gapi/src/streaming/gstreamer/gstreamersource.cpp
  4. 1
      modules/gapi/src/streaming/gstreamer/gstreamersource_priv.hpp
  5. 200
      modules/gapi/test/streaming/gapi_gstreamersource_tests.cpp

@ -32,13 +32,13 @@ namespace gst {
* Pipeline can actually contain many sink elements, but it must have one and only one
* appsink among them.
*
* - data passed to appsink should be video-frame in NV12 format.
* - data passed to appsink should be video-frame in NV12 or GRAY8 format.
*
* 'outputType' is used to select type of output data to produce: 'cv::MediaFrame' or 'cv::Mat'.
* To produce 'cv::MediaFrame'-s you need to pass 'GStreamerSource::OutputType::FRAME' and,
* correspondingly, 'GStreamerSource::OutputType::MAT' to produce 'cv::Mat'-s.
* Please note, that in the last case, output 'cv::Mat' will be of BGR format, internal conversion
* from NV12 GStreamer data will happen.
* from NV12 / GRAY8 GStreamer data will happen.
* Default value for 'outputType' is 'GStreamerSource::OutputType::MAT'.
*
* @note Stream sources are passed to G-API via shared pointers, so please use gapi::make_src<>

@ -28,13 +28,41 @@ GStreamerMediaAdapter::GStreamerMediaAdapter(const cv::GFrameDesc& frameDesc,
GstVideoMeta* videoMeta = gst_buffer_get_video_meta(m_buffer);
if (videoMeta != nullptr) {
m_strides = { videoMeta->stride[0], videoMeta->stride[1] };
m_offsets = { videoMeta->offset[0], videoMeta->offset[1] };
switch (m_frameDesc.fmt) {
case cv::MediaFormat::NV12: {
m_strides = { videoMeta->stride[0], videoMeta->stride[1] };
m_offsets = { videoMeta->offset[0], videoMeta->offset[1] };
break;
}
case cv::MediaFormat::GRAY: {
m_strides = { videoMeta->stride[0]};
m_offsets = { videoMeta->offset[0]};
break;
}
default: {
GAPI_Assert(false && "Non NV12 or GRAY Media format is not expected here");
break;
}
}
} else {
m_strides = { GST_VIDEO_INFO_PLANE_STRIDE(m_videoInfo.get(), 0),
GST_VIDEO_INFO_PLANE_STRIDE(m_videoInfo.get(), 1) };
m_offsets = { GST_VIDEO_INFO_PLANE_OFFSET(m_videoInfo.get(), 0),
GST_VIDEO_INFO_PLANE_OFFSET(m_videoInfo.get(), 1) };
switch (m_frameDesc.fmt) {
case cv::MediaFormat::NV12: {
m_strides = { GST_VIDEO_INFO_PLANE_STRIDE(m_videoInfo.get(), 0),
GST_VIDEO_INFO_PLANE_STRIDE(m_videoInfo.get(), 1) };
m_offsets = { GST_VIDEO_INFO_PLANE_OFFSET(m_videoInfo.get(), 0),
GST_VIDEO_INFO_PLANE_OFFSET(m_videoInfo.get(), 1) };
break;
}
case cv::MediaFormat::GRAY: {
m_strides = { GST_VIDEO_INFO_PLANE_STRIDE(m_videoInfo.get(), 0)};
m_offsets = { GST_VIDEO_INFO_PLANE_OFFSET(m_videoInfo.get(), 0)};
break;
}
default: {
GAPI_Assert(false && "Non NV12 or GRAY Media format is not expected here");
break;
}
}
}
}
@ -71,8 +99,10 @@ cv::MediaFrame::View GStreamerMediaAdapter::access(cv::MediaFrame::Access access
if(!m_isMapped.load(std::memory_order_relaxed)) {
GAPI_Assert(GST_VIDEO_INFO_N_PLANES(m_videoInfo.get()) == 2);
GAPI_Assert(GST_VIDEO_INFO_FORMAT(m_videoInfo.get()) == GST_VIDEO_FORMAT_NV12);
GAPI_Assert(GST_VIDEO_INFO_N_PLANES(m_videoInfo.get()) == 2 ||
GST_VIDEO_INFO_N_PLANES(m_videoInfo.get()) == 1);
GAPI_Assert(GST_VIDEO_INFO_FORMAT(m_videoInfo.get()) == GST_VIDEO_FORMAT_NV12 ||
GST_VIDEO_INFO_FORMAT(m_videoInfo.get()) == GST_VIDEO_FORMAT_GRAY8);
// TODO: Use RAII for map/unmap
if (access == cv::MediaFrame::Access::W) {
@ -85,27 +115,56 @@ cv::MediaFrame::View GStreamerMediaAdapter::access(cv::MediaFrame::Access access
}
GAPI_Assert(GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 0) == m_strides[0]);
GAPI_Assert(GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 1) == m_strides[1]);
GAPI_Assert(GST_VIDEO_FRAME_PLANE_OFFSET(&m_videoFrame, 0) == m_offsets[0]);
GAPI_Assert(GST_VIDEO_FRAME_PLANE_OFFSET(&m_videoFrame, 1) == m_offsets[1]);
if (m_frameDesc.fmt == cv::MediaFormat::NV12) {
GAPI_Assert(GST_VIDEO_FRAME_PLANE_STRIDE(&m_videoFrame, 1) == m_strides[1]);
GAPI_Assert(GST_VIDEO_FRAME_PLANE_OFFSET(&m_videoFrame, 1) == m_offsets[1]);
}
m_isMapped.store(true, std::memory_order_release);
}
}
cv::MediaFrame::View::Ptrs ps {
static_cast<uint8_t*>(GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0)) + m_offsets[0], // Y-plane
static_cast<uint8_t*>(GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0)) + m_offsets[1], // UV-plane
nullptr,
nullptr
};
cv::MediaFrame::View::Strides ss = {
static_cast<std::size_t>(m_strides[0]), // Y-plane stride
static_cast<std::size_t>(m_strides[1]), // UV-plane stride
0u,
0u
};
cv::MediaFrame::View::Ptrs ps;
cv::MediaFrame::View::Strides ss;
switch (m_frameDesc.fmt) {
case cv::MediaFormat::NV12: {
ps = {
static_cast<uint8_t*>(GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0)) + m_offsets[0], // Y-plane
static_cast<uint8_t*>(GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0)) + m_offsets[1], // UV-plane
nullptr,
nullptr
};
ss = {
static_cast<std::size_t>(m_strides[0]), // Y-plane stride
static_cast<std::size_t>(m_strides[1]), // UV-plane stride
0u,
0u
};
break;
}
case cv::MediaFormat::GRAY: {
ps = {
static_cast<uint8_t*>(GST_VIDEO_FRAME_PLANE_DATA(&m_videoFrame, 0)) + m_offsets[0], // Y-plane
nullptr,
nullptr,
nullptr
};
ss = {
static_cast<std::size_t>(m_strides[0]), // Y-plane stride
0u,
0u,
0u
};
break;
}
default: {
GAPI_Assert(false && "Non NV12 or GRAY Media format is not expected here");
break;
}
}
--thread_counters;
return cv::MediaFrame::View(std::move(ps), std::move(ss));

@ -30,8 +30,9 @@ namespace gst {
#ifdef HAVE_GSTREAMER
constexpr char NV12_CAPS_STRING[] =
"video/x-raw,format=NV12;video/x-raw(memory:DMABuf),format=NV12";
constexpr char ALLOWED_CAPS_STRING[] =
"video/x-raw,format=(string){NV12, GRAY8};video/x-raw(memory:DMABuf),format=(string){NV12, GRAY8}";
namespace {
GstPadProbeReturn appsinkQueryCallback(GstPad*, GstPadProbeInfo* info, gpointer)
@ -137,17 +138,17 @@ void GStreamerSource::Priv::configureAppsink() {
// Do not emit signals: all calls will be synchronous and blocking.
gst_app_sink_set_emit_signals(GST_APP_SINK(m_appsink.get()), FALSE);
GStreamerPtr<GstCaps> nv12Caps(gst_caps_from_string(NV12_CAPS_STRING));
GStreamerPtr<GstCaps> gstCaps(gst_caps_from_string(ALLOWED_CAPS_STRING));
GStreamerPtr<GstPad> appsinkPad(gst_element_get_static_pad(m_appsink, "sink"));
GStreamerPtr<GstCaps> peerCaps(gst_pad_peer_query_caps(appsinkPad, NULL));
if (!gst_caps_can_intersect(peerCaps, nv12Caps)) {
if (!gst_caps_can_intersect(peerCaps, gstCaps)) {
cv::util::throw_error(
std::logic_error("appsink element can only consume video-frame in NV12 format in "
std::logic_error("appsink element can only consume video-frame in NV12 or GRAY8 format in "
"GStreamerSource"));
}
gst_app_sink_set_caps(GST_APP_SINK(m_appsink.get()), nv12Caps);
gst_app_sink_set_caps(GST_APP_SINK(m_appsink.get()), gstCaps);
gst_pad_add_probe(appsinkPad, GST_PAD_PROBE_TYPE_QUERY_DOWNSTREAM, appsinkQueryCallback,
NULL, NULL);
@ -184,10 +185,29 @@ void GStreamerSource::Priv::prepareVideoMeta()
cv::util::throw_error(std::logic_error("Cannot query video width/height."));
}
// Fill GstVideoInfo structure to work further with GstVideoFrame class.
if (!gst_video_info_from_caps(&m_videoInfo, prerollCaps)) {
cv::util::throw_error(std::logic_error("preroll sample has invalid caps."));
}
m_type = GST_VIDEO_INFO_FORMAT(&m_videoInfo);
switch(m_outputType) {
case GStreamerSource::OutputType::FRAME: {
// Construct metadata for media frame.
m_mediaFrameMeta = GFrameDesc { cv::MediaFormat::NV12, cv::Size(width, height) };
switch (m_type) {
case GST_VIDEO_FORMAT_NV12: {
m_mediaFrameMeta = GFrameDesc{ cv::MediaFormat::NV12, cv::Size(width, height) };
GAPI_Assert(GST_VIDEO_INFO_N_PLANES(&m_videoInfo) == 2);
break;
}
case GST_VIDEO_FORMAT_GRAY8: {
m_mediaFrameMeta = GFrameDesc{ cv::MediaFormat::GRAY, cv::Size(width, height) };
GAPI_Assert(GST_VIDEO_INFO_N_PLANES(&m_videoInfo) == 1);
break;
}
default: {
GAPI_Assert(false && "Unsupported GStreamerSource FRAME type.");
}
}
break;
}
case GStreamerSource::OutputType::MAT: {
@ -197,13 +217,6 @@ void GStreamerSource::Priv::prepareVideoMeta()
}
}
// Fill GstVideoInfo structure to work further with GstVideoFrame class.
if (!gst_video_info_from_caps(&m_videoInfo, prerollCaps)) {
cv::util::throw_error(std::logic_error("preroll sample has invalid caps."));
}
GAPI_Assert(GST_VIDEO_INFO_N_PLANES(&m_videoInfo) == 2);
GAPI_Assert(GST_VIDEO_INFO_FORMAT(&m_videoInfo) == GST_VIDEO_FORMAT_NV12);
m_isMetaPrepared = true;
}
}
@ -272,28 +285,46 @@ bool GStreamerSource::Priv::retrieveFrame(cv::Mat& data)
try
{
// m_matMeta holds width and height for 8U BGR frame, but actual
// frame m_buffer we request from GStreamer pipeline has 8U NV12 format.
// Constructing y and uv cv::Mat-s from such a m_buffer:
GAPI_Assert((uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 1) ==
switch (m_type) {
case GST_VIDEO_FORMAT_NV12: {
// m_matMeta holds width and height for 8U BGR frame, but actual
// frame m_buffer we request from GStreamer pipeline has 8U NV12 format.
// Constructing y and uv cv::Mat-s from such a m_buffer:
GAPI_Assert((uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 1) ==
(uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0) +
GST_VIDEO_FRAME_PLANE_OFFSET(&videoFrame, 1));
GAPI_Assert(GST_VIDEO_INFO_N_PLANES(&m_videoInfo) == 2);
cv::Mat y(m_matMeta.size, CV_8UC1,
(uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0) +
GST_VIDEO_FRAME_PLANE_OFFSET(&videoFrame, 0),
GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0));
cv::Mat uv(m_matMeta.size / 2, CV_8UC2,
(uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0) +
GST_VIDEO_FRAME_PLANE_OFFSET(&videoFrame, 1),
GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 1));
cv::Mat y(m_matMeta.size, CV_8UC1,
(uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0) +
GST_VIDEO_FRAME_PLANE_OFFSET(&videoFrame, 0),
GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0));
cv::Mat uv(m_matMeta.size / 2, CV_8UC2,
(uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0) +
GST_VIDEO_FRAME_PLANE_OFFSET(&videoFrame, 1),
GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 1));
cv::cvtColorTwoPlane(y, uv, data, cv::COLOR_YUV2BGR_NV12);
cv::cvtColorTwoPlane(y, uv, data, cv::COLOR_YUV2BGR_NV12);
break;
}
case GST_VIDEO_FORMAT_GRAY8: {
GAPI_Assert(GST_VIDEO_INFO_N_PLANES(&m_videoInfo) == 1);
cv::Mat y(m_matMeta.size, CV_8UC1,
(uint8_t*)GST_VIDEO_FRAME_PLANE_DATA(&videoFrame, 0) +
GST_VIDEO_FRAME_PLANE_OFFSET(&videoFrame, 0),
GST_VIDEO_FRAME_PLANE_STRIDE(&videoFrame, 0));
cv::cvtColor(y, data, cv::COLOR_GRAY2BGR);
break;
}
default: {
GAPI_Assert(false && "retrieveFrame - unsupported GStreamerSource FRAME type.");
}
}
}
catch (...)
{
gst_video_frame_unmap(&videoFrame);
cv::util::throw_error(std::runtime_error("NV12 buffer conversion to BGR is failed!"));
cv::util::throw_error(std::runtime_error("NV12 or GRAY8 buffer conversion to BGR is failed!"));
}
gst_video_frame_unmap(&videoFrame);

@ -59,6 +59,7 @@ protected:
bool m_isPipelinePlaying = false;
int64_t m_frameId = 0L;
size_t m_type = 0; //Gstreamer video format type
protected:
void configureAppsink();

@ -29,6 +29,7 @@ namespace opencv_test
struct GStreamerSourceTest : public TestWithParam<std::tuple<std::string, cv::Size, std::size_t>>
{ };
TEST_P(GStreamerSourceTest, AccuracyTest)
{
std::string pipeline;
@ -143,6 +144,16 @@ G_TYPED_KERNEL(GGstFrameCopyToNV12, <std::tuple<cv::GMat,cv::GMat>(GFrame)>,
}
};
G_TYPED_KERNEL(GGstFrameCopyToGRAY8, <cv::GMat(GFrame)>,
"org.opencv.test.gstframe_copy_to_gray8")
{
static GMatDesc outMeta(GFrameDesc desc) {
GMatDesc y{ CV_8U, 1, desc.size, false };
return y;
}
};
GAPI_OCV_KERNEL(GOCVGstFrameCopyToNV12, GGstFrameCopyToNV12)
{
static void run(const cv::MediaFrame& in, cv::Mat& y, cv::Mat& uv)
@ -156,21 +167,50 @@ GAPI_OCV_KERNEL(GOCVGstFrameCopyToNV12, GGstFrameCopyToNV12)
}
};
GAPI_OCV_KERNEL(GOCVGstFrameCopyToGRAY8, GGstFrameCopyToGRAY8)
{
static void run(const cv::MediaFrame & in, cv::Mat & y)
{
auto view = in.access(cv::MediaFrame::Access::R);
cv::Mat ly(y.size(), y.type(), view.ptr[0], view.stride[0]);
ly.copyTo(y);
}
};
TEST_P(GStreamerSourceTest, GFrameTest)
{
std::string pipeline;
cv::Size expectedFrameSize;
std::size_t streamLength { };
bool isNV12 = false;
std::tie(pipeline, expectedFrameSize, streamLength) = GetParam();
//Check if pipline string contains NV12 sub-string
if (pipeline.find("NV12") != std::string::npos) {
isNV12 = true;
}
// Graph declaration:
cv::GFrame in;
cv::GMat copiedY, copiedUV;
std::tie(copiedY, copiedUV) = GGstFrameCopyToNV12::on(in);
cv::GComputation c(cv::GIn(in), cv::GOut(copiedY, copiedUV));
if (isNV12) {
std::tie(copiedY, copiedUV) = GGstFrameCopyToNV12::on(in);
}
else {
copiedY = GGstFrameCopyToGRAY8::on(in);
}
cv::GComputation c(cv::GIn(in), isNV12 ? cv::GOut(copiedY, copiedUV) : cv::GOut(copiedY));
// Graph compilation for streaming mode:
auto ccomp = c.compileStreaming(cv::compile_args(cv::gapi::kernels<GOCVGstFrameCopyToNV12>()));
cv::GStreamingCompiled ccomp;
if (isNV12) {
ccomp = c.compileStreaming(cv::compile_args(cv::gapi::kernels<GOCVGstFrameCopyToNV12>()));
} else {
ccomp = c.compileStreaming(cv::compile_args(cv::gapi::kernels<GOCVGstFrameCopyToGRAY8>()));
}
EXPECT_TRUE(ccomp);
EXPECT_FALSE(ccomp.running());
@ -186,29 +226,41 @@ TEST_P(GStreamerSourceTest, GFrameTest)
// Streaming - pulling of frames until the end:
cv::Mat y_mat, uv_mat;
EXPECT_TRUE(ccomp.pull(cv::gout(y_mat, uv_mat)));
EXPECT_TRUE(isNV12 ? ccomp.pull(cv::gout(y_mat, uv_mat)) : ccomp.pull(cv::gout(y_mat)));
EXPECT_TRUE(!y_mat.empty());
EXPECT_TRUE(!uv_mat.empty());
if (isNV12) {
EXPECT_TRUE(!uv_mat.empty());
}
cv::Size expectedYSize = expectedFrameSize;
cv::Size expectedUVSize = expectedFrameSize / 2;
EXPECT_EQ(expectedYSize, y_mat.size());
EXPECT_EQ(expectedUVSize, uv_mat.size());
if (isNV12) {
EXPECT_EQ(expectedUVSize, uv_mat.size());
}
EXPECT_EQ(CV_8UC1, y_mat.type());
EXPECT_EQ(CV_8UC2, uv_mat.type());
if (isNV12) {
EXPECT_EQ(CV_8UC2, uv_mat.type());
}
std::size_t framesCount = 1UL;
while (ccomp.pull(cv::gout(y_mat, uv_mat))) {
while (isNV12 ? ccomp.pull(cv::gout(y_mat, uv_mat)) : ccomp.pull(cv::gout(y_mat))) {
EXPECT_TRUE(!y_mat.empty());
EXPECT_TRUE(!uv_mat.empty());
if (isNV12) {
EXPECT_TRUE(!uv_mat.empty());
}
EXPECT_EQ(expectedYSize, y_mat.size());
EXPECT_EQ(expectedUVSize, uv_mat.size());
if (isNV12) {
EXPECT_EQ(expectedUVSize, uv_mat.size());
}
EXPECT_EQ(CV_8UC1, y_mat.type());
EXPECT_EQ(CV_8UC2, uv_mat.type());
if (isNV12) {
EXPECT_EQ(CV_8UC2, uv_mat.type());
}
framesCount++;
}
@ -221,36 +273,56 @@ TEST_P(GStreamerSourceTest, GFrameTest)
EXPECT_EQ(streamLength, framesCount);
}
// FIXME: Need to launch with sudo. May be infrastructure problems.
// TODO: It is needed to add tests for streaming from native KMB camera: kmbcamsrc
// GStreamer element.
INSTANTIATE_TEST_CASE_P(CameraEmulatingPipeline, GStreamerSourceTest,
Combine(Values("videotestsrc is-live=true pattern=colors num-buffers=10 ! "
"videorate ! videoscale ! "
"video/x-raw,width=1920,height=1080,framerate=3/1 ! "
"video/x-raw,format=NV12,width=1920,height=1080,framerate=3/1 ! "
"appsink",
"videotestsrc is-live=true pattern=colors num-buffers=10 ! "
"videorate ! videoscale ! "
"video/x-raw,format=GRAY8,width=1920,height=1080,framerate=3/1 ! "
"appsink"),
Values(cv::Size(1920, 1080)),
Values(10UL)));
INSTANTIATE_TEST_CASE_P(FileEmulatingPipeline, GStreamerSourceTest,
Combine(Values("videotestsrc pattern=colors num-buffers=10 ! "
"videorate ! videoscale ! "
"video/x-raw,width=640,height=420,framerate=3/1 ! "
"video/x-raw,format=NV12,width=640,height=420,framerate=3/1 ! "
"appsink",
"videotestsrc pattern=colors num-buffers=10 ! "
"videorate ! videoscale ! "
"video/x-raw,format=GRAY8,width=640,height=420,framerate=3/1 ! "
"appsink"),
Values(cv::Size(640, 420)),
Values(10UL)));
INSTANTIATE_TEST_CASE_P(MultipleLiveSources, GStreamerSourceTest,
Combine(Values("videotestsrc is-live=true pattern=colors num-buffers=10 ! "
"videoscale ! video/x-raw,width=1280,height=720 ! appsink "
"videoscale ! video/x-raw,format=NV12,width=1280,height=720 ! appsink "
"videotestsrc is-live=true pattern=colors num-buffers=10 ! "
"fakesink",
"videotestsrc is-live=true pattern=colors num-buffers=10 ! "
"videoscale ! video/x-raw,format=GRAY8,width=1280,height=720 ! appsink "
"videotestsrc is-live=true pattern=colors num-buffers=10 ! "
"fakesink"),
Values(cv::Size(1280, 720)),
Values(10UL)));
INSTANTIATE_TEST_CASE_P(MultipleNotLiveSources, GStreamerSourceTest,
Combine(Values("videotestsrc pattern=colors num-buffers=10 ! "
"videoscale ! video/x-raw,width=1280,height=720 ! appsink "
"videoscale ! video/x-raw,format=NV12,width=1280,height=720 ! appsink "
"videotestsrc pattern=colors num-buffers=10 ! "
"fakesink",
"videotestsrc pattern=colors num-buffers=10 ! "
"videoscale ! video/x-raw,format=GRAY8,width=1280,height=720 ! appsink "
"videotestsrc pattern=colors num-buffers=10 ! "
"fakesink"),
Values(cv::Size(1280, 720)),
@ -308,11 +380,11 @@ TEST(GStreamerMultiSourceSmokeTest, Test)
EXPECT_FALSE(ccomp.running());
}
struct GStreamerMultiSourceTest :
struct GStreamerMultiSourceTestNV12 :
public TestWithParam<std::tuple<cv::GComputation, cv::gapi::wip::GStreamerSource::OutputType>>
{ };
TEST_P(GStreamerMultiSourceTest, ImageDataTest)
TEST_P(GStreamerMultiSourceTestNV12, ImageDataTest)
{
std::string pathToLeftIm = findDataFile("cv/stereomatching/datasets/tsukuba/im6.png");
std::string pathToRightIm = findDataFile("cv/stereomatching/datasets/tsukuba/im2.png");
@ -377,7 +449,96 @@ TEST_P(GStreamerMultiSourceTest, ImageDataTest)
EXPECT_FALSE(compiled.running());
}
INSTANTIATE_TEST_CASE_P(GStreamerMultiSourceViaGMatsTest, GStreamerMultiSourceTest,
INSTANTIATE_TEST_CASE_P(GStreamerMultiSourceViaGMatsTest, GStreamerMultiSourceTestNV12,
Combine(Values(cv::GComputation([]()
{
cv::GMat in1, in2;
return cv::GComputation(cv::GIn(in1, in2),
cv::GOut(cv::gapi::copy(in1),
cv::gapi::copy(in2)));
})),
Values(cv::gapi::wip::GStreamerSource::OutputType::MAT)));
INSTANTIATE_TEST_CASE_P(GStreamerMultiSourceViaGFramesTest, GStreamerMultiSourceTestNV12,
Combine(Values(cv::GComputation([]()
{
cv::GFrame in1, in2;
return cv::GComputation(cv::GIn(in1, in2),
cv::GOut(cv::gapi::streaming::BGR(in1),
cv::gapi::streaming::BGR(in2)));
})),
Values(cv::gapi::wip::GStreamerSource::OutputType::FRAME)));
struct GStreamerMultiSourceTestGRAY8 :
public TestWithParam<std::tuple<cv::GComputation, cv::gapi::wip::GStreamerSource::OutputType>>
{ };
TEST_P(GStreamerMultiSourceTestGRAY8, ImageDataTest)
{
std::string pathToLeftIm = findDataFile("cv/stereomatching/datasets/tsukuba/im6.png");
std::string pathToRightIm = findDataFile("cv/stereomatching/datasets/tsukuba/im2.png");
std::string pipelineToReadImage("filesrc location=LOC ! pngdec ! videoconvert ! "
"videoscale ! video/x-raw,format=GRAY8 ! appsink");
cv::gapi::wip::GStreamerSource leftImageProvider(
std::regex_replace(pipelineToReadImage, std::regex("LOC"), pathToLeftIm));
cv::gapi::wip::GStreamerSource rightImageProvider(
std::regex_replace(pipelineToReadImage, std::regex("LOC"), pathToRightIm));
cv::gapi::wip::Data leftImData, rightImData;
leftImageProvider.pull(leftImData);
rightImageProvider.pull(rightImData);
cv::Mat leftRefMat = cv::util::get<cv::Mat>(leftImData);
cv::Mat rightRefMat = cv::util::get<cv::Mat>(rightImData);
// Retrieve test parameters:
std::tuple<cv::GComputation, cv::gapi::wip::GStreamerSource::OutputType> params = GetParam();
cv::GComputation extractImage = std::move(std::get<0>(params));
cv::gapi::wip::GStreamerSource::OutputType outputType = std::get<1>(params);
// Graph compilation for streaming mode:
auto compiled =
extractImage.compileStreaming();
EXPECT_TRUE(compiled);
EXPECT_FALSE(compiled.running());
cv::gapi::wip::GStreamerPipeline
pipeline(std::string("multifilesrc location=" + pathToLeftIm + " index=0 loop=true ! "
"pngdec ! videoconvert ! videoscale ! video/x-raw,format=GRAY8 ! "
"appsink name=sink1 ") +
std::string("multifilesrc location=" + pathToRightIm + " index=0 loop=true ! "
"pngdec ! videoconvert ! videoscale ! video/x-raw,format=GRAY8 ! "
"appsink name=sink2"));
// GStreamer streaming sources configuration:
auto src1 = pipeline.getStreamingSource("sink1", outputType);
auto src2 = pipeline.getStreamingSource("sink2", outputType);
compiled.setSource(cv::gin(src1, src2));
// Start of streaming:
compiled.start();
EXPECT_TRUE(compiled.running());
// Streaming - pulling of frames:
cv::Mat in_mat1, in_mat2;
std::size_t counter { }, limit { 10 };
while(compiled.pull(cv::gout(in_mat1, in_mat2)) && (counter < limit)) {
EXPECT_EQ(0, cv::norm(in_mat1, leftRefMat, cv::NORM_INF));
EXPECT_EQ(0, cv::norm(in_mat2, rightRefMat, cv::NORM_INF));
++counter;
}
compiled.stop();
EXPECT_FALSE(compiled.running());
}
INSTANTIATE_TEST_CASE_P(GStreamerMultiSourceViaGMatsTest, GStreamerMultiSourceTestGRAY8,
Combine(Values(cv::GComputation([]()
{
cv::GMat in1, in2;
@ -387,7 +548,7 @@ INSTANTIATE_TEST_CASE_P(GStreamerMultiSourceViaGMatsTest, GStreamerMultiSourceTe
})),
Values(cv::gapi::wip::GStreamerSource::OutputType::MAT)));
INSTANTIATE_TEST_CASE_P(GStreamerMultiSourceViaGFramesTest, GStreamerMultiSourceTest,
INSTANTIATE_TEST_CASE_P(GStreamerMultiSourceViaGFramesTest, GStreamerMultiSourceTestGRAY8,
Combine(Values(cv::GComputation([]()
{
cv::GFrame in1, in2;
@ -396,6 +557,7 @@ INSTANTIATE_TEST_CASE_P(GStreamerMultiSourceViaGFramesTest, GStreamerMultiSource
cv::gapi::streaming::BGR(in2)));
})),
Values(cv::gapi::wip::GStreamerSource::OutputType::FRAME)));
} // namespace opencv_test
#endif // HAVE_GSTREAMER

Loading…
Cancel
Save