Merge pull request #22919 from asmorkalov:as/gstreamer_read_timeout

Address https://github.com/opencv/opencv/issues/22868
Used the same defaults as it's done for FFmpeg

### Pull Request Readiness Checklist

See details at https://github.com/opencv/opencv/wiki/How_to_contribute#making-a-good-pull-request

- [x] I agree to contribute to the project under Apache 2 License.
- [x] To the best of my knowledge, the proposed patch is not based on a code under GPL or another license that is incompatible with OpenCV
- [x] The PR is proposed to the proper branch
- [x] There is a reference to the original bug report and related work
- [ ] There is accuracy test, performance test and test data in opencv_extra repository, if applicable
      Patch to opencv_extra has the same branch name.
- [ ] The feature is well documented and sample code can be built with the project CMake


```
force_builders=Custom
build_image:Custom=gstreamer:16.04
buildworker:Custom=linux-1
```
pull/22964/head
Alexander Smorkalov 2 years ago committed by GitHub
parent 0153e796cc
commit 3f22f4727c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 4
      modules/videoio/include/opencv2/videoio.hpp
  2. 58
      modules/videoio/src/cap_gstreamer.cpp
  3. 27
      modules/videoio/test/test_gstreamer.cpp

@ -187,8 +187,8 @@ enum VideoCaptureProperties {
CAP_PROP_HW_ACCELERATION=50, //!< (**open-only**) Hardware acceleration type (see #VideoAccelerationType). Setting supported only via `params` parameter in cv::VideoCapture constructor / .open() method. Default value is backend-specific.
CAP_PROP_HW_DEVICE =51, //!< (**open-only**) Hardware device index (select GPU if multiple available). Device enumeration is acceleration type specific.
CAP_PROP_HW_ACCELERATION_USE_OPENCL=52, //!< (**open-only**) If non-zero, create new OpenCL context and bind it to current thread. The OpenCL context created with Video Acceleration context attached it (if not attached yet) for optimized GPU data copy between HW accelerated decoder and cv::UMat.
CAP_PROP_OPEN_TIMEOUT_MSEC=53, //!< (**open-only**) timeout in milliseconds for opening a video capture (applicable for FFmpeg back-end only)
CAP_PROP_READ_TIMEOUT_MSEC=54, //!< (**open-only**) timeout in milliseconds for reading from a video capture (applicable for FFmpeg back-end only)
CAP_PROP_OPEN_TIMEOUT_MSEC=53, //!< (**open-only**) timeout in milliseconds for opening a video capture (applicable for FFmpeg and GStreamer back-ends only)
CAP_PROP_READ_TIMEOUT_MSEC=54, //!< (**open-only**) timeout in milliseconds for reading from a video capture (applicable for FFmpeg and GStreamer back-ends only)
CAP_PROP_STREAM_OPEN_TIME_USEC =55, //<! (read-only) time in microseconds since Jan 1 1970 when stream was opened. Applicable for FFmpeg backend only. Useful for RTSP and other live streams
CAP_PROP_VIDEO_TOTAL_CHANNELS = 56, //!< (read-only) Number of video channels
CAP_PROP_VIDEO_STREAM = 57, //!< (**open-only**) Specify video stream, 0-based index. Use -1 to disable video stream from file or IP cameras. Default value is 0.

@ -77,6 +77,8 @@
#define COLOR_ELEM_NAME COLOR_ELEM
#define CV_GST_FORMAT(format) (format)
#define GSTREAMER_INTERRUPT_OPEN_DEFAULT_TIMEOUT_NS (30 * GST_SECOND)
#define GSTREAMER_INTERRUPT_READ_DEFAULT_TIMEOUT_NS (30 * GST_SECOND)
namespace cv {
@ -328,6 +330,8 @@ private:
gint width;
gint height;
double fps;
GstClockTime openTimeout; // measured in nanoseconds
GstClockTime readTimeout; // measured in nanoseconds
bool isPosFramesSupported;
bool isPosFramesEmulated;
gint64 emulatedFrameNumber;
@ -372,6 +376,8 @@ GStreamerCapture::GStreamerCapture() :
videoStream(0),
audioStream(-1),
duration(-1), width(-1), height(-1), fps(-1),
openTimeout(GSTREAMER_INTERRUPT_OPEN_DEFAULT_TIMEOUT_NS),
readTimeout(GSTREAMER_INTERRUPT_READ_DEFAULT_TIMEOUT_NS),
isPosFramesSupported(false),
isPosFramesEmulated(false),
emulatedFrameNumber(-1),
@ -504,7 +510,11 @@ bool GStreamerCapture::grabFrame()
if (gst_app_sink_is_eos(GST_APP_SINK(sink.get())))
return false;
#if FULL_GST_VERSION >= VERSION_NUM(1,10,0)
sample.attach(gst_app_sink_try_pull_sample(GST_APP_SINK(sink.get()), readTimeout));
#else
sample.attach(gst_app_sink_pull_sample(GST_APP_SINK(sink.get())));
#endif
if (!sample)
return false;
@ -675,7 +685,12 @@ bool GStreamerCapture::retrieveVideoFrame(int, OutputArray dst)
// the data. The gst_video_frame_map will parse the meta for us, or default to
// regular strides/offsets if no meta is present.
GstVideoFrame frame = {};
#if FULL_GST_VERSION >= VERSION_NUM(1,6,0)
GstMapFlags flags = static_cast<GstMapFlags>(GST_MAP_READ | GST_VIDEO_FRAME_MAP_FLAG_NO_REF);
#else
GstMapFlags flags = static_cast<GstMapFlags>(GST_MAP_READ);
#endif
if (!gst_video_frame_map(&frame, &info, buf, flags))
{
CV_LOG_ERROR(NULL, "GStreamer: Failed to map GStreamer buffer to system memory");
@ -895,7 +910,7 @@ void GStreamerCapture::startPipeline()
if (status == GST_STATE_CHANGE_ASYNC)
{
// wait for status update
status = gst_element_get_state(pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
status = gst_element_get_state(pipeline, NULL, NULL, openTimeout);
}
if (status == GST_STATE_CHANGE_FAILURE)
{
@ -1353,7 +1368,7 @@ bool GStreamerCapture::open(const String &filename_, const cv::VideoCaptureParam
if (status == GST_STATE_CHANGE_ASYNC)
{
// wait for status update
status = gst_element_get_state(pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);
status = gst_element_get_state(pipeline, NULL, NULL, openTimeout);
}
if (status == GST_STATE_CHANGE_FAILURE)
{
@ -1565,6 +1580,14 @@ double GStreamerCapture::getProperty(int propId) const
return outputAudioFormat;
case CAP_PROP_AUDIO_BASE_INDEX:
return audioBaseIndex;
case CAP_PROP_OPEN_TIMEOUT_MSEC:
return GST_TIME_AS_MSECONDS(openTimeout);
case CAP_PROP_READ_TIMEOUT_MSEC:
#if FULL_GST_VERSION >= VERSION_NUM(1,10,0)
return GST_TIME_AS_MSECONDS(readTimeout);
#else
return 0;
#endif
default:
CV_WARN("unhandled property: " << propId);
break;
@ -1719,6 +1742,37 @@ bool GStreamerCapture::setProperty(int propId, double value)
gst_app_sink_set_max_buffers(GST_APP_SINK(sink.get()), (guint) value);
return true;
}
case CAP_PROP_OPEN_TIMEOUT_MSEC:
{
if(value > 0)
{
openTimeout = GstClockTime(value * GST_MSECOND); // convert from ms to ns
return true;
}
else
{
CV_WARN("GStreamer open timeout should be positive");
return false;
}
}
case CAP_PROP_READ_TIMEOUT_MSEC:
{
#if FULL_GST_VERSION >= VERSION_NUM(1,10,0)
if(value > 0)
{
readTimeout = GstClockTime(value * GST_MSECOND); // convert from ms to ns
return true;
}
else
{
CV_WARN("GStreamer read timeout should be positive");
return false;
}
#else
CV_WARN("GStreamer before 1.10 does not support read timeout");
return false;
#endif
}
default:
CV_WARN("GStreamer: unhandled property");
}

@ -151,4 +151,31 @@ TEST(videoio_gstreamer, gray16_writing)
EXPECT_EQ(0, remove(temp_file.c_str()));
}
TEST(videoio_gstreamer, timeout_property)
{
if (!videoio_registry::hasBackend(CAP_GSTREAMER))
throw SkipTestException("GStreamer backend was not found");
VideoCapture cap;
cap.open("videotestsrc ! appsink", CAP_GSTREAMER);
ASSERT_TRUE(cap.isOpened());
const double default_timeout = 30000; // 30 seconds
const double open_timeout = 5678; // 3 seconds
const double read_timeout = 1234; // 1 second
EXPECT_NEAR(default_timeout, cap.get(CAP_PROP_OPEN_TIMEOUT_MSEC), 1e-3);
const double current_read_timeout = cap.get(CAP_PROP_READ_TIMEOUT_MSEC);
const bool read_timeout_supported = current_read_timeout > 0.0;
if (read_timeout_supported)
{
EXPECT_NEAR(default_timeout, current_read_timeout, 1e-3);
}
cap.set(CAP_PROP_OPEN_TIMEOUT_MSEC, open_timeout);
EXPECT_NEAR(open_timeout, cap.get(CAP_PROP_OPEN_TIMEOUT_MSEC), 1e-3);
if (read_timeout_supported)
{
cap.set(CAP_PROP_READ_TIMEOUT_MSEC, read_timeout);
EXPECT_NEAR(read_timeout, cap.get(CAP_PROP_READ_TIMEOUT_MSEC), 1e-3);
}
}
}} // namespace

Loading…
Cancel
Save