diff --git a/modules/videoio/include/opencv2/videoio.hpp b/modules/videoio/include/opencv2/videoio.hpp index bbe392266f..a7fbe36544 100644 --- a/modules/videoio/include/opencv2/videoio.hpp +++ b/modules/videoio/include/opencv2/videoio.hpp @@ -204,6 +204,7 @@ enum VideoCaptureProperties { CAP_PROP_LRF_HAS_KEY_FRAME = 67, //!< FFmpeg back-end only - Indicates whether the Last Raw Frame (LRF), output from VideoCapture::read() when VideoCapture is initialized with VideoCapture::open(CAP_FFMPEG, {CAP_PROP_FORMAT, -1}) or VideoCapture::set(CAP_PROP_FORMAT,-1) is called before the first call to VideoCapture::read(), contains encoded data for a key frame. CAP_PROP_CODEC_EXTRADATA_INDEX = 68, //!< Positive index indicates that returning extra data is supported by the video back end. This can be retrieved as cap.retrieve(data, ). E.g. When reading from a h264 encoded RTSP stream, the FFmpeg backend could return the SPS and/or PPS if available (if sent in reply to a DESCRIBE request), from calls to cap.retrieve(data, ). CAP_PROP_FRAME_TYPE = 69, //!< (read-only) FFmpeg back-end only - Frame type ascii code (73 = 'I', 80 = 'P', 66 = 'B' or 63 = '?' if unknown) of the most recently read frame. + CAP_PROP_N_THREADS = 70, //!< (**open-only**) Set the maximum number of threads to use. Use 0 to use as many threads as CPU cores (applicable for FFmpeg back-end only). #ifndef CV_DOXYGEN CV__CAP_PROP_LATEST #endif diff --git a/modules/videoio/src/cap_ffmpeg_impl.hpp b/modules/videoio/src/cap_ffmpeg_impl.hpp index caeae31e1f..c25ab0c40a 100644 --- a/modules/videoio/src/cap_ffmpeg_impl.hpp +++ b/modules/videoio/src/cap_ffmpeg_impl.hpp @@ -987,7 +987,8 @@ inline void fill_codec_context(AVCodecContext * enc, AVDictionary * dict) //#ifdef FF_API_THREAD_INIT // avcodec_thread_init(enc, get_number_of_cpus()); //#else - enc->thread_count = get_number_of_cpus(); + const int nCpus = get_number_of_cpus(); + enc->thread_count = enc->thread_count ? enc->thread_count: nCpus; //#endif AVDictionaryEntry* avdiscard_entry = av_dict_get(dict, "avdiscard", NULL, 0); @@ -1024,6 +1025,7 @@ bool CvCapture_FFMPEG::open(const char* _filename, const VideoCaptureParameters& unsigned i; bool valid = false; + int nThreads = 0; close(); @@ -1081,6 +1083,10 @@ bool CvCapture_FFMPEG::open(const char* _filename, const VideoCaptureParameters& read_timeout = params.get(CAP_PROP_READ_TIMEOUT_MSEC); } #endif + if (params.has(CAP_PROP_N_THREADS)) + { + nThreads = params.get(CAP_PROP_N_THREADS); + } if (params.warnUnusedParameters()) { CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: unsupported parameters in .open(), see logger INFO channel for details. Bailout"); @@ -1248,6 +1254,7 @@ bool CvCapture_FFMPEG::open(const char* _filename, const VideoCaptureParameters& #endif continue; } + context->thread_count = nThreads; fill_codec_context(context, dict); #ifdef CV_FFMPEG_CODECPAR avcodec_parameters_to_context(context, par); @@ -1444,6 +1451,7 @@ bool CvCapture_FFMPEG::grabFrame() #if USE_AV_INTERRUPT_CALLBACK // activate interrupt callback + interrupt_metadata.timeout = 0; get_monotonic_time(&interrupt_metadata.value); interrupt_metadata.timeout_after_ms = read_timeout; #endif @@ -1774,6 +1782,8 @@ double CvCapture_FFMPEG::getProperty( int property_id ) const case CAP_PROP_STREAM_OPEN_TIME_USEC: //ic->start_time_realtime is in microseconds return ((double)ic->start_time_realtime); + case CAP_PROP_N_THREADS: + return static_cast(context->thread_count); default: break; } diff --git a/modules/videoio/test/test_ffmpeg.cpp b/modules/videoio/test/test_ffmpeg.cpp index 6f0f2f28b4..3578637cdb 100644 --- a/modules/videoio/test/test_ffmpeg.cpp +++ b/modules/videoio/test/test_ffmpeg.cpp @@ -95,6 +95,54 @@ TEST(videoio_ffmpeg, image) //========================================================================== +#define THREADS testing::ValuesIn({ 0,1,2,2000 }) +#define RAW_READ testing::ValuesIn({true, false}) +typedef tuple videoio_read_params_t; +typedef testing::TestWithParam< testing::tuple> videoio_read; + +TEST_P(videoio_read, threads) +{ + const VideoCaptureAPIs api = CAP_FFMPEG; + if (!videoio_registry::hasBackend(api)) + throw SkipTestException("Backend was not found"); + const string fileName = get<0>(get<0>(GetParam())); + const int nFrames = get<1>(get<0>(GetParam())); + const bool fixedThreadCount = get<2>(get<0>(GetParam())); + const int nThreads = get<1>(GetParam()); + const bool rawRead = get<2>(GetParam()); + VideoCapture cap(findDataFile(fileName), api, { CAP_PROP_N_THREADS, nThreads }); + if (!cap.isOpened()) + throw SkipTestException("Video stream is not supported"); + if (nThreads == 0 || fixedThreadCount) + EXPECT_EQ(cap.get(CAP_PROP_N_THREADS), VideoCapture(findDataFile(fileName), api).get(CAP_PROP_N_THREADS)); + else + EXPECT_EQ(cap.get(CAP_PROP_N_THREADS), nThreads); + if (rawRead && !cap.set(CAP_PROP_FORMAT, -1)) // turn off video decoder (extract stream) + throw SkipTestException("Fetching of RAW video streams is not supported"); + Mat frame; + int n = 0; + while (cap.read(frame)) { + ASSERT_FALSE(frame.empty()); + n++; + } + ASSERT_EQ(n, nFrames); +} + +const videoio_read_params_t videoio_read_params[] = +{ + videoio_read_params_t("video/big_buck_bunny.h264", 125, false), + //videoio_read_params_t("video/big_buck_bunny.h265", 125, false), + videoio_read_params_t("video/big_buck_bunny.mjpg.avi", 125, true), + //videoio_read_params_t("video/big_buck_bunny.mov", 125, false), + //videoio_read_params_t("video/big_buck_bunny.mp4", 125, false), + //videoio_read_params_t("video/big_buck_bunny.mpg", 125, false), + //videoio_read_params_t("video/big_buck_bunny.wmv", 125, true), +}; + +INSTANTIATE_TEST_CASE_P(/**/, videoio_read, testing::Combine(testing::ValuesIn(videoio_read_params), THREADS, RAW_READ)); + +//========================================================================== + typedef tuple videoio_container_params_t; typedef testing::TestWithParam< videoio_container_params_t > videoio_container;