Merge pull request #19460 from mikhail-nikolskiy:videoio-hw

videoio: HW decode/encode in FFMPEG backend; new properties with support in FFMPEG/GST/MSMF

* HW acceleration in FFMPEG backend

* fixes on Windows, remove D3D9

* HW acceleration in FFMPEG backend

* fixes on Windows, remove D3D9

* improve va test

* Copyright

* check LIBAVUTIL_BUILD >= AV_VERSION_INT(55, 78, 100) // FFMPEG 3.4+

* CAP_MSMF test on .mp4

* .mp4 in test

* improve va test

* Copyright

* check LIBAVUTIL_BUILD >= AV_VERSION_INT(55, 78, 100) // FFMPEG 3.4+

* CAP_MSMF test on .mp4

* .mp4 in test

* .avi for GStreamer test

* revert changes around seek()

* cv_writer_open_with_params

* params.warnUnusedParameters

* VideoCaptureParameters in GStreamer

* open_with_params

* params->getUnused

* Reduce PSNR threshold 33->32 (other tests use 30)

* require FFMPEG 4.0+; PSNR 30 as in other tests

* GStreamer AVI-demux plugin not installed in Ubuntu test environment?

* fix build on very old ffmpeg

* fix build on very old ffmpeg

* fix build issues

* fix build issues (static_cast)

* FFMPEG built on Windows without H264 encoder?

* fix for write_nothing test on VAAPI

* fix warnings

* fix cv_writer_get_prop in plugins

* use avcodec_get_hw_frames_parameters; more robust fallback to SW codecs

* internal function hw_check_device() for device check/logging

* two separate tests for HW read and write

* image size 640x480 in encode test

* WITH_VA=ON (only .h headers used in OpenCV, no linkage dependency)

* exception on VP9 SW encoder?

* rebase master; refine info message

* videoio: fix FFmpeg standalone plugin build

* videoio(ffmpeg): eliminate MSVC build warnings

* address review comments

* videoio(hw): update videocapture_acceleration.read test

- remove parallel decoding by SW code path
- check PSNR against the original generated image

* videoio: minor fixes

* videoio(test): disable unsupported MSMF cases (SW and HW)

* videoio(test): update PSNR thresholds for HW acceleration read

* videoio(test): update debug messages

* "hw_acceleration" whitelisting parameter

* little optimization in test

* D3D11VA supports decoders, doesn't support encoders

* videoio(test): adjust PSNR threshold in write_read_position tests

* videoio(ffmpeg): fix rejecting on acceleration device name mismatch

* videoio(ffmpeg): fix compilation USE_AV_HW_CODECS=0, add more debug logging

* videoio: rework VideoAccelerationType behavior

- enum is not a bitset
- default value is backend specific
- only '_NONE' and '_ANY' may fallback on software processing
- specific H/W acceleration doesn't fallback on software processing. It fails if there is no support for specified H/W acceleration.

* videoio(test): fix for current FFmpeg wrapper

Co-authored-by: Alexander Alekhin <alexander.a.alekhin@gmail.com>
pull/19606/head
Mikhail Nikolskii 4 years ago committed by GitHub
parent f70e80a6ba
commit 7bcb51eded
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 35
      modules/videoio/include/opencv2/videoio.hpp
  2. 4
      modules/videoio/src/backend_plugin.cpp
  3. 7
      modules/videoio/src/backend_static.cpp
  4. 59
      modules/videoio/src/cap_ffmpeg.cpp
  5. 555
      modules/videoio/src/cap_ffmpeg_hw.hpp
  6. 543
      modules/videoio/src/cap_ffmpeg_impl.hpp
  7. 254
      modules/videoio/src/cap_gstreamer.cpp
  8. 34
      modules/videoio/src/cap_interface.hpp
  9. 368
      modules/videoio/src/cap_msmf.cpp
  10. 1
      modules/videoio/src/precomp.hpp
  11. 26
      modules/videoio/test/test_precomp.hpp
  12. 332
      modules/videoio/test/test_video_io.cpp
  13. 211
      samples/tapi/video_acceleration.cpp

@ -78,7 +78,7 @@ namespace cv
//! @{ //! @{
/** @brief %VideoCapture API backends identifier. /** @brief cv::VideoCapture API backends identifier.
Select preferred API for a capture object. Select preferred API for a capture object.
To be used in the VideoCapture::VideoCapture() constructor or VideoCapture::open() To be used in the VideoCapture::VideoCapture() constructor or VideoCapture::open()
@ -124,7 +124,7 @@ enum VideoCaptureAPIs {
CAP_UEYE = 2500, //!< uEye Camera API CAP_UEYE = 2500, //!< uEye Camera API
}; };
/** @brief %VideoCapture generic properties identifier. /** @brief cv::VideoCapture generic properties identifier.
Reading / writing properties involves many layers. Some unexpected result might happens along this chain. Reading / writing properties involves many layers. Some unexpected result might happens along this chain.
Effective behaviour depends from device hardware, driver and API Backend. Effective behaviour depends from device hardware, driver and API Backend.
@ -182,12 +182,14 @@ enum VideoCaptureProperties {
CAP_PROP_BITRATE =47, //!< (read-only) Video bitrate in kbits/s CAP_PROP_BITRATE =47, //!< (read-only) Video bitrate in kbits/s
CAP_PROP_ORIENTATION_META=48, //!< (read-only) Frame rotation defined by stream meta (applicable for FFmpeg back-end only) CAP_PROP_ORIENTATION_META=48, //!< (read-only) Frame rotation defined by stream meta (applicable for FFmpeg back-end only)
CAP_PROP_ORIENTATION_AUTO=49, //!< if true - rotates output frames of CvCapture considering video file's metadata (applicable for FFmpeg back-end only) (https://github.com/opencv/opencv/issues/15499) CAP_PROP_ORIENTATION_AUTO=49, //!< if true - rotates output frames of CvCapture considering video file's metadata (applicable for FFmpeg back-end only) (https://github.com/opencv/opencv/issues/15499)
CAP_PROP_HW_ACCELERATION=50, //!< (**open-only**) Hardware acceleration type (see #VideoAccelerationType). Setting supported only via `params` parameter in cv::VideoCapture constructor / .open() method. Default value is backend-specific.
CAP_PROP_HW_DEVICE =51, //!< (**open-only**) Hardware device index (select GPU if multiple available)
#ifndef CV_DOXYGEN #ifndef CV_DOXYGEN
CV__CAP_PROP_LATEST CV__CAP_PROP_LATEST
#endif #endif
}; };
/** @brief %VideoWriter generic properties identifier. /** @brief cv::VideoWriter generic properties identifier.
@sa VideoWriter::get(), VideoWriter::set() @sa VideoWriter::get(), VideoWriter::set()
*/ */
enum VideoWriterProperties { enum VideoWriterProperties {
@ -196,7 +198,12 @@ enum VideoWriterProperties {
VIDEOWRITER_PROP_NSTRIPES = 3, //!< Number of stripes for parallel encoding. -1 for auto detection. VIDEOWRITER_PROP_NSTRIPES = 3, //!< Number of stripes for parallel encoding. -1 for auto detection.
VIDEOWRITER_PROP_IS_COLOR = 4, //!< If it is not zero, the encoder will expect and encode color frames, otherwise it VIDEOWRITER_PROP_IS_COLOR = 4, //!< If it is not zero, the encoder will expect and encode color frames, otherwise it
//!< will work with grayscale frames. //!< will work with grayscale frames.
VIDEOWRITER_PROP_DEPTH = 5 //!< Defaults to CV_8U. VIDEOWRITER_PROP_DEPTH = 5, //!< Defaults to CV_8U.
VIDEOWRITER_PROP_HW_ACCELERATION = 6, //!< (**open-only**) Hardware acceleration type (see #VideoAccelerationType). Setting supported only via `params` parameter in VideoWriter constructor / .open() method. Default value is backend-specific.
VIDEOWRITER_PROP_HW_DEVICE = 7, //!< (**open-only**) Hardware device index (select GPU if multiple available)
#ifndef CV_DOXYGEN
CV__VIDEOWRITER_PROP_LATEST
#endif
}; };
//! @} videoio_flags_base //! @} videoio_flags_base
@ -204,6 +211,26 @@ enum VideoWriterProperties {
//! @addtogroup videoio_flags_others //! @addtogroup videoio_flags_others
//! @{ //! @{
/** @brief Video Acceleration type
*
* Used as value in #CAP_PROP_HW_ACCELERATION and #VIDEOWRITER_PROP_HW_ACCELERATION
*
* @note In case of FFmpeg backend, it translated to enum AVHWDeviceType (https://github.com/FFmpeg/FFmpeg/blob/master/libavutil/hwcontext.h)
*/
enum VideoAccelerationType
{
VIDEO_ACCELERATION_NONE = 0, //!< Do not require any specific H/W acceleration, prefer software processing.
//!< Reading of this value means that special H/W accelerated handling is not added or not detected by OpenCV.
VIDEO_ACCELERATION_ANY = 1, //!< Prefer to use H/W acceleration. If no one supported, then fallback to software processing.
//!< @note H/W acceleration may require special configuration of used environment.
//!< @note Results in encoding scenario may differ between software and hardware accelerated encoders.
VIDEO_ACCELERATION_D3D11 = 2, //!< DirectX 11
VIDEO_ACCELERATION_VAAPI = 3, //!< VAAPI
VIDEO_ACCELERATION_MFX = 4, //!< libmfx (Intel MediaSDK/oneVPL)
};
/** @name IEEE 1394 drivers /** @name IEEE 1394 drivers
@{ @{
*/ */

@ -415,7 +415,7 @@ public:
if (plugin_api->api_header.api_version >= 1 && plugin_api->v1.Capture_open_with_params) if (plugin_api->api_header.api_version >= 1 && plugin_api->v1.Capture_open_with_params)
{ {
std::vector<int> vint_params = params.getIntVector(); std::vector<int> vint_params = params.getIntVector();
int* c_params = &vint_params[0]; int* c_params = vint_params.data();
unsigned n_params = (unsigned)(vint_params.size() / 2); unsigned n_params = (unsigned)(vint_params.size() / 2);
if (CV_ERROR_OK == plugin_api->v1.Capture_open_with_params( if (CV_ERROR_OK == plugin_api->v1.Capture_open_with_params(
@ -547,7 +547,7 @@ public:
} }
if (params.warnUnusedParameters()) if (params.warnUnusedParameters())
{ {
CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: unsupported parameters in VideoWriter, see logger INFO channel for details"); CV_LOG_ERROR(NULL, "VIDEOIO: unsupported parameters in VideoWriter, see logger INFO channel for details");
return Ptr<PluginWriter>(); return Ptr<PluginWriter>();
} }
if (CV_ERROR_OK == plugin_api->v0.Writer_open(filename.c_str(), fourcc, fps, sz.width, sz.height, isColor, &writer)) if (CV_ERROR_OK == plugin_api->v0.Writer_open(filename.c_str(), fourcc, fps, sz.width, sz.height, isColor, &writer))

@ -19,16 +19,17 @@ void applyParametersFallback(const Ptr<IVideoCapture>& cap, const VideoCapturePa
{ {
double value = params.get<double>(prop, -1); double value = params.get<double>(prop, -1);
CV_LOG_INFO(NULL, "VIDEOIO: apply parameter: [" << prop << "]=" << CV_LOG_INFO(NULL, "VIDEOIO: apply parameter: [" << prop << "]=" <<
cv::format("%g / %lld / 0x%16llx", value, (long long)value, (long long)value)); cv::format("%g / %lld / 0x%016llx", value, (long long)value, (long long)value));
if (!cap->setProperty(prop, value)) if (!cap->setProperty(prop, value))
{ {
CV_Error_(cv::Error::StsNotImplemented, ("VIDEOIO: Failed to apply invalid or unsupported parameter: [%d]=%g / %lld / 0x%08llx", prop, value, (long long)value, (long long)value)); if (prop != CAP_PROP_HW_ACCELERATION && prop != CAP_PROP_HW_DEVICE) { // optional parameters
CV_Error_(cv::Error::StsNotImplemented, ("VIDEOIO: Failed to apply invalid or unsupported parameter: [%d]=%g / %lld / 0x%08llx", prop, value, (long long)value, (long long)value));
}
} }
} }
// NB: there is no dedicated "commit" parameters event, implementations should commit after each property automatically // NB: there is no dedicated "commit" parameters event, implementations should commit after each property automatically
} }
// Legacy API. Modern API with parameters is below // Legacy API. Modern API with parameters is below
class StaticBackend: public IBackend class StaticBackend: public IBackend
{ {

@ -165,7 +165,7 @@ class CvVideoWriter_FFMPEG_proxy CV_FINAL :
{ {
public: public:
CvVideoWriter_FFMPEG_proxy() { ffmpegWriter = 0; } CvVideoWriter_FFMPEG_proxy() { ffmpegWriter = 0; }
CvVideoWriter_FFMPEG_proxy(const cv::String& filename, int fourcc, double fps, cv::Size frameSize, bool isColor) { ffmpegWriter = 0; open(filename, fourcc, fps, frameSize, isColor); } CvVideoWriter_FFMPEG_proxy(const cv::String& filename, int fourcc, double fps, cv::Size frameSize, const VideoWriterParameters& params) { ffmpegWriter = 0; open(filename, fourcc, fps, frameSize, params); }
virtual ~CvVideoWriter_FFMPEG_proxy() { close(); } virtual ~CvVideoWriter_FFMPEG_proxy() { close(); }
int getCaptureDomain() const CV_OVERRIDE { return cv::CAP_FFMPEG; } int getCaptureDomain() const CV_OVERRIDE { return cv::CAP_FFMPEG; }
@ -178,10 +178,10 @@ public:
icvWriteFrame_FFMPEG_p(ffmpegWriter, (const uchar*)image.getMat().ptr(), (int)image.step(), image.cols(), image.rows(), image.channels(), 0); icvWriteFrame_FFMPEG_p(ffmpegWriter, (const uchar*)image.getMat().ptr(), (int)image.step(), image.cols(), image.rows(), image.channels(), 0);
} }
virtual bool open( const cv::String& filename, int fourcc, double fps, cv::Size frameSize, bool isColor ) virtual bool open( const cv::String& filename, int fourcc, double fps, cv::Size frameSize, const VideoWriterParameters& params )
{ {
close(); close();
ffmpegWriter = icvCreateVideoWriter_FFMPEG_p( filename.c_str(), fourcc, fps, frameSize.width, frameSize.height, isColor ); ffmpegWriter = cvCreateVideoWriterWithParams_FFMPEG( filename.c_str(), fourcc, fps, frameSize.width, frameSize.height, params );
return ffmpegWriter != 0; return ffmpegWriter != 0;
} }
@ -193,7 +193,12 @@ public:
ffmpegWriter = 0; ffmpegWriter = 0;
} }
virtual double getProperty(int) const CV_OVERRIDE { return 0; } virtual double getProperty(int propId) const CV_OVERRIDE {
if(!ffmpegWriter)
return 0;
return ffmpegWriter->getProperty(propId);
}
virtual bool setProperty(int, double) CV_OVERRIDE { return false; } virtual bool setProperty(int, double) CV_OVERRIDE { return false; }
virtual bool isOpened() const CV_OVERRIDE { return ffmpegWriter != 0; } virtual bool isOpened() const CV_OVERRIDE { return ffmpegWriter != 0; }
@ -207,8 +212,7 @@ cv::Ptr<cv::IVideoWriter> cvCreateVideoWriter_FFMPEG_proxy(const std::string& fi
double fps, const cv::Size& frameSize, double fps, const cv::Size& frameSize,
const VideoWriterParameters& params) const VideoWriterParameters& params)
{ {
const bool isColor = params.get(VIDEOWRITER_PROP_IS_COLOR, true); cv::Ptr<CvVideoWriter_FFMPEG_proxy> writer = cv::makePtr<CvVideoWriter_FFMPEG_proxy>(filename, fourcc, fps, frameSize, params);
cv::Ptr<CvVideoWriter_FFMPEG_proxy> writer = cv::makePtr<CvVideoWriter_FFMPEG_proxy>(filename, fourcc, fps, frameSize, isColor);
if (writer && writer->isOpened()) if (writer && writer->isOpened())
return writer; return writer;
return cv::Ptr<cv::IVideoWriter>(); return cv::Ptr<cv::IVideoWriter>();
@ -233,7 +237,7 @@ cv::Ptr<cv::IVideoWriter> cvCreateVideoWriter_FFMPEG_proxy(const std::string& fi
#define CAPTURE_API_VERSION 1 #define CAPTURE_API_VERSION 1
#include "plugin_capture_api.hpp" #include "plugin_capture_api.hpp"
#define WRITER_ABI_VERSION 1 #define WRITER_ABI_VERSION 1
#define WRITER_API_VERSION 0 #define WRITER_API_VERSION 1
#include "plugin_writer_api.hpp" #include "plugin_writer_api.hpp"
#endif #endif
@ -400,7 +404,7 @@ CvResult CV_API_CALL cv_capture_retrieve(CvPluginCapture handle, int stream_idx,
Mat img; Mat img;
// TODO: avoid unnecessary copying // TODO: avoid unnecessary copying
if (instance->retrieveFrame(stream_idx, img)) if (instance->retrieveFrame(stream_idx, img))
return callback(stream_idx, img.data, img.step, img.cols, img.rows, img.channels(), userdata); return callback(stream_idx, img.data, (int)img.step, img.cols, img.rows, img.channels(), userdata);
return CV_ERROR_FAIL; return CV_ERROR_FAIL;
} }
catch (const std::exception& e) catch (const std::exception& e)
@ -426,7 +430,7 @@ CvResult CV_API_CALL cv_capture_retrieve(CvPluginCapture handle, int stream_idx,
Mat img; Mat img;
// TODO: avoid unnecessary copying // TODO: avoid unnecessary copying
if (instance->retrieveFrame(stream_idx, img)) if (instance->retrieveFrame(stream_idx, img))
return callback(stream_idx, img.data, img.step, img.cols, img.rows, img.type(), userdata); return callback(stream_idx, img.data, (int)img.step, img.cols, img.rows, img.type(), userdata);
return CV_ERROR_FAIL; return CV_ERROR_FAIL;
} }
catch (const std::exception& e) catch (const std::exception& e)
@ -443,14 +447,17 @@ CvResult CV_API_CALL cv_capture_retrieve(CvPluginCapture handle, int stream_idx,
#endif #endif
static static
CvResult CV_API_CALL cv_writer_open(const char* filename, int fourcc, double fps, int width, int height, int isColor, CvResult CV_API_CALL cv_writer_open_with_params(
CV_OUT CvPluginWriter* handle) const char* filename, int fourcc, double fps, int width, int height,
int* params, unsigned n_params,
CV_OUT CvPluginWriter* handle)
{ {
Size sz(width, height); Size sz(width, height);
CvVideoWriter_FFMPEG_proxy* wrt = 0; CvVideoWriter_FFMPEG_proxy* wrt = 0;
try try
{ {
wrt = new CvVideoWriter_FFMPEG_proxy(filename, fourcc, fps, sz, isColor != 0); VideoWriterParameters parameters(params, n_params);
wrt = new CvVideoWriter_FFMPEG_proxy(filename, fourcc, fps, sz, parameters);
if(wrt && wrt->isOpened()) if(wrt && wrt->isOpened())
{ {
*handle = (CvPluginWriter)wrt; *handle = (CvPluginWriter)wrt;
@ -470,6 +477,14 @@ CvResult CV_API_CALL cv_writer_open(const char* filename, int fourcc, double fps
return CV_ERROR_FAIL; return CV_ERROR_FAIL;
} }
static
CvResult CV_API_CALL cv_writer_open(const char* filename, int fourcc, double fps, int width, int height, int isColor,
CV_OUT CvPluginWriter* handle)
{
int params[2] = { VIDEOWRITER_PROP_IS_COLOR, isColor };
return cv_writer_open_with_params(filename, fourcc, fps, width, height, params, 1, handle);
}
static static
CvResult CV_API_CALL cv_writer_release(CvPluginWriter handle) CvResult CV_API_CALL cv_writer_release(CvPluginWriter handle)
{ {
@ -481,9 +496,22 @@ CvResult CV_API_CALL cv_writer_release(CvPluginWriter handle)
} }
static static
CvResult CV_API_CALL cv_writer_get_prop(CvPluginWriter /*handle*/, int /*prop*/, CV_OUT double* /*val*/) CvResult CV_API_CALL cv_writer_get_prop(CvPluginWriter handle, int prop, CV_OUT double* val)
{ {
return CV_ERROR_FAIL; if (!handle)
return CV_ERROR_FAIL;
if (!val)
return CV_ERROR_FAIL;
try
{
CvVideoWriter_FFMPEG_proxy* instance = (CvVideoWriter_FFMPEG_proxy*)handle;
*val = instance->getProperty(prop);
return CV_ERROR_OK;
}
catch (...)
{
return CV_ERROR_FAIL;
}
} }
static static
@ -594,6 +622,9 @@ static const OpenCV_VideoIO_Writer_Plugin_API writer_plugin_api =
/* 4*/cv_writer_get_prop, /* 4*/cv_writer_get_prop,
/* 5*/cv_writer_set_prop, /* 5*/cv_writer_set_prop,
/* 6*/cv_writer_write /* 6*/cv_writer_write
},
{
/* 7*/cv_writer_open_with_params
} }
}; };

@ -0,0 +1,555 @@
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2020-2021 Intel Corporation
#include "opencv2/videoio.hpp"
#if defined(__OPENCV_BUILD) || defined(OPENCV_HAVE_CVCONFIG_H) // TODO Properly detect and add D3D11 / LIBVA dependencies for standalone plugins
#include "cvconfig.h"
#endif
#include <sstream>
#ifdef HAVE_D3D11
#define D3D11_NO_HELPERS
#include <d3d11.h>
#include <codecvt>
#endif
#ifdef HAVE_VA
#include <va/va_backend.h>
#endif
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/hwcontext.h>
#ifdef HAVE_D3D11
#include <libavutil/hwcontext_d3d11va.h>
#endif
#ifdef HAVE_VA
#include <libavutil/hwcontext_vaapi.h>
#endif
}
static
const char* getVideoAccelerationName(VideoAccelerationType va_type)
{
switch (va_type)
{
case VIDEO_ACCELERATION_NONE: return "none";
case VIDEO_ACCELERATION_ANY: return "any";
case VIDEO_ACCELERATION_D3D11: return "d3d11";
case VIDEO_ACCELERATION_VAAPI: return "vaapi";
case VIDEO_ACCELERATION_MFX: return "mfx";
}
return "unknown";
}
static
std::string getDecoderConfiguration(VideoAccelerationType va_type, AVDictionary *dict)
{
std::string va_name = getVideoAccelerationName(va_type);
std::string key_name = std::string("hw_decoders_") + va_name;
const char *hw_acceleration = NULL;
if (dict)
{
AVDictionaryEntry* entry = av_dict_get(dict, key_name.c_str(), NULL, 0);
if (entry)
hw_acceleration = entry->value;
}
if (hw_acceleration)
return hw_acceleration;
// some default values (FFMPEG_DECODE_ACCELERATION_TYPES)
#ifdef _WIN32
switch (va_type)
{
case VIDEO_ACCELERATION_NONE: return "";
case VIDEO_ACCELERATION_ANY: return "d3d11va";
case VIDEO_ACCELERATION_D3D11: return "d3d11va";
case VIDEO_ACCELERATION_VAAPI: return "";
case VIDEO_ACCELERATION_MFX: return "";
}
return "";
#else
switch (va_type)
{
case VIDEO_ACCELERATION_NONE: return "";
case VIDEO_ACCELERATION_ANY: return "vaapi.iHD";
case VIDEO_ACCELERATION_D3D11: return "";
case VIDEO_ACCELERATION_VAAPI: return "vaapi.iHD";
case VIDEO_ACCELERATION_MFX: return "";
}
return "";
#endif
}
static
std::string getEncoderConfiguration(VideoAccelerationType va_type, AVDictionary *dict)
{
std::string va_name = getVideoAccelerationName(va_type);
std::string key_name = std::string("hw_encoders_") + va_name;
const char *hw_acceleration = NULL;
if (dict)
{
AVDictionaryEntry* entry = av_dict_get(dict, key_name.c_str(), NULL, 0);
if (entry)
hw_acceleration = entry->value;
}
if (hw_acceleration)
return hw_acceleration;
// some default values (FFMPEG_ENCODE_ACCELERATION_TYPES)
#ifdef _WIN32
switch (va_type)
{
case VIDEO_ACCELERATION_NONE: return "";
case VIDEO_ACCELERATION_ANY: return "qsv";
case VIDEO_ACCELERATION_D3D11: return "";
case VIDEO_ACCELERATION_VAAPI: return "";
case VIDEO_ACCELERATION_MFX: return "qsv";
}
return "";
#else
switch (va_type)
{
case VIDEO_ACCELERATION_NONE: return "";
case VIDEO_ACCELERATION_ANY: return "qsv.iHD,vaapi.iHD";
case VIDEO_ACCELERATION_D3D11: return "";
case VIDEO_ACCELERATION_VAAPI: return "vaapi.iHD";
case VIDEO_ACCELERATION_MFX: return "qsv.iHD";
}
return "unknown";
#endif
}
static
std::string getDecoderDisabledCodecs(AVDictionary *dict)
{
std::string key_name = std::string("hw_disable_decoders");
const char *disabled_codecs = NULL;
if (dict)
{
AVDictionaryEntry* entry = av_dict_get(dict, key_name.c_str(), NULL, 0);
if (entry)
disabled_codecs = entry->value;
}
if (disabled_codecs)
return disabled_codecs;
// some default values (FFMPEG_DECODE_DISABLE_CODECS)
#ifdef _WIN32
return "none";
#else
return "av1.vaapi,av1_qsv,vp8.vaapi,vp8_qsv"; // "vp9_qsv"
#endif
}
static
std::string getEncoderDisabledCodecs(AVDictionary *dict)
{
std::string key_name = std::string("hw_disabled_encoders");
const char *disabled_codecs = NULL;
if (dict)
{
AVDictionaryEntry* entry = av_dict_get(dict, key_name.c_str(), NULL, 0);
if (entry)
disabled_codecs = entry->value;
}
if (disabled_codecs)
return disabled_codecs;
// some default values (FFMPEG_ENCODE_DISABLE_CODECS)
#ifdef _WIN32
return "mjpeg_qsv";
#else
return "mjpeg_vaapi,mjpeg_qsv,vp8_vaapi";
#endif
}
#define HW_DEFAULT_POOL_SIZE 32
#define HW_DEFAULT_SW_FORMAT AV_PIX_FMT_NV12
using namespace cv;
static AVCodec *hw_find_codec(AVCodecID id, AVHWDeviceType hw_type, int (*check_category)(const AVCodec *),
const char *disabled_codecs, AVPixelFormat *hw_pix_fmt);
static AVBufferRef* hw_create_device(AVHWDeviceType hw_type, int hw_device, const std::string& device_subname);
static AVBufferRef* hw_create_frames(struct AVCodecContext* ctx, AVBufferRef *hw_device_ctx, int width, int height, AVPixelFormat hw_format);
static AVPixelFormat hw_get_format_callback(struct AVCodecContext *ctx, const enum AVPixelFormat * fmt);
static VideoAccelerationType hw_type_to_va_type(AVHWDeviceType hw_type);
static
bool hw_check_device(AVBufferRef* ctx, AVHWDeviceType hw_type, const std::string& device_subname) {
if (!ctx)
return false;
AVHWDeviceContext* hw_device_ctx = (AVHWDeviceContext*)ctx->data;
if (!hw_device_ctx->hwctx)
return false;
const char *hw_name = av_hwdevice_get_type_name(hw_type);
if (hw_type == AV_HWDEVICE_TYPE_QSV)
hw_name = "MFX";
bool ret = true;
std::string device_name;
#if defined(HAVE_D3D11)
if (hw_device_ctx->type == AV_HWDEVICE_TYPE_D3D11VA) {
ID3D11Device* device = ((AVD3D11VADeviceContext*)hw_device_ctx->hwctx)->device;
IDXGIDevice* dxgiDevice = nullptr;
if (device && SUCCEEDED(device->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&dxgiDevice)))) {
IDXGIAdapter* adapter = nullptr;
if (SUCCEEDED(dxgiDevice->GetAdapter(&adapter))) {
DXGI_ADAPTER_DESC desc;
if (SUCCEEDED(adapter->GetDesc(&desc))) {
std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> conv;
device_name = conv.to_bytes(desc.Description);
}
adapter->Release();
}
dxgiDevice->Release();
}
}
#endif
if (hw_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
#if defined(HAVE_VA) && (VA_MAJOR_VERSION >= 1)
VADisplay display = ((AVVAAPIDeviceContext *) hw_device_ctx->hwctx)->display;
if (display) {
VADriverContext *va_ctx = ((VADisplayContext *) display)->pDriverContext;
device_name = va_ctx->str_vendor;
if (hw_type == AV_HWDEVICE_TYPE_QSV) {
// Workaround for issue fixed in MediaSDK 21.x https://github.com/Intel-Media-SDK/MediaSDK/issues/2595
// Checks VAAPI driver for support of VideoProc operation required by MediaSDK
ret = false;
int n_entrypoints = va_ctx->max_entrypoints;
std::vector<VAEntrypoint> entrypoints(n_entrypoints);
if (va_ctx->vtable->vaQueryConfigEntrypoints(va_ctx, VAProfileNone, entrypoints.data(), &n_entrypoints) == VA_STATUS_SUCCESS) {
for (int i = 0; i < n_entrypoints; i++) {
if (entrypoints[i] == VAEntrypointVideoProc) {
ret = true;
break;
}
}
}
if (!ret)
CV_LOG_INFO(NULL, "FFMPEG: Skipping MFX video acceleration as entrypoint VideoProc not found in: " << device_name);
}
}
#else
ret = (hw_type != AV_HWDEVICE_TYPE_QSV); // disable MFX if we can't check VAAPI for VideoProc entrypoint
#endif
}
if (ret && !device_subname.empty() && device_name.find(device_subname) == std::string::npos)
{
CV_LOG_INFO(NULL, "FFMPEG: Skipping '" << hw_name <<
"' video acceleration on the following device name as not matching substring '" << device_subname << "': " << device_name);
ret = false; // reject configuration
}
if (ret)
{
if (!device_name.empty()) {
CV_LOG_INFO(NULL, "FFMPEG: Using " << hw_name << " video acceleration on device: " << device_name);
} else {
CV_LOG_INFO(NULL, "FFMPEG: Using " << hw_name << " video acceleration");
}
}
return ret;
}
static
AVBufferRef* hw_create_device(AVHWDeviceType hw_type, int hw_device, const std::string& device_subname) {
if (AV_HWDEVICE_TYPE_NONE == hw_type)
return NULL;
AVHWDeviceType child_type = hw_type;
if (hw_type == AV_HWDEVICE_TYPE_QSV) {
#ifdef _WIN32
child_type = AV_HWDEVICE_TYPE_DXVA2;
#else
child_type = AV_HWDEVICE_TYPE_VAAPI;
#endif
}
AVBufferRef* hw_device_ctx = NULL;
char device[128] = "";
char* pdevice = NULL;
if (hw_device >= 0 && hw_device < 100000) {
if (child_type == AV_HWDEVICE_TYPE_VAAPI) {
snprintf(device, sizeof(device), "/dev/dri/renderD%d", 128 + hw_device);
} else {
snprintf(device, sizeof(device), "%d", hw_device);
}
pdevice = device;
}
const char *hw_child_name = av_hwdevice_get_type_name(child_type);
const char *device_name = pdevice ? pdevice : "'default'";
int err = av_hwdevice_ctx_create(&hw_device_ctx, child_type, pdevice, NULL, 0);
if (hw_device_ctx && err >= 0)
{
CV_LOG_DEBUG(NULL, "FFMPEG: Created video acceleration context (av_hwdevice_ctx_create) for " << hw_child_name << " on device " << device_name);
if (!hw_check_device(hw_device_ctx, hw_type, device_subname)) {
av_buffer_unref(&hw_device_ctx);
return NULL;
}
if (hw_type != child_type) {
AVBufferRef *derived_ctx = NULL;
const char *hw_name = av_hwdevice_get_type_name(hw_type);
err = av_hwdevice_ctx_create_derived(&derived_ctx, hw_type, hw_device_ctx, 0);
if (!derived_ctx || err < 0)
{
if (derived_ctx)
av_buffer_unref(&derived_ctx);
CV_LOG_INFO(NULL, "FFMPEG: Failed to create derived video acceleration (av_hwdevice_ctx_create_derived) for " << hw_name << ". Error=" << err);
}
else
{
CV_LOG_DEBUG(NULL, "FFMPEG: Created derived video acceleration context (av_hwdevice_ctx_create_derived) for " << hw_name);
}
av_buffer_unref(&hw_device_ctx);
return derived_ctx;
} else {
return hw_device_ctx;
}
}
else
{
const char *hw_name = hw_child_name;
CV_LOG_INFO(NULL, "FFMPEG: Failed to create " << hw_name << " video acceleration (av_hwdevice_ctx_create) on device " << device_name);
return NULL;
}
}
static
AVBufferRef* hw_create_frames(struct AVCodecContext* ctx, AVBufferRef *hw_device_ctx, int width, int height, AVPixelFormat hw_format)
{
AVBufferRef *hw_frames_ref = nullptr;
if (ctx)
{
int res = avcodec_get_hw_frames_parameters(ctx, hw_device_ctx, hw_format, &hw_frames_ref);
if (res < 0)
{
CV_LOG_DEBUG(NULL, "FFMPEG: avcodec_get_hw_frames_parameters() call failed: " << res)
}
}
if (!hw_frames_ref)
{
hw_frames_ref = av_hwframe_ctx_alloc(hw_device_ctx);
}
if (!hw_frames_ref)
{
CV_LOG_INFO(NULL, "FFMPEG: Failed to create HW frame context (av_hwframe_ctx_alloc)");
return NULL;
}
AVHWFramesContext *frames_ctx = (AVHWFramesContext *)(hw_frames_ref->data);
frames_ctx->width = width;
frames_ctx->height = height;
if (frames_ctx->format == AV_PIX_FMT_NONE)
frames_ctx->format = hw_format;
if (frames_ctx->sw_format == AV_PIX_FMT_NONE)
frames_ctx->sw_format = HW_DEFAULT_SW_FORMAT;
if (frames_ctx->initial_pool_size == 0)
frames_ctx->initial_pool_size = HW_DEFAULT_POOL_SIZE;
int res = av_hwframe_ctx_init(hw_frames_ref);
if (res < 0)
{
CV_LOG_INFO(NULL, "FFMPEG: Failed to initialize HW frame context (av_hwframe_ctx_init): " << res);
av_buffer_unref(&hw_frames_ref);
return NULL;
}
return hw_frames_ref;
}
static
bool hw_check_codec(AVCodec* codec, AVHWDeviceType hw_type, const char *disabled_codecs)
{
CV_Assert(disabled_codecs);
std::string hw_name = std::string(".") + av_hwdevice_get_type_name(hw_type);
std::stringstream s_stream(disabled_codecs);
while (s_stream.good()) {
std::string name;
getline(s_stream, name, ',');
if (name == codec->name || name == hw_name || name == codec->name + hw_name || name == "hw") {
CV_LOG_INFO(NULL, "FFMPEG: skipping codec " << codec->name << hw_name);
return false;
}
}
return true;
}
static
AVCodec *hw_find_codec(AVCodecID id, AVHWDeviceType hw_type, int (*check_category)(const AVCodec *), const char *disabled_codecs, AVPixelFormat *hw_pix_fmt) {
AVCodec *c = 0;
void *opaque = 0;
while (NULL != (c = (AVCodec*)av_codec_iterate(&opaque)))
{
if (!check_category(c))
continue;
if (c->id != id)
continue;
if (c->capabilities & AV_CODEC_CAP_EXPERIMENTAL)
continue;
if (hw_type != AV_HWDEVICE_TYPE_NONE) {
AVPixelFormat hw_native_fmt = AV_PIX_FMT_NONE;
#if LIBAVUTIL_BUILD < AV_VERSION_INT(56, 51, 100) // VAAPI encoders support avcodec_get_hw_config() starting ffmpeg 4.3
if (hw_type == AV_HWDEVICE_TYPE_VAAPI)
hw_native_fmt = AV_PIX_FMT_VAAPI_VLD;
#endif
if (hw_type == AV_HWDEVICE_TYPE_CUDA) // CUDA encoders don't support avcodec_get_hw_config()
hw_native_fmt = AV_PIX_FMT_CUDA;
if (av_codec_is_encoder(c) && hw_native_fmt != AV_PIX_FMT_NONE && c->pix_fmts) {
for (int i = 0; c->pix_fmts[i] != AV_PIX_FMT_NONE; i++) {
if (c->pix_fmts[i] == hw_native_fmt) {
*hw_pix_fmt = hw_native_fmt;
if (hw_check_codec(c, hw_type, disabled_codecs))
return c;
}
}
}
for (int i = 0;; i++) {
const AVCodecHWConfig *hw_config = avcodec_get_hw_config(c, i);
if (!hw_config)
break;
if (hw_config->device_type == hw_type) {
*hw_pix_fmt = hw_config->pix_fmt;
if (hw_check_codec(c, hw_type, disabled_codecs))
return c;
}
}
} else {
return c;
}
}
return NULL;
}
// Callback to select hardware pixel format (not software format) and allocate frame pool (hw_frames_ctx)
static
AVPixelFormat hw_get_format_callback(struct AVCodecContext *ctx, const enum AVPixelFormat * fmt) {
if (!ctx->hw_device_ctx)
return fmt[0];
AVHWDeviceType hw_type = ((AVHWDeviceContext*)ctx->hw_device_ctx->data)->type;
for (int j = 0;; j++) {
const AVCodecHWConfig *hw_config = avcodec_get_hw_config(ctx->codec, j);
if (!hw_config)
break;
if (hw_config->device_type == hw_type) {
for (int i = 0; fmt[i] != AV_PIX_FMT_NONE; i++) {
if (fmt[i] == hw_config->pix_fmt) {
if (hw_config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_FRAMES_CTX) {
ctx->sw_pix_fmt = HW_DEFAULT_SW_FORMAT;
ctx->hw_frames_ctx = hw_create_frames(ctx, ctx->hw_device_ctx, ctx->width, ctx->height, fmt[i]);
if (ctx->hw_frames_ctx) {
//ctx->sw_pix_fmt = ((AVHWFramesContext *)(ctx->hw_frames_ctx->data))->sw_format;
return fmt[i];
}
}
}
}
}
}
CV_LOG_DEBUG(NULL, "FFMPEG: Can't select HW format in 'get_format()' callback, use default");
return fmt[0];
}
static
VideoAccelerationType hw_type_to_va_type(AVHWDeviceType hw_type) {
struct HWTypeFFMPEG {
AVHWDeviceType hw_type;
VideoAccelerationType va_type;
} known_hw_types[] = {
{ AV_HWDEVICE_TYPE_D3D11VA, VIDEO_ACCELERATION_D3D11 },
{ AV_HWDEVICE_TYPE_VAAPI, VIDEO_ACCELERATION_VAAPI },
{ AV_HWDEVICE_TYPE_QSV, VIDEO_ACCELERATION_MFX },
{ AV_HWDEVICE_TYPE_CUDA, (VideoAccelerationType)(1 << 11) },
};
for (const HWTypeFFMPEG& hw : known_hw_types) {
if (hw_type == hw.hw_type)
return hw.va_type;
}
return VIDEO_ACCELERATION_NONE;
}
class HWAccelIterator {
public:
HWAccelIterator(VideoAccelerationType va_type, bool isEncoder, AVDictionary *dict)
: hw_type_(AV_HWDEVICE_TYPE_NONE)
{
std::string accel_list;
if (va_type != VIDEO_ACCELERATION_NONE)
{
updateAccelList_(accel_list, va_type, isEncoder, dict);
}
if (va_type == VIDEO_ACCELERATION_ANY)
{
if (!accel_list.empty())
accel_list = ","; // add no-acceleration case to the end of the list
}
CV_LOG_DEBUG(NULL, "FFMPEG: allowed acceleration types (" << getVideoAccelerationName(va_type) << "): '" << accel_list << "'");
if (accel_list.empty() && va_type != VIDEO_ACCELERATION_NONE && va_type != VIDEO_ACCELERATION_ANY)
{
// broke stream
std::string tmp;
s_stream_ >> tmp;
}
else
{
s_stream_ = std::istringstream(accel_list);
}
if (va_type != VIDEO_ACCELERATION_NONE)
{
disabled_codecs_ = isEncoder
? getEncoderDisabledCodecs(dict)
: getDecoderDisabledCodecs(dict);
CV_LOG_DEBUG(NULL, "FFMPEG: disabled codecs: '" << disabled_codecs_ << "'");
}
}
bool good() const
{
return s_stream_.good();
}
void parse_next()
{
getline(s_stream_, hw_type_device_string_, ',');
size_t index = hw_type_device_string_.find('.');
if (index != std::string::npos) {
device_subname_ = hw_type_device_string_.substr(index + 1);
hw_type_string_ = hw_type_device_string_.substr(0, index);
} else {
device_subname_.clear();
hw_type_string_ = hw_type_device_string_;
}
hw_type_ = av_hwdevice_find_type_by_name(hw_type_string_.c_str());
}
const std::string& hw_type_device_string() const { return hw_type_device_string_; }
const std::string& hw_type_string() const { return hw_type_string_; }
AVHWDeviceType hw_type() const { return hw_type_; }
const std::string& device_subname() const { return device_subname_; }
const std::string& disabled_codecs() const { return disabled_codecs_; }
private:
bool updateAccelList_(std::string& accel_list, VideoAccelerationType va_type, bool isEncoder, AVDictionary *dict)
{
std::string new_accels = isEncoder
? getEncoderConfiguration(va_type, dict)
: getDecoderConfiguration(va_type, dict);
if (new_accels.empty())
return false;
if (accel_list.empty())
accel_list = new_accels;
else
accel_list = accel_list + "," + new_accels;
return true;
}
std::istringstream s_stream_;
std::string hw_type_device_string_;
std::string hw_type_string_;
AVHWDeviceType hw_type_;
std::string device_subname_;
std::string disabled_codecs_;
};

@ -64,6 +64,9 @@ using namespace cv;
#ifdef __GNUC__ #ifdef __GNUC__
# pragma GCC diagnostic ignored "-Wdeprecated-declarations" # pragma GCC diagnostic ignored "-Wdeprecated-declarations"
#endif #endif
#ifdef _MSC_VER
#pragma warning(disable: 4996) // was declared deprecated
#endif
#ifndef CV_UNUSED // Required for standalone compilation mode (OpenCV defines this in base.hpp) #ifndef CV_UNUSED // Required for standalone compilation mode (OpenCV defines this in base.hpp)
#define CV_UNUSED(name) (void)name #define CV_UNUSED(name) (void)name
@ -90,6 +93,16 @@ extern "C" {
} }
#endif #endif
//#define USE_AV_HW_CODECS 0
#ifndef USE_AV_HW_CODECS
#if LIBAVUTIL_VERSION_MAJOR >= 56 // FFMPEG 4.0+
#define USE_AV_HW_CODECS 1
#include "cap_ffmpeg_hw.hpp"
#else
#define USE_AV_HW_CODECS 0
#endif
#endif
#if defined _MSC_VER && _MSC_VER >= 1200 #if defined _MSC_VER && _MSC_VER >= 1200
#pragma warning( default: 4244 4510 4610 ) #pragma warning( default: 4244 4510 4610 )
#endif #endif
@ -237,7 +250,7 @@ inline void get_monotonic_time(timespec *tv)
t.QuadPart -= offset.QuadPart; t.QuadPart -= offset.QuadPart;
microseconds = (double)t.QuadPart / frequencyToMicroseconds; microseconds = (double)t.QuadPart / frequencyToMicroseconds;
t.QuadPart = microseconds; t.QuadPart = (LONGLONG)microseconds;
tv->tv_sec = t.QuadPart / 1000000; tv->tv_sec = t.QuadPart / 1000000;
tv->tv_nsec = (t.QuadPart % 1000000) * 1000; tv->tv_nsec = (t.QuadPart % 1000000) * 1000;
} }
@ -522,6 +535,8 @@ struct CvCapture_FFMPEG
#else #else
AVBitStreamFilterContext* bsfc; AVBitStreamFilterContext* bsfc;
#endif #endif
VideoAccelerationType va_type;
int hw_device;
}; };
void CvCapture_FFMPEG::init() void CvCapture_FFMPEG::init()
@ -557,6 +572,8 @@ void CvCapture_FFMPEG::init()
memset(&packet_filtered, 0, sizeof(packet_filtered)); memset(&packet_filtered, 0, sizeof(packet_filtered));
av_init_packet(&packet_filtered); av_init_packet(&packet_filtered);
bsfc = NULL; bsfc = NULL;
va_type = cv::VIDEO_ACCELERATION_ANY;
hw_device = -1;
} }
@ -882,9 +899,34 @@ bool CvCapture_FFMPEG::open(const char* _filename, const VideoCaptureParameters&
return false; return false;
} }
} }
if (params.has(CAP_PROP_HW_ACCELERATION))
{
va_type = params.get<VideoAccelerationType>(CAP_PROP_HW_ACCELERATION);
#if !USE_AV_HW_CODECS
if (va_type != VIDEO_ACCELERATION_NONE && va_type != VIDEO_ACCELERATION_ANY)
{
CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: FFmpeg backend is build without acceleration support. Can't handle CAP_PROP_HW_ACCELERATION parameter. Bailout");
return false;
}
#endif
}
if (params.has(CAP_PROP_HW_DEVICE))
{
hw_device = params.get<int>(CAP_PROP_HW_DEVICE);
if (va_type == VIDEO_ACCELERATION_NONE && hw_device != -1)
{
CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: Invalid usage of CAP_PROP_HW_DEVICE without requested H/W acceleration. Bailout");
return false;
}
if (va_type == VIDEO_ACCELERATION_ANY && hw_device != -1)
{
CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: Invalid usage of CAP_PROP_HW_DEVICE with 'ANY' H/W acceleration. Bailout");
return false;
}
}
if (params.warnUnusedParameters()) if (params.warnUnusedParameters())
{ {
CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: unsupported parameters in .open(), see logger INFO channel for details"); CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: unsupported parameters in .open(), see logger INFO channel for details. Bailout");
return false; return false;
} }
} }
@ -973,22 +1015,102 @@ bool CvCapture_FFMPEG::open(const char* _filename, const VideoCaptureParameters&
if( AVMEDIA_TYPE_VIDEO == enc->codec_type && video_stream < 0) if( AVMEDIA_TYPE_VIDEO == enc->codec_type && video_stream < 0)
{ {
CV_LOG_DEBUG(NULL, "FFMPEG: stream[" << i << "] is video stream with codecID=" << (int)enc->codec_id
<< " width=" << enc->width
<< " height=" << enc->height
);
// backup encoder' width/height // backup encoder' width/height
int enc_width = enc->width; int enc_width = enc->width;
int enc_height = enc->height; int enc_height = enc->height;
AVCodec *codec; #if !USE_AV_HW_CODECS
if(av_dict_get(dict, "video_codec", NULL, 0) == NULL) { va_type = VIDEO_ACCELERATION_NONE;
codec = avcodec_find_decoder(enc->codec_id); #endif
} else {
codec = avcodec_find_decoder_by_name(av_dict_get(dict, "video_codec", NULL, 0)->value); // find and open decoder, try HW acceleration types specified in 'hw_acceleration' list (in order)
} AVCodec *codec = NULL;
if (!codec || avcodec_open2(enc, codec, NULL) < 0) err = -1;
#if USE_AV_HW_CODECS
HWAccelIterator accel_iter(va_type, false/*isEncoder*/, dict);
while (accel_iter.good())
{
#else
do {
#endif
#if USE_AV_HW_CODECS
accel_iter.parse_next();
AVHWDeviceType hw_type = accel_iter.hw_type();
enc->get_format = avcodec_default_get_format;
if (enc->hw_device_ctx) {
av_buffer_unref(&enc->hw_device_ctx);
}
if (hw_type != AV_HWDEVICE_TYPE_NONE)
{
CV_LOG_DEBUG(NULL, "FFMPEG: trying to configure H/W acceleration: '" << accel_iter.hw_type_device_string() << "'");
AVPixelFormat hw_pix_fmt = AV_PIX_FMT_NONE;
codec = hw_find_codec(enc->codec_id, hw_type, av_codec_is_decoder, accel_iter.disabled_codecs().c_str(), &hw_pix_fmt);
if (codec) {
if (hw_pix_fmt != AV_PIX_FMT_NONE)
enc->get_format = hw_get_format_callback; // set callback to select HW pixel format, not SW format
enc->hw_device_ctx = hw_create_device(hw_type, hw_device, accel_iter.device_subname());
if (!enc->hw_device_ctx)
{
CV_LOG_DEBUG(NULL, "FFMPEG: ... can't create H/W device: '" << accel_iter.hw_type_device_string() << "'");
codec = NULL;
}
}
}
else if (hw_type == AV_HWDEVICE_TYPE_NONE)
#endif // USE_AV_HW_CODECS
{
AVDictionaryEntry* video_codec_param = av_dict_get(dict, "video_codec", NULL, 0);
if (video_codec_param == NULL)
{
codec = avcodec_find_decoder(enc->codec_id);
if (!codec)
{
CV_LOG_ERROR(NULL, "Could not find decoder for codec_id=" << (int)enc->codec_id);
}
}
else
{
CV_LOG_DEBUG(NULL, "FFMPEG: Using video_codec='" << video_codec_param->value << "'");
codec = avcodec_find_decoder_by_name(video_codec_param->value);
if (!codec)
{
CV_LOG_ERROR(NULL, "Could not find decoder '" << video_codec_param->value << "'");
}
}
}
if (!codec)
continue;
err = avcodec_open2(enc, codec, NULL);
if (err >= 0) {
#if USE_AV_HW_CODECS
va_type = hw_type_to_va_type(hw_type);
if (hw_type != AV_HWDEVICE_TYPE_NONE && hw_device < 0)
hw_device = 0;
#endif
break;
} else {
CV_LOG_ERROR(NULL, "Could not open codec " << codec->name << ", error: " << err);
}
#if USE_AV_HW_CODECS
} // while (accel_iter.good())
#else
} while (0);
#endif
if (err < 0) {
CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: Failed to initialize VideoCapture");
goto exit_func; goto exit_func;
}
// checking width/height (since decoder can sometimes alter it, eg. vp6f) // checking width/height (since decoder can sometimes alter it, eg. vp6f)
if (enc_width && (enc->width != enc_width)) { enc->width = enc_width; } if (enc_width && (enc->width != enc_width))
if (enc_height && (enc->height != enc_height)) { enc->height = enc_height; } enc->width = enc_width;
if (enc_height && (enc->height != enc_height))
enc->height = enc_height;
video_stream = i; video_stream = i;
video_st = ic->streams[i]; video_st = ic->streams[i];
@ -1009,7 +1131,8 @@ bool CvCapture_FFMPEG::open(const char* _filename, const VideoCaptureParameters&
} }
} }
if(video_stream >= 0) valid = true; if (video_stream >= 0)
valid = true;
exit_func: exit_func:
@ -1140,7 +1263,6 @@ bool CvCapture_FFMPEG::processRawPacket()
bool CvCapture_FFMPEG::grabFrame() bool CvCapture_FFMPEG::grabFrame()
{ {
bool valid = false; bool valid = false;
int got_picture;
int count_errs = 0; int count_errs = 0;
const int max_number_of_attempts = 1 << 9; const int max_number_of_attempts = 1 << 9;
@ -1159,6 +1281,11 @@ bool CvCapture_FFMPEG::grabFrame()
interrupt_metadata.timeout_after_ms = LIBAVFORMAT_INTERRUPT_READ_TIMEOUT_MS; interrupt_metadata.timeout_after_ms = LIBAVFORMAT_INTERRUPT_READ_TIMEOUT_MS;
#endif #endif
#if USE_AV_SEND_FRAME_API
// check if we can receive frame from previously decoded packet
valid = avcodec_receive_frame(video_st->codec, picture) >= 0;
#endif
// get the next frame // get the next frame
while (!valid) while (!valid)
{ {
@ -1205,16 +1332,24 @@ bool CvCapture_FFMPEG::grabFrame()
} }
// Decode video frame // Decode video frame
#if USE_AV_SEND_FRAME_API
if (avcodec_send_packet(video_st->codec, &packet) < 0) {
break;
}
ret = avcodec_receive_frame(video_st->codec, picture);
#else
int got_picture = 0;
avcodec_decode_video2(video_st->codec, picture, &got_picture, &packet); avcodec_decode_video2(video_st->codec, picture, &got_picture, &packet);
ret = got_picture ? 0 : -1;
// Did we get a video frame? #endif
if(got_picture) if (ret >= 0) {
{
//picture_pts = picture->best_effort_timestamp; //picture_pts = picture->best_effort_timestamp;
if( picture_pts == AV_NOPTS_VALUE_ ) if( picture_pts == AV_NOPTS_VALUE_ )
picture_pts = picture->pkt_pts != AV_NOPTS_VALUE_ && picture->pkt_pts != 0 ? picture->pkt_pts : picture->pkt_dts; picture_pts = picture->pkt_pts != AV_NOPTS_VALUE_ && picture->pkt_pts != 0 ? picture->pkt_pts : picture->pkt_dts;
valid = true; valid = true;
} else if (ret == AVERROR(EAGAIN)) {
continue;
} }
else else
{ {
@ -1255,7 +1390,20 @@ bool CvCapture_FFMPEG::retrieveFrame(int, unsigned char** data, int* step, int*
return p.data != NULL; return p.data != NULL;
} }
if (!picture->data[0]) AVFrame* sw_picture = picture;
#if USE_AV_HW_CODECS
// if hardware frame, copy it to system memory
if (picture && picture->hw_frames_ctx) {
sw_picture = av_frame_alloc();
//if (av_hwframe_map(sw_picture, picture, AV_HWFRAME_MAP_READ) < 0) {
if (av_hwframe_transfer_data(sw_picture, picture, 0) < 0) {
CV_LOG_ERROR(NULL, "Error copying data from GPU to CPU (av_hwframe_transfer_data)");
return false;
}
}
#endif
if (!sw_picture || !sw_picture->data[0])
return false; return false;
if( img_convert_ctx == NULL || if( img_convert_ctx == NULL ||
@ -1270,7 +1418,7 @@ bool CvCapture_FFMPEG::retrieveFrame(int, unsigned char** data, int* step, int*
img_convert_ctx = sws_getCachedContext( img_convert_ctx = sws_getCachedContext(
img_convert_ctx, img_convert_ctx,
buffer_width, buffer_height, buffer_width, buffer_height,
video_st->codec->pix_fmt, (AVPixelFormat)sw_picture->format,
buffer_width, buffer_height, buffer_width, buffer_height,
AV_PIX_FMT_BGR24, AV_PIX_FMT_BGR24,
SWS_BICUBIC, SWS_BICUBIC,
@ -1308,8 +1456,8 @@ bool CvCapture_FFMPEG::retrieveFrame(int, unsigned char** data, int* step, int*
sws_scale( sws_scale(
img_convert_ctx, img_convert_ctx,
picture->data, sw_picture->data,
picture->linesize, sw_picture->linesize,
0, video_st->codec->coded_height, 0, video_st->codec->coded_height,
rgb_picture.data, rgb_picture.data,
rgb_picture.linesize rgb_picture.linesize
@ -1321,6 +1469,9 @@ bool CvCapture_FFMPEG::retrieveFrame(int, unsigned char** data, int* step, int*
*height = frame.height; *height = frame.height;
*cn = frame.cn; *cn = frame.cn;
if (sw_picture != picture) {
av_frame_unref(sw_picture);
}
return true; return true;
} }
@ -1392,6 +1543,12 @@ double CvCapture_FFMPEG::getProperty( int property_id ) const
#else #else
return 0; return 0;
#endif #endif
#if USE_AV_HW_CODECS
case CAP_PROP_HW_ACCELERATION:
return static_cast<double>(va_type);
case CAP_PROP_HW_DEVICE:
return static_cast<double>(hw_device);
#endif // USE_AV_HW_CODECS
default: default:
break; break;
} }
@ -1573,7 +1730,7 @@ bool CvCapture_FFMPEG::setProperty( int property_id, double value )
return false; return false;
case CAP_PROP_ORIENTATION_AUTO: case CAP_PROP_ORIENTATION_AUTO:
#if LIBAVUTIL_BUILD >= CALC_FFMPEG_VERSION(52, 94, 100) #if LIBAVUTIL_BUILD >= CALC_FFMPEG_VERSION(52, 94, 100)
rotation_auto = static_cast<bool>(value); rotation_auto = value ? true : false;
return true; return true;
#else #else
rotation_auto = 0; rotation_auto = 0;
@ -1591,9 +1748,10 @@ bool CvCapture_FFMPEG::setProperty( int property_id, double value )
struct CvVideoWriter_FFMPEG struct CvVideoWriter_FFMPEG
{ {
bool open( const char* filename, int fourcc, bool open( const char* filename, int fourcc,
double fps, int width, int height, bool isColor ); double fps, int width, int height, const VideoWriterParameters& params );
void close(); void close();
bool writeFrame( const unsigned char* data, int step, int width, int height, int cn, int origin ); bool writeFrame( const unsigned char* data, int step, int width, int height, int cn, int origin );
double getProperty(int propId) const;
void init(); void init();
@ -1606,13 +1764,15 @@ struct CvVideoWriter_FFMPEG
AVFrame * input_picture; AVFrame * input_picture;
uint8_t * picbuf; uint8_t * picbuf;
AVStream * video_st; AVStream * video_st;
int input_pix_fmt; AVPixelFormat input_pix_fmt;
unsigned char * aligned_input; unsigned char * aligned_input;
size_t aligned_input_size; size_t aligned_input_size;
int frame_width, frame_height; int frame_width, frame_height;
int frame_idx; int frame_idx;
bool ok; bool ok;
struct SwsContext *img_convert_ctx; struct SwsContext *img_convert_ctx;
VideoAccelerationType va_type;
int hw_device;
}; };
static const char * icvFFMPEGErrStr(int err) static const char * icvFFMPEGErrStr(int err)
@ -1667,12 +1827,14 @@ void CvVideoWriter_FFMPEG::init()
input_picture = 0; input_picture = 0;
picbuf = 0; picbuf = 0;
video_st = 0; video_st = 0;
input_pix_fmt = 0; input_pix_fmt = AV_PIX_FMT_NONE;
aligned_input = NULL; aligned_input = NULL;
aligned_input_size = 0; aligned_input_size = 0;
img_convert_ctx = 0; img_convert_ctx = 0;
frame_width = frame_height = 0; frame_width = frame_height = 0;
frame_idx = 0; frame_idx = 0;
va_type = VIDEO_ACCELERATION_NONE;
hw_device = -1;
ok = false; ok = false;
} }
@ -1714,35 +1876,17 @@ static AVFrame * icv_alloc_picture_FFMPEG(int pix_fmt, int width, int height, bo
return picture; return picture;
} }
/* add a video output stream to the container */ /* configure video stream */
static AVStream *icv_add_video_stream_FFMPEG(AVFormatContext *oc, static bool icv_configure_video_stream_FFMPEG(AVFormatContext *oc,
CV_CODEC_ID codec_id, AVStream *st,
int w, int h, int bitrate, const AVCodec* codec,
double fps, int pixel_format) int w, int h, int bitrate,
double fps, AVPixelFormat pixel_format)
{ {
AVCodecContext *c; AVCodecContext *c = st->codec;
AVStream *st;
int frame_rate, frame_rate_base; int frame_rate, frame_rate_base;
AVCodec *codec;
st = avformat_new_stream(oc, 0);
if (!st) {
CV_WARN("Could not allocate stream");
return NULL;
}
c = st->codec;
c->codec_id = av_guess_codec(oc->oformat, NULL, oc->filename, NULL, AVMEDIA_TYPE_VIDEO);
if(codec_id != CV_CODEC(CODEC_ID_NONE)){
c->codec_id = codec_id;
}
//if(codec_tag) c->codec_tag=codec_tag;
codec = avcodec_find_encoder(c->codec_id);
c->codec_id = codec->id;
c->codec_type = AVMEDIA_TYPE_VIDEO; c->codec_type = AVMEDIA_TYPE_VIDEO;
// Set per-codec defaults // Set per-codec defaults
@ -1792,13 +1936,13 @@ static AVStream *icv_add_video_stream_FFMPEG(AVFormatContext *oc,
} }
} }
if (best == NULL) if (best == NULL)
return NULL; return false;
c->time_base.den= best->num; c->time_base.den= best->num;
c->time_base.num= best->den; c->time_base.num= best->den;
} }
c->gop_size = 12; /* emit one intra frame every twelve frames at most */ c->gop_size = 12; /* emit one intra frame every twelve frames at most */
c->pix_fmt = (AVPixelFormat) pixel_format; c->pix_fmt = pixel_format;
if (c->codec_id == CV_CODEC(CODEC_ID_MPEG2VIDEO)) { if (c->codec_id == CV_CODEC(CODEC_ID_MPEG2VIDEO)) {
c->max_b_frames = 2; c->max_b_frames = 2;
@ -1845,14 +1989,14 @@ static AVStream *icv_add_video_stream_FFMPEG(AVFormatContext *oc,
st->time_base = c->time_base; st->time_base = c->time_base;
#endif #endif
return st; return true;
} }
static const int OPENCV_NO_FRAMES_WRITTEN_CODE = 1000; static const int OPENCV_NO_FRAMES_WRITTEN_CODE = 1000;
static int icv_av_write_frame_FFMPEG( AVFormatContext * oc, AVStream * video_st, static int icv_av_write_frame_FFMPEG( AVFormatContext * oc, AVStream * video_st,
uint8_t *, uint32_t, uint8_t *, uint32_t,
AVFrame * picture ) AVFrame * picture, int frame_idx)
{ {
AVCodecContext* c = video_st->codec; AVCodecContext* c = video_st->codec;
int ret = OPENCV_NO_FRAMES_WRITTEN_CODE; int ret = OPENCV_NO_FRAMES_WRITTEN_CODE;
@ -1877,7 +2021,13 @@ static int icv_av_write_frame_FFMPEG( AVFormatContext * oc, AVStream * video_st,
{ {
/* encode the image */ /* encode the image */
#if USE_AV_SEND_FRAME_API #if USE_AV_SEND_FRAME_API
ret = avcodec_send_frame(c, picture); if (picture == NULL && frame_idx == 0) {
ret = 0;
} else {
ret = avcodec_send_frame(c, picture);
if (ret < 0)
CV_LOG_ERROR(NULL, "Error sending frame to encoder (avcodec_send_frame)");
}
while (ret >= 0) while (ret >= 0)
{ {
AVPacket* pkt = av_packet_alloc(); AVPacket* pkt = av_packet_alloc();
@ -1896,6 +2046,7 @@ static int icv_av_write_frame_FFMPEG( AVFormatContext * oc, AVStream * video_st,
break; break;
} }
#else #else
CV_UNUSED(frame_idx);
AVPacket pkt; AVPacket pkt;
av_init_packet(&pkt); av_init_packet(&pkt);
int got_output = 0; int got_output = 0;
@ -1956,7 +2107,7 @@ bool CvVideoWriter_FFMPEG::writeFrame( const unsigned char* data, int step, int
// 2. (dataend - SIMD_SIZE) and (dataend + SIMD_SIZE) is from the same 4k page // 2. (dataend - SIMD_SIZE) and (dataend + SIMD_SIZE) is from the same 4k page
const int CV_STEP_ALIGNMENT = 32; const int CV_STEP_ALIGNMENT = 32;
const size_t CV_SIMD_SIZE = 32; const size_t CV_SIMD_SIZE = 32;
const size_t CV_PAGE_MASK = ~(4096 - 1); const size_t CV_PAGE_MASK = ~(size_t)(4096 - 1);
const unsigned char* dataend = data + ((size_t)height * step); const unsigned char* dataend = data + ((size_t)height * step);
if (step % CV_STEP_ALIGNMENT != 0 || if (step % CV_STEP_ALIGNMENT != 0 ||
(((size_t)dataend - CV_SIMD_SIZE) & CV_PAGE_MASK) != (((size_t)dataend + CV_SIMD_SIZE) & CV_PAGE_MASK)) (((size_t)dataend - CV_SIMD_SIZE) & CV_PAGE_MASK) != (((size_t)dataend + CV_SIMD_SIZE) & CV_PAGE_MASK))
@ -1984,7 +2135,12 @@ bool CvVideoWriter_FFMPEG::writeFrame( const unsigned char* data, int step, int
step = aligned_step; step = aligned_step;
} }
if ( c->pix_fmt != input_pix_fmt ) { AVPixelFormat sw_pix_fmt = c->pix_fmt;
#if USE_AV_HW_CODECS
if (c->hw_frames_ctx)
sw_pix_fmt = ((AVHWFramesContext*)c->hw_frames_ctx->data)->sw_format;
#endif
if ( sw_pix_fmt != input_pix_fmt ) {
assert( input_picture ); assert( input_picture );
// let input_picture point to the raw data buffer of 'image' // let input_picture point to the raw data buffer of 'image'
_opencv_ffmpeg_av_image_fill_arrays(input_picture, (uint8_t *) data, _opencv_ffmpeg_av_image_fill_arrays(input_picture, (uint8_t *) data,
@ -1998,7 +2154,7 @@ bool CvVideoWriter_FFMPEG::writeFrame( const unsigned char* data, int step, int
(AVPixelFormat)input_pix_fmt, (AVPixelFormat)input_pix_fmt,
c->width, c->width,
c->height, c->height,
c->pix_fmt, sw_pix_fmt,
SWS_BICUBIC, SWS_BICUBIC,
NULL, NULL, NULL); NULL, NULL, NULL);
if( !img_convert_ctx ) if( !img_convert_ctx )
@ -2017,13 +2173,58 @@ bool CvVideoWriter_FFMPEG::writeFrame( const unsigned char* data, int step, int
picture->linesize[0] = step; picture->linesize[0] = step;
} }
picture->pts = frame_idx; bool ret;
bool ret = icv_av_write_frame_FFMPEG( oc, video_st, outbuf, outbuf_size, picture) >= 0; #if USE_AV_HW_CODECS
if (video_st->codec->hw_device_ctx) {
// copy data to HW frame
AVFrame* hw_frame = av_frame_alloc();
if (!hw_frame) {
CV_LOG_ERROR(NULL, "Error allocating AVFrame (av_frame_alloc)");
return false;
}
if (av_hwframe_get_buffer(video_st->codec->hw_frames_ctx, hw_frame, 0) < 0) {
CV_LOG_ERROR(NULL, "Error obtaining HW frame (av_hwframe_get_buffer)");
av_frame_free(&hw_frame);
return false;
}
if (av_hwframe_transfer_data(hw_frame, picture, 0) < 0) {
CV_LOG_ERROR(NULL, "Error copying data from CPU to GPU (av_hwframe_transfer_data)");
av_frame_free(&hw_frame);
return false;
}
hw_frame->pts = frame_idx;
int ret_write = icv_av_write_frame_FFMPEG(oc, video_st, outbuf, outbuf_size, hw_frame, frame_idx);
ret = ret_write >= 0 ? true : false;
av_frame_free(&hw_frame);
} else
#endif
{
picture->pts = frame_idx;
int ret_write = icv_av_write_frame_FFMPEG(oc, video_st, outbuf, outbuf_size, picture, frame_idx);
ret = ret_write >= 0 ? true : false;
}
frame_idx++; frame_idx++;
return ret; return ret;
} }
double CvVideoWriter_FFMPEG::getProperty(int propId) const
{
CV_UNUSED(propId);
#if USE_AV_HW_CODECS
if (propId == VIDEOWRITER_PROP_HW_ACCELERATION)
{
return static_cast<double>(va_type);
}
else if (propId == VIDEOWRITER_PROP_HW_DEVICE)
{
return static_cast<double>(hw_device);
}
#endif
return 0;
}
/// close video output stream and free associated memory /// close video output stream and free associated memory
void CvVideoWriter_FFMPEG::close() void CvVideoWriter_FFMPEG::close()
{ {
@ -2045,7 +2246,7 @@ void CvVideoWriter_FFMPEG::close()
{ {
for(;;) for(;;)
{ {
int ret = icv_av_write_frame_FFMPEG( oc, video_st, outbuf, outbuf_size, NULL); int ret = icv_av_write_frame_FFMPEG( oc, video_st, outbuf, outbuf_size, NULL, frame_idx);
if( ret == OPENCV_NO_FRAMES_WRITTEN_CODE || ret < 0 ) if( ret == OPENCV_NO_FRAMES_WRITTEN_CODE || ret < 0 )
break; break;
} }
@ -2135,15 +2336,48 @@ static inline void cv_ff_codec_tag_dump(const AVCodecTag *const *tags)
/// Create a video writer object that uses FFMPEG /// Create a video writer object that uses FFMPEG
bool CvVideoWriter_FFMPEG::open( const char * filename, int fourcc, bool CvVideoWriter_FFMPEG::open( const char * filename, int fourcc,
double fps, int width, int height, bool is_color ) double fps, int width, int height, const VideoWriterParameters& params)
{ {
InternalFFMpegRegister::init(); InternalFFMpegRegister::init();
CV_CODEC_ID codec_id = CV_CODEC(CODEC_ID_NONE); CV_CODEC_ID codec_id = CV_CODEC(CODEC_ID_NONE);
int err, codec_pix_fmt; AVPixelFormat codec_pix_fmt;
double bitrate_scale = 1; double bitrate_scale = 1;
close(); close();
const bool is_color = params.get(VIDEOWRITER_PROP_IS_COLOR, true);
if (params.has(VIDEOWRITER_PROP_HW_ACCELERATION))
{
va_type = params.get<VideoAccelerationType>(VIDEOWRITER_PROP_HW_ACCELERATION, VIDEO_ACCELERATION_NONE);
#if !USE_AV_HW_CODECS
if (va_type != VIDEO_ACCELERATION_NONE && va_type != VIDEO_ACCELERATION_ANY)
{
CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: FFmpeg backend is build without acceleration support. Can't handle VIDEOWRITER_PROP_HW_ACCELERATION parameter. Bailout");
return false;
}
#endif
}
if (params.has(VIDEOWRITER_PROP_HW_DEVICE))
{
hw_device = params.get<int>(VIDEOWRITER_PROP_HW_DEVICE, -1);
if (va_type == VIDEO_ACCELERATION_NONE && hw_device != -1)
{
CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: Invalid usage of VIDEOWRITER_PROP_HW_DEVICE without requested H/W acceleration. Bailout");
return false;
}
if (va_type == VIDEO_ACCELERATION_ANY && hw_device != -1)
{
CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: Invalid usage of VIDEOWRITER_PROP_HW_DEVICE with 'ANY' H/W acceleration. Bailout");
return false;
}
}
if (params.warnUnusedParameters())
{
CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: unsupported parameters in VideoWriter, see logger INFO channel for details");
return false;
}
// check arguments // check arguments
if( !filename ) if( !filename )
return false; return false;
@ -2196,7 +2430,18 @@ bool CvVideoWriter_FFMPEG::open( const char * filename, int fourcc,
#endif #endif
codec_bmp_tags, // fallback for avformat < 54.1 codec_bmp_tags, // fallback for avformat < 54.1
NULL }; NULL };
if( (codec_id = av_codec_get_id(fallback_tags, fourcc)) == CV_CODEC(CODEC_ID_NONE) ) if (codec_id == CV_CODEC(CODEC_ID_NONE)) {
codec_id = av_codec_get_id(fallback_tags, fourcc);
}
if (codec_id == CV_CODEC(CODEC_ID_NONE)) {
char *p = (char *) &fourcc;
char name[] = {(char)tolower(p[0]), (char)tolower(p[1]), (char)tolower(p[2]), (char)tolower(p[3]), 0};
const AVCodecDescriptor *desc = avcodec_descriptor_get_by_name(name);
if (desc)
codec_id = desc->id;
}
if (codec_id == CV_CODEC(CODEC_ID_NONE))
{ {
fflush(stdout); fflush(stdout);
fprintf(stderr, "OpenCV: FFMPEG: tag 0x%08x/'%c%c%c%c' is not found (format '%s / %s')'\n", fprintf(stderr, "OpenCV: FFMPEG: tag 0x%08x/'%c%c%c%c' is not found (format '%s / %s')'\n",
@ -2344,45 +2589,133 @@ bool CvVideoWriter_FFMPEG::open( const char * filename, int fourcc,
double bitrate = std::min(bitrate_scale*fps*width*height, (double)INT_MAX/2); double bitrate = std::min(bitrate_scale*fps*width*height, (double)INT_MAX/2);
// TODO -- safe to ignore output audio stream? if (codec_id == AV_CODEC_ID_NONE) {
video_st = icv_add_video_stream_FFMPEG(oc, codec_id, codec_id = av_guess_codec(oc->oformat, NULL, oc->filename, NULL, AVMEDIA_TYPE_VIDEO);
width, height, (int)(bitrate + 0.5), }
fps, codec_pix_fmt);
// Add video stream to output file
video_st = avformat_new_stream(oc, 0);
if (!video_st) {
CV_WARN("Could not allocate stream");
return false;
}
AVDictionary *dict = NULL;
#if !defined(NO_GETENV) && (LIBAVUTIL_VERSION_MAJOR >= 53)
char* options = getenv("OPENCV_FFMPEG_WRITER_OPTIONS");
if (options) {
av_dict_parse_string(&dict, options, ";", "|", 0);
}
#endif
AVCodecContext *c = video_st->codec;
// find and open encoder, try HW acceleration types specified in 'hw_acceleration' list (in order)
int err = -1;
AVCodec* codec = NULL;
#if USE_AV_HW_CODECS
AVBufferRef* hw_device_ctx = NULL;
HWAccelIterator accel_iter(va_type, true/*isEncoder*/, dict);
while (accel_iter.good())
{
#else
do {
#endif
#if USE_AV_HW_CODECS
accel_iter.parse_next();
AVHWDeviceType hw_type = accel_iter.hw_type();
codec = NULL;
AVPixelFormat hw_format = AV_PIX_FMT_NONE;
if (hw_device_ctx)
av_buffer_unref(&hw_device_ctx);
if (hw_type != AV_HWDEVICE_TYPE_NONE)
{
codec = hw_find_codec(codec_id, hw_type, av_codec_is_encoder, accel_iter.disabled_codecs().c_str(), &hw_format);
if (!codec)
continue;
hw_device_ctx = hw_create_device(hw_type, hw_device, accel_iter.device_subname());
if (!hw_device_ctx)
continue;
}
else if (hw_type == AV_HWDEVICE_TYPE_NONE)
#endif
{
codec = avcodec_find_encoder(codec_id);
if (!codec) {
CV_LOG_ERROR(NULL, "Could not find encoder for codec_id=" << (int)codec_id << ", error: "
<< icvFFMPEGErrStr(AVERROR_ENCODER_NOT_FOUND));
}
}
if (!codec)
continue;
#if USE_AV_HW_CODECS
AVPixelFormat format = (hw_format != AV_PIX_FMT_NONE) ? hw_format : codec_pix_fmt;
#else
AVPixelFormat format = codec_pix_fmt;
#endif
if (!icv_configure_video_stream_FFMPEG(oc, video_st, codec,
width, height, (int) (bitrate + 0.5),
fps, format)) {
continue;
}
#if 0 #if 0
#if FF_API_DUMP_FORMAT #if FF_API_DUMP_FORMAT
dump_format(oc, 0, filename, 1); dump_format(oc, 0, filename, 1);
#else #else
av_dump_format(oc, 0, filename, 1); av_dump_format(oc, 0, filename, 1);
#endif #endif
#endif #endif
/* now that all the parameters are set, we can open the audio and c->codec_tag = fourcc;
video codecs and allocate the necessary encode buffers */
if (!video_st){
return false;
}
AVCodecContext* c = video_st->codec; #if USE_AV_HW_CODECS
if (hw_device_ctx) {
c->hw_device_ctx = av_buffer_ref(hw_device_ctx);
if (hw_format != AV_PIX_FMT_NONE) {
c->hw_frames_ctx = hw_create_frames(NULL, hw_device_ctx, width, height, hw_format);
if (!c->hw_frames_ctx)
continue;
}
}
#endif
c->codec_tag = fourcc; int64_t lbit_rate = (int64_t) c->bit_rate;
/* find the video encoder */ lbit_rate += (int64_t)(bitrate / 2);
AVCodec* codec = avcodec_find_encoder(c->codec_id); lbit_rate = std::min(lbit_rate, (int64_t) INT_MAX);
if (!codec) { c->bit_rate_tolerance = (int) lbit_rate;
fprintf(stderr, "Could not find encoder for codec id %d: %s\n", c->codec_id, c->bit_rate = (int) lbit_rate;
icvFFMPEGErrStr(AVERROR_ENCODER_NOT_FOUND));
return false;
}
int64_t lbit_rate = (int64_t)c->bit_rate; /* open the codec */
lbit_rate += (bitrate / 2); err = avcodec_open2(c, codec, NULL);
lbit_rate = std::min(lbit_rate, (int64_t)INT_MAX); if (err >= 0) {
c->bit_rate_tolerance = (int)lbit_rate; #if USE_AV_HW_CODECS
c->bit_rate = (int)lbit_rate; va_type = hw_type_to_va_type(hw_type);
if (hw_type != AV_HWDEVICE_TYPE_NONE && hw_device < 0)
hw_device = 0;
#endif
break;
} else {
CV_LOG_ERROR(NULL, "Could not open codec " << codec->name << ", error: " << icvFFMPEGErrStr(err));
}
#if USE_AV_HW_CODECS
} // while (accel_iter.good())
#else
} while (0);
#endif
/* open the codec */ #if USE_AV_HW_CODECS
if ((err= avcodec_open2(c, codec, NULL)) < 0) { if (hw_device_ctx)
fprintf(stderr, "Could not open codec '%s': %s\n", codec->name, icvFFMPEGErrStr(err)); av_buffer_unref(&hw_device_ctx);
#endif
if (dict != NULL)
av_dict_free(&dict);
if (err < 0) {
CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: Failed to initialize VideoWriter");
return false; return false;
} }
@ -2400,10 +2733,16 @@ bool CvVideoWriter_FFMPEG::open( const char * filename, int fourcc,
} }
bool need_color_convert; bool need_color_convert;
need_color_convert = (c->pix_fmt != input_pix_fmt); AVPixelFormat sw_pix_fmt = c->pix_fmt;
#if USE_AV_HW_CODECS
if (c->hw_frames_ctx)
sw_pix_fmt = ((AVHWFramesContext*)c->hw_frames_ctx->data)->sw_format;
#endif
need_color_convert = (sw_pix_fmt != input_pix_fmt);
/* allocate the encoded raw picture */ /* allocate the encoded raw picture */
picture = icv_alloc_picture_FFMPEG(c->pix_fmt, c->width, c->height, need_color_convert); picture = icv_alloc_picture_FFMPEG(sw_pix_fmt, c->width, c->height, need_color_convert);
if (!picture) { if (!picture) {
return false; return false;
} }
@ -2493,20 +2832,28 @@ int cvRetrieveFrame_FFMPEG(CvCapture_FFMPEG* capture, unsigned char** data, int*
return capture->retrieveFrame(0, data, step, width, height, cn); return capture->retrieveFrame(0, data, step, width, height, cn);
} }
CvVideoWriter_FFMPEG* cvCreateVideoWriter_FFMPEG( const char* filename, int fourcc, double fps, static CvVideoWriter_FFMPEG* cvCreateVideoWriterWithParams_FFMPEG( const char* filename, int fourcc, double fps,
int width, int height, int isColor ) int width, int height, const VideoWriterParameters& params )
{ {
CvVideoWriter_FFMPEG* writer = (CvVideoWriter_FFMPEG*)malloc(sizeof(*writer)); CvVideoWriter_FFMPEG* writer = (CvVideoWriter_FFMPEG*)malloc(sizeof(*writer));
if (!writer) if (!writer)
return 0; return 0;
writer->init(); writer->init();
if( writer->open( filename, fourcc, fps, width, height, isColor != 0 )) if( writer->open( filename, fourcc, fps, width, height, params ))
return writer; return writer;
writer->close(); writer->close();
free(writer); free(writer);
return 0; return 0;
} }
CvVideoWriter_FFMPEG* cvCreateVideoWriter_FFMPEG( const char* filename, int fourcc, double fps,
int width, int height, int isColor )
{
VideoWriterParameters params;
params.add(VIDEOWRITER_PROP_IS_COLOR, isColor);
return cvCreateVideoWriterWithParams_FFMPEG(filename, fourcc, fps, width, height, params);
}
void cvReleaseVideoWriter_FFMPEG( CvVideoWriter_FFMPEG** writer ) void cvReleaseVideoWriter_FFMPEG( CvVideoWriter_FFMPEG** writer )
{ {
if( writer && *writer ) if( writer && *writer )

@ -281,6 +281,22 @@ bool is_gst_element_exists(const std::string& name)
return (bool)testfac; return (bool)testfac;
} }
static void find_hw_element(const GValue *item, gpointer va_type)
{
GstElement *element = GST_ELEMENT(g_value_get_object(item));
const gchar *name = g_type_name(G_OBJECT_TYPE(element));
if (name) {
std::string name_lower = toLowerCase(name);
if (name_lower.find("vaapi") != std::string::npos) {
*(int*)va_type = VIDEO_ACCELERATION_VAAPI;
} else if (name_lower.find("mfx") != std::string::npos || name_lower.find("msdk") != std::string::npos) {
*(int*)va_type = VIDEO_ACCELERATION_MFX;
} else if (name_lower.find("d3d11") != std::string::npos) {
*(int*)va_type = VIDEO_ACCELERATION_D3D11;
}
}
}
//================================================================================================== //==================================================================================================
class GStreamerCapture CV_FINAL : public IVideoCapture class GStreamerCapture CV_FINAL : public IVideoCapture
@ -300,6 +316,8 @@ private:
bool isPosFramesEmulated; bool isPosFramesEmulated;
gint64 emulatedFrameNumber; gint64 emulatedFrameNumber;
VideoAccelerationType va_type;
int hw_device;
public: public:
GStreamerCapture(); GStreamerCapture();
virtual ~GStreamerCapture() CV_OVERRIDE; virtual ~GStreamerCapture() CV_OVERRIDE;
@ -309,8 +327,8 @@ public:
virtual bool setProperty(int propId, double value) CV_OVERRIDE; virtual bool setProperty(int propId, double value) CV_OVERRIDE;
virtual bool isOpened() const CV_OVERRIDE { return (bool)pipeline; } virtual bool isOpened() const CV_OVERRIDE { return (bool)pipeline; }
virtual int getCaptureDomain() CV_OVERRIDE { return cv::CAP_GSTREAMER; } virtual int getCaptureDomain() CV_OVERRIDE { return cv::CAP_GSTREAMER; }
bool open(int id); bool open(int id, const cv::VideoCaptureParameters& params);
bool open(const String &filename_); bool open(const String &filename_, const cv::VideoCaptureParameters& params);
static void newPad(GstElement * /*elem*/, GstPad *pad, gpointer data); static void newPad(GstElement * /*elem*/, GstPad *pad, gpointer data);
protected: protected:
@ -327,6 +345,8 @@ GStreamerCapture::GStreamerCapture() :
isPosFramesSupported(false), isPosFramesSupported(false),
isPosFramesEmulated(false), isPosFramesEmulated(false),
emulatedFrameNumber(-1) emulatedFrameNumber(-1)
, va_type(VIDEO_ACCELERATION_NONE)
, hw_device(-1)
{ {
} }
@ -754,7 +774,7 @@ void GStreamerCapture::newPad(GstElement *, GstPad *pad, gpointer data)
* is really slow if we need to restart the pipeline over and over again. * is really slow if we need to restart the pipeline over and over again.
* *
*/ */
bool GStreamerCapture::open(int id) bool GStreamerCapture::open(int id, const cv::VideoCaptureParameters& params)
{ {
gst_initializer::init(); gst_initializer::init();
@ -764,13 +784,37 @@ bool GStreamerCapture::open(int id)
desc << "v4l2src device=/dev/video" << id desc << "v4l2src device=/dev/video" << id
<< " ! " << COLOR_ELEM << " ! " << COLOR_ELEM
<< " ! appsink drop=true"; << " ! appsink drop=true";
return open(desc.str()); return open(desc.str(), params);
} }
bool GStreamerCapture::open(const String &filename_) bool GStreamerCapture::open(const String &filename_, const cv::VideoCaptureParameters& params)
{ {
gst_initializer::init(); gst_initializer::init();
if (params.has(CAP_PROP_HW_ACCELERATION))
{
va_type = params.get<VideoAccelerationType>(CAP_PROP_HW_ACCELERATION);
}
if (params.has(CAP_PROP_HW_DEVICE))
{
hw_device = params.get<int>(CAP_PROP_HW_DEVICE);
if (va_type == VIDEO_ACCELERATION_NONE && hw_device != -1)
{
CV_LOG_ERROR(NULL, "VIDEOIO/GStreamer: Invalid usage of CAP_PROP_HW_DEVICE without requested H/W acceleration. Bailout");
return false;
}
if (va_type == VIDEO_ACCELERATION_ANY && hw_device != -1)
{
CV_LOG_ERROR(NULL, "VIDEOIO/GStreamer: Invalid usage of CAP_PROP_HW_DEVICE with 'ANY' H/W acceleration. Bailout");
return false;
}
if (hw_device != -1)
{
CV_LOG_ERROR(NULL, "VIDEOIO/GStreamer: CAP_PROP_HW_DEVICE is not supported. Specify -1 (auto) value. Bailout");
return false;
}
}
const gchar* filename = filename_.c_str(); const gchar* filename = filename_.c_str();
bool file = false; bool file = false;
@ -1046,6 +1090,35 @@ bool GStreamerCapture::open(const String &filename_)
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline"); GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline.get()), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline");
} }
std::vector<int> unused_params = params.getUnused();
for (int key : unused_params) {
if (!setProperty(key, params.get<double>(key))) {
CV_LOG_ERROR(NULL, "VIDEOIO/GStreamer: can't set property " << key);
return false;
}
}
if (pipeline)
{
VideoAccelerationType actual_va_type = VIDEO_ACCELERATION_NONE;
GstIterator *iter = gst_bin_iterate_recurse(GST_BIN (pipeline.get()));
gst_iterator_foreach(iter, find_hw_element, (gpointer)&actual_va_type);
gst_iterator_free(iter);
if (va_type != VIDEO_ACCELERATION_NONE && va_type != VIDEO_ACCELERATION_ANY)
{
if (va_type != actual_va_type)
{
CV_LOG_ERROR(NULL, "VIDEOIO/GStreamer: Can't select requested video acceleration through CAP_PROP_HW_ACCELERATION: "
<< va_type << " (actual is " << actual_va_type << "). Bailout");
return false;
}
}
else
{
va_type = actual_va_type;
}
}
return true; return true;
} }
@ -1129,6 +1202,10 @@ double GStreamerCapture::getProperty(int propId) const
} }
} }
break; break;
case CAP_PROP_HW_ACCELERATION:
return static_cast<double>(va_type);
case CAP_PROP_HW_DEVICE:
return static_cast<double>(hw_device);
case CV_CAP_GSTREAMER_QUEUE_LENGTH: case CV_CAP_GSTREAMER_QUEUE_LENGTH:
if(!sink) if(!sink)
{ {
@ -1276,6 +1353,10 @@ bool GStreamerCapture::setProperty(int propId, double value)
case CV_CAP_PROP_GAIN: case CV_CAP_PROP_GAIN:
case CV_CAP_PROP_CONVERT_RGB: case CV_CAP_PROP_CONVERT_RGB:
break; break;
case cv::CAP_PROP_HW_ACCELERATION:
return false; // open-only
case cv::CAP_PROP_HW_DEVICE:
return false; // open-only
case CV_CAP_GSTREAMER_QUEUE_LENGTH: case CV_CAP_GSTREAMER_QUEUE_LENGTH:
{ {
if(!sink) if(!sink)
@ -1297,18 +1378,18 @@ bool GStreamerCapture::setProperty(int propId, double value)
} }
Ptr<IVideoCapture> createGStreamerCapture_file(const String& filename) Ptr<IVideoCapture> createGStreamerCapture_file(const String& filename, const cv::VideoCaptureParameters& params)
{ {
Ptr<GStreamerCapture> cap = makePtr<GStreamerCapture>(); Ptr<GStreamerCapture> cap = makePtr<GStreamerCapture>();
if (cap && cap->open(filename)) if (cap && cap->open(filename, params))
return cap; return cap;
return Ptr<IVideoCapture>(); return Ptr<IVideoCapture>();
} }
Ptr<IVideoCapture> createGStreamerCapture_cam(int index) Ptr<IVideoCapture> createGStreamerCapture_cam(int index, const cv::VideoCaptureParameters& params)
{ {
Ptr<GStreamerCapture> cap = makePtr<GStreamerCapture>(); Ptr<GStreamerCapture> cap = makePtr<GStreamerCapture>();
if (cap && cap->open(index)) if (cap && cap->open(index, params))
return cap; return cap;
return Ptr<IVideoCapture>(); return Ptr<IVideoCapture>();
} }
@ -1325,6 +1406,7 @@ public:
CvVideoWriter_GStreamer() CvVideoWriter_GStreamer()
: ipl_depth(CV_8U) : ipl_depth(CV_8U)
, input_pix_fmt(0), num_frames(0), framerate(0) , input_pix_fmt(0), num_frames(0), framerate(0)
, va_type(VIDEO_ACCELERATION_NONE), hw_device(0)
{ {
} }
virtual ~CvVideoWriter_GStreamer() CV_OVERRIDE virtual ~CvVideoWriter_GStreamer() CV_OVERRIDE
@ -1346,11 +1428,14 @@ public:
int getCaptureDomain() const CV_OVERRIDE { return cv::CAP_GSTREAMER; } int getCaptureDomain() const CV_OVERRIDE { return cv::CAP_GSTREAMER; }
bool open(const std::string &filename, int fourcc, bool open(const std::string &filename, int fourcc,
double fps, const Size &frameSize, bool isColor, int depth ); double fps, const Size &frameSize, const VideoWriterParameters& params );
void close(); void close();
bool writeFrame( const IplImage* image ) CV_OVERRIDE; bool writeFrame( const IplImage* image ) CV_OVERRIDE;
int getIplDepth() const { return ipl_depth; } int getIplDepth() const { return ipl_depth; }
virtual double getProperty(int) const CV_OVERRIDE;
protected: protected:
const char* filenameToMimetype(const char* filename); const char* filenameToMimetype(const char* filename);
GSafePtr<GstElement> pipeline; GSafePtr<GstElement> pipeline;
@ -1360,6 +1445,9 @@ protected:
int num_frames; int num_frames;
double framerate; double framerate;
VideoAccelerationType va_type;
int hw_device;
void close_(); void close_();
}; };
@ -1423,6 +1511,8 @@ void CvVideoWriter_GStreamer::close()
close_(); close_();
source.release(); source.release();
pipeline.release(); pipeline.release();
va_type = VIDEO_ACCELERATION_NONE;
hw_device = -1;
} }
/*! /*!
@ -1480,8 +1570,7 @@ const char* CvVideoWriter_GStreamer::filenameToMimetype(const char *filename)
* \param fourcc desired codec fourcc * \param fourcc desired codec fourcc
* \param fps desired framerate * \param fps desired framerate
* \param frameSize the size of the expected frames * \param frameSize the size of the expected frames
* \param is_color color or grayscale * \param params other parameters
* \param depth the depth of the expected frames
* \return success * \return success
* *
* We support 2 modes of operation. Either the user enters a filename and a fourcc * We support 2 modes of operation. Either the user enters a filename and a fourcc
@ -1495,13 +1584,46 @@ const char* CvVideoWriter_GStreamer::filenameToMimetype(const char *filename)
*/ */
bool CvVideoWriter_GStreamer::open( const std::string &filename, int fourcc, bool CvVideoWriter_GStreamer::open( const std::string &filename, int fourcc,
double fps, const cv::Size &frameSize, double fps, const cv::Size &frameSize,
bool is_color, int depth ) const VideoWriterParameters& params )
{ {
// check arguments // check arguments
CV_Assert(!filename.empty()); CV_Assert(!filename.empty());
CV_Assert(fps > 0); CV_Assert(fps > 0);
CV_Assert(frameSize.width > 0 && frameSize.height > 0); CV_Assert(frameSize.width > 0 && frameSize.height > 0);
const bool is_color = params.get(VIDEOWRITER_PROP_IS_COLOR, true);
const int depth = params.get(VIDEOWRITER_PROP_DEPTH, CV_8U);
if (params.has(VIDEOWRITER_PROP_HW_ACCELERATION))
{
va_type = params.get<VideoAccelerationType>(VIDEOWRITER_PROP_HW_ACCELERATION);
}
if (params.has(VIDEOWRITER_PROP_HW_DEVICE))
{
hw_device = params.get<int>(VIDEOWRITER_PROP_HW_DEVICE);
if (va_type == VIDEO_ACCELERATION_NONE && hw_device != -1)
{
CV_LOG_ERROR(NULL, "VIDEOIO/GStreamer: Invalid usage of VIDEOWRITER_PROP_HW_DEVICE without requested H/W acceleration. Bailout");
return false;
}
if (va_type == VIDEO_ACCELERATION_ANY && hw_device != -1)
{
CV_LOG_ERROR(NULL, "VIDEOIO/GStreamer: Invalid usage of VIDEOWRITER_PROP_HW_DEVICE with 'ANY' H/W acceleration. Bailout");
return false;
}
if (hw_device != -1)
{
CV_LOG_ERROR(NULL, "VIDEOIO/GStreamer: VIDEOWRITER_PROP_HW_DEVICE is not supported. Specify -1 (auto) value. Bailout");
return false;
}
}
if (params.warnUnusedParameters())
{
CV_LOG_ERROR(NULL, "VIDEOIO/GStreamer: unsupported parameters in VideoWriter, see logger INFO channel for details");
return false;
}
// init gstreamer // init gstreamer
gst_initializer::init(); gst_initializer::init();
@ -1732,6 +1854,28 @@ bool CvVideoWriter_GStreamer::open( const std::string &filename, int fourcc,
handleMessage(pipeline); handleMessage(pipeline);
if (pipeline)
{
VideoAccelerationType actual_va_type = VIDEO_ACCELERATION_NONE;
GstIterator *iter = gst_bin_iterate_recurse(GST_BIN (pipeline.get()));
gst_iterator_foreach(iter, find_hw_element, (gpointer)&actual_va_type);
gst_iterator_free(iter);
if (va_type != VIDEO_ACCELERATION_NONE && va_type != VIDEO_ACCELERATION_ANY)
{
if (va_type != actual_va_type)
{
CV_LOG_ERROR(NULL, "VIDEOIO/GStreamer: Can't select requested VideoWriter acceleration through VIDEOWRITER_PROP_HW_ACCELERATION: "
<< va_type << " (actual is " << actual_va_type << "). Bailout");
close();
return false;
}
}
else
{
va_type = actual_va_type;
}
}
return true; return true;
} }
@ -1812,15 +1956,27 @@ bool CvVideoWriter_GStreamer::writeFrame( const IplImage * image )
return true; return true;
} }
double CvVideoWriter_GStreamer::getProperty(int propId) const
{
if (propId == VIDEOWRITER_PROP_HW_ACCELERATION)
{
return static_cast<double>(va_type);
}
else if (propId == VIDEOWRITER_PROP_HW_DEVICE)
{
return static_cast<double>(hw_device);
}
return 0;
}
Ptr<IVideoWriter> create_GStreamer_writer(const std::string& filename, int fourcc, double fps, Ptr<IVideoWriter> create_GStreamer_writer(const std::string& filename, int fourcc, double fps,
const cv::Size& frameSize, const VideoWriterParameters& params) const cv::Size& frameSize, const VideoWriterParameters& params)
{ {
CvVideoWriter_GStreamer* wrt = new CvVideoWriter_GStreamer; CvVideoWriter_GStreamer* wrt = new CvVideoWriter_GStreamer;
const bool isColor = params.get(VIDEOWRITER_PROP_IS_COLOR, true);
const int depth = params.get(VIDEOWRITER_PROP_DEPTH, CV_8U);
try try
{ {
if (wrt->open(filename, fourcc, fps, frameSize, isColor, depth)) if (wrt->open(filename, fourcc, fps, frameSize, params))
return makePtr<LegacyWriter>(wrt); return makePtr<LegacyWriter>(wrt);
delete wrt; delete wrt;
} }
@ -1923,7 +2079,7 @@ void handleMessage(GstElement * pipeline)
#if defined(BUILD_PLUGIN) #if defined(BUILD_PLUGIN)
#define CAPTURE_ABI_VERSION 1 #define CAPTURE_ABI_VERSION 1
#define CAPTURE_API_VERSION 0 #define CAPTURE_API_VERSION 1
#include "plugin_capture_api.hpp" #include "plugin_capture_api.hpp"
#define WRITER_ABI_VERSION 1 #define WRITER_ABI_VERSION 1
#define WRITER_API_VERSION 1 #define WRITER_API_VERSION 1
@ -1932,7 +2088,11 @@ void handleMessage(GstElement * pipeline)
namespace cv { namespace cv {
static static
CvResult CV_API_CALL cv_capture_open(const char* filename, int camera_index, CV_OUT CvPluginCapture* handle) CvResult CV_API_CALL cv_capture_open_with_params(
const char* filename, int camera_index,
int* params, unsigned n_params,
CV_OUT CvPluginCapture* handle
)
{ {
if (!handle) if (!handle)
return CV_ERROR_FAIL; return CV_ERROR_FAIL;
@ -1942,12 +2102,13 @@ CvResult CV_API_CALL cv_capture_open(const char* filename, int camera_index, CV_
GStreamerCapture *cap = 0; GStreamerCapture *cap = 0;
try try
{ {
cv::VideoCaptureParameters parameters(params, n_params);
cap = new GStreamerCapture(); cap = new GStreamerCapture();
bool res; bool res;
if (filename) if (filename)
res = cap->open(std::string(filename)); res = cap->open(std::string(filename), parameters);
else else
res = cap->open(camera_index); res = cap->open(camera_index, parameters);
if (res) if (res)
{ {
*handle = (CvPluginCapture)cap; *handle = (CvPluginCapture)cap;
@ -1967,6 +2128,12 @@ CvResult CV_API_CALL cv_capture_open(const char* filename, int camera_index, CV_
return CV_ERROR_FAIL; return CV_ERROR_FAIL;
} }
static
CvResult CV_API_CALL cv_capture_open(const char* filename, int camera_index, CV_OUT CvPluginCapture* handle)
{
return cv_capture_open_with_params(filename, camera_index, NULL, 0, handle);
}
static static
CvResult CV_API_CALL cv_capture_release(CvPluginCapture handle) CvResult CV_API_CALL cv_capture_release(CvPluginCapture handle)
{ {
@ -2083,31 +2250,9 @@ CvResult CV_API_CALL cv_writer_open_with_params(
try try
{ {
CvSize sz = { width, height }; CvSize sz = { width, height };
bool isColor = true; VideoWriterParameters parameters(params, n_params);
int depth = CV_8U;
if (params)
{
for (unsigned i = 0; i < n_params; ++i)
{
const int prop = params[i*2];
const int value = params[i*2 + 1];
switch (prop)
{
case VIDEOWRITER_PROP_IS_COLOR:
isColor = value != 0;
break;
case VIDEOWRITER_PROP_DEPTH:
depth = value;
break;
default:
// TODO emit message about non-recognized propert
// FUTURE: there should be mandatory and optional properties
return CV_ERROR_FAIL;
}
}
}
wrt = new CvVideoWriter_GStreamer(); wrt = new CvVideoWriter_GStreamer();
if (wrt && wrt->open(filename, fourcc, fps, sz, isColor, depth)) if (wrt && wrt->open(filename, fourcc, fps, sz, parameters))
{ {
*handle = (CvPluginWriter)wrt; *handle = (CvPluginWriter)wrt;
return CV_ERROR_OK; return CV_ERROR_OK;
@ -2145,11 +2290,25 @@ CvResult CV_API_CALL cv_writer_release(CvPluginWriter handle)
} }
static static
CvResult CV_API_CALL cv_writer_get_prop(CvPluginWriter /*handle*/, int /*prop*/, CV_OUT double* /*val*/) CvResult CV_API_CALL cv_writer_get_prop(CvPluginWriter handle, int prop, CV_OUT double* val)
{ {
return CV_ERROR_FAIL; if (!handle)
return CV_ERROR_FAIL;
if (!val)
return CV_ERROR_FAIL;
try
{
CvVideoWriter_GStreamer* instance = (CvVideoWriter_GStreamer*)handle;
*val = instance->getProperty(prop);
return CV_ERROR_OK;
}
catch (...)
{
return CV_ERROR_FAIL;
}
} }
static static
CvResult CV_API_CALL cv_writer_set_prop(CvPluginWriter /*handle*/, int /*prop*/, double /*val*/) CvResult CV_API_CALL cv_writer_set_prop(CvPluginWriter /*handle*/, int /*prop*/, double /*val*/)
{ {
@ -2197,6 +2356,9 @@ static const OpenCV_VideoIO_Capture_Plugin_API capture_api =
/* 5*/cv_capture_set_prop, /* 5*/cv_capture_set_prop,
/* 6*/cv_capture_grab, /* 6*/cv_capture_grab,
/* 7*/cv_capture_retrieve, /* 7*/cv_capture_retrieve,
},
{
/* 8*/cv_capture_open_with_params,
} }
}; };

@ -29,6 +29,7 @@ struct CvVideoWriter
virtual ~CvVideoWriter() {} virtual ~CvVideoWriter() {}
virtual bool writeFrame(const IplImage*) { return false; } virtual bool writeFrame(const IplImage*) { return false; }
virtual int getCaptureDomain() const { return cv::CAP_ANY; } // Return the type of the capture object: CAP_FFMPEG, etc... virtual int getCaptureDomain() const { return cv::CAP_ANY; } // Return the type of the capture object: CAP_FFMPEG, etc...
virtual double getProperty(int) const { return 0; }
}; };
//=================================================== //===================================================
@ -185,7 +186,7 @@ public:
{ {
found = true; found = true;
CV_LOG_INFO(NULL, "VIDEOIO: unused parameter: [" << param.key << "]=" << CV_LOG_INFO(NULL, "VIDEOIO: unused parameter: [" << param.key << "]=" <<
cv::format("%lld / 0x%16llx", (long long)param.value, (long long)param.value)); cv::format("%lld / 0x%016llx", (long long)param.value, (long long)param.value));
} }
} }
return found; return found;
@ -312,8 +313,12 @@ public:
{ {
cvReleaseVideoWriter(&writer); cvReleaseVideoWriter(&writer);
} }
double getProperty(int) const CV_OVERRIDE double getProperty(int propId) const CV_OVERRIDE
{ {
if (writer)
{
return writer->getProperty(propId);
}
return 0.; return 0.;
} }
bool setProperty(int, double) CV_OVERRIDE bool setProperty(int, double) CV_OVERRIDE
@ -337,13 +342,13 @@ public:
//================================================================================================== //==================================================================================================
Ptr<IVideoCapture> cvCreateFileCapture_FFMPEG_proxy(const std::string &filename, const cv::VideoCaptureParameters& params); Ptr<IVideoCapture> cvCreateFileCapture_FFMPEG_proxy(const std::string &filename, const VideoCaptureParameters& params);
Ptr<IVideoWriter> cvCreateVideoWriter_FFMPEG_proxy(const std::string& filename, int fourcc, Ptr<IVideoWriter> cvCreateVideoWriter_FFMPEG_proxy(const std::string& filename, int fourcc,
double fps, const Size& frameSize, double fps, const Size& frameSize,
const VideoWriterParameters& params); const VideoWriterParameters& params);
Ptr<IVideoCapture> createGStreamerCapture_file(const std::string& filename); Ptr<IVideoCapture> createGStreamerCapture_file(const std::string& filename, const cv::VideoCaptureParameters& params);
Ptr<IVideoCapture> createGStreamerCapture_cam(int index); Ptr<IVideoCapture> createGStreamerCapture_cam(int index, const cv::VideoCaptureParameters& params);
Ptr<IVideoWriter> create_GStreamer_writer(const std::string& filename, int fourcc, Ptr<IVideoWriter> create_GStreamer_writer(const std::string& filename, int fourcc,
double fps, const Size& frameSize, double fps, const Size& frameSize,
const VideoWriterParameters& params); const VideoWriterParameters& params);
@ -361,8 +366,8 @@ Ptr<IVideoWriter> create_AVFoundation_writer(const std::string& filename, int fo
Ptr<IVideoCapture> create_WRT_capture(int device); Ptr<IVideoCapture> create_WRT_capture(int device);
Ptr<IVideoCapture> cvCreateCapture_MSMF(int index); Ptr<IVideoCapture> cvCreateCapture_MSMF(int index, const VideoCaptureParameters& params);
Ptr<IVideoCapture> cvCreateCapture_MSMF(const std::string& filename); Ptr<IVideoCapture> cvCreateCapture_MSMF(const std::string& filename, const VideoCaptureParameters& params);
Ptr<IVideoWriter> cvCreateVideoWriter_MSMF(const std::string& filename, int fourcc, Ptr<IVideoWriter> cvCreateVideoWriter_MSMF(const std::string& filename, int fourcc,
double fps, const Size& frameSize, double fps, const Size& frameSize,
const VideoWriterParameters& params); const VideoWriterParameters& params);
@ -411,6 +416,21 @@ bool VideoCapture_V4L_waitAny(
CV_OUT std::vector<int>& ready, CV_OUT std::vector<int>& ready,
int64 timeoutNs); int64 timeoutNs);
static inline
std::ostream& operator<<(std::ostream& out, const VideoAccelerationType& va_type)
{
switch (va_type)
{
case VIDEO_ACCELERATION_NONE: out << "NONE"; return out;
case VIDEO_ACCELERATION_ANY: out << "ANY"; return out;
case VIDEO_ACCELERATION_D3D11: out << "D3D11"; return out;
case VIDEO_ACCELERATION_VAAPI: out << "VAAPI"; return out;
case VIDEO_ACCELERATION_MFX: out << "MFX"; return out;
}
out << cv::format("UNKNOWN(0x%ux)", static_cast<unsigned int>(va_type));
return out;
}
} // cv:: } // cv::
#endif // CAP_INTERFACE_HPP #endif // CAP_INTERFACE_HPP

@ -24,6 +24,7 @@
#include <mfobjects.h> #include <mfobjects.h>
#include <tchar.h> #include <tchar.h>
#include <strsafe.h> #include <strsafe.h>
#include <codecvt>
#include <mfreadwrite.h> #include <mfreadwrite.h>
#ifdef HAVE_MSMF_DXVA #ifdef HAVE_MSMF_DXVA
#include <d3d11.h> #include <d3d11.h>
@ -45,6 +46,7 @@
#pragma comment(lib, "mfuuid") #pragma comment(lib, "mfuuid")
#pragma comment(lib, "Strmiids") #pragma comment(lib, "Strmiids")
#pragma comment(lib, "Mfreadwrite") #pragma comment(lib, "Mfreadwrite")
#pragma comment(lib, "dxgi")
#ifdef HAVE_MSMF_DXVA #ifdef HAVE_MSMF_DXVA
#pragma comment(lib, "d3d11") #pragma comment(lib, "d3d11")
// MFCreateDXGIDeviceManager() is available since Win8 only. // MFCreateDXGIDeviceManager() is available since Win8 only.
@ -82,6 +84,8 @@ struct IMFAttributes;
#define CV_CAP_MODE_GRAY CV_FOURCC_MACRO('G','R','E','Y') #define CV_CAP_MODE_GRAY CV_FOURCC_MACRO('G','R','E','Y')
#define CV_CAP_MODE_YUYV CV_FOURCC_MACRO('Y', 'U', 'Y', 'V') #define CV_CAP_MODE_YUYV CV_FOURCC_MACRO('Y', 'U', 'Y', 'V')
using namespace cv;
namespace namespace
{ {
@ -576,8 +580,9 @@ public:
} MSMFCapture_Mode; } MSMFCapture_Mode;
CvCapture_MSMF(); CvCapture_MSMF();
virtual ~CvCapture_MSMF(); virtual ~CvCapture_MSMF();
virtual bool open(int); bool configureHW(const cv::VideoCaptureParameters& params);
virtual bool open(const cv::String&); virtual bool open(int, const cv::VideoCaptureParameters* params);
virtual bool open(const cv::String&, const cv::VideoCaptureParameters* params);
virtual void close(); virtual void close();
virtual double getProperty(int) const CV_OVERRIDE; virtual double getProperty(int) const CV_OVERRIDE;
virtual bool setProperty(int, double) CV_OVERRIDE; virtual bool setProperty(int, double) CV_OVERRIDE;
@ -597,10 +602,14 @@ protected:
_ComPtr<IMFAttributes> getDefaultSourceConfig(UINT32 num = 10); _ComPtr<IMFAttributes> getDefaultSourceConfig(UINT32 num = 10);
bool initStream(DWORD streamID, const MediaType& mt); bool initStream(DWORD streamID, const MediaType& mt);
bool openFinalize_(const VideoCaptureParameters* params);
Media_Foundation& MF; Media_Foundation& MF;
cv::String filename; cv::String filename;
int camid; int camid;
MSMFCapture_Mode captureMode; MSMFCapture_Mode captureMode;
VideoAccelerationType va_type;
int hwDeviceIndex;
#ifdef HAVE_MSMF_DXVA #ifdef HAVE_MSMF_DXVA
_ComPtr<ID3D11Device> D3DDev; _ComPtr<ID3D11Device> D3DDev;
_ComPtr<IMFDXGIDeviceManager> D3DMgr; _ComPtr<IMFDXGIDeviceManager> D3DMgr;
@ -624,6 +633,8 @@ CvCapture_MSMF::CvCapture_MSMF():
filename(""), filename(""),
camid(-1), camid(-1),
captureMode(MODE_SW), captureMode(MODE_SW),
va_type(VIDEO_ACCELERATION_NONE),
hwDeviceIndex(-1),
#ifdef HAVE_MSMF_DXVA #ifdef HAVE_MSMF_DXVA
D3DDev(NULL), D3DDev(NULL),
D3DMgr(NULL), D3DMgr(NULL),
@ -635,7 +646,6 @@ CvCapture_MSMF::CvCapture_MSMF():
sampleTime(0), sampleTime(0),
isOpen(false) isOpen(false)
{ {
configureHW(true);
} }
CvCapture_MSMF::~CvCapture_MSMF() CvCapture_MSMF::~CvCapture_MSMF()
@ -732,10 +742,19 @@ bool CvCapture_MSMF::configureHW(bool enable)
close(); close();
if (enable) if (enable)
{ {
_ComPtr<IDXGIAdapter> pAdapter;
if (hwDeviceIndex >= 0) {
_ComPtr<IDXGIFactory2> pDXGIFactory;
if (FAILED(CreateDXGIFactory(__uuidof(IDXGIFactory2), (void**)& pDXGIFactory)) ||
FAILED(pDXGIFactory->EnumAdapters(hwDeviceIndex, &pAdapter))) {
return false;
}
}
D3D_FEATURE_LEVEL levels[] = { D3D_FEATURE_LEVEL_11_1, D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL levels[] = { D3D_FEATURE_LEVEL_11_1, D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1, D3D_FEATURE_LEVEL_10_0, D3D_FEATURE_LEVEL_10_1, D3D_FEATURE_LEVEL_10_0,
D3D_FEATURE_LEVEL_9_3, D3D_FEATURE_LEVEL_9_2, D3D_FEATURE_LEVEL_9_1 }; D3D_FEATURE_LEVEL_9_3, D3D_FEATURE_LEVEL_9_2, D3D_FEATURE_LEVEL_9_1 };
if (SUCCEEDED(D3D11CreateDevice(NULL, D3D_DRIVER_TYPE_HARDWARE, NULL, D3D11_CREATE_DEVICE_BGRA_SUPPORT | D3D11_CREATE_DEVICE_VIDEO_SUPPORT, D3D_DRIVER_TYPE driverType = pAdapter ? D3D_DRIVER_TYPE_UNKNOWN : D3D_DRIVER_TYPE_HARDWARE;
if (SUCCEEDED(D3D11CreateDevice(pAdapter.Get(), driverType, NULL, D3D11_CREATE_DEVICE_BGRA_SUPPORT | D3D11_CREATE_DEVICE_VIDEO_SUPPORT,
levels, sizeof(levels) / sizeof(*levels), D3D11_SDK_VERSION, &D3DDev, NULL, NULL))) levels, sizeof(levels) / sizeof(*levels), D3D11_SDK_VERSION, &D3DDev, NULL, NULL)))
{ {
// NOTE: Getting ready for multi-threaded operation // NOTE: Getting ready for multi-threaded operation
@ -750,7 +769,23 @@ bool CvCapture_MSMF::configureHW(bool enable)
if (SUCCEEDED(D3DMgr->ResetDevice(D3DDev.Get(), mgrRToken))) if (SUCCEEDED(D3DMgr->ResetDevice(D3DDev.Get(), mgrRToken)))
{ {
captureMode = MODE_HW; captureMode = MODE_HW;
return reopen ? (prevcam >= 0 ? open(prevcam) : open(prevfile.c_str())) : true; if (hwDeviceIndex < 0)
hwDeviceIndex = 0;
// Log adapter description
_ComPtr<IDXGIDevice> dxgiDevice;
if (SUCCEEDED(D3DDev->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast<void**>(&dxgiDevice)))) {
_ComPtr<IDXGIAdapter> adapter;
if (SUCCEEDED(dxgiDevice->GetAdapter(&adapter))) {
DXGI_ADAPTER_DESC desc;
if (SUCCEEDED(adapter->GetDesc(&desc))) {
std::wstring name(desc.Description);
std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> conv;
CV_LOG_INFO(NULL, "MSMF: Using D3D11 video acceleration on GPU device: " << conv.to_bytes(name));
}
}
}
// Reopen if needed
return reopen ? (prevcam >= 0 ? open(prevcam, NULL) : open(prevfile.c_str(), NULL)) : true;
} }
D3DMgr.Release(); D3DMgr.Release();
} }
@ -766,13 +801,26 @@ bool CvCapture_MSMF::configureHW(bool enable)
if (D3DDev) if (D3DDev)
D3DDev.Release(); D3DDev.Release();
captureMode = MODE_SW; captureMode = MODE_SW;
return reopen ? (prevcam >= 0 ? open(prevcam) : open(prevfile.c_str())) : true; return reopen ? (prevcam >= 0 ? open(prevcam, NULL) : open(prevfile.c_str(), NULL)) : true;
} }
#else #else
return !enable; return !enable;
#endif #endif
} }
bool CvCapture_MSMF::configureHW(const VideoCaptureParameters& params)
{
va_type = params.get<VideoAccelerationType>(CAP_PROP_HW_ACCELERATION, VIDEO_ACCELERATION_ANY);
hwDeviceIndex = params.get<int>(CAP_PROP_HW_DEVICE, -1);
#ifndef HAVE_MSMF_DXVA
if (va_type != VIDEO_ACCELERATION_NONE && va_type != VIDEO_ACCELERATION_ANY)
{
CV_LOG_INFO(NULL, "VIDEOIO/MSMF: MSMF backend is build without DXVA acceleration support. Can't handle CAP_PROP_HW_ACCELERATION parameter: " << va_type);
}
#endif
return configureHW(va_type == VIDEO_ACCELERATION_D3D11 || va_type == VIDEO_ACCELERATION_ANY);
}
bool CvCapture_MSMF::configureOutput(MediaType newType, cv::uint32_t outFormat) bool CvCapture_MSMF::configureOutput(MediaType newType, cv::uint32_t outFormat)
{ {
FormatStorage formats; FormatStorage formats;
@ -820,11 +868,17 @@ bool CvCapture_MSMF::configureOutput(MediaType newType, cv::uint32_t outFormat)
return initStream(dwStreamIndex, newFormat); return initStream(dwStreamIndex, newFormat);
} }
bool CvCapture_MSMF::open(int index) bool CvCapture_MSMF::open(int index, const cv::VideoCaptureParameters* params)
{ {
close(); close();
if (index < 0) if (index < 0)
return false; return false;
if (params)
{
configureHW(*params);
}
DeviceList devices; DeviceList devices;
UINT32 count = devices.read(); UINT32 count = devices.read();
if (count == 0 || static_cast<UINT32>(index) > count) if (count == 0 || static_cast<UINT32>(index) > count)
@ -850,15 +904,27 @@ bool CvCapture_MSMF::open(int index)
{ {
frameStep = captureFormat.getFrameStep(); frameStep = captureFormat.getFrameStep();
} }
if (isOpen && !openFinalize_(params))
{
close();
return false;
}
return isOpen; return isOpen;
} }
bool CvCapture_MSMF::open(const cv::String& _filename) bool CvCapture_MSMF::open(const cv::String& _filename, const cv::VideoCaptureParameters* params)
{ {
close(); close();
if (_filename.empty()) if (_filename.empty())
return false; return false;
if (params)
{
configureHW(*params);
}
// Set source reader parameters // Set source reader parameters
_ComPtr<IMFAttributes> attr = getDefaultSourceConfig(); _ComPtr<IMFAttributes> attr = getDefaultSourceConfig();
cv::AutoBuffer<wchar_t> unicodeFileName(_filename.length() + 1); cv::AutoBuffer<wchar_t> unicodeFileName(_filename.length() + 1);
@ -884,9 +950,48 @@ bool CvCapture_MSMF::open(const cv::String& _filename)
} }
} }
if (isOpen && !openFinalize_(params))
{
close();
return false;
}
return isOpen; return isOpen;
} }
bool CvCapture_MSMF::openFinalize_(const VideoCaptureParameters* params)
{
if (params)
{
std::vector<int> unused_params = params->getUnused();
for (int key : unused_params)
{
if (!setProperty(key, params->get<double>(key)))
{
CV_LOG_ERROR(NULL, "VIDEOIO/MSMF: can't set property " << key);
return false;
}
}
}
VideoAccelerationType actual_va_type = (captureMode == MODE_HW) ? VIDEO_ACCELERATION_D3D11 : VIDEO_ACCELERATION_NONE;
if (va_type != VIDEO_ACCELERATION_NONE && va_type != VIDEO_ACCELERATION_ANY)
{
if (va_type != actual_va_type)
{
CV_LOG_ERROR(NULL, "VIDEOIO/MSMF: Can't select requested video acceleration through CAP_PROP_HW_ACCELERATION: "
<< va_type << " (actual is " << actual_va_type << "). Bailout");
return false;
}
}
else
{
va_type = actual_va_type;
}
return true;
}
bool CvCapture_MSMF::grabFrame() bool CvCapture_MSMF::grabFrame()
{ {
CV_TRACE_FUNCTION(); CV_TRACE_FUNCTION();
@ -1151,7 +1256,11 @@ double CvCapture_MSMF::getProperty( int property_id ) const
switch (property_id) switch (property_id)
{ {
case CV_CAP_PROP_MODE: case CV_CAP_PROP_MODE:
return captureMode; return captureMode;
case cv::CAP_PROP_HW_DEVICE:
return hwDeviceIndex;
case cv::CAP_PROP_HW_ACCELERATION:
return static_cast<double>(va_type);
case CV_CAP_PROP_CONVERT_RGB: case CV_CAP_PROP_CONVERT_RGB:
return convertFormat ? 1 : 0; return convertFormat ? 1 : 0;
case CV_CAP_PROP_SAR_NUM: case CV_CAP_PROP_SAR_NUM:
@ -1415,24 +1524,24 @@ bool CvCapture_MSMF::setProperty( int property_id, double value )
return false; return false;
} }
cv::Ptr<cv::IVideoCapture> cv::cvCreateCapture_MSMF( int index ) cv::Ptr<cv::IVideoCapture> cv::cvCreateCapture_MSMF( int index, const cv::VideoCaptureParameters& params)
{ {
cv::Ptr<CvCapture_MSMF> capture = cv::makePtr<CvCapture_MSMF>(); cv::Ptr<CvCapture_MSMF> capture = cv::makePtr<CvCapture_MSMF>();
if (capture) if (capture)
{ {
capture->open(index); capture->open(index, &params);
if (capture->isOpened()) if (capture->isOpened())
return capture; return capture;
} }
return cv::Ptr<cv::IVideoCapture>(); return cv::Ptr<cv::IVideoCapture>();
} }
cv::Ptr<cv::IVideoCapture> cv::cvCreateCapture_MSMF (const cv::String& filename) cv::Ptr<cv::IVideoCapture> cv::cvCreateCapture_MSMF (const cv::String& filename, const cv::VideoCaptureParameters& params)
{ {
cv::Ptr<CvCapture_MSMF> capture = cv::makePtr<CvCapture_MSMF>(); cv::Ptr<CvCapture_MSMF> capture = cv::makePtr<CvCapture_MSMF>();
if (capture) if (capture)
{ {
capture->open(filename); capture->open(filename, &params);
if (capture->isOpened()) if (capture->isOpened())
return capture; return capture;
} }
@ -1451,17 +1560,20 @@ public:
CvVideoWriter_MSMF(); CvVideoWriter_MSMF();
virtual ~CvVideoWriter_MSMF(); virtual ~CvVideoWriter_MSMF();
virtual bool open(const cv::String& filename, int fourcc, virtual bool open(const cv::String& filename, int fourcc,
double fps, cv::Size frameSize, bool isColor); double fps, cv::Size frameSize, const cv::VideoWriterParameters& params);
virtual void close(); virtual void close();
virtual void write(cv::InputArray); virtual void write(cv::InputArray);
virtual double getProperty(int) const { return 0; } virtual double getProperty(int) const override;
virtual bool setProperty(int, double) { return false; } virtual bool setProperty(int, double) { return false; }
virtual bool isOpened() const { return initiated; } virtual bool isOpened() const { return initiated; }
int getCaptureDomain() const CV_OVERRIDE { return cv::CAP_MSMF; } int getCaptureDomain() const CV_OVERRIDE { return cv::CAP_MSMF; }
private: private:
Media_Foundation& MF; Media_Foundation& MF;
VideoAccelerationType va_type;
int va_device;
UINT32 videoWidth; UINT32 videoWidth;
UINT32 videoHeight; UINT32 videoHeight;
double fps; double fps;
@ -1483,6 +1595,8 @@ private:
CvVideoWriter_MSMF::CvVideoWriter_MSMF(): CvVideoWriter_MSMF::CvVideoWriter_MSMF():
MF(Media_Foundation::getInstance()), MF(Media_Foundation::getInstance()),
va_type(VIDEO_ACCELERATION_NONE),
va_device(-1),
videoWidth(0), videoWidth(0),
videoHeight(0), videoHeight(0),
fps(0), fps(0),
@ -1556,10 +1670,40 @@ const GUID CvVideoWriter_MSMF::FourCC2GUID(int fourcc)
} }
bool CvVideoWriter_MSMF::open( const cv::String& filename, int fourcc, bool CvVideoWriter_MSMF::open( const cv::String& filename, int fourcc,
double _fps, cv::Size _frameSize, bool /*isColor*/ ) double _fps, cv::Size _frameSize, const cv::VideoWriterParameters& params)
{ {
if (initiated) if (initiated)
close(); close();
if (params.has(VIDEOWRITER_PROP_HW_ACCELERATION))
{
va_type = params.get<VideoAccelerationType>(VIDEOWRITER_PROP_HW_ACCELERATION);
if (va_type != VIDEO_ACCELERATION_NONE && va_type != VIDEO_ACCELERATION_ANY)
{
CV_LOG_ERROR(NULL, "VIDEOIO/MSMF: MSMF backend doesn't support writer acceleration support. Can't handle VIDEOWRITER_PROP_HW_ACCELERATION parameter. Bailout");
return false;
}
}
if (params.has(VIDEOWRITER_PROP_HW_DEVICE))
{
va_device = params.get<int>(VIDEOWRITER_PROP_HW_DEVICE);
if (va_type == VIDEO_ACCELERATION_NONE && va_device != -1)
{
CV_LOG_ERROR(NULL, "VIDEOIO/MSMF: Invalid usage of VIDEOWRITER_PROP_HW_DEVICE without requested H/W acceleration. Bailout");
return false;
}
if (va_type == VIDEO_ACCELERATION_ANY && va_device != -1)
{
CV_LOG_ERROR(NULL, "VIDEOIO/MSMF: Invalid usage of VIDEOWRITER_PROP_HW_DEVICE with 'ANY' H/W acceleration. Bailout");
return false;
}
if (va_device != -1)
{
CV_LOG_ERROR(NULL, "VIDEOIO/MSMF: VIDEOWRITER_PROP_HW_DEVICE is not supported. Specify -1 (auto) value. Bailout");
return false;
}
}
videoWidth = _frameSize.width; videoWidth = _frameSize.width;
videoHeight = _frameSize.height; videoHeight = _frameSize.height;
fps = _fps; fps = _fps;
@ -1608,6 +1752,23 @@ bool CvVideoWriter_MSMF::open( const cv::String& filename, int fourcc,
initiated = true; initiated = true;
rtStart = 0; rtStart = 0;
MFFrameRateToAverageTimePerFrame((UINT32)(fps * 1000), 1000, &rtDuration); MFFrameRateToAverageTimePerFrame((UINT32)(fps * 1000), 1000, &rtDuration);
VideoAccelerationType actual_va_type = VIDEO_ACCELERATION_NONE;
if (va_type != VIDEO_ACCELERATION_NONE && va_type != VIDEO_ACCELERATION_ANY)
{
if (va_type != actual_va_type)
{
CV_LOG_ERROR(NULL, "VIDEOIO/MSMF: Can't select requested video acceleration through VIDEOWRITER_PROP_HW_ACCELERATION: "
<< va_type << " (actual is " << actual_va_type << "). Bailout");
close();
return false;
}
}
else
{
va_type = actual_va_type;
}
return true; return true;
} }
} }
@ -1663,6 +1824,20 @@ void CvVideoWriter_MSMF::write(cv::InputArray img)
} }
} }
double CvVideoWriter_MSMF::getProperty(int propId) const
{
if (propId == VIDEOWRITER_PROP_HW_ACCELERATION)
{
return static_cast<double>(va_type);
}
else if (propId == VIDEOWRITER_PROP_HW_DEVICE)
{
return static_cast<double>(va_device);
}
return 0;
}
cv::Ptr<cv::IVideoWriter> cv::cvCreateVideoWriter_MSMF( const std::string& filename, int fourcc, cv::Ptr<cv::IVideoWriter> cv::cvCreateVideoWriter_MSMF( const std::string& filename, int fourcc,
double fps, const cv::Size& frameSize, double fps, const cv::Size& frameSize,
const VideoWriterParameters& params) const VideoWriterParameters& params)
@ -1670,8 +1845,7 @@ cv::Ptr<cv::IVideoWriter> cv::cvCreateVideoWriter_MSMF( const std::string& filen
cv::Ptr<CvVideoWriter_MSMF> writer = cv::makePtr<CvVideoWriter_MSMF>(); cv::Ptr<CvVideoWriter_MSMF> writer = cv::makePtr<CvVideoWriter_MSMF>();
if (writer) if (writer)
{ {
const bool isColor = params.get(VIDEOWRITER_PROP_IS_COLOR, true); writer->open(filename, fourcc, fps, frameSize, params);
writer->open(filename, fourcc, fps, frameSize, isColor);
if (writer->isOpened()) if (writer->isOpened())
return writer; return writer;
} }
@ -1680,9 +1854,20 @@ cv::Ptr<cv::IVideoWriter> cv::cvCreateVideoWriter_MSMF( const std::string& filen
#if defined(BUILD_PLUGIN) #if defined(BUILD_PLUGIN)
#define NEW_PLUGIN
#ifndef NEW_PLUGIN
#define ABI_VERSION 0 #define ABI_VERSION 0
#define API_VERSION 0 #define API_VERSION 0
#include "plugin_api.hpp" #include "plugin_api.hpp"
#else
#define CAPTURE_ABI_VERSION 1
#define CAPTURE_API_VERSION 1
#include "plugin_capture_api.hpp"
#define WRITER_ABI_VERSION 1
#define WRITER_API_VERSION 1
#include "plugin_writer_api.hpp"
#endif
namespace cv { namespace cv {
@ -1690,7 +1875,11 @@ typedef CvCapture_MSMF CaptureT;
typedef CvVideoWriter_MSMF WriterT; typedef CvVideoWriter_MSMF WriterT;
static static
CvResult CV_API_CALL cv_capture_open(const char* filename, int camera_index, CV_OUT CvPluginCapture* handle) CvResult CV_API_CALL cv_capture_open_with_params(
const char* filename, int camera_index,
int* params, unsigned n_params,
CV_OUT CvPluginCapture* handle
)
{ {
if (!handle) if (!handle)
return CV_ERROR_FAIL; return CV_ERROR_FAIL;
@ -1700,12 +1889,13 @@ CvResult CV_API_CALL cv_capture_open(const char* filename, int camera_index, CV_
CaptureT* cap = 0; CaptureT* cap = 0;
try try
{ {
cv::VideoCaptureParameters parameters(params, n_params);
cap = new CaptureT(); cap = new CaptureT();
bool res; bool res;
if (filename) if (filename)
res = cap->open(std::string(filename)); res = cap->open(std::string(filename), &parameters);
else else
res = cap->open(camera_index); res = cap->open(camera_index, &parameters);
if (res) if (res)
{ {
*handle = (CvPluginCapture)cap; *handle = (CvPluginCapture)cap;
@ -1725,6 +1915,12 @@ CvResult CV_API_CALL cv_capture_open(const char* filename, int camera_index, CV_
return CV_ERROR_FAIL; return CV_ERROR_FAIL;
} }
static
CvResult CV_API_CALL cv_capture_open(const char* filename, int camera_index, CV_OUT CvPluginCapture* handle)
{
return cv_capture_open_with_params(filename, camera_index, NULL, 0, handle);
}
static static
CvResult CV_API_CALL cv_capture_release(CvPluginCapture handle) CvResult CV_API_CALL cv_capture_release(CvPluginCapture handle)
{ {
@ -1806,7 +2002,7 @@ CvResult CV_API_CALL cv_capture_grab(CvPluginCapture handle)
} }
static static
CvResult CV_API_CALL cv_capture_retrieve(CvPluginCapture handle, int stream_idx, cv_videoio_retrieve_cb_t callback, void* userdata) CvResult CV_API_CALL cv_capture_retrieve(CvPluginCapture handle, int stream_idx, cv_videoio_capture_retrieve_cb_t callback, void* userdata)
{ {
if (!handle) if (!handle)
return CV_ERROR_FAIL; return CV_ERROR_FAIL;
@ -1815,7 +2011,11 @@ CvResult CV_API_CALL cv_capture_retrieve(CvPluginCapture handle, int stream_idx,
CaptureT* instance = (CaptureT*)handle; CaptureT* instance = (CaptureT*)handle;
Mat img; Mat img;
if (instance->retrieveFrame(stream_idx, img)) if (instance->retrieveFrame(stream_idx, img))
#ifndef NEW_PLUGIN
return callback(stream_idx, img.data, (int)img.step, img.cols, img.rows, img.channels(), userdata); return callback(stream_idx, img.data, (int)img.step, img.cols, img.rows, img.channels(), userdata);
#else
return callback(stream_idx, img.data, (int)img.step, img.cols, img.rows, img.type(), userdata);
#endif
return CV_ERROR_FAIL; return CV_ERROR_FAIL;
} }
catch (const std::exception& e) catch (const std::exception& e)
@ -1831,14 +2031,18 @@ CvResult CV_API_CALL cv_capture_retrieve(CvPluginCapture handle, int stream_idx,
} }
static static
CvResult CV_API_CALL cv_writer_open(const char* filename, int fourcc, double fps, int width, int height, int isColor, CV_OUT CvPluginWriter* handle) CvResult CV_API_CALL cv_writer_open_with_params(
const char* filename, int fourcc, double fps, int width, int height,
int* params, unsigned n_params,
CV_OUT CvPluginWriter* handle)
{ {
WriterT* wrt = 0; WriterT* wrt = 0;
try try
{ {
VideoWriterParameters parameters(params, n_params);
wrt = new WriterT(); wrt = new WriterT();
Size sz(width, height); Size sz(width, height);
if (wrt && wrt->open(filename, fourcc, fps, sz, isColor != 0)) if (wrt && wrt->open(filename, fourcc, fps, sz, parameters))
{ {
*handle = (CvPluginWriter)wrt; *handle = (CvPluginWriter)wrt;
return CV_ERROR_OK; return CV_ERROR_OK;
@ -1857,6 +2061,14 @@ CvResult CV_API_CALL cv_writer_open(const char* filename, int fourcc, double fps
return CV_ERROR_FAIL; return CV_ERROR_FAIL;
} }
static
CvResult CV_API_CALL cv_writer_open(const char* filename, int fourcc, double fps, int width, int height, int isColor,
CV_OUT CvPluginWriter* handle)
{
int params[2] = { VIDEOWRITER_PROP_IS_COLOR, isColor };
return cv_writer_open_with_params(filename, fourcc, fps, width, height, params, 1, handle);
}
static static
CvResult CV_API_CALL cv_writer_release(CvPluginWriter handle) CvResult CV_API_CALL cv_writer_release(CvPluginWriter handle)
{ {
@ -1868,9 +2080,22 @@ CvResult CV_API_CALL cv_writer_release(CvPluginWriter handle)
} }
static static
CvResult CV_API_CALL cv_writer_get_prop(CvPluginWriter /*handle*/, int /*prop*/, CV_OUT double* /*val*/) CvResult CV_API_CALL cv_writer_get_prop(CvPluginWriter handle, int prop, CV_OUT double* val)
{ {
return CV_ERROR_FAIL; if (!handle)
return CV_ERROR_FAIL;
if (!val)
return CV_ERROR_FAIL;
try
{
WriterT* instance = (WriterT*)handle;
*val = instance->getProperty(prop);
return CV_ERROR_OK;
}
catch (...)
{
return CV_ERROR_FAIL;
}
} }
static static
@ -1905,6 +2130,10 @@ CvResult CV_API_CALL cv_writer_write(CvPluginWriter handle, const unsigned char*
} }
} }
} // namespace
#ifndef NEW_PLUGIN
static const OpenCV_VideoIO_Plugin_API_preview plugin_api = static const OpenCV_VideoIO_Plugin_API_preview plugin_api =
{ {
{ {
@ -1913,28 +2142,85 @@ static const OpenCV_VideoIO_Plugin_API_preview plugin_api =
"Microsoft Media Foundation OpenCV Video I/O plugin" "Microsoft Media Foundation OpenCV Video I/O plugin"
}, },
{ {
/* 1*/CAP_MSMF, /* 1*/cv::CAP_MSMF,
/* 2*/cv_capture_open, /* 2*/cv::cv_capture_open,
/* 3*/cv_capture_release, /* 3*/cv::cv_capture_release,
/* 4*/cv_capture_get_prop, /* 4*/cv::cv_capture_get_prop,
/* 5*/cv_capture_set_prop, /* 5*/cv::cv_capture_set_prop,
/* 6*/cv_capture_grab, /* 6*/cv::cv_capture_grab,
/* 7*/cv_capture_retrieve, /* 7*/cv::cv_capture_retrieve,
/* 8*/cv_writer_open, /* 8*/cv::cv_writer_open,
/* 9*/cv_writer_release, /* 9*/cv::cv_writer_release,
/* 10*/cv_writer_get_prop, /* 10*/cv::cv_writer_get_prop,
/* 11*/cv_writer_set_prop, /* 11*/cv::cv_writer_set_prop,
/* 12*/cv_writer_write /* 12*/cv::cv_writer_write
} }
}; };
} // namespace
const OpenCV_VideoIO_Plugin_API_preview* opencv_videoio_plugin_init_v0(int requested_abi_version, int requested_api_version, void* /*reserved=NULL*/) CV_NOEXCEPT const OpenCV_VideoIO_Plugin_API_preview* opencv_videoio_plugin_init_v0(int requested_abi_version, int requested_api_version, void* /*reserved=NULL*/) CV_NOEXCEPT
{ {
if (requested_abi_version == ABI_VERSION && requested_api_version <= API_VERSION) if (requested_abi_version == ABI_VERSION && requested_api_version <= API_VERSION)
return &cv::plugin_api; return &plugin_api;
return NULL;
}
#else // NEW_PLUGIN
static const OpenCV_VideoIO_Capture_Plugin_API capture_plugin_api =
{
{
sizeof(OpenCV_VideoIO_Capture_Plugin_API), CAPTURE_ABI_VERSION, CAPTURE_API_VERSION,
CV_VERSION_MAJOR, CV_VERSION_MINOR, CV_VERSION_REVISION, CV_VERSION_STATUS,
"Microsoft Media Foundation OpenCV Video I/O plugin"
},
{
/* 1*/cv::CAP_MSMF,
/* 2*/cv::cv_capture_open,
/* 3*/cv::cv_capture_release,
/* 4*/cv::cv_capture_get_prop,
/* 5*/cv::cv_capture_set_prop,
/* 6*/cv::cv_capture_grab,
/* 7*/cv::cv_capture_retrieve,
},
{
/* 8*/cv::cv_capture_open_with_params,
}
};
const OpenCV_VideoIO_Capture_Plugin_API* opencv_videoio_capture_plugin_init_v1(int requested_abi_version, int requested_api_version, void* /*reserved=NULL*/) CV_NOEXCEPT
{
if (requested_abi_version == CAPTURE_ABI_VERSION && requested_api_version <= CAPTURE_API_VERSION)
return &capture_plugin_api;
return NULL; return NULL;
} }
static const OpenCV_VideoIO_Writer_Plugin_API writer_plugin_api =
{
{
sizeof(OpenCV_VideoIO_Writer_Plugin_API), WRITER_ABI_VERSION, WRITER_API_VERSION,
CV_VERSION_MAJOR, CV_VERSION_MINOR, CV_VERSION_REVISION, CV_VERSION_STATUS,
"Microsoft Media Foundation OpenCV Video I/O plugin"
},
{
/* 1*/cv::CAP_MSMF,
/* 2*/cv::cv_writer_open,
/* 3*/cv::cv_writer_release,
/* 4*/cv::cv_writer_get_prop,
/* 5*/cv::cv_writer_set_prop,
/* 6*/cv::cv_writer_write
},
{
/* 7*/cv::cv_writer_open_with_params
}
};
const OpenCV_VideoIO_Writer_Plugin_API* opencv_videoio_writer_plugin_init_v1(int requested_abi_version, int requested_api_version, void* /*reserved=NULL*/) CV_NOEXCEPT
{
if (requested_abi_version == WRITER_ABI_VERSION && requested_api_version <= WRITER_API_VERSION)
return &writer_plugin_api;
return NULL;
}
#endif // NEW_PLUGIN
#endif // BUILD_PLUGIN #endif // BUILD_PLUGIN

@ -44,6 +44,7 @@
#if defined(__OPENCV_BUILD) && defined(BUILD_PLUGIN) #if defined(__OPENCV_BUILD) && defined(BUILD_PLUGIN)
#undef __OPENCV_BUILD // allow public API only #undef __OPENCV_BUILD // allow public API only
#define OPENCV_HAVE_CVCONFIG_H 1 // but we still have access to cvconfig.h (TODO remove this)
#include <opencv2/core.hpp> #include <opencv2/core.hpp>
#include <opencv2/core/utils/trace.hpp> #include <opencv2/core/utils/trace.hpp>
#endif #endif

@ -15,11 +15,35 @@
namespace cv { namespace cv {
inline std::ostream &operator<<(std::ostream &out, const VideoCaptureAPIs& api) static inline
std::ostream& operator<<(std::ostream& out, const VideoCaptureAPIs& api)
{ {
out << cv::videoio_registry::getBackendName(api); return out; out << cv::videoio_registry::getBackendName(api); return out;
} }
static inline
std::ostream& operator<<(std::ostream& out, const VideoAccelerationType& va_type)
{
struct {
VideoAccelerationType va_type;
const char* str;
} va_types[] = {
{VIDEO_ACCELERATION_ANY, "ANY"},
{VIDEO_ACCELERATION_NONE, "NONE"},
{VIDEO_ACCELERATION_D3D11, "D3D11"},
{VIDEO_ACCELERATION_VAAPI, "VAAPI"},
{VIDEO_ACCELERATION_MFX, "MFX"},
};
for (const auto& va : va_types) {
if (va_type == va.va_type) {
out << va.str;
return out;
}
}
out << cv::format("UNKNOWN(0x%ux)", static_cast<unsigned int>(va_type));
return out;
}
static inline void PrintTo(const cv::VideoCaptureAPIs& api, std::ostream* os) static inline void PrintTo(const cv::VideoCaptureAPIs& api, std::ostream* os)
{ {
*os << cv::videoio_registry::getBackendName(api); *os << cv::videoio_registry::getBackendName(api);

@ -431,11 +431,11 @@ static Ext_Fourcc_PSNR synthetic_params[] = {
{"mkv", "MPEG", 30.f, CAP_FFMPEG}, {"mkv", "MPEG", 30.f, CAP_FFMPEG},
{"mkv", "MJPG", 30.f, CAP_FFMPEG}, {"mkv", "MJPG", 30.f, CAP_FFMPEG},
{"avi", "MPEG", 30.f, CAP_GSTREAMER}, {"avi", "MPEG", 28.f, CAP_GSTREAMER},
{"avi", "MJPG", 30.f, CAP_GSTREAMER}, {"avi", "MJPG", 30.f, CAP_GSTREAMER},
{"avi", "H264", 30.f, CAP_GSTREAMER}, {"avi", "H264", 30.f, CAP_GSTREAMER},
{"mkv", "MPEG", 30.f, CAP_GSTREAMER}, {"mkv", "MPEG", 28.f, CAP_GSTREAMER},
{"mkv", "MJPG", 30.f, CAP_GSTREAMER}, {"mkv", "MJPG", 30.f, CAP_GSTREAMER},
{"mkv", "H264", 30.f, CAP_GSTREAMER}, {"mkv", "H264", 30.f, CAP_GSTREAMER},
@ -649,4 +649,332 @@ TEST_P(safe_capture, frames_independency)
static VideoCaptureAPIs safe_apis[] = {CAP_FFMPEG, CAP_GSTREAMER, CAP_MSMF,CAP_AVFOUNDATION}; static VideoCaptureAPIs safe_apis[] = {CAP_FFMPEG, CAP_GSTREAMER, CAP_MSMF,CAP_AVFOUNDATION};
INSTANTIATE_TEST_CASE_P(videoio, safe_capture, testing::ValuesIn(safe_apis)); INSTANTIATE_TEST_CASE_P(videoio, safe_capture, testing::ValuesIn(safe_apis));
//==================================================================================================
// TEST_P(videocapture_acceleration, ...)
struct VideoCaptureAccelerationInput
{
const char* filename;
double psnr_threshold;
};
static inline
std::ostream& operator<<(std::ostream& out, const VideoCaptureAccelerationInput& p)
{
out << p.filename;
return out;
}
typedef testing::TestWithParam<tuple<VideoCaptureAccelerationInput, VideoCaptureAPIs, VideoAccelerationType, bool>> videocapture_acceleration;
TEST_P(videocapture_acceleration, read)
{
auto param = GetParam();
std::string filename = get<0>(param).filename;
double psnr_threshold = get<0>(param).psnr_threshold;
VideoCaptureAPIs backend = get<1>(param);
VideoAccelerationType va_type = get<2>(param);
bool use_umat = get<3>(param);
int device_idx = -1;
const int frameNum = 15;
std::string filepath = cvtest::findDataFile("video/" + filename);
if (backend == CAP_MSMF && (
filename == "sample_322x242_15frames.yuv420p.mjpeg.mp4" ||
filename == "sample_322x242_15frames.yuv420p.libx265.mp4" ||
filename == "sample_322x242_15frames.yuv420p.libaom-av1.mp4" ||
filename == "sample_322x242_15frames.yuv420p.mpeg2video.mp4"
))
throw SkipTestException("Format/codec is not supported");
std::string backend_name = cv::videoio_registry::getBackendName(backend);
if (!videoio_registry::hasBackend(backend))
throw SkipTestException(cv::String("Backend is not available/disabled: ") + backend_name);
// HW reader
VideoCapture hw_reader(filepath, backend, {
CAP_PROP_HW_ACCELERATION, static_cast<int>(va_type),
CAP_PROP_HW_DEVICE, device_idx
});
if (!hw_reader.isOpened())
{
if (va_type == VIDEO_ACCELERATION_ANY || va_type == VIDEO_ACCELERATION_NONE)
{
// ANY HW acceleration should have fallback to SW codecs
VideoCapture sw_reader(filepath, backend, {
CAP_PROP_HW_ACCELERATION, VIDEO_ACCELERATION_NONE
});
if (!sw_reader.isOpened())
throw SkipTestException(backend_name + " VideoCapture on " + filename + " not supported, skipping");
ASSERT_TRUE(hw_reader.isOpened()) << "ANY HW acceleration should have fallback to SW codecs";
}
else
{
throw SkipTestException(backend_name + " VideoCapture on " + filename + " not supported with HW acceleration, skipping");
}
}
VideoAccelerationType actual_va = static_cast<VideoAccelerationType>(static_cast<int>(hw_reader.get(CAP_PROP_HW_ACCELERATION)));
if (va_type != VIDEO_ACCELERATION_ANY && va_type != VIDEO_ACCELERATION_NONE)
{
#ifdef _WIN32 // FIXIT FFmpeg wrapper upgrade is required
if (actual_va == static_cast<VideoAccelerationType>(0))
throw SkipTestException(backend_name + " VideoCapture on " + filename + " not supported with HW acceleration (legacy FFmpeg wrapper), skipping");
#endif
ASSERT_EQ((int)actual_va, (int)va_type) << "actual_va=" << actual_va << ", va_type=" << va_type;
}
std::cout << "VideoCapture " << backend_name << ":" << actual_va << std::endl << std::flush;
double min_psnr_original = 1000;
for (int i = 0; i < frameNum; i++)
{
SCOPED_TRACE(cv::format("frame=%d", i));
Mat frame;
if (use_umat)
{
UMat umat;
EXPECT_TRUE(hw_reader.read(umat));
ASSERT_FALSE(umat.empty());
umat.copyTo(frame);
}
else
{
EXPECT_TRUE(hw_reader.read(frame));
}
ASSERT_FALSE(frame.empty());
if (cvtest::debugLevel > 0)
{
imwrite(cv::format("test_frame%03d.png", i), frame);
}
Mat original(frame.size(), CV_8UC3, Scalar::all(0));
generateFrame(i, frameNum, original);
double psnr = cvtest::PSNR(frame, original);
if (psnr < min_psnr_original)
min_psnr_original = psnr;
}
std::ostringstream ss; ss << actual_va;
std::string actual_va_str = ss.str();
std::cout << "VideoCapture with acceleration = " << cv::format("%-6s @ %-10s", actual_va_str.c_str(), backend_name.c_str())
<< " on " << filename
<< " with PSNR-original = " << min_psnr_original
<< std::endl << std::flush;
EXPECT_GE(min_psnr_original, psnr_threshold);
}
static const VideoCaptureAccelerationInput hw_filename[] = {
{ "sample_322x242_15frames.yuv420p.libxvid.mp4", 28.0 },
{ "sample_322x242_15frames.yuv420p.mjpeg.mp4", 20.0 },
{ "sample_322x242_15frames.yuv420p.mpeg2video.mp4", 24.0 }, // GSTREAMER on Ubuntu 18.04
{ "sample_322x242_15frames.yuv420p.libx264.mp4", 24.0 }, // GSTREAMER on Ubuntu 18.04
{ "sample_322x242_15frames.yuv420p.libx265.mp4", 30.0 },
{ "sample_322x242_15frames.yuv420p.libvpx-vp9.mp4", 30.0 },
{ "sample_322x242_15frames.yuv420p.libaom-av1.mp4", 30.0 }
};
static const VideoCaptureAPIs hw_backends[] = {
CAP_FFMPEG,
CAP_GSTREAMER,
#ifdef _WIN32
CAP_MSMF,
#endif
};
static const VideoAccelerationType hw_types[] = {
VIDEO_ACCELERATION_NONE,
VIDEO_ACCELERATION_ANY,
VIDEO_ACCELERATION_MFX,
#ifdef _WIN32
VIDEO_ACCELERATION_D3D11,
#else
VIDEO_ACCELERATION_VAAPI,
#endif
};
static bool hw_use_umat[] = {
false,
//true
};
INSTANTIATE_TEST_CASE_P(videoio, videocapture_acceleration, testing::Combine(
testing::ValuesIn(hw_filename),
testing::ValuesIn(hw_backends),
testing::ValuesIn(hw_types),
testing::ValuesIn(hw_use_umat)
));
////////////////////////////////////////// TEST_P(video_acceleration, write_read)
typedef tuple<Ext_Fourcc_PSNR, VideoAccelerationType, bool> VATestParams;
typedef testing::TestWithParam<VATestParams> videowriter_acceleration;
TEST_P(videowriter_acceleration, write)
{
auto param = GetParam();
VideoCaptureAPIs backend = get<0>(param).api;
std::string codecid = get<0>(param).fourcc;
std::string extension = get<0>(param).ext;
double psnr_threshold = get<0>(param).PSNR;
VideoAccelerationType va_type = get<1>(param);
int device_idx = -1;
bool use_umat = get<2>(param);
std::string backend_name = cv::videoio_registry::getBackendName(backend);
if (!videoio_registry::hasBackend(backend))
throw SkipTestException(cv::String("Backend is not available/disabled: ") + backend_name);
const Size sz(640, 480);
const int frameNum = 15;
const double fps = 25;
std::string filename = tempfile("videowriter_acceleration.") + extension;
// Write video
VideoAccelerationType actual_va;
{
VideoWriter hw_writer(
filename,
backend,
VideoWriter::fourcc(codecid[0], codecid[1], codecid[2], codecid[3]),
fps,
sz,
{
VIDEOWRITER_PROP_HW_ACCELERATION, static_cast<int>(va_type),
VIDEOWRITER_PROP_HW_DEVICE, device_idx
}
);
if (!hw_writer.isOpened()) {
if (va_type == VIDEO_ACCELERATION_ANY || va_type == VIDEO_ACCELERATION_NONE)
{
// ANY HW acceleration should have fallback to SW codecs
{
VideoWriter sw_writer(
filename,
backend,
VideoWriter::fourcc(codecid[0], codecid[1], codecid[2], codecid[3]),
fps,
sz,
{
VIDEOWRITER_PROP_HW_ACCELERATION, VIDEO_ACCELERATION_NONE,
}
);
if (!sw_writer.isOpened()) {
remove(filename.c_str());
throw SkipTestException(backend_name + " VideoWriter on codec " + codecid + " not supported, skipping");
}
}
remove(filename.c_str());
ASSERT_TRUE(hw_writer.isOpened()) << "ANY HW acceleration should have fallback to SW codecs";
} else {
throw SkipTestException(backend_name + " VideoWriter on " + filename + " not supported with HW acceleration, skipping");
}
}
actual_va = static_cast<VideoAccelerationType>(static_cast<int>(hw_writer.get(VIDEOWRITER_PROP_HW_ACCELERATION)));
if (va_type != VIDEO_ACCELERATION_ANY && va_type != VIDEO_ACCELERATION_NONE)
{
#ifdef _WIN32 // FIXIT FFmpeg wrapper upgrade is required
if (actual_va == static_cast<VideoAccelerationType>(-1))
throw SkipTestException(backend_name + " VideoWriter on " + filename + " not supported with HW acceleration (legacy FFmpeg wrapper), skipping");
#endif
ASSERT_EQ((int)actual_va, (int)va_type) << "actual_va=" << actual_va << ", va_type=" << va_type;
}
std::cout << "VideoWriter " << backend_name << ":" << actual_va << std::endl << std::flush;
Mat frame(sz, CV_8UC3);
for (int i = 0; i < frameNum; ++i) {
generateFrame(i, frameNum, frame);
if (use_umat) {
UMat umat;
frame.copyTo(umat);
hw_writer.write(umat);
}
else {
hw_writer.write(frame);
}
}
}
std::ifstream ofile(filename, std::ios::binary);
ofile.seekg(0, std::ios::end);
int64 fileSize = (int64)ofile.tellg();
ASSERT_GT(fileSize, 0);
std::cout << "File size: " << fileSize << std::endl;
// Read video and check PSNR on every frame
{
VideoCapture reader(
filename,
CAP_ANY /*backend*/,
{ CAP_PROP_HW_ACCELERATION, VIDEO_ACCELERATION_NONE }
);
ASSERT_TRUE(reader.isOpened());
double min_psnr = 1000;
Mat reference(sz, CV_8UC3);
for (int i = 0; i < frameNum; ++i) {
Mat actual;
if (use_umat) {
UMat umat;
EXPECT_TRUE(reader.read(umat));
umat.copyTo(actual);
}
else {
EXPECT_TRUE(reader.read(actual));
}
EXPECT_FALSE(actual.empty());
generateFrame(i, frameNum, reference);
EXPECT_EQ(reference.size(), actual.size());
EXPECT_EQ(reference.depth(), actual.depth());
EXPECT_EQ(reference.channels(), actual.channels());
double psnr = cvtest::PSNR(actual, reference);
EXPECT_GE(psnr, psnr_threshold) << " frame " << i;
if (psnr < min_psnr)
min_psnr = psnr;
}
Mat actual;
EXPECT_FALSE(reader.read(actual));
{
std::ostringstream ss; ss << actual_va;
std::string actual_va_str = ss.str();
std::cout << "VideoWriter with acceleration = " << cv::format("%-6s @ %-10s", actual_va_str.c_str(), backend_name.c_str())
<< " on codec=" << codecid << " (." << extension << ")"
<< ", bitrate = " << fileSize / (frameNum / fps)
<< ", with PSNR-original = " << min_psnr
<< std::endl << std::flush;
}
remove(filename.c_str());
}
}
static Ext_Fourcc_PSNR hw_codecs[] = {
{"mp4", "MPEG", 29.f, CAP_FFMPEG},
{"mp4", "H264", 29.f, CAP_FFMPEG},
{"mp4", "HEVC", 29.f, CAP_FFMPEG},
{"avi", "MJPG", 29.f, CAP_FFMPEG},
{"avi", "XVID", 29.f, CAP_FFMPEG},
//{"webm", "VP8", 29.f, CAP_FFMPEG},
//{"webm", "VP9", 29.f, CAP_FFMPEG},
{"mkv", "MPEG", 29.f, CAP_GSTREAMER},
{"mkv", "H264", 29.f, CAP_GSTREAMER},
#ifdef _WIN32
{"mp4", "MPEG", 29.f, CAP_MSMF},
{"mp4", "H264", 29.f, CAP_MSMF},
#endif
};
INSTANTIATE_TEST_CASE_P(videoio, videowriter_acceleration, testing::Combine(
testing::ValuesIn(hw_codecs),
testing::ValuesIn(hw_types),
testing::ValuesIn(hw_use_umat)
));
} // namespace } // namespace

@ -0,0 +1,211 @@
#include <iostream>
#include <chrono>
#include "opencv2/core.hpp"
#include "opencv2/core/ocl.hpp"
#include "opencv2/core/utility.hpp"
#include "opencv2/imgproc.hpp"
#include "opencv2/videoio.hpp"
#include "opencv2/highgui.hpp"
using namespace cv;
using namespace std;
const char* keys =
"{ i input | | input video file }"
"{ o output | | output video file, or specify 'null' to measure decoding without rendering to screen}"
"{ backend | any | VideoCapture and VideoWriter backend, valid values: 'any', 'ffmpeg', 'msmf', 'gstreamer' }"
"{ accel | any | GPU Video Acceleration, valid values: 'none', 'any', 'd3d11', 'vaapi', 'mfx' }"
"{ device | -1 | Video Acceleration device (GPU) index (-1 means default device) }"
"{ out_w | | output width (resize by calling cv::resize) }"
"{ out_h | | output height (resize by calling cv::resize) }"
"{ bitwise_not| false | apply simple image processing - bitwise_not pixels by calling cv::bitwise_not }"
"{ opencl | true | use OpenCL (inside VideoCapture/VideoWriter and for image processing) }"
"{ codec | H264 | codec id (four characters string) of output file encoder }"
"{ h help | | print help message }";
struct {
cv::VideoCaptureAPIs backend;
const char* str;
} backend_strings[] = {
{ cv::CAP_ANY, "any" },
{ cv::CAP_FFMPEG, "ffmpeg" },
{ cv::CAP_MSMF, "msmf" },
{ cv::CAP_GSTREAMER, "gstreamer" },
};
struct {
VideoAccelerationType acceleration;
const char* str;
} acceleration_strings[] = {
{ VIDEO_ACCELERATION_NONE, "none" },
{ VIDEO_ACCELERATION_ANY, "any" },
{ VIDEO_ACCELERATION_D3D11, "d3d11" },
{ VIDEO_ACCELERATION_VAAPI, "vaapi" },
{ VIDEO_ACCELERATION_MFX, "mfx" },
};
class FPSCounter {
public:
FPSCounter(double _interval) : interval(_interval) {
}
~FPSCounter() {
NewFrame(true);
}
void NewFrame(bool last_frame = false) {
num_frames++;
auto now = std::chrono::high_resolution_clock::now();
if (!last_time.time_since_epoch().count()) {
last_time = now;
}
double sec = std::chrono::duration_cast<std::chrono::duration<double>>(now - last_time).count();
if (sec >= interval || last_frame) {
printf("FPS(last %.2f sec) = %.2f\n", sec, num_frames / sec);
fflush(stdout);
num_frames = 0;
last_time = now;
}
}
private:
double interval = 1;
std::chrono::time_point<std::chrono::high_resolution_clock> last_time;
int num_frames = 0;
};
int main(int argc, char** argv)
{
cv::CommandLineParser cmd(argc, argv, keys);
if (cmd.has("help"))
{
cout << "Usage : video_acceleration [options]" << endl;
cout << "Available options:" << endl;
cmd.printMessage();
return EXIT_SUCCESS;
}
string infile = cmd.get<string>("i");
string outfile = cmd.get<string>("o");
string codec = cmd.get<string>("codec");
int device = cmd.get<int>("device");
int out_w = cmd.get<int>("out_w");
int out_h = cmd.get<int>("out_h");
bool use_opencl = cmd.get<bool>("opencl");
bool bitwise_not = cmd.get<bool>("bitwise_not");
cv::VideoCaptureAPIs backend = cv::CAP_ANY;
string backend_str = cmd.get<string>("backend");
for (size_t i = 0; i < sizeof(backend_strings)/sizeof(backend_strings[0]); i++) {
if (backend_str == backend_strings[i].str) {
backend = backend_strings[i].backend;
break;
}
}
VideoAccelerationType accel = VIDEO_ACCELERATION_ANY;
string accel_str = cmd.get<string>("accel");
for (size_t i = 0; i < sizeof(acceleration_strings) / sizeof(acceleration_strings[0]); i++) {
if (accel_str == acceleration_strings[i].str) {
accel = acceleration_strings[i].acceleration;
break;
}
}
ocl::setUseOpenCL(use_opencl);
VideoCapture capture(infile, backend, {
CAP_PROP_HW_ACCELERATION, (int)accel,
CAP_PROP_HW_DEVICE, device
});
if (!capture.isOpened()) {
cerr << "Failed to open VideoCapture" << endl;
return 1;
}
cout << "VideoCapture backend = " << capture.getBackendName() << endl;
VideoAccelerationType actual_accel = static_cast<VideoAccelerationType>(static_cast<int>(capture.get(CAP_PROP_HW_ACCELERATION)));
for (size_t i = 0; i < sizeof(acceleration_strings) / sizeof(acceleration_strings[0]); i++) {
if (actual_accel == acceleration_strings[i].acceleration) {
cout << "VideoCapture acceleration = " << acceleration_strings[i].str << endl;
cout << "VideoCapture acceleration device = " << (int)capture.get(CAP_PROP_HW_DEVICE) << endl;
break;
}
}
VideoWriter writer;
if (!outfile.empty() && outfile != "null") {
const char* codec_str = codec.c_str();
int fourcc = VideoWriter::fourcc(codec_str[0], codec_str[1], codec_str[2], codec_str[3]);
double fps = capture.get(CAP_PROP_FPS);
Size frameSize = { out_w, out_h };
if (!out_w || !out_h) {
frameSize = { (int)capture.get(CAP_PROP_FRAME_WIDTH), (int)capture.get(CAP_PROP_FRAME_HEIGHT) };
}
writer = VideoWriter(outfile, backend, fourcc, fps, frameSize, {
VIDEOWRITER_PROP_HW_ACCELERATION, (int)accel,
VIDEOWRITER_PROP_HW_DEVICE, device
});
if (!writer.isOpened()) {
cerr << "Failed to open VideoWriter" << endl;
return 1;
}
cout << "VideoWriter backend = " << writer.getBackendName() << endl;
actual_accel = static_cast<VideoAccelerationType>(static_cast<int>(writer.get(CAP_PROP_HW_ACCELERATION)));
for (size_t i = 0; i < sizeof(acceleration_strings) / sizeof(acceleration_strings[0]); i++) {
if (actual_accel == acceleration_strings[i].acceleration) {
cout << "VideoWriter acceleration = " << acceleration_strings[i].str << endl;
cout << "VideoWriter acceleration device = " << (int)writer.get(CAP_PROP_HW_DEVICE) << endl;
break;
}
}
}
cout << "\nStarting frame loop. Press ESC to exit\n";
FPSCounter fps_counter(0.5); // print FPS every 0.5 seconds
UMat frame, frame2, frame3;
for (;;)
{
capture.read(frame);
if (frame.empty()) {
cout << "End of stream" << endl;
break;
}
if (out_w && out_h) {
cv::resize(frame, frame2, cv::Size(out_w, out_h));
//cv::cvtColor(frame, outframe, COLOR_BGRA2RGBA);
}
else {
frame2 = frame;
}
if (bitwise_not) {
cv::bitwise_not(frame2, frame3);
}
else {
frame3 = frame2;
}
if (writer.isOpened()) {
writer.write(frame3);
}
if (outfile.empty()) {
imshow("output", frame3);
char key = (char) waitKey(1);
if (key == 27)
break;
else if (key == 'm') {
ocl::setUseOpenCL(!cv::ocl::useOpenCL());
cout << "Switched to " << (ocl::useOpenCL() ? "OpenCL enabled" : "CPU") << " mode\n";
}
}
fps_counter.NewFrame();
}
return EXIT_SUCCESS;
}
Loading…
Cancel
Save