qsv: Merge libav implementation

Merged as-at libav 398f015, and therefore includes outstanding
skipped merges 04b17ff and 130e1f1.

All features not in libav are preserved, and no options change.
pull/240/head
Mark Thompson 8 years ago
parent 309fe16a12
commit 1f26a231bb
  1. 333
      libavcodec/qsv.c
  2. 40
      libavcodec/qsv_internal.h
  3. 568
      libavcodec/qsvdec.c
  4. 30
      libavcodec/qsvdec.h
  5. 29
      libavcodec/qsvdec_h2645.c
  6. 85
      libavcodec/qsvdec_mpeg2.c
  7. 89
      libavcodec/qsvdec_vc1.c
  8. 160
      libavcodec/qsvenc.c
  9. 6
      libavcodec/qsvenc.h
  10. 3
      libavcodec/qsvenc_h264.c

@ -25,7 +25,10 @@
#include <string.h> #include <string.h>
#include "libavutil/avstring.h" #include "libavutil/avstring.h"
#include "libavutil/common.h"
#include "libavutil/error.h" #include "libavutil/error.h"
#include "libavutil/hwcontext.h"
#include "libavutil/hwcontext_qsv.h"
#include "avcodec.h" #include "avcodec.h"
#include "qsv_internal.h" #include "qsv_internal.h"
@ -51,6 +54,22 @@ int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
return AVERROR(ENOSYS); return AVERROR(ENOSYS);
} }
int ff_qsv_profile_to_mfx(enum AVCodecID codec_id, int profile)
{
if (profile == FF_PROFILE_UNKNOWN)
return MFX_PROFILE_UNKNOWN;
switch (codec_id) {
case AV_CODEC_ID_H264:
case AV_CODEC_ID_HEVC:
return profile;
case AV_CODEC_ID_VC1:
return 4 * profile + 1;
case AV_CODEC_ID_MPEG2VIDEO:
return 0x10 * profile;
}
return MFX_PROFILE_UNKNOWN;
}
int ff_qsv_error(int mfx_err) int ff_qsv_error(int mfx_err)
{ {
switch (mfx_err) { switch (mfx_err) {
@ -85,90 +104,58 @@ int ff_qsv_error(int mfx_err)
return AVERROR_UNKNOWN; return AVERROR_UNKNOWN;
} }
} }
static int ff_qsv_set_display_handle(AVCodecContext *avctx, QSVSession *qs)
static int qsv_load_plugins(mfxSession session, const char *load_plugins,
void *logctx)
{ {
// this code is only required for Linux. It searches for a valid if (!load_plugins || !*load_plugins)
// display handle. First in /dev/dri/renderD then in /dev/dri/card return 0;
#ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
// VAAPI display handle
int ret = 0;
VADisplay va_dpy = NULL;
VAStatus va_res = VA_STATUS_SUCCESS;
int major_version = 0, minor_version = 0;
int fd = -1;
char adapterpath[256];
int adapter_num;
qs->fd_display = -1;
qs->va_display = NULL;
//search for valid graphics device
for (adapter_num = 0;adapter_num < 6;adapter_num++) {
if (adapter_num<3) {
snprintf(adapterpath,sizeof(adapterpath),
"/dev/dri/renderD%d", adapter_num+128);
} else {
snprintf(adapterpath,sizeof(adapterpath),
"/dev/dri/card%d", adapter_num-3);
}
fd = open(adapterpath, O_RDWR); while (*load_plugins) {
if (fd < 0) { mfxPluginUID uid;
av_log(avctx, AV_LOG_ERROR, mfxStatus ret;
"mfx init: %s fd open failed\n", adapterpath); int i, err = 0;
continue;
}
va_dpy = vaGetDisplayDRM(fd); char *plugin = av_get_token(&load_plugins, ":");
if (!va_dpy) { if (!plugin)
av_log(avctx, AV_LOG_ERROR, return AVERROR(ENOMEM);
"mfx init: %s vaGetDisplayDRM failed\n", adapterpath); if (strlen(plugin) != 2 * sizeof(uid.Data)) {
close(fd); av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID length\n");
continue; err = AVERROR(EINVAL);
goto load_plugin_fail;
} }
va_res = vaInitialize(va_dpy, &major_version, &minor_version); for (i = 0; i < sizeof(uid.Data); i++) {
if (VA_STATUS_SUCCESS != va_res) { err = sscanf(plugin + 2 * i, "%2hhx", uid.Data + i);
av_log(avctx, AV_LOG_ERROR, if (err != 1) {
"mfx init: %s vaInitialize failed\n", adapterpath); av_log(logctx, AV_LOG_ERROR, "Invalid plugin UID\n");
close(fd); err = AVERROR(EINVAL);
fd = -1; goto load_plugin_fail;
continue;
} else {
av_log(avctx, AV_LOG_VERBOSE,
"mfx initialization: %s vaInitialize successful\n",adapterpath);
qs->fd_display = fd;
qs->va_display = va_dpy;
ret = MFXVideoCORE_SetHandle(qs->session,
(mfxHandleType)MFX_HANDLE_VA_DISPLAY, (mfxHDL)va_dpy);
if (ret < 0) {
av_log(avctx, AV_LOG_ERROR,
"Error %d during set display handle\n", ret);
return ff_qsv_error(ret);
} }
break;
}
ret = MFXVideoUSER_Load(session, &uid, 1);
if (ret < 0) {
av_log(logctx, AV_LOG_ERROR, "Could not load the requested plugin: %s\n",
plugin);
err = ff_qsv_error(ret);
goto load_plugin_fail;
} }
if (*load_plugins)
load_plugins++;
load_plugin_fail:
av_freep(&plugin);
if (err < 0)
return err;
} }
#endif //AVCODEC_QSV_LINUX_SESSION_HANDLE
return 0; return 0;
} }
/**
* @brief Initialize a MSDK session int ff_qsv_init_internal_session(AVCodecContext *avctx, mfxSession *session,
*
* Media SDK is based on sessions, so this is the prerequisite
* initialization for HW acceleration. For Windows the session is
* complete and ready to use, for Linux a display handle is
* required. For releases of Media Server Studio >= 2015 R4 the
* render nodes interface is preferred (/dev/dri/renderD).
* Using Media Server Studio 2015 R4 or newer is recommended
* but the older /dev/dri/card interface is also searched
* for broader compatibility.
*
* @param avctx ffmpeg metadata for this codec context
* @param session the MSDK session used
*/
int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs,
const char *load_plugins) const char *load_plugins)
{ {
mfxIMPL impl = MFX_IMPL_AUTO_ANY; mfxIMPL impl = MFX_IMPL_AUTO_ANY;
@ -177,58 +164,19 @@ int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs,
const char *desc; const char *desc;
int ret; int ret;
ret = MFXInit(impl, &ver, &qs->session); ret = MFXInit(impl, &ver, session);
if (ret < 0) { if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "Error initializing an internal MFX session\n"); av_log(avctx, AV_LOG_ERROR, "Error initializing an internal MFX session\n");
return ff_qsv_error(ret); return ff_qsv_error(ret);
} }
ret = ff_qsv_set_display_handle(avctx, qs); ret = qsv_load_plugins(*session, load_plugins, avctx);
if (ret < 0) if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
return ret; return ret;
if (load_plugins && *load_plugins) {
while (*load_plugins) {
mfxPluginUID uid;
int i, err = 0;
char *plugin = av_get_token(&load_plugins, ":");
if (!plugin)
return AVERROR(ENOMEM);
if (strlen(plugin) != 2 * sizeof(uid.Data)) {
av_log(avctx, AV_LOG_ERROR, "Invalid plugin UID length\n");
err = AVERROR(EINVAL);
goto load_plugin_fail;
}
for (i = 0; i < sizeof(uid.Data); i++) {
err = sscanf(plugin + 2 * i, "%2hhx", uid.Data + i);
if (err != 1) {
av_log(avctx, AV_LOG_ERROR, "Invalid plugin UID\n");
err = AVERROR(EINVAL);
goto load_plugin_fail;
}
}
ret = MFXVideoUSER_Load(qs->session, &uid, 1);
if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "Could not load the requested plugin: %s\n",
plugin);
err = ff_qsv_error(ret);
goto load_plugin_fail;
}
if (*load_plugins)
load_plugins++;
load_plugin_fail:
av_freep(&plugin);
if (err < 0)
return err;
}
} }
MFXQueryIMPL(qs->session, &impl); MFXQueryIMPL(*session, &impl);
switch (MFX_IMPL_BASETYPE(impl)) { switch (MFX_IMPL_BASETYPE(impl)) {
case MFX_IMPL_SOFTWARE: case MFX_IMPL_SOFTWARE:
@ -251,21 +199,146 @@ load_plugin_fail:
return 0; return 0;
} }
int ff_qsv_close_internal_session(QSVSession *qs) static mfxStatus qsv_frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
mfxFrameAllocResponse *resp)
{ {
if (qs->session) { QSVFramesContext *ctx = pthis;
MFXClose(qs->session); mfxFrameInfo *i = &req->Info;
qs->session = NULL; mfxFrameInfo *i1 = &ctx->info;
if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET) ||
!(req->Type & (MFX_MEMTYPE_FROM_DECODE | MFX_MEMTYPE_FROM_ENCODE)) ||
!(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
return MFX_ERR_UNSUPPORTED;
if (i->Width != i1->Width || i->Height != i1->Height ||
i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
"allocation request: %dx%d %d %d vs %dx%d %d %d\n",
i->Width, i->Height, i->FourCC, i->ChromaFormat,
i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
return MFX_ERR_UNSUPPORTED;
}
resp->mids = ctx->mids;
resp->NumFrameActual = ctx->nb_mids;
return MFX_ERR_NONE;
}
static mfxStatus qsv_frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
{
return MFX_ERR_NONE;
}
static mfxStatus qsv_frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
{
return MFX_ERR_UNSUPPORTED;
}
static mfxStatus qsv_frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
{
return MFX_ERR_UNSUPPORTED;
}
static mfxStatus qsv_frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
{
*hdl = mid;
return MFX_ERR_NONE;
}
int ff_qsv_init_session_hwcontext(AVCodecContext *avctx, mfxSession *psession,
QSVFramesContext *qsv_frames_ctx,
const char *load_plugins, int opaque)
{
static const mfxHandleType handle_types[] = {
MFX_HANDLE_VA_DISPLAY,
MFX_HANDLE_D3D9_DEVICE_MANAGER,
MFX_HANDLE_D3D11_DEVICE,
};
mfxFrameAllocator frame_allocator = {
.pthis = qsv_frames_ctx,
.Alloc = qsv_frame_alloc,
.Lock = qsv_frame_lock,
.Unlock = qsv_frame_unlock,
.GetHDL = qsv_frame_get_hdl,
.Free = qsv_frame_free,
};
AVHWFramesContext *frames_ctx = (AVHWFramesContext*)qsv_frames_ctx->hw_frames_ctx->data;
AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
AVQSVDeviceContext *device_hwctx = frames_ctx->device_ctx->hwctx;
mfxSession parent_session = device_hwctx->session;
mfxSession session;
mfxVersion ver;
mfxIMPL impl;
mfxHDL handle = NULL;
mfxHandleType handle_type;
mfxStatus err;
int i, ret;
err = MFXQueryIMPL(parent_session, &impl);
if (err == MFX_ERR_NONE)
err = MFXQueryVersion(parent_session, &ver);
if (err != MFX_ERR_NONE) {
av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
return ff_qsv_error(err);
} }
#ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
if (qs->va_display) { for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
vaTerminate(qs->va_display); err = MFXVideoCORE_GetHandle(parent_session, handle_types[i], &handle);
qs->va_display = NULL; if (err == MFX_ERR_NONE) {
handle_type = handle_types[i];
break;
}
handle = NULL;
} }
if (qs->fd_display > 0) { if (!handle) {
close(qs->fd_display); av_log(avctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
qs->fd_display = -1; "from the session\n");
} }
#endif
err = MFXInit(impl, &ver, &session);
if (err != MFX_ERR_NONE) {
av_log(avctx, AV_LOG_ERROR,
"Error initializing a child MFX session: %d\n", err);
return ff_qsv_error(err);
}
if (handle) {
err = MFXVideoCORE_SetHandle(session, handle_type, handle);
if (err != MFX_ERR_NONE) {
av_log(avctx, AV_LOG_ERROR, "Error setting a HW handle: %d\n", err);
return ff_qsv_error(err);
}
}
ret = qsv_load_plugins(session, load_plugins, avctx);
if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "Error loading plugins\n");
return ret;
}
if (!opaque) {
av_freep(&qsv_frames_ctx->mids);
qsv_frames_ctx->mids = av_mallocz_array(frames_hwctx->nb_surfaces,
sizeof(*qsv_frames_ctx->mids));
if (!qsv_frames_ctx->mids)
return AVERROR(ENOMEM);
qsv_frames_ctx->info = frames_hwctx->surfaces[0].Info;
qsv_frames_ctx->nb_mids = frames_hwctx->nb_surfaces;
for (i = 0; i < frames_hwctx->nb_surfaces; i++)
qsv_frames_ctx->mids[i] = frames_hwctx->surfaces[i].Data.MemId;
err = MFXVideoCORE_SetFrameAllocator(session, &frame_allocator);
if (err != MFX_ERR_NONE) {
av_log(avctx, AV_LOG_ERROR, "Error setting a frame allocator: %d\n", err);
return ff_qsv_error(err);
}
}
*psession = session;
return 0; return 0;
} }

@ -21,21 +21,6 @@
#ifndef AVCODEC_QSV_INTERNAL_H #ifndef AVCODEC_QSV_INTERNAL_H
#define AVCODEC_QSV_INTERNAL_H #define AVCODEC_QSV_INTERNAL_H
#if CONFIG_VAAPI
#define AVCODEC_QSV_LINUX_SESSION_HANDLE
#endif //CONFIG_VAAPI
#ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE
#include <stdio.h>
#include <string.h>
#if HAVE_UNISTD_H
#include <unistd.h>
#endif
#include <fcntl.h>
#include <va/va.h>
#include <va/va_drm.h>
#endif
#include <mfx/mfxvideo.h> #include <mfx/mfxvideo.h>
#include "libavutil/frame.h" #include "libavutil/frame.h"
@ -43,7 +28,7 @@
#include "avcodec.h" #include "avcodec.h"
#define QSV_VERSION_MAJOR 1 #define QSV_VERSION_MAJOR 1
#define QSV_VERSION_MINOR 9 #define QSV_VERSION_MINOR 1
#define ASYNC_DEPTH_DEFAULT 4 // internal parallelism #define ASYNC_DEPTH_DEFAULT 4 // internal parallelism
@ -65,23 +50,26 @@ typedef struct QSVFrame {
struct QSVFrame *next; struct QSVFrame *next;
} QSVFrame; } QSVFrame;
typedef struct QSVSession { typedef struct QSVFramesContext {
mfxSession session; AVBufferRef *hw_frames_ctx;
#ifdef AVCODEC_QSV_LINUX_SESSION_HANDLE mfxFrameInfo info;
int fd_display; mfxMemId *mids;
VADisplay va_display; int nb_mids;
#endif } QSVFramesContext;
} QSVSession;
/** /**
* Convert a libmfx error code into a ffmpeg error code. * Convert a libmfx error code into an ffmpeg error code.
*/ */
int ff_qsv_error(int mfx_err); int ff_qsv_error(int mfx_err);
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id); int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id);
int ff_qsv_profile_to_mfx(enum AVCodecID codec_id, int profile);
int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs, int ff_qsv_init_internal_session(AVCodecContext *avctx, mfxSession *session,
const char *load_plugins); const char *load_plugins);
int ff_qsv_close_internal_session(QSVSession *qs);
int ff_qsv_init_session_hwcontext(AVCodecContext *avctx, mfxSession *session,
QSVFramesContext *qsv_frames_ctx,
const char *load_plugins, int opaque);
#endif /* AVCODEC_QSV_INTERNAL_H */ #endif /* AVCODEC_QSV_INTERNAL_H */

@ -27,6 +27,8 @@
#include <mfx/mfxvideo.h> #include <mfx/mfxvideo.h>
#include "libavutil/common.h" #include "libavutil/common.h"
#include "libavutil/hwcontext.h"
#include "libavutil/hwcontext_qsv.h"
#include "libavutil/mem.h" #include "libavutil/mem.h"
#include "libavutil/log.h" #include "libavutil/log.h"
#include "libavutil/pixfmt.h" #include "libavutil/pixfmt.h"
@ -49,88 +51,129 @@ int ff_qsv_map_pixfmt(enum AVPixelFormat format)
} }
} }
static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session) static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session,
AVBufferRef *hw_frames_ref)
{ {
if (!session) { int ret;
if (!q->internal_qs.session) {
int ret = ff_qsv_init_internal_session(avctx, &q->internal_qs, if (session) {
q->load_plugins); q->session = session;
} else if (hw_frames_ref) {
if (q->internal_session) {
MFXClose(q->internal_session);
q->internal_session = NULL;
}
av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
q->frames_ctx.hw_frames_ctx = av_buffer_ref(hw_frames_ref);
if (!q->frames_ctx.hw_frames_ctx)
return AVERROR(ENOMEM);
ret = ff_qsv_init_session_hwcontext(avctx, &q->internal_session,
&q->frames_ctx, q->load_plugins,
q->iopattern == MFX_IOPATTERN_OUT_OPAQUE_MEMORY);
if (ret < 0) {
av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
return ret;
}
q->session = q->internal_session;
} else {
if (!q->internal_session) {
ret = ff_qsv_init_internal_session(avctx, &q->internal_session,
q->load_plugins);
if (ret < 0) if (ret < 0)
return ret; return ret;
} }
q->session = q->internal_qs.session; q->session = q->internal_session;
} else {
q->session = session;
} }
return 0; /* make sure the decoder is uninitialized */
MFXVideoDECODE_Close(q->session);
return 0;
} }
static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q, AVPacket *avpkt) static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q)
{ {
mfxSession session = NULL; mfxSession session = NULL;
int iopattern = 0;
mfxVideoParam param = { { 0 } }; mfxVideoParam param = { { 0 } };
mfxBitstream bs = { { { 0 } } };
int ret; int ret;
enum AVPixelFormat pix_fmts[3] = { AV_PIX_FMT_QSV,
AV_PIX_FMT_NV12,
AV_PIX_FMT_NONE };
ret = ff_get_format(avctx, pix_fmts); if (!q->async_fifo) {
if (ret < 0) q->async_fifo = av_fifo_alloc((1 + q->async_depth) *
return ret; (sizeof(mfxSyncPoint*) + sizeof(QSVFrame*)));
if (!q->async_fifo)
avctx->pix_fmt = ret; return AVERROR(ENOMEM);
}
q->iopattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
if (avctx->hwaccel_context) { if (avctx->hwaccel_context) {
AVQSVContext *qsv = avctx->hwaccel_context; AVQSVContext *user_ctx = avctx->hwaccel_context;
session = user_ctx->session;
iopattern = user_ctx->iopattern;
q->ext_buffers = user_ctx->ext_buffers;
q->nb_ext_buffers = user_ctx->nb_ext_buffers;
}
session = qsv->session; if (avctx->hw_frames_ctx) {
q->iopattern = qsv->iopattern; AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
q->ext_buffers = qsv->ext_buffers; AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
q->nb_ext_buffers = qsv->nb_ext_buffers;
if (!iopattern) {
if (frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)
iopattern = MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
else if (frames_hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
iopattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY;
}
} }
ret = qsv_init_session(avctx, q, session); if (!iopattern)
iopattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
q->iopattern = iopattern;
ret = qsv_init_session(avctx, q, session, avctx->hw_frames_ctx);
if (ret < 0) { if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "Error initializing an MFX session\n"); av_log(avctx, AV_LOG_ERROR, "Error initializing an MFX session\n");
return ret; return ret;
} }
if (avpkt->size) {
bs.Data = avpkt->data;
bs.DataLength = avpkt->size;
bs.MaxLength = bs.DataLength;
bs.TimeStamp = avpkt->pts;
} else
return AVERROR_INVALIDDATA;
ret = ff_qsv_codec_id_to_mfx(avctx->codec_id); ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
if (ret < 0) { if (ret < 0)
av_log(avctx, AV_LOG_ERROR, "Unsupported codec_id %08x\n", avctx->codec_id);
return ret; return ret;
}
param.mfx.CodecId = ret; param.mfx.CodecId = ret;
param.mfx.CodecProfile = ff_qsv_profile_to_mfx(avctx->codec_id, avctx->profile);
param.mfx.CodecLevel = avctx->level == FF_LEVEL_UNKNOWN ? MFX_LEVEL_UNKNOWN : avctx->level;
ret = MFXVideoDECODE_DecodeHeader(q->session, &bs, &param); param.mfx.FrameInfo.BitDepthLuma = 8;
if (MFX_ERR_MORE_DATA==ret) { param.mfx.FrameInfo.BitDepthChroma = 8;
/* this code means that header not found so we return packet size to skip param.mfx.FrameInfo.Shift = 0;
a current packet param.mfx.FrameInfo.FourCC = MFX_FOURCC_NV12;
*/ param.mfx.FrameInfo.Width = avctx->coded_width;
return avpkt->size; param.mfx.FrameInfo.Height = avctx->coded_height;
} else if (ret < 0) { param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
av_log(avctx, AV_LOG_ERROR, "Decode header error %d\n", ret);
return ff_qsv_error(ret); switch (avctx->field_order) {
case AV_FIELD_PROGRESSIVE:
param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
break;
case AV_FIELD_TT:
param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_TFF;
break;
case AV_FIELD_BB:
param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_BFF;
break;
default:
param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_UNKNOWN;
break;
} }
param.IOPattern = q->iopattern; param.IOPattern = q->iopattern;
param.AsyncDepth = q->async_depth; param.AsyncDepth = q->async_depth;
param.ExtParam = q->ext_buffers; param.ExtParam = q->ext_buffers;
param.NumExtParam = q->nb_ext_buffers; param.NumExtParam = q->nb_ext_buffers;
param.mfx.FrameInfo.BitDepthLuma = 8;
param.mfx.FrameInfo.BitDepthChroma = 8;
ret = MFXVideoDECODE_Init(q->session, &param); ret = MFXVideoDECODE_Init(q->session, &param);
if (ret < 0) { if (ret < 0) {
@ -144,37 +187,6 @@ static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q, AVPacket *avpkt
return ff_qsv_error(ret); return ff_qsv_error(ret);
} }
avctx->profile = param.mfx.CodecProfile;
avctx->level = param.mfx.CodecLevel;
avctx->coded_width = param.mfx.FrameInfo.Width;
avctx->coded_height = param.mfx.FrameInfo.Height;
avctx->width = param.mfx.FrameInfo.CropW - param.mfx.FrameInfo.CropX;
avctx->height = param.mfx.FrameInfo.CropH - param.mfx.FrameInfo.CropY;
/* maximum decoder latency should be not exceed max DPB size for h.264 and
HEVC which is 16 for both cases.
So weare pre-allocating fifo big enough for 17 elements:
*/
if (!q->async_fifo) {
q->async_fifo = av_fifo_alloc((1 + 16) *
(sizeof(mfxSyncPoint) + sizeof(QSVFrame*)));
if (!q->async_fifo)
return AVERROR(ENOMEM);
}
if (!q->input_fifo) {
q->input_fifo = av_fifo_alloc(1024*16);
if (!q->input_fifo)
return AVERROR(ENOMEM);
}
if (!q->pkt_fifo) {
q->pkt_fifo = av_fifo_alloc( sizeof(AVPacket) * (1 + 16) );
if (!q->pkt_fifo)
return AVERROR(ENOMEM);
}
q->engine_ready = 1;
return 0; return 0;
} }
@ -270,161 +282,77 @@ static QSVFrame *find_frame(QSVContext *q, mfxFrameSurface1 *surf)
return NULL; return NULL;
} }
/* This function uses for 'smart' releasing of consumed data static int qsv_decode(AVCodecContext *avctx, QSVContext *q,
from the input bitstream fifo. AVFrame *frame, int *got_frame,
Since the input fifo mapped to mfxBitstream which does not understand AVPacket *avpkt)
a wrapping of data over fifo end, we should also to relocate a possible
data rest to fifo begin. If rest of data is absent then we just reset fifo's
pointers to initial positions.
NOTE the case when fifo does contain unconsumed data is rare and typical
amount of such data is 1..4 bytes.
*/
static void qsv_fifo_relocate(AVFifoBuffer *f, int bytes_to_free)
{
int data_size;
int data_rest = 0;
av_fifo_drain(f, bytes_to_free);
data_size = av_fifo_size(f);
if (data_size > 0) {
if (f->buffer!=f->rptr) {
if ( (f->end - f->rptr) < data_size) {
data_rest = data_size - (f->end - f->rptr);
data_size-=data_rest;
memmove(f->buffer+data_size, f->buffer, data_rest);
}
memmove(f->buffer, f->rptr, data_size);
data_size+= data_rest;
}
}
f->rptr = f->buffer;
f->wptr = f->buffer + data_size;
f->wndx = data_size;
f->rndx = 0;
}
static void close_decoder(QSVContext *q)
{
QSVFrame *cur;
if (q->session)
MFXVideoDECODE_Close(q->session);
cur = q->work_frames;
while (cur) {
q->work_frames = cur->next;
av_frame_free(&cur->frame);
av_freep(&cur);
cur = q->work_frames;
}
q->engine_ready = 0;
q->reinit_pending = 0;
}
static int do_qsv_decode(AVCodecContext *avctx, QSVContext *q,
AVFrame *frame, int *got_frame,
AVPacket *avpkt)
{ {
QSVFrame *out_frame; QSVFrame *out_frame;
mfxFrameSurface1 *insurf; mfxFrameSurface1 *insurf;
mfxFrameSurface1 *outsurf; mfxFrameSurface1 *outsurf;
mfxSyncPoint sync; mfxSyncPoint *sync;
mfxBitstream bs = { { { 0 } } }; mfxBitstream bs = { { { 0 } } };
int ret; int ret;
int n_out_frames;
int buffered = 0;
int flush = !avpkt->size || q->reinit_pending;
if (!q->engine_ready) {
ret = qsv_decode_init(avctx, q, avpkt);
if (ret)
return ret;
}
if (!flush) { if (avpkt->size) {
if (av_fifo_size(q->input_fifo)) { bs.Data = avpkt->data;
/* we have got rest of previous packet into buffer */ bs.DataLength = avpkt->size;
if (av_fifo_space(q->input_fifo) < avpkt->size) {
ret = av_fifo_grow(q->input_fifo, avpkt->size);
if (ret < 0)
return ret;
}
av_fifo_generic_write(q->input_fifo, avpkt->data, avpkt->size, NULL);
bs.Data = q->input_fifo->rptr;
bs.DataLength = av_fifo_size(q->input_fifo);
buffered = 1;
} else {
bs.Data = avpkt->data;
bs.DataLength = avpkt->size;
}
bs.MaxLength = bs.DataLength; bs.MaxLength = bs.DataLength;
bs.TimeStamp = avpkt->pts; bs.TimeStamp = avpkt->pts;
} }
while (1) { sync = av_mallocz(sizeof(*sync));
if (!sync) {
av_freep(&sync);
return AVERROR(ENOMEM);
}
do {
ret = get_surface(avctx, q, &insurf); ret = get_surface(avctx, q, &insurf);
if (ret < 0) if (ret < 0)
return ret; return ret;
do {
ret = MFXVideoDECODE_DecodeFrameAsync(q->session, flush ? NULL : &bs,
insurf, &outsurf, &sync);
if (ret != MFX_WRN_DEVICE_BUSY)
break;
av_usleep(500);
} while (1);
if (MFX_WRN_VIDEO_PARAM_CHANGED==ret) {
/* TODO: handle here minor sequence header changing */
} else if (MFX_ERR_INCOMPATIBLE_VIDEO_PARAM==ret) {
av_fifo_reset(q->input_fifo);
flush = q->reinit_pending = 1;
continue;
}
if (sync) {
QSVFrame *out_frame = find_frame(q, outsurf);
if (!out_frame) { ret = MFXVideoDECODE_DecodeFrameAsync(q->session, avpkt->size ? &bs : NULL,
av_log(avctx, AV_LOG_ERROR, insurf, &outsurf, sync);
"The returned surface does not correspond to any frame\n"); if (ret == MFX_WRN_DEVICE_BUSY)
return AVERROR_BUG; av_usleep(500);
}
out_frame->queued = 1; } while (ret == MFX_WRN_DEVICE_BUSY || ret == MFX_ERR_MORE_SURFACE);
av_fifo_generic_write(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL);
continue; if (ret != MFX_ERR_NONE &&
} ret != MFX_ERR_MORE_DATA &&
if (MFX_ERR_MORE_SURFACE != ret && ret < 0) ret != MFX_WRN_VIDEO_PARAM_CHANGED &&
break; ret != MFX_ERR_MORE_SURFACE) {
av_log(avctx, AV_LOG_ERROR, "Error during QSV decoding.\n");
av_freep(&sync);
return ff_qsv_error(ret);
} }
/* make sure we do not enter an infinite loop if the SDK /* make sure we do not enter an infinite loop if the SDK
* did not consume any data and did not return anything */ * did not consume any data and did not return anything */
if (!sync && !bs.DataOffset && !flush) { if (!*sync && !bs.DataOffset) {
av_log(avctx, AV_LOG_WARNING, "A decode call did not consume any data\n"); av_log(avctx, AV_LOG_WARNING, "A decode call did not consume any data\n");
bs.DataOffset = avpkt->size; bs.DataOffset = avpkt->size;
} }
if (buffered) { if (*sync) {
qsv_fifo_relocate(q->input_fifo, bs.DataOffset); QSVFrame *out_frame = find_frame(q, outsurf);
} else if (bs.DataOffset!=avpkt->size) {
/* some data of packet was not consumed. store it to local buffer */
av_fifo_generic_write(q->input_fifo, avpkt->data+bs.DataOffset,
avpkt->size - bs.DataOffset, NULL);
}
if (MFX_ERR_MORE_DATA!=ret && ret < 0) { if (!out_frame) {
av_log(avctx, AV_LOG_ERROR, "Error %d during QSV decoding.\n", ret); av_log(avctx, AV_LOG_ERROR,
return ff_qsv_error(ret); "The returned surface does not correspond to any frame\n");
av_freep(&sync);
return AVERROR_BUG;
}
out_frame->queued = 1;
av_fifo_generic_write(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL);
} else {
av_freep(&sync);
} }
n_out_frames = av_fifo_size(q->async_fifo) / (sizeof(out_frame)+sizeof(sync));
if (n_out_frames > q->async_depth || (flush && n_out_frames) ) { if (!av_fifo_space(q->async_fifo) ||
(!avpkt->size && av_fifo_size(q->async_fifo))) {
AVFrame *src_frame; AVFrame *src_frame;
av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL); av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
@ -432,9 +360,11 @@ static int do_qsv_decode(AVCodecContext *avctx, QSVContext *q,
out_frame->queued = 0; out_frame->queued = 0;
do { do {
ret = MFXVideoCORE_SyncOperation(q->session, sync, 1000); ret = MFXVideoCORE_SyncOperation(q->session, *sync, 1000);
} while (ret == MFX_WRN_IN_EXECUTION); } while (ret == MFX_WRN_IN_EXECUTION);
av_freep(&sync);
src_frame = out_frame->frame; src_frame = out_frame->frame;
ret = av_frame_ref(frame, src_frame); ret = av_frame_ref(frame, src_frame);
@ -462,149 +392,125 @@ FF_ENABLE_DEPRECATION_WARNINGS
*got_frame = 1; *got_frame = 1;
} }
return avpkt->size; return bs.DataOffset;
} }
/*
This function inserts a packet at fifo front.
*/
static void qsv_packet_push_front(QSVContext *q, AVPacket *avpkt)
{
int fifo_size = av_fifo_size(q->pkt_fifo);
if (!fifo_size) {
/* easy case fifo is empty */
av_fifo_generic_write(q->pkt_fifo, avpkt, sizeof(*avpkt), NULL);
} else {
/* realloc necessary */
AVPacket pkt;
AVFifoBuffer *fifo = av_fifo_alloc(fifo_size+av_fifo_space(q->pkt_fifo));
av_fifo_generic_write(fifo, avpkt, sizeof(*avpkt), NULL); int ff_qsv_decode_close(QSVContext *q)
while (av_fifo_size(q->pkt_fifo)) {
av_fifo_generic_read(q->pkt_fifo, &pkt, sizeof(pkt), NULL);
av_fifo_generic_write(fifo, &pkt, sizeof(pkt), NULL);
}
av_fifo_free(q->pkt_fifo);
q->pkt_fifo = fifo;
}
}
int ff_qsv_decode(AVCodecContext *avctx, QSVContext *q,
AVFrame *frame, int *got_frame,
AVPacket *avpkt)
{ {
AVPacket pkt_ref = { 0 }; QSVFrame *cur = q->work_frames;
int ret = 0;
if (q->pkt_fifo && av_fifo_size(q->pkt_fifo) >= sizeof(AVPacket)) { if (q->session)
/* we already have got some buffered packets. so add new to tail */ MFXVideoDECODE_Close(q->session);
ret = av_packet_ref(&pkt_ref, avpkt);
if (ret < 0)
return ret;
av_fifo_generic_write(q->pkt_fifo, &pkt_ref, sizeof(pkt_ref), NULL);
}
if (q->reinit_pending) {
ret = do_qsv_decode(avctx, q, frame, got_frame, avpkt);
if (!*got_frame) { while (q->async_fifo && av_fifo_size(q->async_fifo)) {
/* Flushing complete, no more frames */ QSVFrame *out_frame;
close_decoder(q); mfxSyncPoint *sync;
//return ff_qsv_decode(avctx, q, frame, got_frame, avpkt);
}
}
if (!q->reinit_pending) {
if (q->pkt_fifo && av_fifo_size(q->pkt_fifo) >= sizeof(AVPacket)) {
/* process buffered packets */
while (!*got_frame && av_fifo_size(q->pkt_fifo) >= sizeof(AVPacket)) {
av_fifo_generic_read(q->pkt_fifo, &pkt_ref, sizeof(pkt_ref), NULL);
ret = do_qsv_decode(avctx, q, frame, got_frame, &pkt_ref);
if (q->reinit_pending) {
/*
A rare case: new reinit pending when buffering existing.
We should to return the pkt_ref back to same place of fifo
*/
qsv_packet_push_front(q, &pkt_ref);
} else {
av_packet_unref(&pkt_ref);
}
}
} else {
/* general decoding */
ret = do_qsv_decode(avctx, q, frame, got_frame, avpkt);
if (q->reinit_pending) {
ret = av_packet_ref(&pkt_ref, avpkt);
if (ret < 0)
return ret;
av_fifo_generic_write(q->pkt_fifo, &pkt_ref, sizeof(pkt_ref), NULL);
}
}
}
return ret; av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
} av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
/*
This function resets decoder and corresponded buffers before seek operation
*/
void ff_qsv_decode_reset(AVCodecContext *avctx, QSVContext *q)
{
QSVFrame *cur;
AVPacket pkt;
int ret = 0;
mfxVideoParam param = { { 0 } };
if (q->reinit_pending) {
close_decoder(q);
} else if (q->engine_ready) {
ret = MFXVideoDECODE_GetVideoParam(q->session, &param);
if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "MFX decode get param error %d\n", ret);
}
ret = MFXVideoDECODE_Reset(q->session, &param); av_freep(&sync);
if (ret < 0) { }
av_log(avctx, AV_LOG_ERROR, "MFX decode reset error %d\n", ret);
}
/* Free all frames*/ while (cur) {
q->work_frames = cur->next;
av_frame_free(&cur->frame);
av_freep(&cur);
cur = q->work_frames; cur = q->work_frames;
while (cur) {
q->work_frames = cur->next;
av_frame_free(&cur->frame);
av_freep(&cur);
cur = q->work_frames;
}
} }
/* Reset output surfaces */ av_fifo_free(q->async_fifo);
if (q->async_fifo) q->async_fifo = NULL;
av_fifo_reset(q->async_fifo);
av_parser_close(q->parser);
avcodec_free_context(&q->avctx_internal);
/* Reset input packets fifo */ if (q->internal_session)
while (q->pkt_fifo && av_fifo_size(q->pkt_fifo)) { MFXClose(q->internal_session);
av_fifo_generic_read(q->pkt_fifo, &pkt, sizeof(pkt), NULL);
av_packet_unref(&pkt);
}
/* Reset input bitstream fifo */ av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
if (q->input_fifo) av_freep(&q->frames_ctx.mids);
av_fifo_reset(q->input_fifo); q->frames_ctx.nb_mids = 0;
return 0;
} }
int ff_qsv_decode_close(QSVContext *q) int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q,
AVFrame *frame, int *got_frame, AVPacket *pkt)
{ {
close_decoder(q); uint8_t *dummy_data;
int dummy_size;
int ret;
q->session = NULL; if (!q->avctx_internal) {
q->avctx_internal = avcodec_alloc_context3(NULL);
if (!q->avctx_internal)
return AVERROR(ENOMEM);
ff_qsv_close_internal_session(&q->internal_qs); q->parser = av_parser_init(avctx->codec_id);
if (!q->parser)
return AVERROR(ENOMEM);
av_fifo_free(q->async_fifo); q->parser->flags |= PARSER_FLAG_COMPLETE_FRAMES;
q->async_fifo = NULL; q->orig_pix_fmt = AV_PIX_FMT_NONE;
}
if (!pkt->size)
return qsv_decode(avctx, q, frame, got_frame, pkt);
/* we assume the packets are already split properly and want
* just the codec parameters here */
av_parser_parse2(q->parser, q->avctx_internal,
&dummy_data, &dummy_size,
pkt->data, pkt->size, pkt->pts, pkt->dts,
pkt->pos);
/* TODO: flush delayed frames on reinit */
if (q->parser->format != q->orig_pix_fmt ||
q->parser->coded_width != avctx->coded_width ||
q->parser->coded_height != avctx->coded_height) {
enum AVPixelFormat pix_fmts[3] = { AV_PIX_FMT_QSV,
AV_PIX_FMT_NONE,
AV_PIX_FMT_NONE };
enum AVPixelFormat qsv_format;
qsv_format = ff_qsv_map_pixfmt(q->parser->format);
if (qsv_format < 0) {
av_log(avctx, AV_LOG_ERROR,
"Only 8-bit YUV420 streams are supported.\n");
ret = AVERROR(ENOSYS);
goto reinit_fail;
}
av_fifo_free(q->input_fifo); q->orig_pix_fmt = q->parser->format;
q->input_fifo = NULL; avctx->pix_fmt = pix_fmts[1] = qsv_format;
avctx->width = q->parser->width;
avctx->height = q->parser->height;
avctx->coded_width = q->parser->coded_width;
avctx->coded_height = q->parser->coded_height;
avctx->field_order = q->parser->field_order;
avctx->level = q->avctx_internal->level;
avctx->profile = q->avctx_internal->profile;
ret = ff_get_format(avctx, pix_fmts);
if (ret < 0)
goto reinit_fail;
av_fifo_free(q->pkt_fifo); avctx->pix_fmt = ret;
q->pkt_fifo = NULL;
return 0; ret = qsv_decode_init(avctx, q);
if (ret < 0)
goto reinit_fail;
}
return qsv_decode(avctx, q, frame, got_frame, pkt);
reinit_fail:
q->orig_pix_fmt = q->parser->format = avctx->pix_fmt = AV_PIX_FMT_NONE;
return ret;
}
void ff_qsv_decode_flush(AVCodecContext *avctx, QSVContext *q)
{
q->orig_pix_fmt = AV_PIX_FMT_NONE;
} }

@ -41,7 +41,9 @@ typedef struct QSVContext {
// the session we allocated internally, in case the caller did not provide // the session we allocated internally, in case the caller did not provide
// one // one
QSVSession internal_qs; mfxSession internal_session;
QSVFramesContext frames_ctx;
/** /**
* a linked list of frames currently being used by QSV * a linked list of frames currently being used by QSV
@ -49,22 +51,11 @@ typedef struct QSVContext {
QSVFrame *work_frames; QSVFrame *work_frames;
AVFifoBuffer *async_fifo; AVFifoBuffer *async_fifo;
AVFifoBuffer *input_fifo;
// we should to buffer input packets at some cases
// else it is not possible to handle dynamic stream changes correctly
// this fifo uses for input packets buffering
AVFifoBuffer *pkt_fifo;
// this flag indicates that header parsed,
// decoder instance created and ready to general decoding
int engine_ready;
// we can not just re-init decoder if different sequence header arrived // the internal parser and codec context for parsing the data
// we should to deliver all buffered frames but we can not decode new packets AVCodecParserContext *parser;
// this time. So when reinit_pending is non-zero we flushing decoder and AVCodecContext *avctx_internal;
// accumulate new arrived packets into pkt_fifo enum AVPixelFormat orig_pix_fmt;
int reinit_pending;
// options set by the caller // options set by the caller
int async_depth; int async_depth;
@ -78,11 +69,10 @@ typedef struct QSVContext {
int ff_qsv_map_pixfmt(enum AVPixelFormat format); int ff_qsv_map_pixfmt(enum AVPixelFormat format);
int ff_qsv_decode(AVCodecContext *s, QSVContext *q, int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q,
AVFrame *frame, int *got_frame, AVFrame *frame, int *got_frame, AVPacket *pkt);
AVPacket *avpkt);
void ff_qsv_decode_reset(AVCodecContext *avctx, QSVContext *q); void ff_qsv_decode_flush(AVCodecContext *avctx, QSVContext *q);
int ff_qsv_decode_close(QSVContext *q); int ff_qsv_decode_close(QSVContext *q);

@ -33,11 +33,14 @@
#include "avcodec.h" #include "avcodec.h"
#include "internal.h" #include "internal.h"
#include "qsv_internal.h"
#include "qsvdec.h" #include "qsvdec.h"
#include "qsv.h"
enum LoadPlugin { enum LoadPlugin {
LOAD_PLUGIN_NONE, LOAD_PLUGIN_NONE,
LOAD_PLUGIN_HEVC_SW, LOAD_PLUGIN_HEVC_SW,
LOAD_PLUGIN_HEVC_HW,
}; };
typedef struct QSVH2645Context { typedef struct QSVH2645Context {
@ -86,7 +89,8 @@ static av_cold int qsv_decode_init(AVCodecContext *avctx)
int ret; int ret;
if (avctx->codec_id == AV_CODEC_ID_HEVC && s->load_plugin != LOAD_PLUGIN_NONE) { if (avctx->codec_id == AV_CODEC_ID_HEVC && s->load_plugin != LOAD_PLUGIN_NONE) {
static const char *uid_hevcenc_sw = "15dd936825ad475ea34e35f3f54217a6"; static const char *uid_hevcdec_sw = "15dd936825ad475ea34e35f3f54217a6";
static const char *uid_hevcdec_hw = "33a61c0b4c27454ca8d85dde757c6f8e";
if (s->qsv.load_plugins[0]) { if (s->qsv.load_plugins[0]) {
av_log(avctx, AV_LOG_WARNING, av_log(avctx, AV_LOG_WARNING,
@ -94,22 +98,22 @@ static av_cold int qsv_decode_init(AVCodecContext *avctx)
"The load_plugin value will be ignored.\n"); "The load_plugin value will be ignored.\n");
} else { } else {
av_freep(&s->qsv.load_plugins); av_freep(&s->qsv.load_plugins);
s->qsv.load_plugins = av_strdup(uid_hevcenc_sw);
if (s->load_plugin == LOAD_PLUGIN_HEVC_SW)
s->qsv.load_plugins = av_strdup(uid_hevcdec_sw);
else
s->qsv.load_plugins = av_strdup(uid_hevcdec_hw);
if (!s->qsv.load_plugins) if (!s->qsv.load_plugins)
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
} }
} }
s->packet_fifo = av_fifo_alloc(sizeof(AVPacket)); s->packet_fifo = av_fifo_alloc(sizeof(AVPacket));
if (!s->packet_fifo) { if (!s->packet_fifo) {
ret = AVERROR(ENOMEM); ret = AVERROR(ENOMEM);
goto fail; goto fail;
} }
if (avctx->codec_id == AV_CODEC_ID_H264) {
//regarding ticks_per_frame description, should be 2 for h.264:
avctx->ticks_per_frame = 2;
}
return 0; return 0;
fail: fail:
qsv_decode_close(avctx); qsv_decode_close(avctx);
@ -184,7 +188,7 @@ static int qsv_decode_frame(AVCodecContext *avctx, void *data,
/* no more data */ /* no more data */
if (av_fifo_size(s->packet_fifo) < sizeof(AVPacket)) if (av_fifo_size(s->packet_fifo) < sizeof(AVPacket))
return avpkt->size ? avpkt->size : ff_qsv_decode(avctx, &s->qsv, frame, got_frame, avpkt); return avpkt->size ? avpkt->size : ff_qsv_process_data(avctx, &s->qsv, frame, got_frame, avpkt);
av_packet_unref(&s->pkt_filtered); av_packet_unref(&s->pkt_filtered);
@ -202,7 +206,7 @@ static int qsv_decode_frame(AVCodecContext *avctx, void *data,
av_packet_unref(&input_ref); av_packet_unref(&input_ref);
} }
ret = ff_qsv_decode(avctx, &s->qsv, frame, got_frame, &s->pkt_filtered); ret = ff_qsv_process_data(avctx, &s->qsv, frame, got_frame, &s->pkt_filtered);
if (ret < 0) if (ret < 0)
return ret; return ret;
@ -216,7 +220,9 @@ static int qsv_decode_frame(AVCodecContext *avctx, void *data,
static void qsv_decode_flush(AVCodecContext *avctx) static void qsv_decode_flush(AVCodecContext *avctx)
{ {
QSVH2645Context *s = avctx->priv_data; QSVH2645Context *s = avctx->priv_data;
ff_qsv_decode_reset(avctx, &s->qsv);
qsv_clear_buffers(s);
ff_qsv_decode_flush(avctx, &s->qsv);
} }
#define OFFSET(x) offsetof(QSVH2645Context, x) #define OFFSET(x) offsetof(QSVH2645Context, x)
@ -233,9 +239,10 @@ AVHWAccel ff_hevc_qsv_hwaccel = {
static const AVOption hevc_options[] = { static const AVOption hevc_options[] = {
{ "async_depth", "Internal parallelization depth, the higher the value the higher the latency.", OFFSET(qsv.async_depth), AV_OPT_TYPE_INT, { .i64 = ASYNC_DEPTH_DEFAULT }, 0, INT_MAX, VD }, { "async_depth", "Internal parallelization depth, the higher the value the higher the latency.", OFFSET(qsv.async_depth), AV_OPT_TYPE_INT, { .i64 = ASYNC_DEPTH_DEFAULT }, 0, INT_MAX, VD },
{ "load_plugin", "A user plugin to load in an internal session", OFFSET(load_plugin), AV_OPT_TYPE_INT, { .i64 = LOAD_PLUGIN_HEVC_SW }, LOAD_PLUGIN_NONE, LOAD_PLUGIN_HEVC_SW, VD, "load_plugin" }, { "load_plugin", "A user plugin to load in an internal session", OFFSET(load_plugin), AV_OPT_TYPE_INT, { .i64 = LOAD_PLUGIN_HEVC_SW }, LOAD_PLUGIN_NONE, LOAD_PLUGIN_HEVC_HW, VD, "load_plugin" },
{ "none", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = LOAD_PLUGIN_NONE }, 0, 0, VD, "load_plugin" }, { "none", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = LOAD_PLUGIN_NONE }, 0, 0, VD, "load_plugin" },
{ "hevc_sw", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = LOAD_PLUGIN_HEVC_SW }, 0, 0, VD, "load_plugin" }, { "hevc_sw", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = LOAD_PLUGIN_HEVC_SW }, 0, 0, VD, "load_plugin" },
{ "hevc_hw", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = LOAD_PLUGIN_HEVC_HW }, 0, 0, VD, "load_plugin" },
{ "load_plugins", "A :-separate list of hexadecimal plugin UIDs to load in an internal session", { "load_plugins", "A :-separate list of hexadecimal plugin UIDs to load in an internal session",
OFFSET(qsv.load_plugins), AV_OPT_TYPE_STRING, { .str = "" }, 0, 0, VD }, OFFSET(qsv.load_plugins), AV_OPT_TYPE_STRING, { .str = "" }, 0, 0, VD },

@ -1,5 +1,7 @@
/* /*
* Intel MediaSDK QSV based MPEG-2 video decoder * Intel MediaSDK QSV based MPEG-2 decoder
*
* copyright (c) 2015 Anton Khirnov
* *
* This file is part of FFmpeg. * This file is part of FFmpeg.
* *
@ -18,32 +20,70 @@
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/ */
#include <stdint.h> #include <stdint.h>
#include <string.h> #include <string.h>
#include <mfx/mfxvideo.h>
#include "libavutil/common.h" #include "libavutil/common.h"
#include "libavutil/fifo.h"
#include "libavutil/opt.h" #include "libavutil/opt.h"
#include "avcodec.h" #include "avcodec.h"
#include "internal.h"
#include "qsv_internal.h"
#include "qsvdec.h" #include "qsvdec.h"
#include "qsv.h"
typedef struct QSVMPEG2Context { typedef struct QSVMPEG2Context {
AVClass *class; AVClass *class;
QSVContext qsv; QSVContext qsv;
AVFifoBuffer *packet_fifo;
AVPacket input_ref;
} QSVMPEG2Context; } QSVMPEG2Context;
static void qsv_clear_buffers(QSVMPEG2Context *s)
{
AVPacket pkt;
while (av_fifo_size(s->packet_fifo) >= sizeof(pkt)) {
av_fifo_generic_read(s->packet_fifo, &pkt, sizeof(pkt), NULL);
av_packet_unref(&pkt);
}
av_packet_unref(&s->input_ref);
}
static av_cold int qsv_decode_close(AVCodecContext *avctx) static av_cold int qsv_decode_close(AVCodecContext *avctx)
{ {
QSVMPEG2Context *s = avctx->priv_data; QSVMPEG2Context *s = avctx->priv_data;
ff_qsv_decode_close(&s->qsv); ff_qsv_decode_close(&s->qsv);
qsv_clear_buffers(s);
av_fifo_free(s->packet_fifo);
return 0; return 0;
} }
static av_cold int qsv_decode_init(AVCodecContext *avctx) static av_cold int qsv_decode_init(AVCodecContext *avctx)
{ {
QSVMPEG2Context *s = avctx->priv_data;
int ret;
s->packet_fifo = av_fifo_alloc(sizeof(AVPacket));
if (!s->packet_fifo) {
ret = AVERROR(ENOMEM);
goto fail;
}
return 0; return 0;
fail:
qsv_decode_close(avctx);
return ret;
} }
static int qsv_decode_frame(AVCodecContext *avctx, void *data, static int qsv_decode_frame(AVCodecContext *avctx, void *data,
@ -51,14 +91,53 @@ static int qsv_decode_frame(AVCodecContext *avctx, void *data,
{ {
QSVMPEG2Context *s = avctx->priv_data; QSVMPEG2Context *s = avctx->priv_data;
AVFrame *frame = data; AVFrame *frame = data;
int ret;
/* buffer the input packet */
if (avpkt->size) {
AVPacket input_ref = { 0 };
if (av_fifo_space(s->packet_fifo) < sizeof(input_ref)) {
ret = av_fifo_realloc2(s->packet_fifo,
av_fifo_size(s->packet_fifo) + sizeof(input_ref));
if (ret < 0)
return ret;
}
ret = av_packet_ref(&input_ref, avpkt);
if (ret < 0)
return ret;
av_fifo_generic_write(s->packet_fifo, &input_ref, sizeof(input_ref), NULL);
}
return ff_qsv_decode(avctx, &s->qsv, frame, got_frame, avpkt); /* process buffered data */
while (!*got_frame) {
if (s->input_ref.size <= 0) {
/* no more data */
if (av_fifo_size(s->packet_fifo) < sizeof(AVPacket))
return avpkt->size ? avpkt->size : ff_qsv_process_data(avctx, &s->qsv, frame, got_frame, avpkt);
av_packet_unref(&s->input_ref);
av_fifo_generic_read(s->packet_fifo, &s->input_ref, sizeof(s->input_ref), NULL);
}
ret = ff_qsv_process_data(avctx, &s->qsv, frame, got_frame, &s->input_ref);
if (ret < 0)
return ret;
s->input_ref.size -= ret;
s->input_ref.data += ret;
}
return avpkt->size;
} }
static void qsv_decode_flush(AVCodecContext *avctx) static void qsv_decode_flush(AVCodecContext *avctx)
{ {
QSVMPEG2Context *s = avctx->priv_data; QSVMPEG2Context *s = avctx->priv_data;
ff_qsv_decode_reset(avctx, &s->qsv);
qsv_clear_buffers(s);
ff_qsv_decode_flush(avctx, &s->qsv);
} }
AVHWAccel ff_mpeg2_qsv_hwaccel = { AVHWAccel ff_mpeg2_qsv_hwaccel = {

@ -21,18 +21,37 @@
#include <stdint.h> #include <stdint.h>
#include <string.h> #include <string.h>
#include <mfx/mfxvideo.h>
#include "libavutil/common.h" #include "libavutil/common.h"
#include "libavutil/fifo.h" #include "libavutil/fifo.h"
#include "libavutil/opt.h" #include "libavutil/opt.h"
#include "avcodec.h" #include "avcodec.h"
#include "internal.h"
#include "qsv_internal.h"
#include "qsvdec.h" #include "qsvdec.h"
#include "qsv.h"
typedef struct QSVVC1Context { typedef struct QSVVC1Context {
AVClass *class; AVClass *class;
QSVContext qsv; QSVContext qsv;
AVFifoBuffer *packet_fifo;
AVPacket input_ref;
} QSVVC1Context; } QSVVC1Context;
static void qsv_clear_buffers(QSVVC1Context *s)
{
AVPacket pkt;
while (av_fifo_size(s->packet_fifo) >= sizeof(pkt)) {
av_fifo_generic_read(s->packet_fifo, &pkt, sizeof(pkt), NULL);
av_packet_unref(&pkt);
}
av_packet_unref(&s->input_ref);
}
static av_cold int qsv_decode_close(AVCodecContext *avctx) static av_cold int qsv_decode_close(AVCodecContext *avctx)
{ {
@ -40,22 +59,82 @@ static av_cold int qsv_decode_close(AVCodecContext *avctx)
ff_qsv_decode_close(&s->qsv); ff_qsv_decode_close(&s->qsv);
qsv_clear_buffers(s);
av_fifo_free(s->packet_fifo);
return 0; return 0;
} }
static av_cold int qsv_decode_init(AVCodecContext *avctx)
{
QSVVC1Context *s = avctx->priv_data;
int ret;
s->packet_fifo = av_fifo_alloc(sizeof(AVPacket));
if (!s->packet_fifo) {
ret = AVERROR(ENOMEM);
goto fail;
}
return 0;
fail:
qsv_decode_close(avctx);
return ret;
}
static int qsv_decode_frame(AVCodecContext *avctx, void *data, static int qsv_decode_frame(AVCodecContext *avctx, void *data,
int *got_frame, AVPacket *avpkt) int *got_frame, AVPacket *avpkt)
{ {
QSVVC1Context *s = avctx->priv_data; QSVVC1Context *s = avctx->priv_data;
AVFrame *frame = data; AVFrame *frame = data;
int ret;
/* buffer the input packet */
if (avpkt->size) {
AVPacket input_ref = { 0 };
if (av_fifo_space(s->packet_fifo) < sizeof(input_ref)) {
ret = av_fifo_realloc2(s->packet_fifo,
av_fifo_size(s->packet_fifo) + sizeof(input_ref));
if (ret < 0)
return ret;
}
ret = av_packet_ref(&input_ref, avpkt);
if (ret < 0)
return ret;
av_fifo_generic_write(s->packet_fifo, &input_ref, sizeof(input_ref), NULL);
}
return ff_qsv_decode(avctx, &s->qsv, frame, got_frame, avpkt); /* process buffered data */
while (!*got_frame) {
if (s->input_ref.size <= 0) {
/* no more data */
if (av_fifo_size(s->packet_fifo) < sizeof(AVPacket))
return avpkt->size ? avpkt->size : ff_qsv_process_data(avctx, &s->qsv, frame, got_frame, avpkt);
av_packet_unref(&s->input_ref);
av_fifo_generic_read(s->packet_fifo, &s->input_ref, sizeof(s->input_ref), NULL);
}
ret = ff_qsv_process_data(avctx, &s->qsv, frame, got_frame, &s->input_ref);
if (ret < 0)
return ret;
s->input_ref.size -= ret;
s->input_ref.data += ret;
}
return avpkt->size;
} }
static void qsv_decode_flush(AVCodecContext *avctx) static void qsv_decode_flush(AVCodecContext *avctx)
{ {
QSVVC1Context *s = avctx->priv_data; QSVVC1Context *s = avctx->priv_data;
ff_qsv_decode_reset(avctx, &s->qsv);
qsv_clear_buffers(s);
ff_qsv_decode_flush(avctx, &s->qsv);
} }
AVHWAccel ff_vc1_qsv_hwaccel = { AVHWAccel ff_vc1_qsv_hwaccel = {
@ -85,11 +164,11 @@ AVCodec ff_vc1_qsv_decoder = {
.priv_data_size = sizeof(QSVVC1Context), .priv_data_size = sizeof(QSVVC1Context),
.type = AVMEDIA_TYPE_VIDEO, .type = AVMEDIA_TYPE_VIDEO,
.id = AV_CODEC_ID_VC1, .id = AV_CODEC_ID_VC1,
.init = NULL, .init = qsv_decode_init,
.decode = qsv_decode_frame, .decode = qsv_decode_frame,
.flush = qsv_decode_flush, .flush = qsv_decode_flush,
.close = qsv_decode_close, .close = qsv_decode_close,
.capabilities = AV_CODEC_CAP_DELAY, .capabilities = AV_CODEC_CAP_DELAY | AV_CODEC_CAP_DR1,
.priv_class = &class, .priv_class = &class,
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_NV12, .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_NV12,
AV_PIX_FMT_QSV, AV_PIX_FMT_QSV,

@ -26,6 +26,8 @@
#include <mfx/mfxvideo.h> #include <mfx/mfxvideo.h>
#include "libavutil/common.h" #include "libavutil/common.h"
#include "libavutil/hwcontext.h"
#include "libavutil/hwcontext_qsv.h"
#include "libavutil/mem.h" #include "libavutil/mem.h"
#include "libavutil/log.h" #include "libavutil/log.h"
#include "libavutil/time.h" #include "libavutil/time.h"
@ -379,31 +381,25 @@ static int init_video_param(AVCodecContext *avctx, QSVEncContext *q)
q->param.mfx.EncodedOrder = 0; q->param.mfx.EncodedOrder = 0;
q->param.mfx.BufferSizeInKB = 0; q->param.mfx.BufferSizeInKB = 0;
q->param.mfx.FrameInfo.FourCC = MFX_FOURCC_NV12; if (avctx->hw_frames_ctx) {
q->param.mfx.FrameInfo.CropX = 0; AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
q->param.mfx.FrameInfo.CropY = 0; AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
q->param.mfx.FrameInfo.CropW = avctx->width; q->param.mfx.FrameInfo = frames_hwctx->surfaces[0].Info;
q->param.mfx.FrameInfo.CropH = avctx->height;
q->param.mfx.FrameInfo.AspectRatioW = avctx->sample_aspect_ratio.num;
q->param.mfx.FrameInfo.AspectRatioH = avctx->sample_aspect_ratio.den;
q->param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
q->param.mfx.FrameInfo.BitDepthLuma = 8;
q->param.mfx.FrameInfo.BitDepthChroma = 8;
q->param.mfx.FrameInfo.Width = FFALIGN(avctx->width, q->width_align);
if (avctx->flags & AV_CODEC_FLAG_INTERLACED_DCT) {
/* A true field layout (TFF or BFF) is not important here,
it will specified later during frame encoding. But it is important
to specify is frame progressive or not because allowed heigh alignment
does depend by this.
*/
q->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_TFF;
q->height_align = 32;
} else { } else {
q->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE; q->param.mfx.FrameInfo.FourCC = MFX_FOURCC_NV12;
q->height_align = 16; q->param.mfx.FrameInfo.Width = FFALIGN(avctx->width, q->width_align);
q->param.mfx.FrameInfo.Height = FFALIGN(avctx->height, 32);
q->param.mfx.FrameInfo.CropX = 0;
q->param.mfx.FrameInfo.CropY = 0;
q->param.mfx.FrameInfo.CropW = avctx->width;
q->param.mfx.FrameInfo.CropH = avctx->height;
q->param.mfx.FrameInfo.AspectRatioW = avctx->sample_aspect_ratio.num;
q->param.mfx.FrameInfo.AspectRatioH = avctx->sample_aspect_ratio.den;
q->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
q->param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
q->param.mfx.FrameInfo.BitDepthLuma = 8;
q->param.mfx.FrameInfo.BitDepthChroma = 8;
} }
q->param.mfx.FrameInfo.Height = FFALIGN(avctx->height, q->height_align);
if (avctx->framerate.den > 0 && avctx->framerate.num > 0) { if (avctx->framerate.den > 0 && avctx->framerate.num > 0) {
q->param.mfx.FrameInfo.FrameRateExtN = avctx->framerate.num; q->param.mfx.FrameInfo.FrameRateExtN = avctx->framerate.num;
@ -536,7 +532,7 @@ FF_ENABLE_DEPRECATION_WARNINGS
q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->extco2; q->extparam_internal[q->nb_extparam_internal++] = (mfxExtBuffer *)&q->extco2;
#if QSV_VERSION_ATLEAST(1,8) #if QSV_HAVE_LA_DS
q->extco2.LookAheadDS = q->look_ahead_downsampling; q->extco2.LookAheadDS = q->look_ahead_downsampling;
#endif #endif
} }
@ -673,12 +669,45 @@ static int qsv_init_opaque_alloc(AVCodecContext *avctx, QSVEncContext *q)
return 0; return 0;
} }
static int qsvenc_init_session(AVCodecContext *avctx, QSVEncContext *q)
{
int ret;
if (avctx->hwaccel_context) {
AVQSVContext *qsv = avctx->hwaccel_context;
q->session = qsv->session;
} else if (avctx->hw_frames_ctx) {
q->frames_ctx.hw_frames_ctx = av_buffer_ref(avctx->hw_frames_ctx);
if (!q->frames_ctx.hw_frames_ctx)
return AVERROR(ENOMEM);
ret = ff_qsv_init_session_hwcontext(avctx, &q->internal_session,
&q->frames_ctx, q->load_plugins,
q->param.IOPattern == MFX_IOPATTERN_IN_OPAQUE_MEMORY);
if (ret < 0) {
av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
return ret;
}
q->session = q->internal_session;
} else {
ret = ff_qsv_init_internal_session(avctx, &q->internal_session,
q->load_plugins);
if (ret < 0)
return ret;
q->session = q->internal_session;
}
return 0;
}
int ff_qsv_enc_init(AVCodecContext *avctx, QSVEncContext *q) int ff_qsv_enc_init(AVCodecContext *avctx, QSVEncContext *q)
{ {
int iopattern = 0;
int opaque_alloc = 0; int opaque_alloc = 0;
int ret; int ret;
q->param.IOPattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY;
q->param.AsyncDepth = q->async_depth; q->param.AsyncDepth = q->async_depth;
q->async_fifo = av_fifo_alloc((1 + q->async_depth) * q->async_fifo = av_fifo_alloc((1 + q->async_depth) *
@ -689,32 +718,34 @@ int ff_qsv_enc_init(AVCodecContext *avctx, QSVEncContext *q)
if (avctx->hwaccel_context) { if (avctx->hwaccel_context) {
AVQSVContext *qsv = avctx->hwaccel_context; AVQSVContext *qsv = avctx->hwaccel_context;
q->session = qsv->session; iopattern = qsv->iopattern;
q->param.IOPattern = qsv->iopattern;
opaque_alloc = qsv->opaque_alloc; opaque_alloc = qsv->opaque_alloc;
} }
if (!q->session) { if (avctx->hw_frames_ctx) {
ret = ff_qsv_init_internal_session(avctx, &q->internal_qs, AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
q->load_plugins); AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
if (ret < 0)
return ret;
q->session = q->internal_qs.session; if (!iopattern) {
if (frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)
iopattern = MFX_IOPATTERN_IN_OPAQUE_MEMORY;
else if (frames_hwctx->frame_type &
(MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
iopattern = MFX_IOPATTERN_IN_VIDEO_MEMORY;
}
} }
ret = init_video_param(avctx, q); if (!iopattern)
iopattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY;
q->param.IOPattern = iopattern;
ret = qsvenc_init_session(avctx, q);
if (ret < 0) if (ret < 0)
return ret; return ret;
ret = MFXVideoENCODE_Query(q->session, &q->param,&q->param); ret = init_video_param(avctx, q);
if (MFX_WRN_PARTIAL_ACCELERATION==ret) { if (ret < 0)
av_log(avctx, AV_LOG_WARNING, "Encoder will work with partial HW acceleration\n"); return ret;
} else if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "Error %d querying encoder params\n", ret);
return ff_qsv_error(ret);
}
ret = MFXVideoENCODE_QueryIOSurf(q->session, &q->param, &q->req); ret = MFXVideoENCODE_QueryIOSurf(q->session, &q->param, &q->req);
if (ret < 0) { if (ret < 0) {
@ -758,7 +789,7 @@ int ff_qsv_enc_init(AVCodecContext *avctx, QSVEncContext *q)
} }
ret = MFXVideoENCODE_Init(q->session, &q->param); ret = MFXVideoENCODE_Init(q->session, &q->param);
if (MFX_WRN_PARTIAL_ACCELERATION==ret) { if (ret == MFX_WRN_PARTIAL_ACCELERATION) {
av_log(avctx, AV_LOG_WARNING, "Encoder will work with partial HW acceleration\n"); av_log(avctx, AV_LOG_WARNING, "Encoder will work with partial HW acceleration\n");
} else if (ret < 0) { } else if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "Error initializing the encoder\n"); av_log(avctx, AV_LOG_ERROR, "Error initializing the encoder\n");
@ -856,9 +887,8 @@ static int submit_frame(QSVEncContext *q, const AVFrame *frame,
qf->surface = (mfxFrameSurface1*)qf->frame->data[3]; qf->surface = (mfxFrameSurface1*)qf->frame->data[3];
} else { } else {
/* make a copy if the input is not padded as libmfx requires */ /* make a copy if the input is not padded as libmfx requires */
if ( frame->height & (q->height_align - 1) || if (frame->height & 31 || frame->linesize[0] & (q->width_align - 1)) {
frame->linesize[0] & (q->width_align - 1)) { qf->frame->height = FFALIGN(frame->height, 32);
qf->frame->height = FFALIGN(frame->height, q->height_align);
qf->frame->width = FFALIGN(frame->width, q->width_align); qf->frame->width = FFALIGN(frame->width, q->width_align);
ret = ff_get_buffer(q->avctx, qf->frame, AV_GET_BUFFER_FLAG_REF); ret = ff_get_buffer(q->avctx, qf->frame, AV_GET_BUFFER_FLAG_REF);
@ -924,7 +954,7 @@ static int encode_frame(AVCodecContext *avctx, QSVEncContext *q,
mfxBitstream *bs; mfxBitstream *bs;
mfxFrameSurface1 *surf = NULL; mfxFrameSurface1 *surf = NULL;
mfxSyncPoint *sync = NULL; mfxSyncPoint *sync = NULL;
QSVFrame *qsv_frame = NULL; QSVFrame *qsv_frame = NULL;
mfxEncodeCtrl* enc_ctrl = NULL; mfxEncodeCtrl* enc_ctrl = NULL;
int ret; int ret;
@ -968,30 +998,21 @@ static int encode_frame(AVCodecContext *avctx, QSVEncContext *q,
do { do {
ret = MFXVideoENCODE_EncodeFrameAsync(q->session, enc_ctrl, surf, bs, sync); ret = MFXVideoENCODE_EncodeFrameAsync(q->session, enc_ctrl, surf, bs, sync);
if (ret == MFX_WRN_DEVICE_BUSY) { if (ret == MFX_WRN_DEVICE_BUSY)
av_usleep(500); av_usleep(500);
continue; } while (ret > 0);
}
break;
} while ( 1 );
if (ret < 0) { if (ret < 0) {
av_packet_unref(&new_pkt); av_packet_unref(&new_pkt);
av_freep(&bs); av_freep(&bs);
if (ret == MFX_ERR_MORE_DATA) av_freep(&sync);
return 0; return (ret == MFX_ERR_MORE_DATA) ? 0 : ff_qsv_error(ret);
av_log(avctx, AV_LOG_ERROR, "EncodeFrameAsync returned %d\n", ret);
return ff_qsv_error(ret);
} }
if (ret == MFX_WRN_INCOMPATIBLE_VIDEO_PARAM) { if (ret == MFX_WRN_INCOMPATIBLE_VIDEO_PARAM && frame->interlaced_frame)
if (frame->interlaced_frame) print_interlace_msg(avctx, q);
print_interlace_msg(avctx, q);
else if (*sync) {
av_log(avctx, AV_LOG_WARNING,
"EncodeFrameAsync returned 'incompatible param' code\n");
}
if (sync) {
av_fifo_generic_write(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL); av_fifo_generic_write(q->async_fifo, &new_pkt, sizeof(new_pkt), NULL);
av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL); av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL);
av_fifo_generic_write(q->async_fifo, &bs, sizeof(bs), NULL); av_fifo_generic_write(q->async_fifo, &bs, sizeof(bs), NULL);
@ -1079,9 +1100,14 @@ int ff_qsv_enc_close(AVCodecContext *avctx, QSVEncContext *q)
if (q->session) if (q->session)
MFXVideoENCODE_Close(q->session); MFXVideoENCODE_Close(q->session);
q->session = NULL; if (q->internal_session)
MFXClose(q->internal_session);
ff_qsv_close_internal_session(&q->internal_qs); q->session = NULL;
q->internal_session = NULL;
av_buffer_unref(&q->frames_ctx.hw_frames_ctx);
av_freep(&q->frames_ctx.mids);
q->frames_ctx.nb_mids = 0;
cur = q->work_frames; cur = q->work_frames;
while (cur) { while (cur) {

@ -42,6 +42,7 @@
#define QSV_HAVE_BREF_TYPE QSV_VERSION_ATLEAST(1, 8) #define QSV_HAVE_BREF_TYPE QSV_VERSION_ATLEAST(1, 8)
#define QSV_HAVE_LA QSV_VERSION_ATLEAST(1, 7) #define QSV_HAVE_LA QSV_VERSION_ATLEAST(1, 7)
#define QSV_HAVE_LA_DS QSV_VERSION_ATLEAST(1, 8)
#define QSV_HAVE_LA_HRD QSV_VERSION_ATLEAST(1, 11) #define QSV_HAVE_LA_HRD QSV_VERSION_ATLEAST(1, 11)
#define QSV_HAVE_ICQ QSV_VERSION_ATLEAST(1, 8) #define QSV_HAVE_ICQ QSV_VERSION_ATLEAST(1, 8)
#define QSV_HAVE_VCM QSV_VERSION_ATLEAST(1, 8) #define QSV_HAVE_VCM QSV_VERSION_ATLEAST(1, 8)
@ -79,11 +80,10 @@ typedef struct QSVEncContext {
QSVFrame *work_frames; QSVFrame *work_frames;
mfxSession session; mfxSession session;
QSVSession internal_qs; mfxSession internal_session;
int packet_size; int packet_size;
int width_align; int width_align;
int height_align;
mfxVideoParam param; mfxVideoParam param;
mfxFrameAllocRequest req; mfxFrameAllocRequest req;
@ -104,6 +104,8 @@ typedef struct QSVEncContext {
AVFifoBuffer *async_fifo; AVFifoBuffer *async_fifo;
QSVFramesContext frames_ctx;
// options set by the caller // options set by the caller
int async_depth; int async_depth;
int idr_interval; int idr_interval;

@ -111,8 +111,7 @@ static const AVOption options[] = {
{ "look_ahead", "Use VBR algorithm with look ahead", OFFSET(qsv.look_ahead), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, VE }, { "look_ahead", "Use VBR algorithm with look ahead", OFFSET(qsv.look_ahead), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, VE },
{ "look_ahead_depth", "Depth of look ahead in number frames", OFFSET(qsv.look_ahead_depth), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 100, VE }, { "look_ahead_depth", "Depth of look ahead in number frames", OFFSET(qsv.look_ahead_depth), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 100, VE },
#endif #endif
#if QSV_HAVE_LA_DS
#if QSV_VERSION_ATLEAST(1,8)
{ "look_ahead_downsampling", NULL, OFFSET(qsv.look_ahead_downsampling), AV_OPT_TYPE_INT, { .i64 = MFX_LOOKAHEAD_DS_UNKNOWN }, MFX_LOOKAHEAD_DS_UNKNOWN, MFX_LOOKAHEAD_DS_2x, VE, "look_ahead_downsampling" }, { "look_ahead_downsampling", NULL, OFFSET(qsv.look_ahead_downsampling), AV_OPT_TYPE_INT, { .i64 = MFX_LOOKAHEAD_DS_UNKNOWN }, MFX_LOOKAHEAD_DS_UNKNOWN, MFX_LOOKAHEAD_DS_2x, VE, "look_ahead_downsampling" },
{ "unknown" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_LOOKAHEAD_DS_UNKNOWN }, INT_MIN, INT_MAX, VE, "look_ahead_downsampling" }, { "unknown" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_LOOKAHEAD_DS_UNKNOWN }, INT_MIN, INT_MAX, VE, "look_ahead_downsampling" },
{ "off" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_LOOKAHEAD_DS_OFF }, INT_MIN, INT_MAX, VE, "look_ahead_downsampling" }, { "off" , NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MFX_LOOKAHEAD_DS_OFF }, INT_MIN, INT_MAX, VE, "look_ahead_downsampling" },

Loading…
Cancel
Save