From b43b95f4789b6e60f9684918fd3c0a5f3f18aef6 Mon Sep 17 00:00:00 2001 From: Mark Thompson Date: Wed, 26 Apr 2017 21:39:54 +0100 Subject: [PATCH 01/10] vp9_raw_reorder_bsf: Remove a redundant allocation This was left over from an earlier version which created the new packet inside the current frame structure. Now it just leaks an unused packet, so remove the allocation entirely. --- libavcodec/vp9_raw_reorder_bsf.c | 4 ---- 1 file changed, 4 deletions(-) diff --git a/libavcodec/vp9_raw_reorder_bsf.c b/libavcodec/vp9_raw_reorder_bsf.c index 7f00f5f103..f5a5e49eda 100644 --- a/libavcodec/vp9_raw_reorder_bsf.c +++ b/libavcodec/vp9_raw_reorder_bsf.c @@ -241,10 +241,6 @@ static int vp9_raw_reorder_make_output(AVBSFContext *bsf, "(%"PRId64") from slot %d.\n", frame->sequence, frame->pts, s); - frame->packet = av_packet_alloc(); - if (!frame->packet) - return AVERROR(ENOMEM); - err = av_new_packet(out, 2); if (err < 0) return err; From 9203aac22874c7259e155b7d00f1f33bb1355129 Mon Sep 17 00:00:00 2001 From: Mark Thompson Date: Sun, 30 Apr 2017 14:11:51 +0100 Subject: [PATCH 02/10] avconv_hw: Add implicit device creation with default parameters If -hwaccel foo is supplied without any other device options, and the foo hwaccel is meant to have a device, try to make such a device with default parameters for the hwaccel to use. --- avtools/avconv_hw.c | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/avtools/avconv_hw.c b/avtools/avconv_hw.c index fd1618389b..94be723304 100644 --- a/avtools/avconv_hw.c +++ b/avtools/avconv_hw.c @@ -287,6 +287,10 @@ int hw_device_setup_for_decode(InputStream *ist) type = hw_device_match_type_in_name(ist->dec->name); if (type != AV_HWDEVICE_TYPE_NONE) { dev = hw_device_get_by_type(type); + if (!dev) { + hw_device_init_from_string(av_hwdevice_get_type_name(type), + &dev); + } } else { // No device required. return 0; From e669db76108de8d7a36c2274c99da82cc94d1dd1 Mon Sep 17 00:00:00 2001 From: Mark Thompson Date: Sat, 4 Mar 2017 23:57:38 +0000 Subject: [PATCH 03/10] avconv: Support setting the hardware device to use when filtering This only supports one device globally, but more can be used by passing them with input streams in hw_frames_ctx or by deriving new devices inside a filter graph with hwmap. --- avtools/avconv.h | 1 + avtools/avconv_filter.c | 10 ++++++++-- avtools/avconv_opt.c | 17 +++++++++++++++++ doc/avconv.texi | 14 ++++++++++++++ 4 files changed, 40 insertions(+), 2 deletions(-) diff --git a/avtools/avconv.h b/avtools/avconv.h index 9415b208be..3354c50444 100644 --- a/avtools/avconv.h +++ b/avtools/avconv.h @@ -489,6 +489,7 @@ extern const OptionDef options[]; extern const HWAccel hwaccels[]; extern int hwaccel_lax_profile_check; extern AVBufferRef *hw_device_ctx; +extern HWDevice *filter_hw_device; void reset_options(OptionsContext *o); void show_usage(void); diff --git a/avtools/avconv_filter.c b/avtools/avconv_filter.c index e53dcd271c..884478da27 100644 --- a/avtools/avconv_filter.c +++ b/avtools/avconv_filter.c @@ -711,9 +711,15 @@ int configure_filtergraph(FilterGraph *fg) if ((ret = avfilter_graph_parse2(fg->graph, graph_desc, &inputs, &outputs)) < 0) goto fail; - if (hw_device_ctx) { + if (filter_hw_device || hw_device_ctx) { + AVBufferRef *device = filter_hw_device ? filter_hw_device->device_ref + : hw_device_ctx; for (i = 0; i < fg->graph->nb_filters; i++) { - fg->graph->filters[i]->hw_device_ctx = av_buffer_ref(hw_device_ctx); + fg->graph->filters[i]->hw_device_ctx = av_buffer_ref(device); + if (!fg->graph->filters[i]->hw_device_ctx) { + ret = AVERROR(ENOMEM); + goto fail; + } } } diff --git a/avtools/avconv_opt.c b/avtools/avconv_opt.c index e970c8e46c..9839a2269e 100644 --- a/avtools/avconv_opt.c +++ b/avtools/avconv_opt.c @@ -80,6 +80,7 @@ const HWAccel hwaccels[] = { }; int hwaccel_lax_profile_check = 0; AVBufferRef *hw_device_ctx; +HWDevice *filter_hw_device; char *vstats_filename; @@ -369,6 +370,20 @@ static int opt_init_hw_device(void *optctx, const char *opt, const char *arg) } } +static int opt_filter_hw_device(void *optctx, const char *opt, const char *arg) +{ + if (filter_hw_device) { + av_log(NULL, AV_LOG_ERROR, "Only one filter device can be used.\n"); + return AVERROR(EINVAL); + } + filter_hw_device = hw_device_get_by_name(arg); + if (!filter_hw_device) { + av_log(NULL, AV_LOG_ERROR, "Invalid filter device %s.\n", arg); + return AVERROR(EINVAL); + } + return 0; +} + /** * Parse a metadata specifier passed as 'arg' parameter. * @param arg metadata string to parse @@ -2775,6 +2790,8 @@ const OptionDef options[] = { { "init_hw_device", HAS_ARG | OPT_EXPERT, { .func_arg = opt_init_hw_device }, "initialise hardware device", "args" }, + { "filter_hw_device", HAS_ARG | OPT_EXPERT, { .func_arg = opt_filter_hw_device }, + "set hardware device used when filtering", "device" }, { NULL, }, }; diff --git a/doc/avconv.texi b/doc/avconv.texi index 7bcb787979..d8eb44864b 100644 --- a/doc/avconv.texi +++ b/doc/avconv.texi @@ -644,6 +644,20 @@ deriving it from the existing device with the name @var{source}. @item -init_hw_device list List all hardware device types supported in this build of avconv. +@item -filter_hw_device @var{name} +Pass the hardware device called @var{name} to all filters in any filter graph. +This can be used to set the device to upload to with the @code{hwupload} filter, +or the device to map to with the @code{hwmap} filter. Other filters may also +make use of this parameter when they require a hardware device. Note that this +is typically only required when the input is not already in hardware frames - +when it is, filters will derive the device they require from the context of the +frames they receive as input. + +This is a global setting, so all filters will receive the same device. + +Do not use this option in scripts that should remain functional in future +avconv versions. + @item -hwaccel[:@var{stream_specifier}] @var{hwaccel} (@emph{input,per-stream}) Use hardware acceleration to decode the matching stream(s). The allowed values of @var{hwaccel} are: From aa51bb3d2756ed912ee40645efccf5f4a9609696 Mon Sep 17 00:00:00 2001 From: Mark Thompson Date: Sat, 4 Mar 2017 23:57:43 +0000 Subject: [PATCH 04/10] hwcontext_qsv: Support derivation from child devices --- libavutil/hwcontext_qsv.c | 113 ++++++++++++++++++++++++++++---------- 1 file changed, 84 insertions(+), 29 deletions(-) diff --git a/libavutil/hwcontext_qsv.c b/libavutil/hwcontext_qsv.c index 3409e9b976..46636ef9eb 100644 --- a/libavutil/hwcontext_qsv.c +++ b/libavutil/hwcontext_qsv.c @@ -792,21 +792,96 @@ static mfxIMPL choose_implementation(const char *device) return impl; } -static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, - AVDictionary *opts, int flags) +static int qsv_device_derive_from_child(AVHWDeviceContext *ctx, + mfxIMPL implementation, + AVHWDeviceContext *child_device_ctx, + int flags) { AVQSVDeviceContext *hwctx = ctx->hwctx; - QSVDevicePriv *priv; - enum AVHWDeviceType child_device_type; - AVDictionaryEntry *e; + QSVDeviceContext *s = ctx->internal->priv; mfxVersion ver = { { 3, 1 } }; - mfxIMPL impl; mfxHDL handle; mfxHandleType handle_type; mfxStatus err; int ret; + switch (child_device_ctx->type) { +#if CONFIG_VAAPI + case AV_HWDEVICE_TYPE_VAAPI: + { + AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx; + handle_type = MFX_HANDLE_VA_DISPLAY; + handle = (mfxHDL)child_device_hwctx->display; + } + break; +#endif +#if CONFIG_DXVA2 + case AV_HWDEVICE_TYPE_DXVA2: + { + AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx; + handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER; + handle = (mfxHDL)child_device_hwctx->devmgr; + } + break; +#endif + default: + ret = AVERROR(ENOSYS); + goto fail; + } + + err = MFXInit(implementation, &ver, &hwctx->session); + if (err != MFX_ERR_NONE) { + av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: " + "%d.\n", err); + ret = AVERROR_UNKNOWN; + goto fail; + } + + err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle); + if (err != MFX_ERR_NONE) { + av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: " + "%d\n", err); + ret = AVERROR_UNKNOWN; + goto fail; + } + + ret = qsv_device_init(ctx); + if (ret < 0) + goto fail; + if (s->handle_type != handle_type) { + av_log(ctx, AV_LOG_ERROR, "Error in child device handle setup: " + "type mismatch (%d != %d).\n", s->handle_type, handle_type); + err = AVERROR_UNKNOWN; + goto fail; + } + + return 0; + +fail: + if (hwctx->session) + MFXClose(hwctx->session); + return ret; +} + +static int qsv_device_derive(AVHWDeviceContext *ctx, + AVHWDeviceContext *child_device_ctx, int flags) +{ + return qsv_device_derive_from_child(ctx, MFX_IMPL_HARDWARE_ANY, + child_device_ctx, flags); +} + +static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, + AVDictionary *opts, int flags) +{ + QSVDevicePriv *priv; + enum AVHWDeviceType child_device_type; + AVHWDeviceContext *child_device; + AVDictionaryEntry *e; + + mfxIMPL impl; + int ret; + priv = av_mallocz(sizeof(*priv)); if (!priv) return AVERROR(ENOMEM); @@ -830,32 +905,11 @@ static int qsv_device_create(AVHWDeviceContext *ctx, const char *device, if (ret < 0) return ret; - { - AVHWDeviceContext *child_device_ctx = (AVHWDeviceContext*)priv->child_device_ctx->data; -#if CONFIG_VAAPI - AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx; - handle_type = MFX_HANDLE_VA_DISPLAY; - handle = (mfxHDL)child_device_hwctx->display; -#elif CONFIG_DXVA2 - AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx; - handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER; - handle = (mfxHDL)child_device_hwctx->devmgr; -#endif - } + child_device = (AVHWDeviceContext*)priv->child_device_ctx->data; impl = choose_implementation(device); - err = MFXInit(impl, &ver, &hwctx->session); - if (err != MFX_ERR_NONE) { - av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session\n"); - return AVERROR_UNKNOWN; - } - - err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle); - if (err != MFX_ERR_NONE) - return AVERROR_UNKNOWN; - - return 0; + return qsv_device_derive_from_child(ctx, impl, child_device, 0); } const HWContextType ff_hwcontext_type_qsv = { @@ -868,6 +922,7 @@ const HWContextType ff_hwcontext_type_qsv = { .frames_priv_size = sizeof(QSVFramesContext), .device_create = qsv_device_create, + .device_derive = qsv_device_derive, .device_init = qsv_device_init, .frames_get_constraints = qsv_frames_get_constraints, .frames_init = qsv_frames_init, From 27978155bc661eec9f22bcf82c9cfc099cff4365 Mon Sep 17 00:00:00 2001 From: Mark Thompson Date: Sat, 4 Mar 2017 23:57:39 +0000 Subject: [PATCH 05/10] hwcontext: Add frame context mapping for nontrivial contexts Some frames contexts are not usable without additional format-specific state in hwctx. This change adds new functions frames_derive_from and frames_derive_to to initialise this state appropriately when deriving a frames context which will require it to be set. --- libavutil/hwcontext.c | 9 ++++++++- libavutil/hwcontext_internal.h | 5 +++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/libavutil/hwcontext.c b/libavutil/hwcontext.c index ff9fe99aba..a6d88421d8 100644 --- a/libavutil/hwcontext.c +++ b/libavutil/hwcontext.c @@ -816,7 +816,14 @@ int av_hwframe_ctx_create_derived(AVBufferRef **derived_frame_ctx, goto fail; } - ret = av_hwframe_ctx_init(dst_ref); + ret = AVERROR(ENOSYS); + if (src->internal->hw_type->frames_derive_from) + ret = src->internal->hw_type->frames_derive_from(dst, src, flags); + if (ret == AVERROR(ENOSYS) && + dst->internal->hw_type->frames_derive_to) + ret = dst->internal->hw_type->frames_derive_to(dst, src, flags); + if (ret == AVERROR(ENOSYS)) + ret = 0; if (ret) goto fail; diff --git a/libavutil/hwcontext_internal.h b/libavutil/hwcontext_internal.h index 66f54142e8..87b32e191e 100644 --- a/libavutil/hwcontext_internal.h +++ b/libavutil/hwcontext_internal.h @@ -92,6 +92,11 @@ typedef struct HWContextType { const AVFrame *src, int flags); int (*map_from)(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags); + + int (*frames_derive_to)(AVHWFramesContext *dst_ctx, + AVHWFramesContext *src_ctx, int flags); + int (*frames_derive_from)(AVHWFramesContext *dst_ctx, + AVHWFramesContext *src_ctx, int flags); } HWContextType; struct AVHWDeviceInternal { From eaa5e0710496db50fc164806e5f49eaaccc83bb5 Mon Sep 17 00:00:00 2001 From: Mark Thompson Date: Sat, 4 Mar 2017 23:57:44 +0000 Subject: [PATCH 06/10] hwcontext_qsv: Implement mapping frames from the child device type Factorises out existing surface initialisation code to reuse. --- libavutil/hwcontext_qsv.c | 174 +++++++++++++++++++++++++++++++------- 1 file changed, 142 insertions(+), 32 deletions(-) diff --git a/libavutil/hwcontext_qsv.c b/libavutil/hwcontext_qsv.c index 46636ef9eb..ca50bcd948 100644 --- a/libavutil/hwcontext_qsv.c +++ b/libavutil/hwcontext_qsv.c @@ -94,6 +94,16 @@ static const struct { { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 }, }; +static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt) +{ + int i; + for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) { + if (supported_pixel_formats[i].pix_fmt == pix_fmt) + return supported_pixel_formats[i].fourcc; + } + return 0; +} + static int qsv_device_init(AVHWDeviceContext *ctx) { AVQSVDeviceContext *hwctx = ctx->hwctx; @@ -272,18 +282,48 @@ fail: return ret; } +static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf) +{ + const AVPixFmtDescriptor *desc; + uint32_t fourcc; + + desc = av_pix_fmt_desc_get(ctx->sw_format); + if (!desc) + return AVERROR(EINVAL); + + fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format); + if (!fourcc) + return AVERROR(EINVAL); + + surf->Info.BitDepthLuma = desc->comp[0].depth; + surf->Info.BitDepthChroma = desc->comp[0].depth; + surf->Info.Shift = desc->comp[0].depth > 8; + + if (desc->log2_chroma_w && desc->log2_chroma_h) + surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420; + else if (desc->log2_chroma_w) + surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422; + else + surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444; + + surf->Info.FourCC = fourcc; + surf->Info.Width = ctx->width; + surf->Info.CropW = ctx->width; + surf->Info.Height = ctx->height; + surf->Info.CropH = ctx->height; + surf->Info.FrameRateExtN = 25; + surf->Info.FrameRateExtD = 1; + + return 0; +} + static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc) { QSVFramesContext *s = ctx->internal->priv; AVQSVFramesContext *frames_hwctx = ctx->hwctx; - const AVPixFmtDescriptor *desc; int i, ret = 0; - desc = av_pix_fmt_desc_get(ctx->sw_format); - if (!desc) - return AVERROR_BUG; - if (ctx->initial_pool_size <= 0) { av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n"); return AVERROR(EINVAL); @@ -295,26 +335,9 @@ static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc) return AVERROR(ENOMEM); for (i = 0; i < ctx->initial_pool_size; i++) { - mfxFrameSurface1 *surf = &s->surfaces_internal[i]; - - surf->Info.BitDepthLuma = desc->comp[0].depth; - surf->Info.BitDepthChroma = desc->comp[0].depth; - surf->Info.Shift = desc->comp[0].depth > 8; - - if (desc->log2_chroma_w && desc->log2_chroma_h) - surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420; - else if (desc->log2_chroma_w) - surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422; - else - surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444; - - surf->Info.FourCC = fourcc; - surf->Info.Width = ctx->width; - surf->Info.CropW = ctx->width; - surf->Info.Height = ctx->height; - surf->Info.CropH = ctx->height; - surf->Info.FrameRateExtN = 25; - surf->Info.FrameRateExtD = 1; + ret = qsv_init_surface(ctx, &s->surfaces_internal[i]); + if (ret < 0) + return ret; } if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) { @@ -466,15 +489,10 @@ static int qsv_frames_init(AVHWFramesContext *ctx) int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME); - uint32_t fourcc = 0; + uint32_t fourcc; int i, ret; - for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) { - if (supported_pixel_formats[i].pix_fmt == ctx->sw_format) { - fourcc = supported_pixel_formats[i].fourcc; - break; - } - } + fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format); if (!fourcc) { av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n"); return AVERROR(ENOSYS); @@ -723,6 +741,96 @@ static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, return 0; } +static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx, + AVHWFramesContext *src_ctx, int flags) +{ + QSVFramesContext *s = dst_ctx->internal->priv; + AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx; + int i; + + switch (src_ctx->device_ctx->type) { +#if CONFIG_VAAPI + case AV_HWDEVICE_TYPE_VAAPI: + { + AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx; + s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces, + sizeof(*s->surfaces_internal)); + if (!s->surfaces_internal) + return AVERROR(ENOMEM); + for (i = 0; i < src_hwctx->nb_surfaces; i++) { + qsv_init_surface(dst_ctx, &s->surfaces_internal[i]); + s->surfaces_internal[i].Data.MemId = src_hwctx->surface_ids + i; + } + dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces; + dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET; + } + break; +#endif +#if CONFIG_DXVA2 + case AV_HWDEVICE_TYPE_DXVA2: + { + AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx; + s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces, + sizeof(*s->surfaces_internal)); + if (!s->surfaces_internal) + return AVERROR(ENOMEM); + for (i = 0; i < src_hwctx->nb_surfaces; i++) { + qsv_init_surface(dst_ctx, &s->surfaces_internal[i]); + s->surfaces_internal[i].Data.MemId = (mfxMemId)src_hwctx->surfaces[i]; + } + dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces; + if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget) + dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET; + else + dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET; + } + break; +#endif + default: + return AVERROR(ENOSYS); + } + + dst_hwctx->surfaces = s->surfaces_internal; + + return 0; +} + +static int qsv_map_to(AVHWFramesContext *dst_ctx, + AVFrame *dst, const AVFrame *src, int flags) +{ + AVQSVFramesContext *hwctx = dst_ctx->hwctx; + int i, err; + + for (i = 0; i < hwctx->nb_surfaces; i++) { +#if CONFIG_VAAPI + if (*(VASurfaceID*)hwctx->surfaces[i].Data.MemId == + (VASurfaceID)(uintptr_t)src->data[3]) + break; +#endif +#if CONFIG_DXVA2 + if ((IDirect3DSurface9*)hwctx->surfaces[i].Data.MemId == + (IDirect3DSurface9*)(uintptr_t)src->data[3]) + break; +#endif + } + if (i >= hwctx->nb_surfaces) { + av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which " + "is not in the mapped frames context.\n"); + return AVERROR(EINVAL); + } + + err = ff_hwframe_map_create(dst->hw_frames_ctx, + dst, src, NULL, NULL); + if (err) + return err; + + dst->width = src->width; + dst->height = src->height; + dst->data[3] = (uint8_t*)&hwctx->surfaces[i]; + + return 0; +} + static int qsv_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints) @@ -931,7 +1039,9 @@ const HWContextType ff_hwcontext_type_qsv = { .transfer_get_formats = qsv_transfer_get_formats, .transfer_data_to = qsv_transfer_data_to, .transfer_data_from = qsv_transfer_data_from, + .map_to = qsv_map_to, .map_from = qsv_map_from, + .frames_derive_to = qsv_frames_derive_to, .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE }, }; From e1c5d56b18b82e3fb42382b1b1f972e8b371fc38 Mon Sep 17 00:00:00 2001 From: Mark Thompson Date: Sat, 4 Mar 2017 23:57:45 +0000 Subject: [PATCH 07/10] hwcontext_qsv: Implement mapping frames to the child device type --- libavutil/hwcontext_qsv.c | 88 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 86 insertions(+), 2 deletions(-) diff --git a/libavutil/hwcontext_qsv.c b/libavutil/hwcontext_qsv.c index ca50bcd948..e0f6875b40 100644 --- a/libavutil/hwcontext_qsv.c +++ b/libavutil/hwcontext_qsv.c @@ -577,13 +577,62 @@ static int qsv_transfer_get_formats(AVHWFramesContext *ctx, return 0; } +static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx, + AVHWFramesContext *src_ctx, int flags) +{ + AVQSVFramesContext *src_hwctx = src_ctx->hwctx; + int i; + + switch (dst_ctx->device_ctx->type) { +#if CONFIG_VAAPI + case AV_HWDEVICE_TYPE_VAAPI: + { + AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx; + dst_hwctx->surface_ids = av_mallocz_array(src_hwctx->nb_surfaces, + sizeof(*dst_hwctx->surface_ids)); + if (!dst_hwctx->surface_ids) + return AVERROR(ENOMEM); + for (i = 0; i < src_hwctx->nb_surfaces; i++) + dst_hwctx->surface_ids[i] = + *(VASurfaceID*)src_hwctx->surfaces[i].Data.MemId; + dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces; + } + break; +#endif +#if CONFIG_DXVA2 + case AV_HWDEVICE_TYPE_DXVA2: + { + AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx; + dst_hwctx->surfaces = av_mallocz_array(src_hwctx->nb_surfaces, + sizeof(*dst_hwctx->surfaces)); + if (!dst_hwctx->surfaces) + return AVERROR(ENOMEM); + for (i = 0; i < src_hwctx->nb_surfaces; i++) + dst_hwctx->surfaces[i] = + (IDirect3DSurface9*)src_hwctx->surfaces[i].Data.MemId; + dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces; + if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET) + dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget; + else + dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget; + } + break; +#endif + default: + return AVERROR(ENOSYS); + } + + return 0; +} + static int qsv_map_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src, int flags) { QSVFramesContext *s = ctx->internal->priv; mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3]; AVHWFramesContext *child_frames_ctx; - + const AVPixFmtDescriptor *desc; + uint8_t *child_data; AVFrame *dummy; int ret = 0; @@ -591,6 +640,40 @@ static int qsv_map_from(AVHWFramesContext *ctx, return AVERROR(ENOSYS); child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data; + switch (child_frames_ctx->device_ctx->type) { +#if CONFIG_VAAPI + case AV_HWDEVICE_TYPE_VAAPI: + child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)surf->Data.MemId; + break; +#endif +#if CONFIG_DXVA2 + case AV_HWDEVICE_TYPE_DXVA2: + child_data = surf->Data.MemId; + break; +#endif + default: + return AVERROR(ENOSYS); + } + + if (dst->format == child_frames_ctx->format) { + ret = ff_hwframe_map_create(s->child_frames_ref, + dst, src, NULL, NULL); + if (ret < 0) + return ret; + + dst->width = src->width; + dst->height = src->height; + dst->data[3] = child_data; + + return 0; + } + + desc = av_pix_fmt_desc_get(dst->format); + if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) { + // This only supports mapping to software. + return AVERROR(ENOSYS); + } + dummy = av_frame_alloc(); if (!dummy) return AVERROR(ENOMEM); @@ -603,7 +686,7 @@ static int qsv_map_from(AVHWFramesContext *ctx, dummy->format = child_frames_ctx->format; dummy->width = src->width; dummy->height = src->height; - dummy->data[3] = surf->Data.MemId; + dummy->data[3] = child_data; ret = av_hwframe_map(dst, dummy, flags); @@ -1042,6 +1125,7 @@ const HWContextType ff_hwcontext_type_qsv = { .map_to = qsv_map_to, .map_from = qsv_map_from, .frames_derive_to = qsv_frames_derive_to, + .frames_derive_from = qsv_frames_derive_from, .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE }, }; From c5714b51aad41fef56dddac1d542e7fc6b984627 Mon Sep 17 00:00:00 2001 From: Mark Thompson Date: Sat, 4 Mar 2017 23:57:40 +0000 Subject: [PATCH 08/10] hwcontext: Improve allocation in derived contexts Use the flags argument of av_hwframe_ctx_create_derived() to pass the mapping flags which will be used on allocation. Also, set the format and hardware context on the allocated frame automatically - the user should not be required to do this themselves. --- doc/APIchanges | 4 ++++ libavutil/hwcontext.c | 14 +++++++++++++- libavutil/hwcontext.h | 4 +++- libavutil/hwcontext_internal.h | 5 +++++ libavutil/version.h | 2 +- 5 files changed, 26 insertions(+), 3 deletions(-) diff --git a/doc/APIchanges b/doc/APIchanges index 34d788a706..a251c4ca82 100644 --- a/doc/APIchanges +++ b/doc/APIchanges @@ -13,6 +13,10 @@ libavutil: 2017-03-23 API changes, most recent first: +2017-04-30 - xxxxxxx - lavu 56.1.1 - hwcontext.h + av_hwframe_ctx_create_derived() now takes some AV_HWFRAME_MAP_* combination + as its flags argument (which was previously unused). + 2017-04-xx - xxxxxxx - lavu 56.1.0 - spherical.h Add av_spherical_projection_name() and av_spherical_from_name(). diff --git a/libavutil/hwcontext.c b/libavutil/hwcontext.c index a6d88421d8..360b01205c 100644 --- a/libavutil/hwcontext.c +++ b/libavutil/hwcontext.c @@ -455,6 +455,11 @@ int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags) // and map the frame immediately. AVFrame *src_frame; + frame->format = ctx->format; + frame->hw_frames_ctx = av_buffer_ref(hwframe_ref); + if (!frame->hw_frames_ctx) + return AVERROR(ENOMEM); + src_frame = av_frame_alloc(); if (!src_frame) return AVERROR(ENOMEM); @@ -464,7 +469,8 @@ int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags) if (ret < 0) return ret; - ret = av_hwframe_map(frame, src_frame, 0); + ret = av_hwframe_map(frame, src_frame, + ctx->internal->source_allocation_map_flags); if (ret) { av_log(ctx, AV_LOG_ERROR, "Failed to map frame into derived " "frame context: %d.\n", ret); @@ -816,6 +822,12 @@ int av_hwframe_ctx_create_derived(AVBufferRef **derived_frame_ctx, goto fail; } + dst->internal->source_allocation_map_flags = + flags & (AV_HWFRAME_MAP_READ | + AV_HWFRAME_MAP_WRITE | + AV_HWFRAME_MAP_OVERWRITE | + AV_HWFRAME_MAP_DIRECT); + ret = AVERROR(ENOSYS); if (src->internal->hw_type->frames_derive_from) ret = src->internal->hw_type->frames_derive_from(dst, src, flags); diff --git a/libavutil/hwcontext.h b/libavutil/hwcontext.h index 889e30365e..eaf40c46f1 100644 --- a/libavutil/hwcontext.h +++ b/libavutil/hwcontext.h @@ -564,7 +564,9 @@ int av_hwframe_map(AVFrame *dst, const AVFrame *src, int flags); * AVHWFramesContext on. * @param source_frame_ctx A reference to an existing AVHWFramesContext * which will be mapped to the derived context. - * @param flags Currently unused; should be set to zero. + * @param flags Some combination of AV_HWFRAME_MAP_* flags, defining the + * mapping parameters to apply to frames which are allocated + * in the derived device. * @return Zero on success, negative AVERROR code on failure. */ int av_hwframe_ctx_create_derived(AVBufferRef **derived_frame_ctx, diff --git a/libavutil/hwcontext_internal.h b/libavutil/hwcontext_internal.h index 87b32e191e..7cf6cb07c7 100644 --- a/libavutil/hwcontext_internal.h +++ b/libavutil/hwcontext_internal.h @@ -121,6 +121,11 @@ struct AVHWFramesInternal { * context it was derived from. */ AVBufferRef *source_frames; + /** + * Flags to apply to the mapping from the source to the derived + * frame context when trying to allocate in the derived context. + */ + int source_allocation_map_flags; }; typedef struct HWMapDescriptor { diff --git a/libavutil/version.h b/libavutil/version.h index fd72ff431d..7779755870 100644 --- a/libavutil/version.h +++ b/libavutil/version.h @@ -55,7 +55,7 @@ #define LIBAVUTIL_VERSION_MAJOR 56 #define LIBAVUTIL_VERSION_MINOR 1 -#define LIBAVUTIL_VERSION_MICRO 0 +#define LIBAVUTIL_VERSION_MICRO 1 #define LIBAVUTIL_VERSION_INT AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, \ LIBAVUTIL_VERSION_MINOR, \ From 38cb05f1c89cae1862b360d4e7e3f0cd2b5bbb67 Mon Sep 17 00:00:00 2001 From: Mark Thompson Date: Sat, 4 Mar 2017 23:57:41 +0000 Subject: [PATCH 09/10] vf_hwmap: Add device derivation Also refactor a little and improve error messages to make failure cases easier to understand. --- libavfilter/vf_hwmap.c | 67 ++++++++++++++++++++++++++++++------------ 1 file changed, 49 insertions(+), 18 deletions(-) diff --git a/libavfilter/vf_hwmap.c b/libavfilter/vf_hwmap.c index 11595bd6ca..f19d10f2c2 100644 --- a/libavfilter/vf_hwmap.c +++ b/libavfilter/vf_hwmap.c @@ -30,10 +30,10 @@ typedef struct HWMapContext { const AVClass *class; - AVBufferRef *hwdevice_ref; AVBufferRef *hwframes_ref; int mode; + char *derive_device_type; int map_backwards; } HWMapContext; @@ -52,6 +52,7 @@ static int hwmap_config_output(AVFilterLink *outlink) HWMapContext *ctx = avctx->priv; AVFilterLink *inlink = avctx->inputs[0]; AVHWFramesContext *hwfc; + AVBufferRef *device; const AVPixFmtDescriptor *desc; int err; @@ -59,30 +60,58 @@ static int hwmap_config_output(AVFilterLink *outlink) av_get_pix_fmt_name(inlink->format), av_get_pix_fmt_name(outlink->format)); + av_buffer_unref(&ctx->hwframes_ref); + + device = avctx->hw_device_ctx; + if (inlink->hw_frames_ctx) { hwfc = (AVHWFramesContext*)inlink->hw_frames_ctx->data; + if (ctx->derive_device_type) { + enum AVHWDeviceType type; + + type = av_hwdevice_find_type_by_name(ctx->derive_device_type); + if (type == AV_HWDEVICE_TYPE_NONE) { + av_log(avctx, AV_LOG_ERROR, "Invalid device type.\n"); + goto fail; + } + + err = av_hwdevice_ctx_create_derived(&device, type, + hwfc->device_ref, 0); + if (err < 0) { + av_log(avctx, AV_LOG_ERROR, "Failed to created derived " + "device context: %d.\n", err); + goto fail; + } + } + desc = av_pix_fmt_desc_get(outlink->format); - if (!desc) - return AVERROR(EINVAL); + if (!desc) { + err = AVERROR(EINVAL); + goto fail; + } if (inlink->format == hwfc->format && (desc->flags & AV_PIX_FMT_FLAG_HWACCEL)) { // Map between two hardware formats (including the case of // undoing an existing mapping). - ctx->hwdevice_ref = av_buffer_ref(avctx->hw_device_ctx); - if (!ctx->hwdevice_ref) { - err = AVERROR(ENOMEM); + if (!device) { + av_log(avctx, AV_LOG_ERROR, "A device reference is " + "required to map to a hardware format.\n"); + err = AVERROR(EINVAL); goto fail; } err = av_hwframe_ctx_create_derived(&ctx->hwframes_ref, outlink->format, - ctx->hwdevice_ref, + device, inlink->hw_frames_ctx, 0); - if (err < 0) + if (err < 0) { + av_log(avctx, AV_LOG_ERROR, "Failed to create derived " + "frames context: %d.\n", err); goto fail; + } } else if ((outlink->format == hwfc->format && inlink->format == hwfc->sw_format) || @@ -90,8 +119,6 @@ static int hwmap_config_output(AVFilterLink *outlink) // Map from a hardware format to a software format, or // undo an existing such mapping. - ctx->hwdevice_ref = NULL; - ctx->hwframes_ref = av_buffer_ref(inlink->hw_frames_ctx); if (!ctx->hwframes_ref) { err = AVERROR(ENOMEM); @@ -115,15 +142,17 @@ static int hwmap_config_output(AVFilterLink *outlink) // returns frames mapped from that to the previous link in // order to fill them without an additional copy. - ctx->map_backwards = 1; - - ctx->hwdevice_ref = av_buffer_ref(avctx->hw_device_ctx); - if (!ctx->hwdevice_ref) { - err = AVERROR(ENOMEM); + if (!device) { + av_log(avctx, AV_LOG_ERROR, "A device reference is " + "required to create new frames with backwards " + "mapping.\n"); + err = AVERROR(EINVAL); goto fail; } - ctx->hwframes_ref = av_hwframe_ctx_alloc(ctx->hwdevice_ref); + ctx->map_backwards = 1; + + ctx->hwframes_ref = av_hwframe_ctx_alloc(device); if (!ctx->hwframes_ref) { err = AVERROR(ENOMEM); goto fail; @@ -161,7 +190,6 @@ static int hwmap_config_output(AVFilterLink *outlink) fail: av_buffer_unref(&ctx->hwframes_ref); - av_buffer_unref(&ctx->hwdevice_ref); return err; } @@ -269,7 +297,6 @@ static av_cold void hwmap_uninit(AVFilterContext *avctx) HWMapContext *ctx = avctx->priv; av_buffer_unref(&ctx->hwframes_ref); - av_buffer_unref(&ctx->hwdevice_ref); } #define OFFSET(x) offsetof(HWMapContext, x) @@ -293,6 +320,10 @@ static const AVOption hwmap_options[] = { 0, AV_OPT_TYPE_CONST, { .i64 = AV_HWFRAME_MAP_DIRECT }, INT_MIN, INT_MAX, FLAGS, "mode" }, + { "derive_device", "Derive a new device of this type", + OFFSET(derive_device_type), AV_OPT_TYPE_STRING, + { .str = NULL }, 0, 0, FLAGS }, + { NULL }, }; From 81a4cb8e58636d4efd200c2b4fec786a7e948d8b Mon Sep 17 00:00:00 2001 From: Mark Thompson Date: Sat, 4 Mar 2017 23:57:42 +0000 Subject: [PATCH 10/10] vf_hwmap: Add reverse mapping for hardware frames This is something of a hack. It allocates a new hwframe context for the target format, then maps it back to the source link and overwrites the input link hw_frames_ctx so that the previous filter will receive the frames we want from ff_get_video_buffer(). It may fail if the previous filter imposes any additional constraints on the frames it wants to use as output. --- libavfilter/vf_hwmap.c | 68 +++++++++++++++++++++++++++++++++++++----- 1 file changed, 61 insertions(+), 7 deletions(-) diff --git a/libavfilter/vf_hwmap.c b/libavfilter/vf_hwmap.c index f19d10f2c2..2983528ea0 100644 --- a/libavfilter/vf_hwmap.c +++ b/libavfilter/vf_hwmap.c @@ -34,7 +34,7 @@ typedef struct HWMapContext { int mode; char *derive_device_type; - int map_backwards; + int reverse; } HWMapContext; static int hwmap_query_formats(AVFilterContext *avctx) @@ -92,7 +92,8 @@ static int hwmap_config_output(AVFilterLink *outlink) } if (inlink->format == hwfc->format && - (desc->flags & AV_PIX_FMT_FLAG_HWACCEL)) { + (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) && + !ctx->reverse) { // Map between two hardware formats (including the case of // undoing an existing mapping). @@ -113,6 +114,56 @@ static int hwmap_config_output(AVFilterLink *outlink) goto fail; } + } else if (inlink->format == hwfc->format && + (desc->flags & AV_PIX_FMT_FLAG_HWACCEL) && + ctx->reverse) { + // Map between two hardware formats, but do it in reverse. + // Make a new hwframe context for the target type, and then + // overwrite the input hwframe context with a derived context + // mapped from that back to the source type. + AVBufferRef *source; + AVHWFramesContext *frames; + + ctx->hwframes_ref = av_hwframe_ctx_alloc(device); + if (!ctx->hwframes_ref) { + err = AVERROR(ENOMEM); + goto fail; + } + frames = (AVHWFramesContext*)ctx->hwframes_ref->data; + + frames->format = outlink->format; + frames->sw_format = hwfc->sw_format; + frames->width = hwfc->width; + frames->height = hwfc->height; + frames->initial_pool_size = 64; + + err = av_hwframe_ctx_init(ctx->hwframes_ref); + if (err < 0) { + av_log(avctx, AV_LOG_ERROR, "Failed to initialise " + "target frames context: %d.\n", err); + goto fail; + } + + err = av_hwframe_ctx_create_derived(&source, + inlink->format, + hwfc->device_ref, + ctx->hwframes_ref, + ctx->mode); + if (err < 0) { + av_log(avctx, AV_LOG_ERROR, "Failed to create " + "derived source frames context: %d.\n", err); + goto fail; + } + + // Here is the naughty bit. This overwriting changes what + // ff_get_video_buffer() in the previous filter returns - + // it will now give a frame allocated here mapped back to + // the format it expects. If there were any additional + // constraints on the output frames there then this may + // break nastily. + av_buffer_unref(&inlink->hw_frames_ctx); + inlink->hw_frames_ctx = source; + } else if ((outlink->format == hwfc->format && inlink->format == hwfc->sw_format) || inlink->format == hwfc->format) { @@ -144,13 +195,13 @@ static int hwmap_config_output(AVFilterLink *outlink) if (!device) { av_log(avctx, AV_LOG_ERROR, "A device reference is " - "required to create new frames with backwards " + "required to create new frames with reverse " "mapping.\n"); err = AVERROR(EINVAL); goto fail; } - ctx->map_backwards = 1; + ctx->reverse = 1; ctx->hwframes_ref = av_hwframe_ctx_alloc(device); if (!ctx->hwframes_ref) { @@ -167,7 +218,7 @@ static int hwmap_config_output(AVFilterLink *outlink) err = av_hwframe_ctx_init(ctx->hwframes_ref); if (err < 0) { av_log(avctx, AV_LOG_ERROR, "Failed to create frame " - "context for backward mapping: %d.\n", err); + "context for reverse mapping: %d.\n", err); goto fail; } @@ -199,7 +250,7 @@ static AVFrame *hwmap_get_buffer(AVFilterLink *inlink, int w, int h) AVFilterLink *outlink = avctx->outputs[0]; HWMapContext *ctx = avctx->priv; - if (ctx->map_backwards) { + if (ctx->reverse && !inlink->hw_frames_ctx) { AVFrame *src, *dst; int err; @@ -257,7 +308,7 @@ static int hwmap_filter_frame(AVFilterLink *link, AVFrame *input) goto fail; } - if (ctx->map_backwards && !input->hw_frames_ctx) { + if (ctx->reverse && !input->hw_frames_ctx) { // If we mapped backwards from hardware to software, we need // to attach the hardware frame context to the input frame to // make the mapping visible to av_hwframe_map(). @@ -323,6 +374,9 @@ static const AVOption hwmap_options[] = { { "derive_device", "Derive a new device of this type", OFFSET(derive_device_type), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, FLAGS }, + { "reverse", "Map in reverse (create and allocate in the sink)", + OFFSET(reverse), AV_OPT_TYPE_INT, + { .i64 = 0 }, 0, 1, FLAGS }, { NULL }, };