avfilter/scale_amf: Add AMF VPP & super resolution filters

This commit adds two AMF filters: vpp_amf & sr_amf.
Both filters are using AMF hardware acceleration.
vpp_amf supports simple scaling algorithms & color conversion.
sr_amf supports advanced scaling algorithms such as FSR & can
be used for upscaling only.
master
Evgeny Pavlov 4 months ago committed by Dmitrii Ovchinnikov
parent fbfde33230
commit 4b77a0a681
  1. 2
      configure
  2. 2
      libavfilter/Makefile
  3. 2
      libavfilter/allfilters.c
  4. 515
      libavfilter/vf_amf_common.c
  5. 73
      libavfilter/vf_amf_common.h
  6. 185
      libavfilter/vf_sr_amf.c
  7. 259
      libavfilter/vf_vpp_amf.c

2
configure vendored

@ -3966,6 +3966,8 @@ rubberband_filter_deps="librubberband"
sab_filter_deps="gpl swscale"
scale2ref_filter_deps="swscale"
scale_filter_deps="swscale"
sr_amf_filter_deps="amf"
vpp_amf_filter_deps="amf"
scale_qsv_filter_deps="libmfx"
scale_qsv_filter_select="qsvvpp"
scdet_filter_select="scene_sad"

@ -504,6 +504,7 @@ OBJS-$(CONFIG_SITI_FILTER) += vf_siti.o
OBJS-$(CONFIG_SPLIT_FILTER) += split.o
OBJS-$(CONFIG_SPP_FILTER) += vf_spp.o qp_table.o
OBJS-$(CONFIG_SR_FILTER) += vf_sr.o
OBJS-$(CONFIG_SR_AMF_FILTER) += vf_sr_amf.o scale_eval.o vf_amf_common.o
OBJS-$(CONFIG_SSIM_FILTER) += vf_ssim.o framesync.o
OBJS-$(CONFIG_SSIM360_FILTER) += vf_ssim360.o framesync.o
OBJS-$(CONFIG_STEREO3D_FILTER) += vf_stereo3d.o
@ -557,6 +558,7 @@ OBJS-$(CONFIG_VIDSTABTRANSFORM_FILTER) += vidstabutils.o vf_vidstabtransfo
OBJS-$(CONFIG_VIF_FILTER) += vf_vif.o framesync.o
OBJS-$(CONFIG_VIGNETTE_FILTER) += vf_vignette.o
OBJS-$(CONFIG_VMAFMOTION_FILTER) += vf_vmafmotion.o framesync.o
OBJS-$(CONFIG_VPP_AMF_FILTER) += vf_vpp_amf.o scale_eval.o vf_amf_common.o
OBJS-$(CONFIG_VPP_QSV_FILTER) += vf_vpp_qsv.o
OBJS-$(CONFIG_VSTACK_FILTER) += vf_stack.o framesync.o
OBJS-$(CONFIG_W3FDIF_FILTER) += vf_w3fdif.o

@ -432,6 +432,8 @@ extern const FFFilter ff_vf_roberts_opencl;
extern const FFFilter ff_vf_rotate;
extern const FFFilter ff_vf_sab;
extern const FFFilter ff_vf_scale;
extern const FFFilter ff_vf_vpp_amf;
extern const FFFilter ff_vf_sr_amf;
extern const FFFilter ff_vf_scale_cuda;
extern const FFFilter ff_vf_scale_npp;
extern const FFFilter ff_vf_scale_qsv;

@ -0,0 +1,515 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "vf_amf_common.h"
#include "libavutil/avassert.h"
#include "avfilter.h"
#include "avfilter_internal.h"
#include "formats.h"
#include "libavutil/mem.h"
#include "libavutil/imgutils.h"
#include "libavutil/hwcontext_amf.h"
#include "libavutil/hwcontext_amf_internal.h"
#include "AMF/components/ColorSpace.h"
#include "scale_eval.h"
#if CONFIG_DXVA2
#include <d3d9.h>
#endif
#if CONFIG_D3D11VA
#include <d3d11.h>
#endif
int amf_filter_init(AVFilterContext *avctx)
{
AMFFilterContext *ctx = avctx->priv;
if (!strcmp(ctx->format_str, "same")) {
ctx->format = AV_PIX_FMT_NONE;
} else {
ctx->format = av_get_pix_fmt(ctx->format_str);
if (ctx->format == AV_PIX_FMT_NONE) {
av_log(avctx, AV_LOG_ERROR, "Unrecognized pixel format: %s\n", ctx->format_str);
return AVERROR(EINVAL);
}
}
return 0;
}
void amf_filter_uninit(AVFilterContext *avctx)
{
AMFFilterContext *ctx = avctx->priv;
if (ctx->component) {
ctx->component->pVtbl->Terminate(ctx->component);
ctx->component->pVtbl->Release(ctx->component);
ctx->component = NULL;
}
av_buffer_unref(&ctx->amf_device_ref);
av_buffer_unref(&ctx->hwdevice_ref);
av_buffer_unref(&ctx->hwframes_in_ref);
av_buffer_unref(&ctx->hwframes_out_ref);
}
int amf_filter_filter_frame(AVFilterLink *inlink, AVFrame *in)
{
AVFilterContext *avctx = inlink->dst;
AMFFilterContext *ctx = avctx->priv;
AVFilterLink *outlink = avctx->outputs[0];
AMF_RESULT res;
AMFSurface *surface_in;
AMFSurface *surface_out;
AMFData *data_out = NULL;
enum AVColorSpace out_colorspace;
enum AVColorRange out_color_range;
AVFrame *out = NULL;
int ret = 0;
if (!ctx->component)
return AVERROR(EINVAL);
ret = amf_avframe_to_amfsurface(avctx, in, &surface_in);
if (ret < 0)
goto fail;
res = ctx->component->pVtbl->SubmitInput(ctx->component, (AMFData*)surface_in);
surface_in->pVtbl->Release(surface_in); // release surface after use
AMF_GOTO_FAIL_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
res = ctx->component->pVtbl->QueryOutput(ctx->component, &data_out);
AMF_GOTO_FAIL_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "QueryOutput() failed with error %d\n", res);
if (data_out) {
AMFGuid guid = IID_AMFSurface();
data_out->pVtbl->QueryInterface(data_out, &guid, (void**)&surface_out); // query for buffer interface
data_out->pVtbl->Release(data_out);
}
out = amf_amfsurface_to_avframe(avctx, surface_out);
ret = av_frame_copy_props(out, in);
av_frame_unref(in);
out_colorspace = AVCOL_SPC_UNSPECIFIED;
if (ctx->color_profile != AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN) {
switch(ctx->color_profile) {
case AMF_VIDEO_CONVERTER_COLOR_PROFILE_601:
out_colorspace = AVCOL_SPC_SMPTE170M;
break;
case AMF_VIDEO_CONVERTER_COLOR_PROFILE_709:
out_colorspace = AVCOL_SPC_BT709;
break;
case AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020:
out_colorspace = AVCOL_SPC_BT2020_NCL;
break;
case AMF_VIDEO_CONVERTER_COLOR_PROFILE_JPEG:
out_colorspace = AVCOL_SPC_RGB;
break;
default:
out_colorspace = AVCOL_SPC_UNSPECIFIED;
break;
}
out->colorspace = out_colorspace;
}
out_color_range = AVCOL_RANGE_UNSPECIFIED;
if (ctx->color_range == AMF_COLOR_RANGE_FULL)
out_color_range = AVCOL_RANGE_JPEG;
else if (ctx->color_range == AMF_COLOR_RANGE_STUDIO)
out_color_range = AVCOL_RANGE_MPEG;
if (ctx->color_range != AMF_COLOR_RANGE_UNDEFINED)
out->color_range = out_color_range;
if (ctx->primaries != AMF_COLOR_PRIMARIES_UNDEFINED)
out->color_primaries = ctx->primaries;
if (ctx->trc != AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED)
out->color_trc = ctx->trc;
if (ret < 0)
goto fail;
out->hw_frames_ctx = av_buffer_ref(ctx->hwframes_out_ref);
if (!out->hw_frames_ctx) {
ret = AVERROR(ENOMEM);
goto fail;
}
if (inlink->sample_aspect_ratio.num) {
outlink->sample_aspect_ratio = av_mul_q((AVRational){outlink->h * inlink->w, outlink->w * inlink->h}, inlink->sample_aspect_ratio);
} else
outlink->sample_aspect_ratio = inlink->sample_aspect_ratio;
av_frame_free(&in);
return ff_filter_frame(outlink, out);
fail:
av_frame_free(&in);
av_frame_free(&out);
return ret;
}
int amf_setup_input_output_formats(AVFilterContext *avctx,
const enum AVPixelFormat *input_pix_fmts,
const enum AVPixelFormat *output_pix_fmts)
{
int err;
AVFilterFormats *input_formats;
AVFilterFormats *output_formats;
//in case if hw_device_ctx is set to DXVA2 we change order of pixel formats to set DXVA2 be choosen by default
//The order is ignored if hw_frames_ctx is not NULL on the config_output stage
if (avctx->hw_device_ctx) {
AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
switch (device_ctx->type) {
#if CONFIG_D3D11VA
case AV_HWDEVICE_TYPE_D3D11VA:
{
static const enum AVPixelFormat output_pix_fmts_d3d11[] = {
AV_PIX_FMT_D3D11,
AV_PIX_FMT_NONE,
};
output_pix_fmts = output_pix_fmts_d3d11;
}
break;
#endif
#if CONFIG_DXVA2
case AV_HWDEVICE_TYPE_DXVA2:
{
static const enum AVPixelFormat output_pix_fmts_dxva2[] = {
AV_PIX_FMT_DXVA2_VLD,
AV_PIX_FMT_NONE,
};
output_pix_fmts = output_pix_fmts_dxva2;
}
break;
#endif
case AV_HWDEVICE_TYPE_AMF:
break;
default:
{
av_log(avctx, AV_LOG_ERROR, "Unsupported device : %s\n", av_hwdevice_get_type_name(device_ctx->type));
return AVERROR(EINVAL);
}
break;
}
}
input_formats = ff_make_format_list(output_pix_fmts);
if (!input_formats) {
return AVERROR(ENOMEM);
}
output_formats = ff_make_format_list(output_pix_fmts);
if (!output_formats) {
return AVERROR(ENOMEM);
}
if ((err = ff_formats_ref(input_formats, &avctx->inputs[0]->outcfg.formats)) < 0)
return err;
if ((err = ff_formats_ref(output_formats, &avctx->outputs[0]->incfg.formats)) < 0)
return err;
return 0;
}
int amf_copy_surface(AVFilterContext *avctx, const AVFrame *frame,
AMFSurface* surface)
{
AMFPlane *plane;
uint8_t *dst_data[4];
int dst_linesize[4];
int planes;
int i;
planes = (int)surface->pVtbl->GetPlanesCount(surface);
av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
for (i = 0; i < planes; i++) {
plane = surface->pVtbl->GetPlaneAt(surface, i);
dst_data[i] = plane->pVtbl->GetNative(plane);
dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
}
av_image_copy(dst_data, dst_linesize,
(const uint8_t**)frame->data, frame->linesize, frame->format,
frame->width, frame->height);
return 0;
}
int amf_init_filter_config(AVFilterLink *outlink, enum AVPixelFormat *in_format)
{
int err;
AMF_RESULT res;
AVFilterContext *avctx = outlink->src;
AVFilterLink *inlink = avctx->inputs[0];
AMFFilterContext *ctx = avctx->priv;
AVHWFramesContext *hwframes_out;
AVHWDeviceContext *hwdev_ctx;
enum AVPixelFormat in_sw_format = inlink->format;
enum AVPixelFormat out_sw_format = ctx->format;
FilterLink *inl = ff_filter_link(inlink);
FilterLink *outl = ff_filter_link(outlink);
if ((err = ff_scale_eval_dimensions(avctx,
ctx->w_expr, ctx->h_expr,
inlink, outlink,
&ctx->width, &ctx->height)) < 0)
return err;
ff_scale_adjust_dimensions(inlink, &ctx->width, &ctx->height,
ctx->force_original_aspect_ratio, ctx->force_divisible_by);
av_buffer_unref(&ctx->amf_device_ref);
av_buffer_unref(&ctx->hwframes_in_ref);
av_buffer_unref(&ctx->hwframes_out_ref);
ctx->local_context = 0;
if (inl->hw_frames_ctx) {
AVHWFramesContext *frames_ctx = (AVHWFramesContext*)inl->hw_frames_ctx->data;
if (av_av_to_amf_format(frames_ctx->sw_format) == AMF_SURFACE_UNKNOWN) {
av_log(avctx, AV_LOG_ERROR, "Format of input frames context (%s) is not supported by AMF.\n",
av_get_pix_fmt_name(frames_ctx->sw_format));
return AVERROR(EINVAL);
}
err = av_hwdevice_ctx_create_derived(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, frames_ctx->device_ref, 0);
if (err < 0)
return err;
ctx->hwframes_in_ref = av_buffer_ref(inl->hw_frames_ctx);
if (!ctx->hwframes_in_ref)
return AVERROR(ENOMEM);
in_sw_format = frames_ctx->sw_format;
} else if (avctx->hw_device_ctx) {
err = av_hwdevice_ctx_create_derived(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, avctx->hw_device_ctx, 0);
if (err < 0)
return err;
ctx->hwdevice_ref = av_buffer_ref(avctx->hw_device_ctx);
if (!ctx->hwdevice_ref)
return AVERROR(ENOMEM);
} else {
res = av_hwdevice_ctx_create(&ctx->amf_device_ref, AV_HWDEVICE_TYPE_AMF, NULL, NULL, 0);
AMF_RETURN_IF_FALSE(avctx, res == 0, res, "Failed to create hardware device context (AMF) : %s\n", av_err2str(res));
}
if(out_sw_format == AV_PIX_FMT_NONE){
if(outlink->format == AV_PIX_FMT_AMF_SURFACE)
out_sw_format = in_sw_format;
else
out_sw_format = outlink->format;
}
ctx->hwframes_out_ref = av_hwframe_ctx_alloc(ctx->amf_device_ref);
if (!ctx->hwframes_out_ref)
return AVERROR(ENOMEM);
hwframes_out = (AVHWFramesContext*)ctx->hwframes_out_ref->data;
hwdev_ctx = (AVHWDeviceContext*)ctx->amf_device_ref->data;
if (hwdev_ctx->type == AV_HWDEVICE_TYPE_AMF)
{
ctx->amf_device_ctx = hwdev_ctx->hwctx;
}
hwframes_out->format = AV_PIX_FMT_AMF_SURFACE;
hwframes_out->sw_format = out_sw_format;
if (inlink->format == AV_PIX_FMT_AMF_SURFACE) {
*in_format = in_sw_format;
} else {
*in_format = inlink->format;
}
outlink->w = ctx->width;
outlink->h = ctx->height;
hwframes_out->width = outlink->w;
hwframes_out->height = outlink->h;
err = av_hwframe_ctx_init(ctx->hwframes_out_ref);
if (err < 0)
return err;
outl->hw_frames_ctx = av_buffer_ref(ctx->hwframes_out_ref);
if (!outl->hw_frames_ctx) {
return AVERROR(ENOMEM);
}
return 0;
}
void amf_free_amfsurface(void *opaque, uint8_t *data)
{
AMFSurface *surface = (AMFSurface*)data;
surface->pVtbl->Release(surface);
}
AVFrame *amf_amfsurface_to_avframe(AVFilterContext *avctx, AMFSurface* pSurface)
{
AVFrame *frame = av_frame_alloc();
AMFFilterContext *ctx = avctx->priv;
if (!frame)
return NULL;
if (ctx->hwframes_out_ref) {
AVHWFramesContext *hwframes_out = (AVHWFramesContext *)ctx->hwframes_out_ref->data;
if (hwframes_out->format == AV_PIX_FMT_AMF_SURFACE) {
int ret = av_hwframe_get_buffer(ctx->hwframes_out_ref, frame, 0);
if (ret < 0) {
av_log(avctx, AV_LOG_ERROR, "Get hw frame failed.\n");
av_frame_free(&frame);
return NULL;
}
frame->data[0] = (uint8_t *)pSurface;
frame->buf[1] = av_buffer_create((uint8_t *)pSurface, sizeof(AMFSurface),
amf_free_amfsurface,
(void*)avctx,
AV_BUFFER_FLAG_READONLY);
} else { // FIXME: add processing of other hw formats
av_log(ctx, AV_LOG_ERROR, "Unknown pixel format\n");
return NULL;
}
} else {
switch (pSurface->pVtbl->GetMemoryType(pSurface))
{
#if CONFIG_D3D11VA
case AMF_MEMORY_DX11:
{
AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
frame->data[0] = plane0->pVtbl->GetNative(plane0);
frame->data[1] = (uint8_t*)(intptr_t)0;
frame->buf[0] = av_buffer_create(NULL,
0,
amf_free_amfsurface,
pSurface,
AV_BUFFER_FLAG_READONLY);
}
break;
#endif
#if CONFIG_DXVA2
case AMF_MEMORY_DX9:
{
AMFPlane *plane0 = pSurface->pVtbl->GetPlaneAt(pSurface, 0);
frame->data[3] = plane0->pVtbl->GetNative(plane0);
frame->buf[0] = av_buffer_create(NULL,
0,
amf_free_amfsurface,
pSurface,
AV_BUFFER_FLAG_READONLY);
}
break;
#endif
default:
{
av_log(avctx, AV_LOG_ERROR, "Unsupported memory type : %d\n", pSurface->pVtbl->GetMemoryType(pSurface));
return NULL;
}
}
}
return frame;
}
int amf_avframe_to_amfsurface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface** ppSurface)
{
AMFFilterContext *ctx = avctx->priv;
AMFSurface *surface;
AMF_RESULT res;
int hw_surface = 0;
switch (frame->format) {
#if CONFIG_D3D11VA
case AV_PIX_FMT_D3D11:
{
static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
res = ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX11Native(ctx->amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
hw_surface = 1;
}
break;
#endif
case AV_PIX_FMT_AMF_SURFACE:
{
surface = (AMFSurface*)frame->data[0]; // actual surface
surface->pVtbl->Acquire(surface); // returned surface has to be to be ref++
hw_surface = 1;
}
break;
#if CONFIG_DXVA2
case AV_PIX_FMT_DXVA2_VLD:
{
IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
res = ctx->amf_device_ctx->context->pVtbl->CreateSurfaceFromDX9Native(ctx->amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
hw_surface = 1;
}
break;
#endif
default:
{
AMF_SURFACE_FORMAT amf_fmt = av_av_to_amf_format(frame->format);
res = ctx->amf_device_ctx->context->pVtbl->AllocSurface(ctx->amf_device_ctx->context, AMF_MEMORY_HOST, amf_fmt, frame->width, frame->height, &surface);
AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
amf_copy_surface(avctx, frame, surface);
}
break;
}
if (frame->crop_left || frame->crop_right || frame->crop_top || frame->crop_bottom) {
size_t crop_x = frame->crop_left;
size_t crop_y = frame->crop_top;
size_t crop_w = frame->width - (frame->crop_left + frame->crop_right);
size_t crop_h = frame->height - (frame->crop_top + frame->crop_bottom);
AVFilterLink *outlink = avctx->outputs[0];
if (crop_x || crop_y) {
if (crop_w == outlink->w && crop_h == outlink->h) {
AMFData *cropped_buffer = NULL;
res = surface->pVtbl->Duplicate(surface, surface->pVtbl->GetMemoryType(surface), &cropped_buffer);
AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR(ENOMEM), "Duplicate() failed with error %d\n", res);
surface->pVtbl->Release(surface);
surface = (AMFSurface*)cropped_buffer;
}
else
surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
}
else
surface->pVtbl->SetCrop(surface, (amf_int32)crop_x, (amf_int32)crop_y, (amf_int32)crop_w, (amf_int32)crop_h);
}
else if (hw_surface) {
// input HW surfaces can be vertically aligned by 16; tell AMF the real size
surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
}
surface->pVtbl->SetPts(surface, frame->pts);
*ppSurface = surface;
return 0;
}

@ -0,0 +1,73 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVFILTER_AMF_COMMON_H
#define AVFILTER_AMF_COMMON_H
#include "avfilter.h"
#include "AMF/core/Surface.h"
#include "AMF/components/Component.h"
#include "libavutil/hwcontext_amf.h"
typedef struct AMFFilterContext {
const AVClass *class;
int width, height;
enum AVPixelFormat format;
int scale_type;
int color_profile;
int color_range;
int primaries;
int trc;
int fill;
int fill_color;
int keep_ratio;
// HQScaler properties
int algorithm;
float sharpness;
char *w_expr;
char *h_expr;
char *format_str;
int force_original_aspect_ratio;
int force_divisible_by;
AMFComponent *component;
AVBufferRef *amf_device_ref;
AVBufferRef *hwframes_in_ref;
AVBufferRef *hwframes_out_ref;
AVBufferRef *hwdevice_ref;
AVAMFDeviceContext *amf_device_ctx;
int local_context;
} AMFFilterContext;
int amf_filter_init(AVFilterContext *avctx);
void amf_filter_uninit(AVFilterContext *avctx);
int amf_init_filter_config(AVFilterLink *outlink, enum AVPixelFormat *in_format);
int amf_copy_surface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface* surface);
void amf_free_amfsurface(void *opaque, uint8_t *data);
AVFrame *amf_amfsurface_to_avframe(AVFilterContext *avctx, AMFSurface* pSurface);
int amf_avframe_to_amfsurface(AVFilterContext *avctx, const AVFrame *frame, AMFSurface** ppSurface);
int amf_setup_input_output_formats(AVFilterContext *avctx, const enum AVPixelFormat *input_pix_fmts, const enum AVPixelFormat *output_pix_fmts);
int amf_filter_filter_frame(AVFilterLink *inlink, AVFrame *in);
#endif /* AVFILTER_AMF_COMMON_H */

@ -0,0 +1,185 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* Super resolution video filter with AMF hardware acceleration
*/
#include <stdio.h>
#include <string.h>
#include "libavutil/avassert.h"
#include "libavutil/imgutils.h"
#include "libavutil/opt.h"
#include "libavutil/pixdesc.h"
#include "libavutil/time.h"
#include "libavutil/hwcontext.h"
#include "libavutil/hwcontext_amf.h"
#include "libavutil/hwcontext_amf_internal.h"
#include "AMF/components/HQScaler.h"
#include "AMF/components/ColorSpace.h"
#include "vf_amf_common.h"
#include "avfilter.h"
#include "avfilter_internal.h"
#include "formats.h"
#include "video.h"
#if CONFIG_DXVA2
#include <d3d9.h>
#endif
#if CONFIG_D3D11VA
#include <d3d11.h>
#endif
static int amf_filter_query_formats(AVFilterContext *avctx)
{
const enum AVPixelFormat *output_pix_fmts;
static const enum AVPixelFormat input_pix_fmts[] = {
AV_PIX_FMT_NV12,
AV_PIX_FMT_P010,
AV_PIX_FMT_BGRA,
AV_PIX_FMT_RGBA,
AV_PIX_FMT_AMF_SURFACE,
AV_PIX_FMT_RGBAF16,
AV_PIX_FMT_NONE,
};
static const enum AVPixelFormat output_pix_fmts_default[] = {
AV_PIX_FMT_NV12,
AV_PIX_FMT_P010,
AV_PIX_FMT_BGRA,
AV_PIX_FMT_RGBA,
AV_PIX_FMT_AMF_SURFACE,
AV_PIX_FMT_D3D11,
AV_PIX_FMT_DXVA2_VLD,
AV_PIX_FMT_RGBAF16,
AV_PIX_FMT_NONE,
};
output_pix_fmts = output_pix_fmts_default;
return amf_setup_input_output_formats(avctx, input_pix_fmts, output_pix_fmts);
}
static int amf_filter_config_output(AVFilterLink *outlink)
{
AVFilterContext *avctx = outlink->src;
AVFilterLink *inlink = avctx->inputs[0];
AMFFilterContext *ctx = avctx->priv;
AMFSize out_size;
int err;
AMF_RESULT res;
enum AVPixelFormat in_format;
err = amf_init_filter_config(outlink, &in_format);
if (err < 0)
return err;
// HQ scaler should be used for upscaling only
if (inlink->w > outlink->w || inlink->h > outlink->h) {
av_log(avctx, AV_LOG_ERROR, "AMF HQ scaler should be used for upscaling only.\n");
return AVERROR_UNKNOWN;
}
// FIXME: add checks whether we have HW context
res = ctx->amf_device_ctx->factory->pVtbl->CreateComponent(ctx->amf_device_ctx->factory, ctx->amf_device_ctx->context, AMFHQScaler, &ctx->component);
AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_FILTER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", AMFHQScaler, res);
out_size.width = outlink->w;
out_size.height = outlink->h;
AMF_ASSIGN_PROPERTY_SIZE(res, ctx->component, AMF_HQ_SCALER_OUTPUT_SIZE, out_size);
AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "AMFHQScaler-SetProperty() failed with error %d\n", res);
if (ctx->algorithm != -1) {
AMF_ASSIGN_PROPERTY_INT64(res, ctx->component, AMF_HQ_SCALER_ALGORITHM, ctx->algorithm);
}
if (ctx->sharpness != -1) {
AMF_ASSIGN_PROPERTY_DOUBLE(res, ctx->component, AMF_HQ_SCALER_SHARPNESS, ctx->sharpness);
}
AMF_ASSIGN_PROPERTY_BOOL(res, ctx->component, AMF_HQ_SCALER_FILL, ctx->fill);
AMF_ASSIGN_PROPERTY_BOOL(res, ctx->component, AMF_HQ_SCALER_KEEP_ASPECT_RATIO, ctx->keep_ratio);
// Setup default options to skip color conversion
ctx->color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN;
ctx->color_range = AMF_COLOR_RANGE_UNDEFINED;
ctx->primaries = AMF_COLOR_PRIMARIES_UNDEFINED;
ctx->trc = AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED;
res = ctx->component->pVtbl->Init(ctx->component, av_av_to_amf_format(in_format), inlink->w, inlink->h);
AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "AMFHQScaler-Init() failed with error %d\n", res);
return 0;
}
#define OFFSET(x) offsetof(AMFFilterContext, x)
#define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
static const AVOption sr_amf_options[] = {
{ "w", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, { .str = "iw" }, .flags = FLAGS },
{ "h", "Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, { .str = "ih" }, .flags = FLAGS },
{ "format", "Output pixel format", OFFSET(format_str), AV_OPT_TYPE_STRING, { .str = "same" }, .flags = FLAGS },
{ "sharpness", "Sharpness", OFFSET(sharpness), AV_OPT_TYPE_FLOAT, { .dbl = -1 }, -1, 2., FLAGS, "sharpness" },
{ "keep-ratio", "Keep aspect ratio", OFFSET(keep_ratio), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS, "keep_ration" },
{ "fill", "Fill", OFFSET(fill), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS, "fill" },
{ "algorithm", "Scaling algorithm", OFFSET(algorithm), AV_OPT_TYPE_INT, { .i64 = -1 }, -1, AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_1, FLAGS, "algorithm" },
{ "bilinear", "Bilinear", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_HQ_SCALER_ALGORITHM_BILINEAR }, 0, 0, FLAGS, "algorithm" },
{ "bicubic", "Bicubic", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_HQ_SCALER_ALGORITHM_BICUBIC }, 0, 0, FLAGS, "algorithm" },
{ "sr1-0", "Video SR1.0", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_0 }, 0, 0, FLAGS, "algorithm" },
{ "point", "Point", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_HQ_SCALER_ALGORITHM_POINT }, 0, 0, FLAGS, "algorithm" },
{ "sr1-1", "Video SR1.1", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_HQ_SCALER_ALGORITHM_VIDEOSR1_1 }, 0, 0, FLAGS, "algorithm" },
{ NULL },
};
AVFILTER_DEFINE_CLASS(sr_amf);
static const AVFilterPad amf_filter_inputs[] = {
{
.name = "default",
.type = AVMEDIA_TYPE_VIDEO,
.filter_frame = amf_filter_filter_frame,
}
};
static const AVFilterPad amf_filter_outputs[] = {
{
.name = "default",
.type = AVMEDIA_TYPE_VIDEO,
.config_props = amf_filter_config_output,
}
};
FFFilter ff_vf_sr_amf = {
.p.name = "sr_amf",
.p.description = NULL_IF_CONFIG_SMALL("AMF HQ video upscaling"),
.p.priv_class = &sr_amf_class,
.p.flags = AVFILTER_FLAG_HWDEVICE,
.priv_size = sizeof(AMFFilterContext),
.init = amf_filter_init,
.uninit = amf_filter_uninit,
FILTER_INPUTS(amf_filter_inputs),
FILTER_OUTPUTS(amf_filter_outputs),
FILTER_QUERY_FUNC(&amf_filter_query_formats),
FILTER_SINGLE_PIXFMT(AV_PIX_FMT_AMF_SURFACE),
.flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
};

@ -0,0 +1,259 @@
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* VPP video filter with AMF hardware acceleration
*/
#include <stdio.h>
#include <string.h>
#include "libavutil/avassert.h"
#include "libavutil/imgutils.h"
#include "libavutil/opt.h"
#include "libavutil/pixdesc.h"
#include "libavutil/time.h"
#include "libavutil/hwcontext.h"
#include "libavutil/hwcontext_amf.h"
#include "libavutil/hwcontext_amf_internal.h"
#include "AMF/components/VideoConverter.h"
#include "vf_amf_common.h"
#include "avfilter.h"
#include "formats.h"
#include "video.h"
#include "scale_eval.h"
#include "avfilter_internal.h"
#if CONFIG_DXVA2
#include <d3d9.h>
#endif
#if CONFIG_D3D11VA
#include <d3d11.h>
#endif
static int amf_filter_query_formats(AVFilterContext *avctx)
{
const enum AVPixelFormat *output_pix_fmts;
static const enum AVPixelFormat input_pix_fmts[] = {
AV_PIX_FMT_AMF_SURFACE,
AV_PIX_FMT_NV12,
AV_PIX_FMT_P010,
AV_PIX_FMT_0RGB,
AV_PIX_FMT_BGR0,
AV_PIX_FMT_BGRA,
AV_PIX_FMT_RGB0,
AV_PIX_FMT_RGBA,
AV_PIX_FMT_GRAY8,
AV_PIX_FMT_YUV420P,
AV_PIX_FMT_YUV420P10,
AV_PIX_FMT_YUYV422,
AV_PIX_FMT_NONE,
};
static const enum AVPixelFormat output_pix_fmts_default[] = {
AV_PIX_FMT_AMF_SURFACE,
AV_PIX_FMT_D3D11,
AV_PIX_FMT_DXVA2_VLD,
AV_PIX_FMT_NV12,
AV_PIX_FMT_BGRA,
AV_PIX_FMT_YUV420P,
AV_PIX_FMT_NONE,
};
output_pix_fmts = output_pix_fmts_default;
return amf_setup_input_output_formats(avctx, input_pix_fmts, output_pix_fmts);
}
static int amf_filter_config_output(AVFilterLink *outlink)
{
AVFilterContext *avctx = outlink->src;
AVFilterLink *inlink = avctx->inputs[0];
AMFFilterContext *ctx = avctx->priv;
AVHWFramesContext *hwframes_out = NULL;
AMFSize out_size;
int err;
AMF_RESULT res;
enum AMF_VIDEO_CONVERTER_COLOR_PROFILE_ENUM amf_color_profile;
enum AVPixelFormat in_format;
err = amf_init_filter_config(outlink, &in_format);
if (err < 0)
return err;
// FIXME: add checks whether we have HW context
hwframes_out = (AVHWFramesContext*)ctx->hwframes_out_ref->data;
res = ctx->amf_device_ctx->factory->pVtbl->CreateComponent(ctx->amf_device_ctx->factory, ctx->amf_device_ctx->context, AMFVideoConverter, &ctx->component);
AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_FILTER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", AMFVideoConverter, res);
// FIXME: add checks whether we have HW context
AMF_ASSIGN_PROPERTY_INT64(res, ctx->component, AMF_VIDEO_CONVERTER_OUTPUT_FORMAT, (amf_int32)av_av_to_amf_format(hwframes_out->sw_format));
AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "AMFConverter-SetProperty() failed with error %d\n", res);
out_size.width = outlink->w;
out_size.height = outlink->h;
AMF_ASSIGN_PROPERTY_SIZE(res, ctx->component, AMF_VIDEO_CONVERTER_OUTPUT_SIZE, out_size);
AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "AMFConverter-SetProperty() failed with error %d\n", res);
AMF_ASSIGN_PROPERTY_INT64(res, ctx->component, AMF_VIDEO_CONVERTER_SCALE, (amf_int32)ctx->scale_type);
AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "AMFConverter-SetProperty() failed with error %d\n", res);
amf_color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN;
switch(ctx->color_profile) {
case AMF_VIDEO_CONVERTER_COLOR_PROFILE_601:
if (ctx->color_range == AMF_COLOR_RANGE_FULL) {
amf_color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_601;
} else {
amf_color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_601;
}
break;
case AMF_VIDEO_CONVERTER_COLOR_PROFILE_709:
if (ctx->color_range == AMF_COLOR_RANGE_FULL) {
amf_color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_709;
} else {
amf_color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_709;
}
break;
case AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020:
if (ctx->color_range == AMF_COLOR_RANGE_FULL) {
amf_color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_2020;
} else {
amf_color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020;
}
break;
default:
amf_color_profile = AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN;
break;
}
if (amf_color_profile != AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN) {
AMF_ASSIGN_PROPERTY_INT64(res, ctx->component, AMF_VIDEO_CONVERTER_COLOR_PROFILE, amf_color_profile);
}
if (ctx->color_range != AMF_COLOR_RANGE_UNDEFINED) {
AMF_ASSIGN_PROPERTY_INT64(res, ctx->component, AMF_VIDEO_CONVERTER_OUTPUT_COLOR_RANGE, ctx->color_range);
}
if (ctx->primaries != AMF_COLOR_PRIMARIES_UNDEFINED) {
AMF_ASSIGN_PROPERTY_INT64(res, ctx->component, AMF_VIDEO_CONVERTER_OUTPUT_COLOR_PRIMARIES, ctx->primaries);
}
if (ctx->trc != AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED) {
AMF_ASSIGN_PROPERTY_INT64(res, ctx->component, AMF_VIDEO_CONVERTER_OUTPUT_TRANSFER_CHARACTERISTIC, ctx->trc);
}
res = ctx->component->pVtbl->Init(ctx->component, av_av_to_amf_format(in_format), inlink->w, inlink->h);
AMF_RETURN_IF_FALSE(avctx, res == AMF_OK, AVERROR_UNKNOWN, "AMFConverter-Init() failed with error %d\n", res);
return 0;
}
#define OFFSET(x) offsetof(AMFFilterContext, x)
#define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
static const AVOption vpp_amf_options[] = {
{ "w", "Output video width", OFFSET(w_expr), AV_OPT_TYPE_STRING, { .str = "iw" }, .flags = FLAGS },
{ "h", "Output video height", OFFSET(h_expr), AV_OPT_TYPE_STRING, { .str = "ih" }, .flags = FLAGS },
{ "format", "Output pixel format", OFFSET(format_str), AV_OPT_TYPE_STRING, { .str = "same" }, .flags = FLAGS },
{ "scale_type", "Scale type", OFFSET(scale_type), AV_OPT_TYPE_INT, { .i64 = AMF_VIDEO_CONVERTER_SCALE_BILINEAR }, AMF_VIDEO_CONVERTER_SCALE_BILINEAR, AMF_VIDEO_CONVERTER_SCALE_BICUBIC, FLAGS, "scale_type" },
{ "bilinear", "Bilinear", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_VIDEO_CONVERTER_SCALE_BILINEAR }, 0, 0, FLAGS, "scale_type" },
{ "bicubic", "Bicubic", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_VIDEO_CONVERTER_SCALE_BICUBIC }, 0, 0, FLAGS, "scale_type" },
{ "color_profile", "Color profile", OFFSET(color_profile), AV_OPT_TYPE_INT, { .i64 = AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN }, AMF_VIDEO_CONVERTER_COLOR_PROFILE_UNKNOWN, AMF_VIDEO_CONVERTER_COLOR_PROFILE_FULL_2020, FLAGS, "color_profile" },
{ "bt601", "BT.601", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_VIDEO_CONVERTER_COLOR_PROFILE_601 }, 0, 0, FLAGS, "color_profile" },
{ "bt709", "BT.709", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_VIDEO_CONVERTER_COLOR_PROFILE_709 }, 0, 0, FLAGS, "color_profile" },
{ "bt2020", "BT.2020", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_VIDEO_CONVERTER_COLOR_PROFILE_2020 }, 0, 0, FLAGS, "color_profile" },
{ "color_range", "Color range", OFFSET(color_range), AV_OPT_TYPE_INT, { .i64 = AMF_COLOR_RANGE_UNDEFINED }, AMF_COLOR_RANGE_UNDEFINED, AMF_COLOR_RANGE_FULL, FLAGS, "color_range" },
{ "studio", "Studio", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_RANGE_STUDIO }, 0, 0, FLAGS, "color_range" },
{ "full", "Full", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_RANGE_FULL }, 0, 0, FLAGS, "color_range" },
{ "primaries", "Output color primaries", OFFSET(primaries), AV_OPT_TYPE_INT, { .i64 = AMF_COLOR_PRIMARIES_UNDEFINED }, AMF_COLOR_PRIMARIES_UNDEFINED, AMF_COLOR_PRIMARIES_JEDEC_P22, FLAGS, "primaries" },
{ "bt709", "BT.709", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_PRIMARIES_BT709 }, 0, 0, FLAGS, "primaries" },
{ "bt470m", "BT.470M", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_PRIMARIES_BT470M }, 0, 0, FLAGS, "primaries" },
{ "bt470bg", "BT.470BG", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_PRIMARIES_BT470BG }, 0, 0, FLAGS, "primaries" },
{ "smpte170m", "SMPTE170M", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_PRIMARIES_SMPTE170M }, 0, 0, FLAGS, "primaries" },
{ "smpte240m", "SMPTE240M", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_PRIMARIES_SMPTE240M }, 0, 0, FLAGS, "primaries" },
{ "film", "FILM", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_PRIMARIES_FILM }, 0, 0, FLAGS, "primaries" },
{ "bt2020", "BT2020", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_PRIMARIES_BT2020 }, 0, 0, FLAGS, "primaries" },
{ "smpte428", "SMPTE428", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_PRIMARIES_SMPTE428 }, 0, 0, FLAGS, "primaries" },
{ "smpte431", "SMPTE431", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_PRIMARIES_SMPTE431 }, 0, 0, FLAGS, "primaries" },
{ "smpte432", "SMPTE432", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_PRIMARIES_SMPTE432 }, 0, 0, FLAGS, "primaries" },
{ "jedec-p22", "JEDEC_P22", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_PRIMARIES_JEDEC_P22 }, 0, 0, FLAGS, "primaries" },
{ "trc", "Output transfer characteristics", OFFSET(trc), AV_OPT_TYPE_INT, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED }, AMF_COLOR_TRANSFER_CHARACTERISTIC_UNDEFINED, AMF_COLOR_TRANSFER_CHARACTERISTIC_ARIB_STD_B67, FLAGS, "trc" },
{ "bt709", "BT.709", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_BT709 }, 0, 0, FLAGS, "trc" },
{ "gamma22", "GAMMA22", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_GAMMA22 }, 0, 0, FLAGS, "trc" },
{ "gamma28", "GAMMA28", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_GAMMA28 }, 0, 0, FLAGS, "trc" },
{ "smpte170m", "SMPTE170M", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE170M }, 0, 0, FLAGS, "trc" },
{ "smpte240m", "SMPTE240M", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE240M }, 0, 0, FLAGS, "trc" },
{ "linear", "Linear", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_LINEAR }, 0, 0, FLAGS, "trc" },
{ "log", "LOG", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_LOG }, 0, 0, FLAGS, "trc" },
{ "log-sqrt", "LOG_SQRT", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_LOG_SQRT }, 0, 0, FLAGS, "trc" },
{ "iec61966-2-4", "IEC61966_2_4", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_IEC61966_2_4 }, 0, 0, FLAGS, "trc" },
{ "bt1361-ecg", "BT1361_ECG", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_BT1361_ECG }, 0, 0, FLAGS, "trc" },
{ "iec61966-2-1", "IEC61966_2_1", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_IEC61966_2_1 }, 0, 0, FLAGS, "trc" },
{ "bt2020-10", "BT.2020_10", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_BT2020_10 }, 0, 0, FLAGS, "trc" },
{ "bt2020-12", "BT.2020-12", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_BT2020_12 }, 0, 0, FLAGS, "trc" },
{ "smpte2084", "SMPTE2084", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE2084 }, 0, 0, FLAGS, "trc" },
{ "smpte428", "SMPTE428", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_SMPTE428 }, 0, 0, FLAGS, "trc" },
{ "arib-std-b67", "ARIB_STD_B67", 0, AV_OPT_TYPE_CONST, { .i64 = AMF_COLOR_TRANSFER_CHARACTERISTIC_ARIB_STD_B67 }, 0, 0, FLAGS, "trc" },
{ "force_original_aspect_ratio", "decrease or increase w/h if necessary to keep the original AR", OFFSET(force_original_aspect_ratio), AV_OPT_TYPE_INT, { .i64 = 0}, 0, 2, FLAGS, "force_oar" },
{ "disable", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 0 }, 0, 0, FLAGS, "force_oar" },
{ "decrease", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, FLAGS, "force_oar" },
{ "increase", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = 2 }, 0, 0, FLAGS, "force_oar" },
{ "force_divisible_by", "enforce that the output resolution is divisible by a defined integer when force_original_aspect_ratio is used", OFFSET(force_divisible_by), AV_OPT_TYPE_INT, { .i64 = 1}, 1, 256, FLAGS },
{ NULL },
};
AVFILTER_DEFINE_CLASS(vpp_amf);
static const AVFilterPad amf_filter_inputs[] = {
{
.name = "default",
.type = AVMEDIA_TYPE_VIDEO,
.filter_frame = amf_filter_filter_frame,
}
};
static const AVFilterPad amf_filter_outputs[] = {
{
.name = "default",
.type = AVMEDIA_TYPE_VIDEO,
.config_props = amf_filter_config_output,
}
};
FFFilter ff_vf_vpp_amf = {
.p.name = "vpp_amf",
.p.description = NULL_IF_CONFIG_SMALL("AMF video scaling and format conversion"),
.p.priv_class = &vpp_amf_class,
.p.flags = AVFILTER_FLAG_HWDEVICE,
.priv_size = sizeof(AMFFilterContext),
.init = amf_filter_init,
.uninit = amf_filter_uninit,
FILTER_INPUTS(amf_filter_inputs),
FILTER_OUTPUTS(amf_filter_outputs),
FILTER_QUERY_FUNC(amf_filter_query_formats),
.flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
};
Loading…
Cancel
Save