avcodec: add MediaCodec encoder

Signed-off-by: Zhao Zhili <zhilizhao@tencent.com>
pull/388/head
Zhao Zhili 2 years ago
parent 3a59446967
commit 0ff18a7d6d
  1. 1
      Changelog
  2. 4
      configure
  3. 2
      libavcodec/Makefile
  4. 2
      libavcodec/allcodecs.c
  5. 101
      libavcodec/mediacodec_wrapper.c
  6. 8
      libavcodec/mediacodec_wrapper.h
  7. 504
      libavcodec/mediacodecenc.c
  8. 2
      libavcodec/version.h

@ -21,6 +21,7 @@ version <next>:
- backgroundkey filter
- nvenc AV1 encoding support
- MediaCodec decoder via NDKMediaCodec
- MediaCodec encoder
version 5.1:

4
configure vendored

@ -3183,6 +3183,8 @@ h264_cuvid_decoder_select="h264_mp4toannexb_bsf"
h264_mediacodec_decoder_deps="mediacodec"
h264_mediacodec_decoder_extralibs="-landroid"
h264_mediacodec_decoder_select="h264_mp4toannexb_bsf h264_parser"
h264_mediacodec_encoder_deps="mediacodec"
h264_mediacodec_encoder_extralibs="-landroid"
h264_mf_encoder_deps="mediafoundation"
h264_mmal_decoder_deps="mmal"
h264_nvenc_encoder_deps="nvenc"
@ -3202,6 +3204,8 @@ hevc_cuvid_decoder_select="hevc_mp4toannexb_bsf"
hevc_mediacodec_decoder_deps="mediacodec"
hevc_mediacodec_decoder_extralibs="-landroid"
hevc_mediacodec_decoder_select="hevc_mp4toannexb_bsf hevc_parser"
hevc_mediacodec_encoder_deps="mediacodec"
hevc_mediacodec_encoder_extralibs="-landroid"
hevc_mf_encoder_deps="mediafoundation"
hevc_nvenc_encoder_deps="nvenc"
hevc_nvenc_encoder_select="atsc_a53"

@ -396,6 +396,7 @@ OBJS-$(CONFIG_H264_DECODER) += h264dec.o h264_cabac.o h264_cavlc.o \
OBJS-$(CONFIG_H264_AMF_ENCODER) += amfenc_h264.o
OBJS-$(CONFIG_H264_CUVID_DECODER) += cuviddec.o
OBJS-$(CONFIG_H264_MEDIACODEC_DECODER) += mediacodecdec.o
OBJS-$(CONFIG_H264_MEDIACODEC_ENCODER) += mediacodecenc.o
OBJS-$(CONFIG_H264_MF_ENCODER) += mfenc.o mf_utils.o
OBJS-$(CONFIG_H264_MMAL_DECODER) += mmaldec.o
OBJS-$(CONFIG_H264_NVENC_ENCODER) += nvenc_h264.o nvenc.o
@ -420,6 +421,7 @@ OBJS-$(CONFIG_HEVC_DECODER) += hevcdec.o hevc_mvs.o \
OBJS-$(CONFIG_HEVC_AMF_ENCODER) += amfenc_hevc.o
OBJS-$(CONFIG_HEVC_CUVID_DECODER) += cuviddec.o
OBJS-$(CONFIG_HEVC_MEDIACODEC_DECODER) += mediacodecdec.o
OBJS-$(CONFIG_HEVC_MEDIACODEC_ENCODER) += mediacodecenc.o
OBJS-$(CONFIG_HEVC_MF_ENCODER) += mfenc.o mf_utils.o
OBJS-$(CONFIG_HEVC_NVENC_ENCODER) += nvenc_hevc.o nvenc.o
OBJS-$(CONFIG_HEVC_QSV_DECODER) += qsvdec.o

@ -154,6 +154,7 @@ extern const FFCodec ff_h264_decoder;
extern const FFCodec ff_h264_crystalhd_decoder;
extern const FFCodec ff_h264_v4l2m2m_decoder;
extern const FFCodec ff_h264_mediacodec_decoder;
extern const FFCodec ff_h264_mediacodec_encoder;
extern const FFCodec ff_h264_mmal_decoder;
extern const FFCodec ff_h264_qsv_decoder;
extern const FFCodec ff_h264_rkmpp_decoder;
@ -844,6 +845,7 @@ extern const FFCodec ff_h264_videotoolbox_encoder;
extern const FFCodec ff_hevc_amf_encoder;
extern const FFCodec ff_hevc_cuvid_decoder;
extern const FFCodec ff_hevc_mediacodec_decoder;
extern const FFCodec ff_hevc_mediacodec_encoder;
extern const FFCodec ff_hevc_mf_encoder;
extern const FFCodec ff_hevc_nvenc_encoder;
extern const FFCodec ff_hevc_qsv_encoder;

@ -212,6 +212,9 @@ struct JNIAMediaCodecFields {
jmethodID release_output_buffer_id;
jmethodID release_output_buffer_at_time_id;
jmethodID set_input_surface_id;
jmethodID signal_end_of_input_stream_id;
jclass mediainfo_class;
jmethodID init_id;
@ -261,6 +264,9 @@ static const struct FFJniField jni_amediacodec_mapping[] = {
{ "android/media/MediaCodec", "releaseOutputBuffer", "(IZ)V", FF_JNI_METHOD, offsetof(struct JNIAMediaCodecFields, release_output_buffer_id), 1 },
{ "android/media/MediaCodec", "releaseOutputBuffer", "(IJ)V", FF_JNI_METHOD, offsetof(struct JNIAMediaCodecFields, release_output_buffer_at_time_id), 0 },
{ "android/media/MediaCodec", "setInputSurface", "(Landroid/view/Surface;)V", FF_JNI_METHOD, offsetof(struct JNIAMediaCodecFields, set_input_surface_id), 0 },
{ "android/media/MediaCodec", "signalEndOfInputStream", "()V", FF_JNI_METHOD, offsetof(struct JNIAMediaCodecFields, signal_end_of_input_stream_id), 0 },
{ "android/media/MediaCodec$BufferInfo", NULL, NULL, FF_JNI_CLASS, offsetof(struct JNIAMediaCodecFields, mediainfo_class), 1 },
{ "android/media/MediaCodec.BufferInfo", "<init>", "()V", FF_JNI_METHOD, offsetof(struct JNIAMediaCodecFields, init_id), 1 },
@ -1385,7 +1391,26 @@ static int mediacodec_jni_configure(FFAMediaCodec *ctx,
JNI_GET_ENV_OR_RETURN(env, codec, AVERROR_EXTERNAL);
(*env)->CallVoidMethod(env, codec->object, codec->jfields.configure_id, format->object, surface, NULL, flags);
if (flags & codec->CONFIGURE_FLAG_ENCODE) {
if (surface && !codec->jfields.set_input_surface_id) {
av_log(ctx, AV_LOG_ERROR, "System doesn't support setInputSurface\n");
return AVERROR_EXTERNAL;
}
(*env)->CallVoidMethod(env, codec->object, codec->jfields.configure_id, format->object, NULL, NULL, flags);
if (ff_jni_exception_check(env, 1, codec) < 0)
return AVERROR_EXTERNAL;
if (!surface)
return 0;
(*env)->CallVoidMethod(env, codec->object, codec->jfields.set_input_surface_id, surface);
if (ff_jni_exception_check(env, 1, codec) < 0)
return AVERROR_EXTERNAL;
return 0;
} else {
(*env)->CallVoidMethod(env, codec->object, codec->jfields.configure_id, format->object, surface, NULL, flags);
}
if (ff_jni_exception_check(env, 1, codec) < 0) {
ret = AVERROR_EXTERNAL;
goto fail;
@ -1743,6 +1768,21 @@ fail:
return ret;
}
static int mediacodec_jni_signalEndOfInputStream(FFAMediaCodec *ctx)
{
JNIEnv *env = NULL;
FFAMediaCodecJni *codec = (FFAMediaCodecJni *)ctx;
JNI_GET_ENV_OR_RETURN(env, codec, AVERROR_EXTERNAL);
(*env)->CallVoidMethod(env, codec->object, codec->jfields.signal_end_of_input_stream_id);
if (ff_jni_exception_check(env, 1, codec) < 0) {
return AVERROR_EXTERNAL;
}
return 0;
}
static const FFAMediaFormat media_format_jni = {
.class = &amediaformat_class,
@ -1801,6 +1841,7 @@ static const FFAMediaCodec media_codec_jni = {
.getConfigureFlagEncode = mediacodec_jni_getConfigureFlagEncode,
.cleanOutputBuffers = mediacodec_jni_cleanOutputBuffers,
.signalEndOfInputStream = mediacodec_jni_signalEndOfInputStream,
};
typedef struct FFAMediaFormatNdk {
@ -1866,6 +1907,10 @@ typedef struct FFAMediaCodecNdk {
// Available since API level 28.
media_status_t (*getName)(AMediaCodec*, char** out_name);
void (*releaseName)(AMediaCodec*, char* name);
// Available since API level 26.
media_status_t (*setInputSurface)(AMediaCodec*, ANativeWindow *);
media_status_t (*signalEndOfInputStream)(AMediaCodec *);
} FFAMediaCodecNdk;
static const FFAMediaFormat media_format_ndk;
@ -2098,6 +2143,9 @@ static inline FFAMediaCodec *ndk_codec_create(int method, const char *arg) {
GET_SYMBOL(getName, 0)
GET_SYMBOL(releaseName, 0)
GET_SYMBOL(setInputSurface, 0)
GET_SYMBOL(signalEndOfInputStream, 0)
#undef GET_SYMBOL
switch (method) {
@ -2184,10 +2232,32 @@ static int mediacodec_ndk_configure(FFAMediaCodec* ctx,
return AVERROR(EINVAL);
}
status = codec->configure(codec->impl, format->impl, native_window, NULL, flags);
if (status != AMEDIA_OK) {
av_log(codec, AV_LOG_ERROR, "configure failed, %d\n", status);
return AVERROR_EXTERNAL;
if (flags & AMEDIACODEC_CONFIGURE_FLAG_ENCODE) {
if (native_window && !codec->setInputSurface) {
av_log(ctx, AV_LOG_ERROR, "System doesn't support setInputSurface\n");
return AVERROR_EXTERNAL;
}
status = codec->configure(codec->impl, format->impl, NULL, NULL, flags);
if (status != AMEDIA_OK) {
av_log(codec, AV_LOG_ERROR, "Encoder configure failed, %d\n", status);
return AVERROR_EXTERNAL;
}
if (!native_window)
return 0;
status = codec->setInputSurface(codec->impl, native_window);
if (status != AMEDIA_OK) {
av_log(codec, AV_LOG_ERROR, "Encoder set input surface failed, %d\n", status);
return AVERROR_EXTERNAL;
}
} else {
status = codec->configure(codec->impl, format->impl, native_window, NULL, flags);
if (status != AMEDIA_OK) {
av_log(codec, AV_LOG_ERROR, "Decoder configure failed, %d\n", status);
return AVERROR_EXTERNAL;
}
}
return 0;
@ -2330,6 +2400,26 @@ static int mediacodec_ndk_cleanOutputBuffers(FFAMediaCodec *ctx)
return 0;
}
static int mediacodec_ndk_signalEndOfInputStream(FFAMediaCodec *ctx)
{
FFAMediaCodecNdk *codec = (FFAMediaCodecNdk *)ctx;
media_status_t status;
if (!codec->signalEndOfInputStream) {
av_log(codec, AV_LOG_ERROR, "signalEndOfInputStream unavailable\n");
return AVERROR_EXTERNAL;
}
status = codec->signalEndOfInputStream(codec->impl);
if (status != AMEDIA_OK) {
av_log(codec, AV_LOG_ERROR, "signalEndOfInputStream failed, %d\n", status);
return AVERROR_EXTERNAL;
}
av_log(codec, AV_LOG_DEBUG, "signalEndOfInputStream success\n");
return 0;
}
static const FFAMediaFormat media_format_ndk = {
.class = &amediaformat_ndk_class,
@ -2388,6 +2478,7 @@ static const FFAMediaCodec media_codec_ndk = {
.getConfigureFlagEncode = mediacodec_ndk_getConfigureFlagEncode,
.cleanOutputBuffers = mediacodec_ndk_cleanOutputBuffers,
.signalEndOfInputStream = mediacodec_ndk_signalEndOfInputStream,
};
FFAMediaFormat *ff_AMediaFormat_new(int ndk)

@ -192,6 +192,9 @@ struct FFAMediaCodec {
int (*getConfigureFlagEncode)(FFAMediaCodec *codec);
int (*cleanOutputBuffers)(FFAMediaCodec *codec);
// For encoder with FFANativeWindow as input.
int (*signalEndOfInputStream)(FFAMediaCodec *);
};
static inline char *ff_AMediaCodec_getName(FFAMediaCodec *codec)
@ -311,6 +314,11 @@ static inline int ff_AMediaCodec_cleanOutputBuffers(FFAMediaCodec *codec)
return codec->cleanOutputBuffers(codec);
}
static inline int ff_AMediaCodec_signalEndOfInputStream(FFAMediaCodec *codec)
{
return codec->signalEndOfInputStream(codec);
}
int ff_Build_SDK_INT(AVCodecContext *avctx);
#endif /* AVCODEC_MEDIACODEC_WRAPPER_H */

@ -0,0 +1,504 @@
/*
* Android MediaCodec encoders
*
* Copyright (c) 2022 Zhao Zhili <zhilizhao@tencent.com>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "config_components.h"
#include "libavutil/avassert.h"
#include "libavutil/hwcontext_mediacodec.h"
#include "libavutil/imgutils.h"
#include "libavutil/opt.h"
#include "avcodec.h"
#include "codec_internal.h"
#include "encode.h"
#include "hwconfig.h"
#include "jni.h"
#include "mediacodec.h"
#include "mediacodec_wrapper.h"
#include "mediacodecdec_common.h"
#define INPUT_DEQUEUE_TIMEOUT_US 8000
#define OUTPUT_DEQUEUE_TIMEOUT_US 8000
typedef struct MediaCodecEncContext {
AVClass *avclass;
FFAMediaCodec *codec;
int use_ndk_codec;
FFANativeWindow *window;
int fps;
int width;
int height;
uint8_t *extradata;
int extradata_size;
// Since MediaCodec doesn't output DTS, use a timestamp queue to save pts
// of AVFrame and generate DTS for AVPacket.
//
// This doesn't work when use Surface as input, in that case frames can be
// sent to encoder without our notice. One exception is frames come from
// our MediaCodec decoder wrapper, since we can control it's render by
// av_mediacodec_release_buffer.
int64_t timestamps[32];
int ts_head;
int ts_tail;
int eof_sent;
AVFrame *frame;
} MediaCodecEncContext;
enum {
COLOR_FormatYUV420Planar = 0x13,
COLOR_FormatYUV420SemiPlanar = 0x15,
COLOR_FormatSurface = 0x7F000789,
};
static const struct {
int color_format;
enum AVPixelFormat pix_fmt;
} color_formats[] = {
{ COLOR_FormatYUV420Planar, AV_PIX_FMT_YUV420P },
{ COLOR_FormatYUV420SemiPlanar, AV_PIX_FMT_NV12 },
{ COLOR_FormatSurface, AV_PIX_FMT_MEDIACODEC },
};
static const enum AVPixelFormat avc_pix_fmts[] = {
AV_PIX_FMT_MEDIACODEC,
AV_PIX_FMT_YUV420P,
AV_PIX_FMT_NV12,
AV_PIX_FMT_NONE
};
static void mediacodec_output_format(AVCodecContext *avctx)
{
MediaCodecEncContext *s = avctx->priv_data;
char *name = ff_AMediaCodec_getName(s->codec);
FFAMediaFormat *out_format = ff_AMediaCodec_getOutputFormat(s->codec);
char *str = ff_AMediaFormat_toString(out_format);
av_log(avctx, AV_LOG_DEBUG, "MediaCodec encoder %s output format %s\n",
name ? name : "unknown", str);
av_free(name);
av_free(str);
ff_AMediaFormat_delete(out_format);
}
static av_cold int mediacodec_init(AVCodecContext *avctx)
{
const char *codec_mime = NULL;
MediaCodecEncContext *s = avctx->priv_data;
FFAMediaFormat *format = NULL;
int ret;
int gop;
if (s->use_ndk_codec < 0)
s->use_ndk_codec = !av_jni_get_java_vm(avctx);
switch (avctx->codec_id) {
case AV_CODEC_ID_H264:
codec_mime = "video/avc";
break;
case AV_CODEC_ID_HEVC:
codec_mime = "video/hevc";
break;
default:
av_assert0(0);
}
s->codec = ff_AMediaCodec_createEncoderByType(codec_mime, s->use_ndk_codec);
if (!s->codec) {
av_log(avctx, AV_LOG_ERROR, "Failed to create encoder for type %s\n",
codec_mime);
return AVERROR_EXTERNAL;
}
format = ff_AMediaFormat_new(s->use_ndk_codec);
if (!format) {
av_log(avctx, AV_LOG_ERROR, "Failed to create media format\n");
return AVERROR_EXTERNAL;
}
ff_AMediaFormat_setString(format, "mime", codec_mime);
s->width = FFALIGN(avctx->width, 16);
s->height = avctx->height;
ff_AMediaFormat_setInt32(format, "width", s->width);
ff_AMediaFormat_setInt32(format, "height", s->height);
if (avctx->pix_fmt == AV_PIX_FMT_MEDIACODEC) {
AVMediaCodecContext *user_ctx = avctx->hwaccel_context;
if (avctx->hw_device_ctx) {
AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)(avctx->hw_device_ctx->data);
AVMediaCodecDeviceContext *dev_ctx;
if (device_ctx->type != AV_HWDEVICE_TYPE_MEDIACODEC || !device_ctx->hwctx) {
ret = AVERROR(EINVAL);
goto bailout;
}
dev_ctx = device_ctx->hwctx;
s->window = ff_mediacodec_surface_ref(dev_ctx->surface, dev_ctx->native_window, avctx);
}
if (!s->window && user_ctx && user_ctx->surface)
s->window = ff_mediacodec_surface_ref(user_ctx->surface, NULL, avctx);
if (!s->window) {
ret = AVERROR(EINVAL);
av_log(avctx, AV_LOG_ERROR, "Missing hw_device_ctx or hwaccel_context for AV_PIX_FMT_MEDIACODEC\n");
goto bailout;
}
}
for (int i = 0; i < FF_ARRAY_ELEMS(color_formats); i++) {
if (avctx->pix_fmt == color_formats[i].pix_fmt) {
ff_AMediaFormat_setInt32(format, "color-format",
color_formats[i].color_format);
break;
}
}
if (avctx->bit_rate)
ff_AMediaFormat_setInt32(format, "bitrate", avctx->bit_rate);
// frame-rate and i-frame-interval are required to configure codec
if (avctx->framerate.num >= avctx->framerate.den && avctx->framerate.den > 0) {
s->fps = avctx->framerate.num / avctx->framerate.den;
} else {
s->fps = 30;
av_log(avctx, AV_LOG_INFO, "Use %d as the default MediaFormat frame-rate\n", s->fps);
}
gop = round(avctx->gop_size / s->fps);
if (gop == 0) {
gop = 1;
av_log(avctx, AV_LOG_INFO,
"Use %d as the default MediaFormat i-frame-interval, "
"please set gop_size properly (>= fps)\n", gop);
} else {
av_log(avctx, AV_LOG_DEBUG, "Set i-frame-interval to %d\n", gop);
}
ff_AMediaFormat_setInt32(format, "frame-rate", s->fps);
ff_AMediaFormat_setInt32(format, "i-frame-interval", gop);
ret = ff_AMediaCodec_getConfigureFlagEncode(s->codec);
ret = ff_AMediaCodec_configure(s->codec, format, s->window, NULL, ret);
if (ret) {
av_log(avctx, AV_LOG_ERROR, "MediaCodec configure failed, %s\n", av_err2str(ret));
goto bailout;
}
ret = ff_AMediaCodec_start(s->codec);
if (ret) {
av_log(avctx, AV_LOG_ERROR, "MediaCodec failed to start, %s\n", av_err2str(ret));
goto bailout;
}
mediacodec_output_format(avctx);
s->frame = av_frame_alloc();
if (!s->frame)
ret = AVERROR(ENOMEM);
bailout:
if (format)
ff_AMediaFormat_delete(format);
return ret;
}
static int mediacodec_receive(AVCodecContext *avctx,
AVPacket *pkt,
int *got_packet)
{
MediaCodecEncContext *s = avctx->priv_data;
FFAMediaCodec *codec = s->codec;
FFAMediaCodecBufferInfo out_info = {0};
uint8_t *out_buf;
size_t out_size = 0;
int ret;
int extradata_size = 0;
int64_t timeout_us = s->eof_sent ? OUTPUT_DEQUEUE_TIMEOUT_US : 0;
ssize_t index = ff_AMediaCodec_dequeueOutputBuffer(codec, &out_info, timeout_us);
if (ff_AMediaCodec_infoTryAgainLater(codec, index))
return AVERROR(EAGAIN);
if (ff_AMediaCodec_infoOutputFormatChanged(codec, index)) {
mediacodec_output_format(avctx);
return AVERROR(EAGAIN);
}
if (ff_AMediaCodec_infoOutputBuffersChanged(codec, index)) {
ff_AMediaCodec_cleanOutputBuffers(codec);
return AVERROR(EAGAIN);
}
if (index < 0)
return AVERROR_EXTERNAL;
if (out_info.flags & ff_AMediaCodec_getBufferFlagEndOfStream(codec))
return AVERROR_EOF;
out_buf = ff_AMediaCodec_getOutputBuffer(codec, index, &out_size);
if (!out_buf) {
ret = AVERROR_EXTERNAL;
goto bailout;
}
if (out_info.flags & ff_AMediaCodec_getBufferFlagCodecConfig(codec)) {
ret = av_reallocp(&s->extradata, out_info.size);
if (ret)
goto bailout;
s->extradata_size = out_info.size;
memcpy(s->extradata, out_buf + out_info.offset, out_info.size);
ff_AMediaCodec_releaseOutputBuffer(codec, index, false);
// try immediately
return mediacodec_receive(avctx, pkt, got_packet);
}
ret = ff_get_encode_buffer(avctx, pkt, out_info.size + s->extradata_size, 0);
if (ret < 0)
goto bailout;
if (s->extradata_size) {
extradata_size = s->extradata_size;
s->extradata_size = 0;
memcpy(pkt->data, s->extradata, extradata_size);
}
memcpy(pkt->data + extradata_size, out_buf + out_info.offset, out_info.size);
pkt->pts = av_rescale_q(out_info.presentationTimeUs, AV_TIME_BASE_Q, avctx->time_base);
if (s->ts_tail != s->ts_head) {
pkt->dts = s->timestamps[s->ts_tail];
s->ts_tail = (s->ts_tail + 1) % FF_ARRAY_ELEMS(s->timestamps);
}
if (out_info.flags & ff_AMediaCodec_getBufferFlagKeyFrame(codec))
pkt->flags |= AV_PKT_FLAG_KEY;
ret = 0;
*got_packet = 1;
av_log(avctx, AV_LOG_TRACE, "receive packet pts %" PRId64 " dts %" PRId64
" flags %d extradata %d\n",
pkt->pts, pkt->dts, pkt->flags, extradata_size);
bailout:
ff_AMediaCodec_releaseOutputBuffer(codec, index, false);
return ret;
}
static void copy_frame_to_buffer(AVCodecContext *avctx, const AVFrame *frame, uint8_t *dst, size_t size)
{
MediaCodecEncContext *s = avctx->priv_data;
uint8_t *dst_data[4] = {};
int dst_linesize[4] = {};
const uint8_t *src_data[4] = {
frame->data[0], frame->data[1], frame->data[2], frame->data[3]
};
if (avctx->pix_fmt == AV_PIX_FMT_YUV420P) {
dst_data[0] = dst;
dst_data[1] = dst + s->width * s->height;
dst_data[2] = dst_data[1] + s->width * s->height / 4;
dst_linesize[0] = s->width;
dst_linesize[1] = dst_linesize[2] = s->width / 2;
} else if (avctx->pix_fmt == AV_PIX_FMT_NV12) {
dst_data[0] = dst;
dst_data[1] = dst + s->width * s->height;
dst_linesize[0] = s->width;
dst_linesize[1] = s->width;
} else {
av_assert0(0);
}
av_image_copy(dst_data, dst_linesize, src_data, frame->linesize,
avctx->pix_fmt, avctx->width, avctx->height);
}
static int mediacodec_send(AVCodecContext *avctx,
const AVFrame *frame) {
MediaCodecEncContext *s = avctx->priv_data;
FFAMediaCodec *codec = s->codec;
ssize_t index;
uint8_t *input_buf = NULL;
size_t input_size = 0;
int64_t pts = 0;
uint32_t flags = 0;
int64_t timeout_us;
if (s->eof_sent)
return 0;
if (s->window) {
if (!frame) {
s->eof_sent = 1;
return ff_AMediaCodec_signalEndOfInputStream(codec);
}
if (frame->data[3]) {
pts = av_rescale_q(frame->pts, avctx->time_base, AV_TIME_BASE_Q);
s->timestamps[s->ts_head] = frame->pts;
s->ts_head = (s->ts_head + 1) % FF_ARRAY_ELEMS(s->timestamps);
av_mediacodec_release_buffer((AVMediaCodecBuffer *)frame->data[3], 1);
}
return 0;
}
timeout_us = INPUT_DEQUEUE_TIMEOUT_US;
index = ff_AMediaCodec_dequeueInputBuffer(codec, timeout_us);
if (ff_AMediaCodec_infoTryAgainLater(codec, index))
return AVERROR(EAGAIN);
if (index < 0) {
av_log(avctx, AV_LOG_ERROR, "dequeue input buffer failed, %zd", index);
return AVERROR_EXTERNAL;
}
if (frame) {
input_buf = ff_AMediaCodec_getInputBuffer(codec, index, &input_size);
copy_frame_to_buffer(avctx, frame, input_buf, input_size);
pts = av_rescale_q(frame->pts, avctx->time_base, AV_TIME_BASE_Q);
s->timestamps[s->ts_head] = frame->pts;
s->ts_head = (s->ts_head + 1) % FF_ARRAY_ELEMS(s->timestamps);
} else {
flags |= ff_AMediaCodec_getBufferFlagEndOfStream(codec);
s->eof_sent = 1;
}
ff_AMediaCodec_queueInputBuffer(codec, index, 0, input_size, pts, flags);
return 0;
}
static int mediacodec_encode(AVCodecContext *avctx, AVPacket *pkt)
{
MediaCodecEncContext *s = avctx->priv_data;
int ret;
int got_packet = 0;
// Return on three case:
// 1. Serious error
// 2. Got a packet success
// 3. No AVFrame is available yet (don't return if get_frame return EOF)
while (1) {
ret = mediacodec_receive(avctx, pkt, &got_packet);
if (!ret)
return 0;
else if (ret != AVERROR(EAGAIN))
return ret;
if (!s->frame->buf[0]) {
ret = ff_encode_get_frame(avctx, s->frame);
if (ret && ret != AVERROR_EOF)
return ret;
}
ret = mediacodec_send(avctx, s->frame->buf[0] ? s->frame : NULL);
if (!ret)
av_frame_unref(s->frame);
else if (ret != AVERROR(EAGAIN))
return ret;
}
return 0;
}
static av_cold int mediacodec_close(AVCodecContext *avctx)
{
MediaCodecEncContext *s = avctx->priv_data;
if (s->codec) {
ff_AMediaCodec_stop(s->codec);
ff_AMediaCodec_delete(s->codec);
s->codec = NULL;
}
if (s->window) {
ff_mediacodec_surface_unref(s->window, avctx);
s->window = NULL;
}
av_frame_free(&s->frame);
return 0;
}
static const AVCodecHWConfigInternal *const mediacodec_hw_configs[] = {
&(const AVCodecHWConfigInternal) {
.public = {
.pix_fmt = AV_PIX_FMT_MEDIACODEC,
.methods = AV_CODEC_HW_CONFIG_METHOD_AD_HOC |
AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX,
.device_type = AV_HWDEVICE_TYPE_MEDIACODEC,
},
.hwaccel = NULL,
},
NULL
};
#define OFFSET(x) offsetof(MediaCodecEncContext, x)
#define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
static const AVOption common_options[] = {
{ "ndk_codec", "Use MediaCodec from NDK",
OFFSET(use_ndk_codec), AV_OPT_TYPE_BOOL, {.i64 = -1}, -1, 1, VE },
{ NULL },
};
#define MEDIACODEC_ENCODER_CLASS(name) \
static const AVClass name ## _mediacodec_class = { \
.class_name = #name "_mediacodec", \
.item_name = av_default_item_name, \
.option = common_options, \
.version = LIBAVUTIL_VERSION_INT, \
}; \
#define DECLARE_MEDIACODEC_ENCODER(short_name, long_name, codec_id) \
MEDIACODEC_ENCODER_CLASS(short_name) \
const FFCodec ff_ ## short_name ## _mediacodec_encoder = { \
.p.name = #short_name "_mediacodec", \
CODEC_LONG_NAME(long_name " Android MediaCodec encoder"), \
.p.type = AVMEDIA_TYPE_VIDEO, \
.p.id = codec_id, \
.p.capabilities = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_DELAY \
| AV_CODEC_CAP_HARDWARE, \
.priv_data_size = sizeof(MediaCodecEncContext), \
.p.pix_fmts = avc_pix_fmts, \
.init = mediacodec_init, \
FF_CODEC_RECEIVE_PACKET_CB(mediacodec_encode), \
.close = mediacodec_close, \
.p.priv_class = &short_name ## _mediacodec_class, \
.caps_internal = FF_CODEC_CAP_INIT_CLEANUP, \
.p.wrapper_name = "mediacodec", \
.hw_configs = mediacodec_hw_configs, \
}; \
#if CONFIG_H264_MEDIACODEC_ENCODER
DECLARE_MEDIACODEC_ENCODER(h264, "H.264", AV_CODEC_ID_H264)
#endif
#if CONFIG_HEVC_MEDIACODEC_ENCODER
DECLARE_MEDIACODEC_ENCODER(hevc, "H.265", AV_CODEC_ID_HEVC)
#endif

@ -29,7 +29,7 @@
#include "version_major.h"
#define LIBAVCODEC_VERSION_MINOR 53
#define LIBAVCODEC_VERSION_MINOR 54
#define LIBAVCODEC_VERSION_MICRO 100
#define LIBAVCODEC_VERSION_INT AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, \

Loading…
Cancel
Save