fftools/ffmpeg: attach bits_per_raw_sample information to frames

This way avoids encoders reaching into filters or decoders for this
information.
pull/389/head
Anton Khirnov 1 year ago
parent 7d4e00ccf0
commit 5293adb1a7
  1. 7
      fftools/ffmpeg.h
  2. 5
      fftools/ffmpeg_dec.c
  3. 13
      fftools/ffmpeg_enc.c
  4. 17
      fftools/ffmpeg_filter.c
  5. 6
      fftools/ffmpeg_mux_init.c

@ -315,9 +315,6 @@ typedef struct FilterGraph {
int index;
AVFilterGraph *graph;
// true when the filtergraph contains only meta filters
// that do not modify the frame data
int is_meta;
InputFilter **inputs;
int nb_inputs;
@ -339,8 +336,6 @@ typedef struct InputStream {
int decoding_needed; /* non zero if the packets must be decoded in 'raw_fifo', see DECODING_FOR_* */
#define DECODING_FOR_OST 1
#define DECODING_FOR_FILTER 2
// should attach FrameData as opaque_ref after decoding
int want_frame_data;
/**
* Codec parameters - to be used by the decoding/streamcopy code.
@ -653,6 +648,8 @@ typedef struct FrameData {
AVRational tb;
AVRational frame_rate_filter;
int bits_per_raw_sample;
} FrameData;
extern InputFile **input_files;

@ -472,6 +472,7 @@ int dec_packet(InputStream *ist, const AVPacket *pkt, int no_eof)
while (1) {
AVFrame *frame = d->frame;
FrameData *fd;
update_benchmark(NULL);
ret = avcodec_receive_frame(dec, frame);
@ -508,8 +509,6 @@ int dec_packet(InputStream *ist, const AVPacket *pkt, int no_eof)
exit_program(1);
}
if (ist->want_frame_data) {
FrameData *fd;
av_assert0(!frame->opaque_ref);
fd = frame_data(frame);
@ -520,7 +519,7 @@ int dec_packet(InputStream *ist, const AVPacket *pkt, int no_eof)
fd->pts = frame->pts;
fd->tb = dec->pkt_timebase;
fd->idx = dec->frame_num - 1;
}
fd->bits_per_raw_sample = dec->bits_per_raw_sample;
frame->time_base = dec->pkt_timebase;

@ -198,6 +198,7 @@ int enc_open(OutputStream *ost, AVFrame *frame)
AVCodecContext *dec_ctx = NULL;
const AVCodec *enc = enc_ctx->codec;
OutputFile *of = output_files[ost->file_index];
FrameData *fd = frame ? frame_data(frame) : NULL;
int ret;
if (e->opened)
@ -219,8 +220,8 @@ int enc_open(OutputStream *ost, AVFrame *frame)
if (ost->bits_per_raw_sample)
enc_ctx->bits_per_raw_sample = ost->bits_per_raw_sample;
else if (dec_ctx && ost->filter->graph->is_meta)
enc_ctx->bits_per_raw_sample = FFMIN(dec_ctx->bits_per_raw_sample,
else if (fd)
enc_ctx->bits_per_raw_sample = FFMIN(fd->bits_per_raw_sample,
av_get_bytes_per_sample(enc_ctx->sample_fmt) << 3);
enc_ctx->time_base = ost->enc_timebase.num > 0 ? ost->enc_timebase :
@ -230,10 +231,8 @@ int enc_open(OutputStream *ost, AVFrame *frame)
case AVMEDIA_TYPE_VIDEO: {
AVRational fr = ost->frame_rate;
if (!fr.num && frame) {
FrameData *fd = frame_data(frame);
if (!fr.num && fd)
fr = fd->frame_rate_filter;
}
if (!fr.num && !ost->max_frame_rate.num) {
fr = (AVRational){25, 1};
av_log(ost, AV_LOG_WARNING,
@ -282,8 +281,8 @@ int enc_open(OutputStream *ost, AVFrame *frame)
if (ost->bits_per_raw_sample)
enc_ctx->bits_per_raw_sample = ost->bits_per_raw_sample;
else if (dec_ctx && ost->filter->graph->is_meta)
enc_ctx->bits_per_raw_sample = FFMIN(dec_ctx->bits_per_raw_sample,
else if (fd)
enc_ctx->bits_per_raw_sample = FFMIN(fd->bits_per_raw_sample,
av_pix_fmt_desc_get(enc_ctx->pix_fmt)->comp[0].depth);
if (frame) {

@ -45,6 +45,9 @@ typedef struct FilterGraphPriv {
char log_name[32];
int is_simple;
// true when the filtergraph contains only meta filters
// that do not modify the frame data
int is_meta;
const char *graph_desc;
@ -1566,7 +1569,7 @@ static int configure_filtergraph(FilterGraph *fg)
if ((ret = avfilter_graph_config(fg->graph, NULL)) < 0)
goto fail;
fg->is_meta = graph_is_meta(fg->graph);
fgp->is_meta = graph_is_meta(fg->graph);
/* limit the lists of allowed formats to the ones selected, to
* make sure they stay the same if the filtergraph is reconfigured later */
@ -1714,6 +1717,8 @@ int reap_filters(int flush)
filtered_frame = fgp->frame;
while (1) {
FrameData *fd;
ret = av_buffersink_get_frame_flags(filter, filtered_frame,
AV_BUFFERSINK_FLAG_NO_REQUEST);
if (ret < 0) {
@ -1744,15 +1749,19 @@ int reap_filters(int flush)
tb.num, tb.den);
}
if (ost->type == AVMEDIA_TYPE_VIDEO) {
FrameData *fd = frame_data(filtered_frame);
fd = frame_data(filtered_frame);
if (!fd) {
av_frame_unref(filtered_frame);
report_and_exit(AVERROR(ENOMEM));
}
// only use bits_per_raw_sample passed through from the decoder
// if the filtergraph did not touch the frame data
if (!fgp->is_meta)
fd->bits_per_raw_sample = 0;
if (ost->type == AVMEDIA_TYPE_VIDEO)
fd->frame_rate_filter = av_buffersink_get_frame_rate(filter);
}
enc_frame(ost, filtered_frame);
av_frame_unref(filtered_frame);

@ -365,16 +365,12 @@ static int enc_stats_init(OutputStream *ost, EncStats *es, int pre,
c->type = fmt_specs[i].type;
if (fmt_specs[i].need_input_data) {
if (ost->ist)
ost->ist->want_frame_data = 1;
else {
if (fmt_specs[i].need_input_data && !ost->ist) {
av_log(ost, AV_LOG_WARNING,
"Format directive '%s' is unavailable, because "
"this output stream has no associated input stream\n",
val);
}
}
break;
}

Loading…
Cancel
Save