avconv: get output pixel format from lavfi.

This way we don't require a clearly defined corresponding input stream.

The result for the xwd test changes because rgb24 is now chosen instead
of bgra.
pull/5/head
Anton Khirnov 13 years ago
parent 7af99a01c4
commit b7327887ea
  1. 79
      avconv.c
  2. 3
      avplay.c
  3. 6
      cmdutils.c
  4. 2
      cmdutils.h
  5. 6
      tests/ref/lavf/xwd

@ -252,6 +252,8 @@ typedef struct OutputStream {
int stream_copy; int stream_copy;
const char *attachment_filename; const char *attachment_filename;
int copy_initial_nonkeyframes; int copy_initial_nonkeyframes;
enum PixelFormat pix_fmts[2];
} OutputStream; } OutputStream;
@ -543,13 +545,24 @@ static void filter_release_buffer(AVFilterBuffer *fb)
unref_buffer(buf->ist, buf); unref_buffer(buf->ist, buf);
} }
static const enum PixelFormat *choose_pixel_fmts(OutputStream *ost)
{
if (ost->st->codec->pix_fmt != PIX_FMT_NONE) {
ost->pix_fmts[0] = ost->st->codec->pix_fmt;
return ost->pix_fmts;
} else if (ost->enc->pix_fmts)
return ost->enc->pix_fmts;
else
return NULL;
}
static int configure_video_filters(InputStream *ist, OutputStream *ost) static int configure_video_filters(InputStream *ist, OutputStream *ost)
{ {
AVFilterContext *last_filter, *filter; AVFilterContext *last_filter, *filter;
/** filter graph containing all filters including input & output */ /** filter graph containing all filters including input & output */
AVCodecContext *codec = ost->st->codec; AVCodecContext *codec = ost->st->codec;
AVCodecContext *icodec = ist->st->codec; AVCodecContext *icodec = ist->st->codec;
SinkContext sink_ctx = { .pix_fmt = codec->pix_fmt }; SinkContext sink_ctx = { .pix_fmts = choose_pixel_fmts(ost) };
AVRational sample_aspect_ratio; AVRational sample_aspect_ratio;
char args[255]; char args[255];
int ret; int ret;
@ -621,6 +634,7 @@ static int configure_video_filters(InputStream *ist, OutputStream *ost)
ost->frame_aspect_ratio ? // overridden by the -aspect cli option ost->frame_aspect_ratio ? // overridden by the -aspect cli option
av_d2q(ost->frame_aspect_ratio * codec->height/codec->width, 255) : av_d2q(ost->frame_aspect_ratio * codec->height/codec->width, 255) :
ost->output_video_filter->inputs[0]->sample_aspect_ratio; ost->output_video_filter->inputs[0]->sample_aspect_ratio;
codec->pix_fmt = ost->output_video_filter->inputs[0]->format;
return 0; return 0;
} }
@ -833,34 +847,6 @@ static void choose_sample_rate(AVStream *st, AVCodec *codec)
} }
} }
static void choose_pixel_fmt(AVStream *st, AVCodec *codec)
{
if (codec && codec->pix_fmts) {
const enum PixelFormat *p = codec->pix_fmts;
if (st->codec->strict_std_compliance <= FF_COMPLIANCE_UNOFFICIAL) {
if (st->codec->codec_id == CODEC_ID_MJPEG) {
p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_NONE };
} else if (st->codec->codec_id == CODEC_ID_LJPEG) {
p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUVJ444P, PIX_FMT_YUV420P,
PIX_FMT_YUV422P, PIX_FMT_YUV444P, PIX_FMT_BGRA, PIX_FMT_NONE };
}
}
for (; *p != PIX_FMT_NONE; p++) {
if (*p == st->codec->pix_fmt)
break;
}
if (*p == PIX_FMT_NONE) {
if (st->codec->pix_fmt != PIX_FMT_NONE)
av_log(NULL, AV_LOG_WARNING,
"Incompatible pixel format '%s' for codec '%s', auto-selecting format '%s'\n",
av_pix_fmt_descriptors[st->codec->pix_fmt].name,
codec->name,
av_pix_fmt_descriptors[codec->pix_fmts[0]].name);
st->codec->pix_fmt = codec->pix_fmts[0];
}
}
}
static double static double
get_sync_ipts(const OutputStream *ost, int64_t pts) get_sync_ipts(const OutputStream *ost, int64_t pts)
{ {
@ -2401,31 +2387,11 @@ static int transcode_init(void)
ost->resample_channels = icodec->channels; ost->resample_channels = icodec->channels;
break; break;
case AVMEDIA_TYPE_VIDEO: case AVMEDIA_TYPE_VIDEO:
if (codec->pix_fmt == PIX_FMT_NONE)
codec->pix_fmt = icodec->pix_fmt;
choose_pixel_fmt(ost->st, ost->enc);
if (ost->st->codec->pix_fmt == PIX_FMT_NONE) {
av_log(NULL, AV_LOG_FATAL, "Video pixel format is unknown, stream cannot be encoded\n");
exit_program(1);
}
if (!codec->width || !codec->height) { if (!codec->width || !codec->height) {
codec->width = icodec->width; codec->width = icodec->width;
codec->height = icodec->height; codec->height = icodec->height;
} }
ost->video_resample = codec->width != icodec->width ||
codec->height != icodec->height ||
codec->pix_fmt != icodec->pix_fmt;
if (ost->video_resample) {
codec->bits_per_raw_sample = 0;
}
ost->resample_height = icodec->height;
ost->resample_width = icodec->width;
ost->resample_pix_fmt = icodec->pix_fmt;
/* /*
* We want CFR output if and only if one of those is true: * We want CFR output if and only if one of those is true:
* 1) user specified output framerate with -r * 1) user specified output framerate with -r
@ -2455,6 +2421,18 @@ static int transcode_init(void)
av_log(NULL, AV_LOG_FATAL, "Error opening filters!\n"); av_log(NULL, AV_LOG_FATAL, "Error opening filters!\n");
exit(1); exit(1);
} }
ost->video_resample = codec->width != icodec->width ||
codec->height != icodec->height ||
codec->pix_fmt != icodec->pix_fmt;
if (ost->video_resample) {
codec->bits_per_raw_sample = 0;
}
ost->resample_height = icodec->height;
ost->resample_width = icodec->width;
ost->resample_pix_fmt = icodec->pix_fmt;
break; break;
case AVMEDIA_TYPE_SUBTITLE: case AVMEDIA_TYPE_SUBTITLE:
codec->time_base = (AVRational){1, 1000}; codec->time_base = (AVRational){1, 1000};
@ -3535,6 +3513,9 @@ static OutputStream *new_output_stream(OptionsContext *o, AVFormatContext *oc, e
st->codec->flags |= CODEC_FLAG_GLOBAL_HEADER; st->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
av_opt_get_int(sws_opts, "sws_flags", 0, &ost->sws_flags); av_opt_get_int(sws_opts, "sws_flags", 0, &ost->sws_flags);
ost->pix_fmts[0] = ost->pix_fmts[1] = PIX_FMT_NONE;
return ost; return ost;
} }

@ -1701,9 +1701,10 @@ static AVFilter input_filter =
static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const char *vfilters) static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const char *vfilters)
{ {
static const enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };
char sws_flags_str[128]; char sws_flags_str[128];
int ret; int ret;
SinkContext sink_ctx = { .pix_fmt = PIX_FMT_YUV420P }; SinkContext sink_ctx = { .pix_fmts = pix_fmts };
AVFilterContext *filt_src = NULL, *filt_out = NULL; AVFilterContext *filt_src = NULL, *filt_out = NULL;
snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%d", sws_flags); snprintf(sws_flags_str, sizeof(sws_flags_str), "flags=%d", sws_flags);
graph->scale_sws_opts = av_strdup(sws_flags_str); graph->scale_sws_opts = av_strdup(sws_flags_str);

@ -1036,9 +1036,11 @@ static void null_end_frame(AVFilterLink *inlink) { }
static int sink_query_formats(AVFilterContext *ctx) static int sink_query_formats(AVFilterContext *ctx)
{ {
SinkContext *priv = ctx->priv; SinkContext *priv = ctx->priv;
enum PixelFormat pix_fmts[] = { priv->pix_fmt, PIX_FMT_NONE };
avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts)); if (priv->pix_fmts)
avfilter_set_common_formats(ctx, avfilter_make_format_list(priv->pix_fmts));
else
avfilter_default_query_formats(ctx);
return 0; return 0;
} }

@ -368,7 +368,7 @@ FILE *get_preset_file(char *filename, size_t filename_size,
const char *preset_name, int is_path, const char *codec_name); const char *preset_name, int is_path, const char *codec_name);
typedef struct { typedef struct {
enum PixelFormat pix_fmt; const enum PixelFormat *pix_fmts;
} SinkContext; } SinkContext;
extern AVFilter sink; extern AVFilter sink;

@ -1,3 +1,3 @@
b838561f7df803ea14dd6307a9d3c5ec *./tests/data/images/xwd/02.xwd 50baa5560b7d1aa3188b19c1162bf7dc *./tests/data/images/xwd/02.xwd
./tests/data/images/xwd/%02d.xwd CRC=0x69b329cd ./tests/data/images/xwd/%02d.xwd CRC=0x6da01946
405615 ./tests/data/images/xwd/02.xwd 304239 ./tests/data/images/xwd/02.xwd

Loading…
Cancel
Save