Merge commit '716d413c13981da15323c7a3821860536eefdbbb'

* commit '716d413c13981da15323c7a3821860536eefdbbb':
  Replace PIX_FMT_* -> AV_PIX_FMT_*, PixelFormat -> AVPixelFormat

Conflicts:
	doc/examples/muxing.c
	ffmpeg.h
	ffmpeg_filter.c
	ffmpeg_opt.c
	ffplay.c
	ffprobe.c
	libavcodec/8bps.c
	libavcodec/aasc.c
	libavcodec/aura.c
	libavcodec/avcodec.h
	libavcodec/avs.c
	libavcodec/bfi.c
	libavcodec/bmp.c
	libavcodec/bmpenc.c
	libavcodec/c93.c
	libavcodec/cscd.c
	libavcodec/cyuv.c
	libavcodec/dpx.c
	libavcodec/dpxenc.c
	libavcodec/eatgv.c
	libavcodec/escape124.c
	libavcodec/ffv1.c
	libavcodec/flashsv.c
	libavcodec/fraps.c
	libavcodec/h264.c
	libavcodec/huffyuv.c
	libavcodec/iff.c
	libavcodec/imgconvert.c
	libavcodec/indeo3.c
	libavcodec/kmvc.c
	libavcodec/libopenjpegdec.c
	libavcodec/libopenjpegenc.c
	libavcodec/libx264.c
	libavcodec/ljpegenc.c
	libavcodec/mjpegdec.c
	libavcodec/mjpegenc.c
	libavcodec/motionpixels.c
	libavcodec/mpeg12.c
	libavcodec/mpeg12enc.c
	libavcodec/mpeg4videodec.c
	libavcodec/mpegvideo_enc.c
	libavcodec/pamenc.c
	libavcodec/pcxenc.c
	libavcodec/pgssubdec.c
	libavcodec/pngdec.c
	libavcodec/pngenc.c
	libavcodec/pnm.c
	libavcodec/pnmdec.c
	libavcodec/pnmenc.c
	libavcodec/ptx.c
	libavcodec/qdrw.c
	libavcodec/qpeg.c
	libavcodec/qtrleenc.c
	libavcodec/raw.c
	libavcodec/rawdec.c
	libavcodec/rl2.c
	libavcodec/sgidec.c
	libavcodec/sgienc.c
	libavcodec/snowdec.c
	libavcodec/snowenc.c
	libavcodec/sunrast.c
	libavcodec/targa.c
	libavcodec/targaenc.c
	libavcodec/tiff.c
	libavcodec/tiffenc.c
	libavcodec/tmv.c
	libavcodec/truemotion2.c
	libavcodec/utils.c
	libavcodec/vb.c
	libavcodec/vp3.c
	libavcodec/wnv1.c
	libavcodec/xl.c
	libavcodec/xwddec.c
	libavcodec/xwdenc.c
	libavcodec/yop.c
	libavdevice/v4l2.c
	libavdevice/x11grab.c
	libavfilter/avfilter.c
	libavfilter/avfilter.h
	libavfilter/buffersrc.c
	libavfilter/drawutils.c
	libavfilter/formats.c
	libavfilter/src_movie.c
	libavfilter/vf_ass.c
	libavfilter/vf_drawtext.c
	libavfilter/vf_fade.c
	libavfilter/vf_format.c
	libavfilter/vf_hflip.c
	libavfilter/vf_lut.c
	libavfilter/vf_overlay.c
	libavfilter/vf_pad.c
	libavfilter/vf_scale.c
	libavfilter/vf_transpose.c
	libavfilter/vf_yadif.c
	libavfilter/video.c
	libavfilter/vsrc_testsrc.c
	libavformat/movenc.c
	libavformat/mxf.h
	libavformat/utils.c
	libavformat/yuv4mpeg.c
	libavutil/imgutils.c
	libavutil/pixdesc.c
	libswscale/input.c
	libswscale/output.c
	libswscale/swscale_internal.h
	libswscale/swscale_unscaled.c
	libswscale/utils.c
	libswscale/x86/swscale_template.c
	libswscale/x86/yuv2rgb.c
	libswscale/x86/yuv2rgb_template.c
	libswscale/yuv2rgb.c

Merged-by: Michael Niedermayer <michaelni@gmx.at>
pull/6/head
Michael Niedermayer 12 years ago
commit ac627b3d38
  1. 8
      cmdutils.c
  2. 2
      cmdutils.h
  3. 2
      doc/examples/decoding_encoding.c
  4. 2
      doc/examples/filtering_video.c
  5. 10
      doc/examples/muxing.c
  6. 2
      doc/examples/scaling_video.c
  7. 2
      doc/filters.texi
  8. 2
      ffmpeg.h
  9. 42
      ffmpeg_filter.c
  10. 2
      ffmpeg_opt.c
  11. 6
      ffplay.c
  12. 4
      libavcodec/4xm.c
  13. 8
      libavcodec/8bps.c
  14. 4
      libavcodec/a64multienc.c
  15. 8
      libavcodec/aasc.c
  16. 2
      libavcodec/anm.c
  17. 2
      libavcodec/ansi.c
  18. 6
      libavcodec/asv1.c
  19. 2
      libavcodec/aura.c
  20. 73
      libavcodec/avcodec.h
  21. 2
      libavcodec/avrndec.c
  22. 2
      libavcodec/avs.c
  23. 2
      libavcodec/avuidec.c
  24. 2
      libavcodec/avuienc.c
  25. 2
      libavcodec/bethsoftvideo.c
  26. 2
      libavcodec/bfi.c
  27. 2
      libavcodec/bink.c
  28. 2
      libavcodec/bintext.c
  29. 32
      libavcodec/bmp.c
  30. 54
      libavcodec/bmpenc.c
  31. 2
      libavcodec/bmv.c
  32. 2
      libavcodec/c93.c
  33. 2
      libavcodec/cavs.c
  34. 2
      libavcodec/cdgraphics.c
  35. 4
      libavcodec/cdxl.c
  36. 4
      libavcodec/cinepak.c
  37. 6
      libavcodec/cljr.c
  38. 4
      libavcodec/cllc.c
  39. 2
      libavcodec/cpia.c
  40. 14
      libavcodec/crystalhd.c
  41. 6
      libavcodec/cscd.c
  42. 4
      libavcodec/cyuv.c
  43. 2
      libavcodec/dfa.c
  44. 6
      libavcodec/dirac.c
  45. 4
      libavcodec/dnxhddec.c
  46. 10
      libavcodec/dnxhdenc.c
  47. 14
      libavcodec/dpx.c
  48. 44
      libavcodec/dpxenc.c
  49. 2
      libavcodec/dsicinav.c
  50. 22
      libavcodec/dv.c
  51. 20
      libavcodec/dv_profile.c
  52. 2
      libavcodec/dv_profile.h
  53. 10
      libavcodec/dvdec.c
  54. 2
      libavcodec/dxa.c
  55. 2
      libavcodec/dxtory.c
  56. 2
      libavcodec/dxva2_h264.c
  57. 2
      libavcodec/dxva2_mpeg2.c
  58. 4
      libavcodec/dxva2_vc1.c
  59. 2
      libavcodec/eacmv.c
  60. 2
      libavcodec/eamad.c
  61. 2
      libavcodec/eatgq.c
  62. 2
      libavcodec/eatgv.c
  63. 2
      libavcodec/eatqi.c
  64. 2
      libavcodec/escape124.c
  65. 2
      libavcodec/escape130.c
  66. 4
      libavcodec/exr.c
  67. 122
      libavcodec/ffv1.c
  68. 6
      libavcodec/flashsv.c
  69. 2
      libavcodec/flashsv2enc.c
  70. 2
      libavcodec/flashsvenc.c
  71. 16
      libavcodec/flicvideo.c
  72. 2
      libavcodec/flvenc.c
  73. 4
      libavcodec/fraps.c
  74. 2
      libavcodec/frwu.c
  75. 6
      libavcodec/gif.c
  76. 2
      libavcodec/gifdec.c
  77. 2
      libavcodec/h261dec.c
  78. 2
      libavcodec/h261enc.c
  79. 2
      libavcodec/h263dec.c
  80. 58
      libavcodec/h264.c
  81. 40
      libavcodec/huffyuv.c
  82. 2
      libavcodec/idcinvideo.c
  83. 40
      libavcodec/iff.c
  84. 296
      libavcodec/imgconvert.c
  85. 2
      libavcodec/indeo2.c
  86. 2
      libavcodec/indeo3.c
  87. 2
      libavcodec/indeo4.c
  88. 2
      libavcodec/indeo5.c
  89. 6
      libavcodec/internal.h
  90. 2
      libavcodec/interplayvideo.c
  91. 10
      libavcodec/j2kdec.c
  92. 18
      libavcodec/j2kenc.c
  93. 22
      libavcodec/jpeglsenc.c
  94. 2
      libavcodec/jvdec.c
  95. 2
      libavcodec/kgv1dec.c
  96. 2
      libavcodec/kmvc.c
  97. 14
      libavcodec/lagarith.c
  98. 12
      libavcodec/lcldec.c
  99. 4
      libavcodec/lclenc.c
  100. 50
      libavcodec/libopenjpegdec.c
  101. Some files were not shown because too many files have changed in this diff Show More

@ -851,8 +851,8 @@ static void print_codec(const AVCodec *c)
}
printf("\n");
}
PRINT_CODEC_SUPPORTED(c, pix_fmts, enum PixelFormat, "pixel formats",
PIX_FMT_NONE, GET_PIX_FMT_NAME);
PRINT_CODEC_SUPPORTED(c, pix_fmts, enum AVPixelFormat, "pixel formats",
AV_PIX_FMT_NONE, GET_PIX_FMT_NAME);
PRINT_CODEC_SUPPORTED(c, supported_samplerates, int, "sample rates", 0,
GET_SAMPLE_RATE_NAME);
PRINT_CODEC_SUPPORTED(c, sample_fmts, enum AVSampleFormat, "sample formats",
@ -1094,7 +1094,7 @@ int show_filters(void *optctx, const char *opt, const char *arg)
int show_pix_fmts(void *optctx, const char *opt, const char *arg)
{
enum PixelFormat pix_fmt;
enum AVPixelFormat pix_fmt;
printf("Pixel formats:\n"
"I.... = Supported Input format for conversion\n"
@ -1110,7 +1110,7 @@ int show_pix_fmts(void *optctx, const char *opt, const char *arg)
# define sws_isSupportedOutput(x) 0
#endif
for (pix_fmt = 0; pix_fmt < PIX_FMT_NB; pix_fmt++) {
for (pix_fmt = 0; pix_fmt < AV_PIX_FMT_NB; pix_fmt++) {
const AVPixFmtDescriptor *pix_desc = &av_pix_fmt_descriptors[pix_fmt];
if(!pix_desc->name)
continue;

@ -426,7 +426,7 @@ typedef struct FrameBuffer {
int linesize[4];
int h, w;
enum PixelFormat pix_fmt;
enum AVPixelFormat pix_fmt;
int refcount;
struct FrameBuffer **pool; ///< head of the buffer pool

@ -356,7 +356,7 @@ static void video_encode_example(const char *filename, int codec_id)
c->time_base= (AVRational){1,25};
c->gop_size = 10; /* emit one intra frame every ten frames */
c->max_b_frames=1;
c->pix_fmt = PIX_FMT_YUV420P;
c->pix_fmt = AV_PIX_FMT_YUV420P;
if(codec_id == AV_CODEC_ID_H264)
av_opt_set(c->priv_data, "preset", "slow", 0);

@ -87,7 +87,7 @@ static int init_filters(const char *filters_descr)
AVFilter *buffersink = avfilter_get_by_name("ffbuffersink");
AVFilterInOut *outputs = avfilter_inout_alloc();
AVFilterInOut *inputs = avfilter_inout_alloc();
enum PixelFormat pix_fmts[] = { PIX_FMT_GRAY8, PIX_FMT_NONE };
enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_GRAY8, AV_PIX_FMT_NONE };
AVBufferSinkParams *buffersink_params;
filter_graph = avfilter_graph_alloc();

@ -41,7 +41,7 @@
#define STREAM_DURATION 200.0
#define STREAM_FRAME_RATE 25 /* 25 images/s */
#define STREAM_NB_FRAMES ((int)(STREAM_DURATION * STREAM_FRAME_RATE))
#define STREAM_PIX_FMT PIX_FMT_YUV420P /* default pix_fmt */
#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */
static int sws_flags = SWS_BICUBIC;
@ -277,8 +277,8 @@ static void open_video(AVFormatContext *oc, AVCodec *codec, AVStream *st)
/* If the output format is not YUV420P, then a temporary YUV420P
* picture is needed too. It is then converted to the required
* output format. */
if (c->pix_fmt != PIX_FMT_YUV420P) {
ret = avpicture_alloc(&src_picture, PIX_FMT_YUV420P, c->width, c->height);
if (c->pix_fmt != AV_PIX_FMT_YUV420P) {
ret = avpicture_alloc(&src_picture, AV_PIX_FMT_YUV420P, c->width, c->height);
if (ret < 0) {
fprintf(stderr, "Could not allocate temporary picture\n");
exit(1);
@ -322,11 +322,11 @@ static void write_video_frame(AVFormatContext *oc, AVStream *st)
* frames if using B-frames, so we get the last frames by
* passing the same picture again. */
} else {
if (c->pix_fmt != PIX_FMT_YUV420P) {
if (c->pix_fmt != AV_PIX_FMT_YUV420P) {
/* as we only generate a YUV420P picture, we must convert it
* to the codec pixel format if needed */
if (!sws_ctx) {
sws_ctx = sws_getContext(c->width, c->height, PIX_FMT_YUV420P,
sws_ctx = sws_getContext(c->width, c->height, AV_PIX_FMT_YUV420P,
c->width, c->height, c->pix_fmt,
sws_flags, NULL, NULL, NULL);
if (!sws_ctx) {

@ -55,7 +55,7 @@ int main(int argc, char **argv)
uint8_t *src_data[4], *dst_data[4];
int src_linesize[4], dst_linesize[4];
int src_w = 320, src_h = 240, dst_w, dst_h;
enum PixelFormat src_pix_fmt = PIX_FMT_YUV420P, dst_pix_fmt = PIX_FMT_RGB24;
enum AVPixelFormat src_pix_fmt = AV_PIX_FMT_YUV420P, dst_pix_fmt = AV_PIX_FMT_RGB24;
const char *dst_size = NULL;
const char *dst_filename = NULL;
FILE *dst_file;

@ -3738,7 +3738,7 @@ will instruct the source to accept video frames with size 320x240 and
with format "yuv410p", assuming 1/24 as the timestamps timebase and
square pixels (1:1 sample aspect ratio).
Since the pixel format with name "yuv410p" corresponds to the number 6
(check the enum PixelFormat definition in @file{libavutil/pixfmt.h}),
(check the enum AVPixelFormat definition in @file{libavutil/pixfmt.h}),
this example corresponds to:
@example
buffer=size=320x240:pixfmt=6:time_base=1/24:pixel_aspect=1/1

@ -398,7 +398,7 @@ void assert_avoptions(AVDictionary *m);
int guess_input_channel_layout(InputStream *ist);
enum PixelFormat choose_pixel_fmt(AVStream *st, AVCodec *codec, enum PixelFormat target);
enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodec *codec, enum AVPixelFormat target);
void choose_sample_fmt(AVStream *st, AVCodec *codec);
int configure_filtergraph(FilterGraph *fg);

@ -33,27 +33,27 @@
#include "libavutil/imgutils.h"
#include "libavutil/samplefmt.h"
enum PixelFormat choose_pixel_fmt(AVStream *st, AVCodec *codec, enum PixelFormat target)
enum AVPixelFormat choose_pixel_fmt(AVStream *st, AVCodec *codec, enum AVPixelFormat target)
{
if (codec && codec->pix_fmts) {
const enum PixelFormat *p = codec->pix_fmts;
const enum AVPixelFormat *p = codec->pix_fmts;
int has_alpha= av_pix_fmt_descriptors[target].nb_components % 2 == 0;
enum PixelFormat best= PIX_FMT_NONE;
enum AVPixelFormat best= AV_PIX_FMT_NONE;
if (st->codec->strict_std_compliance <= FF_COMPLIANCE_UNOFFICIAL) {
if (st->codec->codec_id == AV_CODEC_ID_MJPEG) {
p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_NONE };
p = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_NONE };
} else if (st->codec->codec_id == AV_CODEC_ID_LJPEG) {
p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUVJ444P, PIX_FMT_YUV420P,
PIX_FMT_YUV422P, PIX_FMT_YUV444P, PIX_FMT_BGRA, PIX_FMT_NONE };
p = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUV420P,
AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_BGRA, AV_PIX_FMT_NONE };
}
}
for (; *p != PIX_FMT_NONE; p++) {
for (; *p != AV_PIX_FMT_NONE; p++) {
best= avcodec_find_best_pix_fmt_of_2(best, *p, target, has_alpha, NULL);
if (*p == target)
break;
}
if (*p == PIX_FMT_NONE) {
if (target != PIX_FMT_NONE)
if (*p == AV_PIX_FMT_NONE) {
if (target != AV_PIX_FMT_NONE)
av_log(NULL, AV_LOG_WARNING,
"Incompatible pixel format '%s' for codec '%s', auto-selecting format '%s'\n",
av_pix_fmt_descriptors[target].name,
@ -93,14 +93,14 @@ static char *choose_pix_fmts(OutputStream *ost)
if (ost->filter)
avfilter_graph_set_auto_convert(ost->filter->graph->graph,
AVFILTER_AUTO_CONVERT_NONE);
if (ost->st->codec->pix_fmt == PIX_FMT_NONE)
if (ost->st->codec->pix_fmt == AV_PIX_FMT_NONE)
return NULL;
return av_strdup(av_get_pix_fmt_name(ost->st->codec->pix_fmt));
}
if (ost->st->codec->pix_fmt != PIX_FMT_NONE) {
if (ost->st->codec->pix_fmt != AV_PIX_FMT_NONE) {
return av_strdup(av_get_pix_fmt_name(choose_pixel_fmt(ost->st, ost->enc, ost->st->codec->pix_fmt)));
} else if (ost->enc && ost->enc->pix_fmts) {
const enum PixelFormat *p;
const enum AVPixelFormat *p;
AVIOContext *s = NULL;
uint8_t *ret;
int len;
@ -111,14 +111,14 @@ static char *choose_pix_fmts(OutputStream *ost)
p = ost->enc->pix_fmts;
if (ost->st->codec->strict_std_compliance <= FF_COMPLIANCE_UNOFFICIAL) {
if (ost->st->codec->codec_id == AV_CODEC_ID_MJPEG) {
p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_NONE };
p = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_NONE };
} else if (ost->st->codec->codec_id == AV_CODEC_ID_LJPEG) {
p = (const enum PixelFormat[]) { PIX_FMT_YUVJ420P, PIX_FMT_YUVJ422P, PIX_FMT_YUVJ444P, PIX_FMT_YUV420P,
PIX_FMT_YUV422P, PIX_FMT_YUV444P, PIX_FMT_BGRA, PIX_FMT_NONE };
p = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUV420P,
AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_BGRA, AV_PIX_FMT_NONE };
}
}
for (; *p != PIX_FMT_NONE; p++) {
for (; *p != AV_PIX_FMT_NONE; p++) {
const char *name = av_get_pix_fmt_name(*p);
avio_printf(s, "%s:", name);
}
@ -159,7 +159,7 @@ static char *choose_ ## var ## s(OutputStream *ost) \
return NULL; \
}
// DEF_CHOOSE_FORMAT(enum PixelFormat, pix_fmt, pix_fmts, PIX_FMT_NONE,
// DEF_CHOOSE_FORMAT(enum AVPixelFormat, pix_fmt, pix_fmts, AV_PIX_FMT_NONE,
// GET_PIX_FMT_NAME, ":")
DEF_CHOOSE_FORMAT(enum AVSampleFormat, sample_fmt, sample_fmts,
@ -526,17 +526,17 @@ static int sub2video_prepare(InputStream *ist)
ist->sub2video.w = ist->st->codec->width = w;
ist->sub2video.h = ist->st->codec->height = h;
/* rectangles are PIX_FMT_PAL8, but we have no guarantee that the
/* rectangles are AV_PIX_FMT_PAL8, but we have no guarantee that the
palettes for all rectangles are identical or compatible */
ist->st->codec->pix_fmt = PIX_FMT_RGB32;
ist->st->codec->pix_fmt = AV_PIX_FMT_RGB32;
ret = av_image_alloc(image, linesize, w, h, PIX_FMT_RGB32, 32);
ret = av_image_alloc(image, linesize, w, h, AV_PIX_FMT_RGB32, 32);
if (ret < 0)
return ret;
memset(image[0], 0, h * linesize[0]);
ist->sub2video.ref = avfilter_get_video_buffer_ref_from_arrays(
image, linesize, AV_PERM_READ | AV_PERM_PRESERVE,
w, h, PIX_FMT_RGB32);
w, h, AV_PIX_FMT_RGB32);
if (!ist->sub2video.ref) {
av_free(image[0]);
return AVERROR(ENOMEM);

@ -1078,7 +1078,7 @@ static OutputStream *new_video_stream(OptionsContext *o, AVFormatContext *oc, in
if (!*++frame_pix_fmt)
frame_pix_fmt = NULL;
}
if (frame_pix_fmt && (video_enc->pix_fmt = av_get_pix_fmt(frame_pix_fmt)) == PIX_FMT_NONE) {
if (frame_pix_fmt && (video_enc->pix_fmt = av_get_pix_fmt(frame_pix_fmt)) == AV_PIX_FMT_NONE) {
av_log(NULL, AV_LOG_FATAL, "Unknown pixel format requested: %s.\n", frame_pix_fmt);
exit(1);
}

@ -1463,7 +1463,7 @@ static int queue_picture(VideoState *is, AVFrame *src_frame, double pts1, int64_
sws_flags = av_get_int(sws_opts, "sws_flags", NULL);
is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx,
vp->width, vp->height, src_frame->format, vp->width, vp->height,
PIX_FMT_YUV420P, sws_flags, NULL, NULL, NULL);
AV_PIX_FMT_YUV420P, sws_flags, NULL, NULL, NULL);
if (is->img_convert_ctx == NULL) {
fprintf(stderr, "Cannot initialize the conversion context\n");
exit(1);
@ -1599,7 +1599,7 @@ fail:
static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const char *vfilters)
{
static const enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };
static const enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE };
char sws_flags_str[128];
char buffersrc_args[256];
int ret;
@ -1671,7 +1671,7 @@ static int video_thread(void *arg)
AVFilterContext *filt_out = NULL, *filt_in = NULL;
int last_w = 0;
int last_h = 0;
enum PixelFormat last_format = -2;
enum AVPixelFormat last_format = -2;
if (codec->codec->capabilities & CODEC_CAP_DR1) {
is->use_dr1 = 1;

@ -950,9 +950,9 @@ static av_cold int decode_init(AVCodecContext *avctx)
init_vlcs(f);
if (f->version > 2)
avctx->pix_fmt = PIX_FMT_RGB565;
avctx->pix_fmt = AV_PIX_FMT_RGB565;
else
avctx->pix_fmt = PIX_FMT_BGR555;
avctx->pix_fmt = AV_PIX_FMT_BGR555;
return 0;
}

@ -40,8 +40,8 @@
#include "avcodec.h"
static const enum PixelFormat pixfmt_rgb24[] = {
PIX_FMT_BGR24, PIX_FMT_RGB32, PIX_FMT_NONE };
static const enum AVPixelFormat pixfmt_rgb24[] = {
AV_PIX_FMT_BGR24, AV_PIX_FMT_RGB32, AV_PIX_FMT_NONE };
/*
* Decoder context
@ -164,7 +164,7 @@ static av_cold int decode_init(AVCodecContext *avctx)
avcodec_get_frame_defaults(&c->pic);
switch (avctx->bits_per_coded_sample) {
case 8:
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
c->planes = 1;
c->planemap[0] = 0; // 1st plane is palette indexes
break;
@ -176,7 +176,7 @@ static av_cold int decode_init(AVCodecContext *avctx)
c->planemap[2] = 0; // 3rd plane is blue
break;
case 32:
avctx->pix_fmt = PIX_FMT_RGB32;
avctx->pix_fmt = AV_PIX_FMT_RGB32;
c->planes = 4;
#if HAVE_BIGENDIAN
c->planemap[0] = 1; // 1st plane is red

@ -378,7 +378,7 @@ AVCodec ff_a64multi_encoder = {
.init = a64multi_init_encoder,
.encode2 = a64multi_encode_frame,
.close = a64multi_close_encoder,
.pix_fmts = (const enum PixelFormat[]) {PIX_FMT_GRAY8, PIX_FMT_NONE},
.pix_fmts = (const enum AVPixelFormat[]) {AV_PIX_FMT_GRAY8, AV_PIX_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("Multicolor charset for Commodore 64"),
.capabilities = CODEC_CAP_DELAY,
};
@ -392,7 +392,7 @@ AVCodec ff_a64multi5_encoder = {
.init = a64multi_init_encoder,
.encode2 = a64multi_encode_frame,
.close = a64multi_close_encoder,
.pix_fmts = (const enum PixelFormat[]) {PIX_FMT_GRAY8, PIX_FMT_NONE},
.pix_fmts = (const enum AVPixelFormat[]) {AV_PIX_FMT_GRAY8, AV_PIX_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("Multicolor charset for Commodore 64, extended with 5th color (colram)"),
.capabilities = CODEC_CAP_DELAY,
};

@ -50,7 +50,7 @@ static av_cold int aasc_decode_init(AVCodecContext *avctx)
s->avctx = avctx;
switch (avctx->bits_per_coded_sample) {
case 8:
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
ptr = avctx->extradata;
s->palette_size = FFMIN(avctx->extradata_size, AVPALETTE_SIZE);
@ -60,10 +60,10 @@ static av_cold int aasc_decode_init(AVCodecContext *avctx)
}
break;
case 16:
avctx->pix_fmt = PIX_FMT_RGB555;
avctx->pix_fmt = AV_PIX_FMT_RGB555;
break;
case 24:
avctx->pix_fmt = PIX_FMT_BGR24;
avctx->pix_fmt = AV_PIX_FMT_BGR24;
break;
default:
av_log(avctx, AV_LOG_ERROR, "Unsupported bit depth: %d\n", avctx->bits_per_coded_sample);
@ -127,7 +127,7 @@ static int aasc_decode_frame(AVCodecContext *avctx,
return -1;
}
if (avctx->pix_fmt == PIX_FMT_PAL8)
if (avctx->pix_fmt == AV_PIX_FMT_PAL8)
memcpy(s->frame.data[1], s->palette, s->palette_size);
*data_size = sizeof(AVFrame);

@ -39,7 +39,7 @@ static av_cold int decode_init(AVCodecContext *avctx)
AnmContext *s = avctx->priv_data;
int i;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
avcodec_get_frame_defaults(&s->frame);
s->frame.reference = 3;

@ -76,7 +76,7 @@ typedef struct {
static av_cold int decode_init(AVCodecContext *avctx)
{
AnsiContext *s = avctx->priv_data;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
/* defaults */
s->font = avpriv_vga16_font;

@ -548,7 +548,7 @@ static av_cold int decode_init(AVCodecContext *avctx){
common_init(avctx);
init_vlcs(a);
ff_init_scantable(a->dsp.idct_permutation, &a->scantable, scantab);
avctx->pix_fmt= PIX_FMT_YUV420P;
avctx->pix_fmt= AV_PIX_FMT_YUV420P;
if(avctx->extradata_size < 1 || (a->inv_qscale= avctx->extradata[0]) == 0){
av_log(avctx, AV_LOG_ERROR, "illegal qscale 0\n");
@ -647,7 +647,7 @@ AVCodec ff_asv1_encoder = {
.priv_data_size = sizeof(ASV1Context),
.init = encode_init,
.encode2 = encode_frame,
.pix_fmts = (const enum PixelFormat[]){ PIX_FMT_YUV420P, PIX_FMT_NONE },
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("ASUS V1"),
};
#endif
@ -660,7 +660,7 @@ AVCodec ff_asv2_encoder = {
.priv_data_size = sizeof(ASV1Context),
.init = encode_init,
.encode2 = encode_frame,
.pix_fmts = (const enum PixelFormat[]){ PIX_FMT_YUV420P, PIX_FMT_NONE },
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("ASUS V2"),
};
#endif

@ -39,7 +39,7 @@ static av_cold int aura_decode_init(AVCodecContext *avctx)
/* width needs to be divisible by 4 for this codec to work */
if (avctx->width & 0x3)
return -1;
avctx->pix_fmt = PIX_FMT_YUV422P;
avctx->pix_fmt = AV_PIX_FMT_YUV422P;
avcodec_get_frame_defaults(&s->frame);
return 0;

@ -1086,7 +1086,7 @@ typedef struct AVFrame {
/**
* format of the frame, -1 if unknown or unset
* Values correspond to enum PixelFormat for video frames,
* Values correspond to enum AVPixelFormat for video frames,
* enum AVSampleFormat for audio)
* - encoding: unused
* - decoding: Read by user.
@ -1653,13 +1653,13 @@ typedef struct AVCodecContext {
int gop_size;
/**
* Pixel format, see PIX_FMT_xxx.
* Pixel format, see AV_PIX_FMT_xxx.
* May be set by the demuxer if known from headers.
* May be overridden by the decoder if it knows better.
* - encoding: Set by user.
* - decoding: Set by user if known, overridden by libavcodec if known
*/
enum PixelFormat pix_fmt;
enum AVPixelFormat pix_fmt;
/**
* Motion estimation algorithm used for video coding.
@ -1706,7 +1706,7 @@ typedef struct AVCodecContext {
* - encoding: unused
* - decoding: Set by user, if not set the native format will be chosen.
*/
enum PixelFormat (*get_format)(struct AVCodecContext *s, const enum PixelFormat * fmt);
enum AVPixelFormat (*get_format)(struct AVCodecContext *s, const enum AVPixelFormat * fmt);
/**
* maximum number of B-frames between non-B-frames
@ -3132,7 +3132,7 @@ typedef struct AVCodec {
*/
int capabilities;
const AVRational *supported_framerates; ///< array of supported framerates, or NULL if any, array is terminated by {0,0}
const enum PixelFormat *pix_fmts; ///< array of supported pixel formats, or NULL if unknown, array is terminated by -1
const enum AVPixelFormat *pix_fmts; ///< array of supported pixel formats, or NULL if unknown, array is terminated by -1
const int *supported_samplerates; ///< array of supported audio samplerates, or NULL if unknown, array is terminated by 0
const enum AVSampleFormat *sample_fmts; ///< array of supported sample formats, or NULL if unknown, array is terminated by -1
const uint64_t *channel_layouts; ///< array of support channel layouts, or NULL if unknown. array is terminated by 0
@ -3233,7 +3233,7 @@ typedef struct AVHWAccel {
*
* Only hardware accelerated formats are supported here.
*/
enum PixelFormat pix_fmt;
enum AVPixelFormat pix_fmt;
/**
* Hardware accelerated codec capabilities.
@ -4437,7 +4437,7 @@ void av_resample_close(struct AVResampleContext *c);
* @param height the height of the picture
* @return zero if successful, a negative value if not
*/
int avpicture_alloc(AVPicture *picture, enum PixelFormat pix_fmt, int width, int height);
int avpicture_alloc(AVPicture *picture, enum AVPixelFormat pix_fmt, int width, int height);
/**
* Free a picture previously allocated by avpicture_alloc().
@ -4455,7 +4455,7 @@ void avpicture_free(AVPicture *picture);
* @see av_image_fill_arrays()
*/
int avpicture_fill(AVPicture *picture, uint8_t *ptr,
enum PixelFormat pix_fmt, int width, int height);
enum AVPixelFormat pix_fmt, int width, int height);
/**
* Copy pixel data from an AVPicture into a buffer, always assume a
@ -4463,7 +4463,8 @@ int avpicture_fill(AVPicture *picture, uint8_t *ptr,
*
* @see av_image_copy_to_buffer()
*/
int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
int avpicture_layout(const AVPicture* src, enum AVPixelFormat pix_fmt,
int width, int height,
unsigned char *dest, int dest_size);
/**
@ -4473,29 +4474,29 @@ int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width,
*
* @see av_image_get_buffer_size().
*/
int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height);
int avpicture_get_size(enum AVPixelFormat pix_fmt, int width, int height);
/**
* deinterlace - if not supported return -1
*/
int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
enum PixelFormat pix_fmt, int width, int height);
enum AVPixelFormat pix_fmt, int width, int height);
/**
* Copy image src to dst. Wraps av_image_copy().
*/
void av_picture_copy(AVPicture *dst, const AVPicture *src,
enum PixelFormat pix_fmt, int width, int height);
enum AVPixelFormat pix_fmt, int width, int height);
/**
* Crop image top and left side.
*/
int av_picture_crop(AVPicture *dst, const AVPicture *src,
enum PixelFormat pix_fmt, int top_band, int left_band);
enum AVPixelFormat pix_fmt, int top_band, int left_band);
/**
* Pad image.
*/
int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width, enum PixelFormat pix_fmt,
int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width, enum AVPixelFormat pix_fmt,
int padtop, int padbottom, int padleft, int padright, int *color);
/**
@ -4518,14 +4519,14 @@ int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
* @{
*/
void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift);
void avcodec_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift);
/**
* Return a value representing the fourCC code associated to the
* pixel format pix_fmt, or 0 if no associated fourCC code can be
* found.
*/
unsigned int avcodec_pix_fmt_to_codec_tag(enum PixelFormat pix_fmt);
unsigned int avcodec_pix_fmt_to_codec_tag(enum AVPixelFormat pix_fmt);
#define FF_LOSS_RESOLUTION 0x0001 /**< loss due to resolution change */
#define FF_LOSS_DEPTH 0x0002 /**< loss due to color depth change */
@ -4552,7 +4553,7 @@ unsigned int avcodec_pix_fmt_to_codec_tag(enum PixelFormat pix_fmt);
* @return Combination of flags informing you what kind of losses will occur
* (maximum loss for an invalid dst_pix_fmt).
*/
int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
int avcodec_get_pix_fmt_loss(enum AVPixelFormat dst_pix_fmt, enum AVPixelFormat src_pix_fmt,
int has_alpha);
#if FF_API_FIND_BEST_PIX_FMT
@ -4571,8 +4572,8 @@ int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_
* Note, only the first 64 pixel formats will fit in pix_fmt_mask.
*
* @code
* src_pix_fmt = PIX_FMT_YUV420P;
* pix_fmt_mask = (1 << PIX_FMT_YUV422P) | (1 << PIX_FMT_RGB24);
* src_pix_fmt = AV_PIX_FMT_YUV420P;
* pix_fmt_mask = (1 << AV_PIX_FMT_YUV422P) | (1 << AV_PIX_FMT_RGB24);
* dst_pix_fmt = avcodec_find_best_pix_fmt(pix_fmt_mask, src_pix_fmt, alpha, &loss);
* @endcode
*
@ -4583,7 +4584,7 @@ int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_
* @return The best pixel format to convert to or -1 if none was found.
*/
attribute_deprecated
enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
enum AVPixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum AVPixelFormat src_pix_fmt,
int has_alpha, int *loss_ptr);
#endif /* FF_API_FIND_BEST_PIX_FMT */
@ -4598,14 +4599,14 @@ enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelForma
* pix_fmt_list parameter.
*
*
* @param[in] pix_fmt_list PIX_FMT_NONE terminated array of pixel formats to choose from
* @param[in] pix_fmt_list AV_PIX_FMT_NONE terminated array of pixel formats to choose from
* @param[in] src_pix_fmt source pixel format
* @param[in] has_alpha Whether the source pixel format alpha channel is used.
* @param[out] loss_ptr Combination of flags informing you what kind of losses will occur.
* @return The best pixel format to convert to or -1 if none was found.
*/
enum PixelFormat avcodec_find_best_pix_fmt_of_list(enum PixelFormat *pix_fmt_list,
enum PixelFormat src_pix_fmt,
enum AVPixelFormat avcodec_find_best_pix_fmt_of_list(enum AVPixelFormat *pix_fmt_list,
enum AVPixelFormat src_pix_fmt,
int has_alpha, int *loss_ptr);
/**
@ -4616,14 +4617,14 @@ enum PixelFormat avcodec_find_best_pix_fmt_of_list(enum PixelFormat *pix_fmt_lis
* converting from some formats to other formats. avcodec_find_best_pix_fmt_of_2() selects which of
* the given pixel formats should be used to suffer the least amount of loss.
*
* If one of the destination formats is PIX_FMT_NONE the other pixel format (if valid) will be
* If one of the destination formats is AV_PIX_FMT_NONE the other pixel format (if valid) will be
* returned.
*
* @code
* src_pix_fmt = PIX_FMT_YUV420P;
* dst_pix_fmt1= PIX_FMT_RGB24;
* dst_pix_fmt2= PIX_FMT_GRAY8;
* dst_pix_fmt3= PIX_FMT_RGB8;
* src_pix_fmt = AV_PIX_FMT_YUV420P;
* dst_pix_fmt1= AV_PIX_FMT_RGB24;
* dst_pix_fmt2= AV_PIX_FMT_GRAY8;
* dst_pix_fmt3= AV_PIX_FMT_RGB8;
* loss= FF_LOSS_CHROMA; // don't care about chroma loss, so chroma loss will be ignored.
* dst_pix_fmt = avcodec_find_best_pix_fmt_of_2(dst_pix_fmt1, dst_pix_fmt2, src_pix_fmt, alpha, &loss);
* dst_pix_fmt = avcodec_find_best_pix_fmt_of_2(dst_pix_fmt, dst_pix_fmt3, src_pix_fmt, alpha, &loss);
@ -4638,21 +4639,21 @@ enum PixelFormat avcodec_find_best_pix_fmt_of_list(enum PixelFormat *pix_fmt_lis
* that occurs when converting from src to selected dst pixel format.
* @return The best pixel format to convert to or -1 if none was found.
*/
enum PixelFormat avcodec_find_best_pix_fmt_of_2(enum PixelFormat dst_pix_fmt1, enum PixelFormat dst_pix_fmt2,
enum PixelFormat src_pix_fmt, int has_alpha, int *loss_ptr);
enum AVPixelFormat avcodec_find_best_pix_fmt_of_2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2,
enum AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr);
attribute_deprecated
#if AV_HAVE_INCOMPATIBLE_FORK_ABI
enum PixelFormat avcodec_find_best_pix_fmt2(enum PixelFormat *pix_fmt_list,
enum PixelFormat src_pix_fmt,
int has_alpha, int *loss_ptr);
enum AVPixelFormat avcodec_find_best_pix_fmt2(enum AVPixelFormat *pix_fmt_list,
enum AVPixelFormat src_pix_fmt,
int has_alpha, int *loss_ptr);
#else
enum PixelFormat avcodec_find_best_pix_fmt2(enum PixelFormat dst_pix_fmt1, enum PixelFormat dst_pix_fmt2,
enum PixelFormat src_pix_fmt, int has_alpha, int *loss_ptr);
enum AVPixelFormat avcodec_find_best_pix_fmt2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2,
enum AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr);
#endif
enum PixelFormat avcodec_default_get_format(struct AVCodecContext *s, const enum PixelFormat * fmt);
enum AVPixelFormat avcodec_default_get_format(struct AVCodecContext *s, const enum AVPixelFormat * fmt);
/**
* @}

@ -45,7 +45,7 @@ static av_cold int init(AVCodecContext *avctx)
return -1;
avcodec_get_frame_defaults(&a->frame);
avctx->pix_fmt = PIX_FMT_UYVY422;
avctx->pix_fmt = AV_PIX_FMT_UYVY422;
if(avctx->extradata_size >= 9 && avctx->extradata[4]+28 < avctx->extradata_size) {
int ndx = avctx->extradata[4] + 4;

@ -160,7 +160,7 @@ avs_decode_frame(AVCodecContext * avctx,
static av_cold int avs_decode_init(AVCodecContext * avctx)
{
AvsContext *const avs = avctx->priv_data;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
avcodec_get_frame_defaults(&avs->picture);
avcodec_set_dimensions(avctx, 318, 198);
return 0;

@ -25,7 +25,7 @@
static av_cold int avui_decode_init(AVCodecContext *avctx)
{
avctx->pix_fmt = PIX_FMT_YUVA422P;
avctx->pix_fmt = AV_PIX_FMT_YUVA422P;
avctx->coded_frame = avcodec_alloc_frame();

@ -108,6 +108,6 @@ AVCodec ff_avui_encoder = {
.encode2 = avui_encode_frame,
.close = avui_encode_close,
.capabilities = CODEC_CAP_EXPERIMENTAL,
.pix_fmts = (const enum PixelFormat[]){ PIX_FMT_UYVY422, PIX_FMT_NONE },
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_UYVY422, AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("Avid Meridien Uncompressed"),
};

@ -44,7 +44,7 @@ static av_cold int bethsoftvid_decode_init(AVCodecContext *avctx)
vid->frame.reference = 3;
vid->frame.buffer_hints = FF_BUFFER_HINTS_VALID |
FF_BUFFER_HINTS_PRESERVE | FF_BUFFER_HINTS_REUSABLE;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
return 0;
}

@ -40,7 +40,7 @@ typedef struct BFIContext {
static av_cold int bfi_decode_init(AVCodecContext *avctx)
{
BFIContext *bfi = avctx->priv_data;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
avcodec_get_frame_defaults(&bfi->frame);
bfi->dst = av_mallocz(avctx->width * avctx->height);
return 0;

@ -1288,7 +1288,7 @@ static av_cold int decode_init(AVCodecContext *avctx)
return 1;
}
avctx->pix_fmt = c->has_alpha ? PIX_FMT_YUVA420P : PIX_FMT_YUV420P;
avctx->pix_fmt = c->has_alpha ? AV_PIX_FMT_YUVA420P : AV_PIX_FMT_YUV420P;
avctx->idct_algo = FF_IDCT_BINK;
ff_dsputil_init(&c->dsp, avctx);

@ -49,7 +49,7 @@ static av_cold int decode_init(AVCodecContext *avctx)
uint8_t *p;
int i;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
p = avctx->extradata;
if (p) {
s->font_height = p[0];

@ -140,40 +140,40 @@ static int bmp_decode_frame(AVCodecContext *avctx,
avctx->width = width;
avctx->height = height > 0? height: -height;
avctx->pix_fmt = PIX_FMT_NONE;
avctx->pix_fmt = AV_PIX_FMT_NONE;
switch(depth){
case 32:
if(comp == BMP_BITFIELDS){
if (rgb[0] == 0xFF000000 && rgb[1] == 0x00FF0000 && rgb[2] == 0x0000FF00)
avctx->pix_fmt = alpha ? PIX_FMT_ABGR : PIX_FMT_0BGR;
avctx->pix_fmt = alpha ? AV_PIX_FMT_ABGR : AV_PIX_FMT_0BGR;
else if (rgb[0] == 0x00FF0000 && rgb[1] == 0x0000FF00 && rgb[2] == 0x000000FF)
avctx->pix_fmt = alpha ? PIX_FMT_BGRA : PIX_FMT_BGR0;
avctx->pix_fmt = alpha ? AV_PIX_FMT_BGRA : AV_PIX_FMT_BGR0;
else if (rgb[0] == 0x0000FF00 && rgb[1] == 0x00FF0000 && rgb[2] == 0xFF000000)
avctx->pix_fmt = alpha ? PIX_FMT_ARGB : PIX_FMT_0RGB;
avctx->pix_fmt = alpha ? AV_PIX_FMT_ARGB : AV_PIX_FMT_0RGB;
else if (rgb[0] == 0x000000FF && rgb[1] == 0x0000FF00 && rgb[2] == 0x00FF0000)
avctx->pix_fmt = alpha ? PIX_FMT_RGBA : PIX_FMT_RGB0;
avctx->pix_fmt = alpha ? AV_PIX_FMT_RGBA : AV_PIX_FMT_RGB0;
else {
av_log(avctx, AV_LOG_ERROR, "Unknown bitfields %0X %0X %0X\n", rgb[0], rgb[1], rgb[2]);
return AVERROR(EINVAL);
}
} else {
avctx->pix_fmt = PIX_FMT_BGRA;
avctx->pix_fmt = AV_PIX_FMT_BGRA;
}
break;
case 24:
avctx->pix_fmt = PIX_FMT_BGR24;
avctx->pix_fmt = AV_PIX_FMT_BGR24;
break;
case 16:
if(comp == BMP_RGB)
avctx->pix_fmt = PIX_FMT_RGB555;
avctx->pix_fmt = AV_PIX_FMT_RGB555;
else if (comp == BMP_BITFIELDS) {
if (rgb[0] == 0xF800 && rgb[1] == 0x07E0 && rgb[2] == 0x001F)
avctx->pix_fmt = PIX_FMT_RGB565;
avctx->pix_fmt = AV_PIX_FMT_RGB565;
else if (rgb[0] == 0x7C00 && rgb[1] == 0x03E0 && rgb[2] == 0x001F)
avctx->pix_fmt = PIX_FMT_RGB555;
avctx->pix_fmt = AV_PIX_FMT_RGB555;
else if (rgb[0] == 0x0F00 && rgb[1] == 0x00F0 && rgb[2] == 0x000F)
avctx->pix_fmt = PIX_FMT_RGB444;
avctx->pix_fmt = AV_PIX_FMT_RGB444;
else {
av_log(avctx, AV_LOG_ERROR, "Unknown bitfields %0X %0X %0X\n", rgb[0], rgb[1], rgb[2]);
return AVERROR(EINVAL);
@ -182,14 +182,14 @@ static int bmp_decode_frame(AVCodecContext *avctx,
break;
case 8:
if(hsize - ihsize - 14 > 0)
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
else
avctx->pix_fmt = PIX_FMT_GRAY8;
avctx->pix_fmt = AV_PIX_FMT_GRAY8;
break;
case 1:
case 4:
if(hsize - ihsize - 14 > 0){
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
}else{
av_log(avctx, AV_LOG_ERROR, "Unknown palette for %d-colour BMP\n", 1<<depth);
return -1;
@ -200,7 +200,7 @@ static int bmp_decode_frame(AVCodecContext *avctx,
return -1;
}
if(avctx->pix_fmt == PIX_FMT_NONE){
if(avctx->pix_fmt == AV_PIX_FMT_NONE){
av_log(avctx, AV_LOG_ERROR, "unsupported pixel format\n");
return -1;
}
@ -240,7 +240,7 @@ static int bmp_decode_frame(AVCodecContext *avctx,
linesize = p->linesize[0];
}
if(avctx->pix_fmt == PIX_FMT_PAL8){
if(avctx->pix_fmt == AV_PIX_FMT_PAL8){
int colors = 1 << depth;
memset(p->data[1], 0, 1024);

@ -38,26 +38,26 @@ static av_cold int bmp_encode_init(AVCodecContext *avctx){
avctx->coded_frame = &s->picture;
switch (avctx->pix_fmt) {
case PIX_FMT_BGRA:
case AV_PIX_FMT_BGRA:
avctx->bits_per_coded_sample = 32;
break;
case PIX_FMT_BGR24:
case AV_PIX_FMT_BGR24:
avctx->bits_per_coded_sample = 24;
break;
case PIX_FMT_RGB555:
case PIX_FMT_RGB565:
case PIX_FMT_RGB444:
case AV_PIX_FMT_RGB555:
case AV_PIX_FMT_RGB565:
case AV_PIX_FMT_RGB444:
avctx->bits_per_coded_sample = 16;
break;
case PIX_FMT_RGB8:
case PIX_FMT_BGR8:
case PIX_FMT_RGB4_BYTE:
case PIX_FMT_BGR4_BYTE:
case PIX_FMT_GRAY8:
case PIX_FMT_PAL8:
case AV_PIX_FMT_RGB8:
case AV_PIX_FMT_BGR8:
case AV_PIX_FMT_RGB4_BYTE:
case AV_PIX_FMT_BGR4_BYTE:
case AV_PIX_FMT_GRAY8:
case AV_PIX_FMT_PAL8:
avctx->bits_per_coded_sample = 8;
break;
case PIX_FMT_MONOBLACK:
case AV_PIX_FMT_MONOBLACK:
avctx->bits_per_coded_sample = 1;
break;
default:
@ -83,29 +83,29 @@ static int bmp_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
p->pict_type= AV_PICTURE_TYPE_I;
p->key_frame= 1;
switch (avctx->pix_fmt) {
case PIX_FMT_RGB444:
case AV_PIX_FMT_RGB444:
compression = BMP_BITFIELDS;
pal = rgb444_masks; // abuse pal to hold color masks
pal_entries = 3;
break;
case PIX_FMT_RGB565:
case AV_PIX_FMT_RGB565:
compression = BMP_BITFIELDS;
pal = rgb565_masks; // abuse pal to hold color masks
pal_entries = 3;
break;
case PIX_FMT_RGB8:
case PIX_FMT_BGR8:
case PIX_FMT_RGB4_BYTE:
case PIX_FMT_BGR4_BYTE:
case PIX_FMT_GRAY8:
case AV_PIX_FMT_RGB8:
case AV_PIX_FMT_BGR8:
case AV_PIX_FMT_RGB4_BYTE:
case AV_PIX_FMT_BGR4_BYTE:
case AV_PIX_FMT_GRAY8:
av_assert1(bit_count == 8);
ff_set_systematic_pal2(palette256, avctx->pix_fmt);
pal = palette256;
break;
case PIX_FMT_PAL8:
case AV_PIX_FMT_PAL8:
pal = (uint32_t *)p->data[1];
break;
case PIX_FMT_MONOBLACK:
case AV_PIX_FMT_MONOBLACK:
pal = monoblack_pal;
break;
}
@ -172,12 +172,12 @@ AVCodec ff_bmp_encoder = {
.priv_data_size = sizeof(BMPContext),
.init = bmp_encode_init,
.encode2 = bmp_encode_frame,
.pix_fmts = (const enum PixelFormat[]){
PIX_FMT_BGRA, PIX_FMT_BGR24,
PIX_FMT_RGB565, PIX_FMT_RGB555, PIX_FMT_RGB444,
PIX_FMT_RGB8, PIX_FMT_BGR8, PIX_FMT_RGB4_BYTE, PIX_FMT_BGR4_BYTE, PIX_FMT_GRAY8, PIX_FMT_PAL8,
PIX_FMT_MONOBLACK,
PIX_FMT_NONE
.pix_fmts = (const enum AVPixelFormat[]){
AV_PIX_FMT_BGRA, AV_PIX_FMT_BGR24,
AV_PIX_FMT_RGB565, AV_PIX_FMT_RGB555, AV_PIX_FMT_RGB444,
AV_PIX_FMT_RGB8, AV_PIX_FMT_BGR8, AV_PIX_FMT_RGB4_BYTE, AV_PIX_FMT_BGR4_BYTE, AV_PIX_FMT_GRAY8, AV_PIX_FMT_PAL8,
AV_PIX_FMT_MONOBLACK,
AV_PIX_FMT_NONE
},
.long_name = NULL_IF_CONFIG_SMALL("BMP (Windows and OS/2 bitmap)"),
};

@ -270,7 +270,7 @@ static av_cold int decode_init(AVCodecContext *avctx)
BMVDecContext * const c = avctx->priv_data;
c->avctx = avctx;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
if (avctx->width != SCREEN_WIDE || avctx->height != SCREEN_HIGH) {
av_log(avctx, AV_LOG_ERROR, "Invalid dimension %dx%d\n", avctx->width, avctx->height);

@ -51,7 +51,7 @@ static av_cold int decode_init(AVCodecContext *avctx)
avcodec_get_frame_defaults(&c93->pictures[0]);
avcodec_get_frame_defaults(&c93->pictures[1]);
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
return 0;
}

@ -712,7 +712,7 @@ av_cold int ff_cavs_init(AVCodecContext *avctx) {
ff_cavsdsp_init(&h->cdsp, avctx);
s->avctx = avctx;
avctx->pix_fmt= PIX_FMT_YUV420P;
avctx->pix_fmt= AV_PIX_FMT_YUV420P;
h->luma_scan[0] = 0;
h->luma_scan[1] = 8;

@ -86,7 +86,7 @@ static av_cold int cdg_decode_init(AVCodecContext *avctx)
avctx->width = CDG_FULL_WIDTH;
avctx->height = CDG_FULL_HEIGHT;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
return 0;
}

@ -249,11 +249,11 @@ static int cdxl_decode_frame(AVCodecContext *avctx, void *data,
if (c->video_size < aligned_width * avctx->height * c->bpp / 8)
return AVERROR_INVALIDDATA;
if (!encoding && c->palette_size && c->bpp <= 8) {
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
} else if (encoding == 1 && (c->bpp == 6 || c->bpp == 8)) {
if (c->palette_size != (1 << (c->bpp - 1)))
return AVERROR_INVALIDDATA;
avctx->pix_fmt = PIX_FMT_BGR24;
avctx->pix_fmt = AV_PIX_FMT_BGR24;
} else {
av_log_ask_for_sample(avctx, "unsupported encoding %d and bpp %d\n",
encoding, c->bpp);

@ -412,10 +412,10 @@ static av_cold int cinepak_decode_init(AVCodecContext *avctx)
// check for paletted data
if (avctx->bits_per_coded_sample != 8) {
s->palette_video = 0;
avctx->pix_fmt = PIX_FMT_YUV420P;
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
} else {
s->palette_video = 1;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
}
avcodec_get_frame_defaults(&s->frame);

@ -106,7 +106,7 @@ static int decode_frame(AVCodecContext *avctx,
static av_cold int decode_init(AVCodecContext *avctx)
{
avctx->pix_fmt = PIX_FMT_YUV411P;
avctx->pix_fmt = AV_PIX_FMT_YUV411P;
return common_init(avctx);
}
@ -203,8 +203,8 @@ AVCodec ff_cljr_encoder = {
.priv_data_size = sizeof(CLJRContext),
.init = common_init,
.encode2 = encode_frame,
.pix_fmts = (const enum PixelFormat[]) { PIX_FMT_YUV411P,
PIX_FMT_NONE },
.pix_fmts = (const enum AVPixelFormat[]) { AV_PIX_FMT_YUV411P,
AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("Cirrus Logic AccuPak"),
.priv_class = &class,
};

@ -330,7 +330,7 @@ static int cllc_decode_frame(AVCodecContext *avctx, void *data,
switch (coding_type) {
case 1:
case 2:
avctx->pix_fmt = PIX_FMT_RGB24;
avctx->pix_fmt = AV_PIX_FMT_RGB24;
avctx->bits_per_raw_sample = 8;
ret = avctx->get_buffer(avctx, pic);
@ -345,7 +345,7 @@ static int cllc_decode_frame(AVCodecContext *avctx, void *data,
break;
case 3:
avctx->pix_fmt = PIX_FMT_ARGB;
avctx->pix_fmt = AV_PIX_FMT_ARGB;
avctx->bits_per_raw_sample = 8;
ret = avctx->get_buffer(avctx, pic);

@ -192,7 +192,7 @@ static int cpia_decode_frame(AVCodecContext* avctx,
static av_cold int cpia_decode_init(AVCodecContext *avctx)
{
// output pixel format
avctx->pix_fmt = PIX_FMT_YUV420P;
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
/* The default timebase set by the v4l2 demuxer leads to probing which is buggy.
* Set some reasonable time_base to skip this.

@ -402,7 +402,7 @@ static av_cold int init(AVCodecContext *avctx)
av_log(avctx, AV_LOG_VERBOSE, "CrystalHD Init for %s\n",
avctx->codec->name);
avctx->pix_fmt = PIX_FMT_YUYV422;
avctx->pix_fmt = AV_PIX_FMT_YUYV422;
/* Initialize the library */
priv = avctx->priv_data;
@ -1099,7 +1099,7 @@ AVCodec ff_h264_crystalhd_decoder = {
.capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY,
.flush = flush,
.long_name = NULL_IF_CONFIG_SMALL("H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 (CrystalHD acceleration)"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_YUYV422, PIX_FMT_NONE},
.pix_fmts = (const enum AVPixelFormat[]){AV_PIX_FMT_YUYV422, AV_PIX_FMT_NONE},
.priv_class = &h264_class,
};
#endif
@ -1123,7 +1123,7 @@ AVCodec ff_mpeg2_crystalhd_decoder = {
.capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY,
.flush = flush,
.long_name = NULL_IF_CONFIG_SMALL("MPEG-2 Video (CrystalHD acceleration)"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_YUYV422, PIX_FMT_NONE},
.pix_fmts = (const enum AVPixelFormat[]){AV_PIX_FMT_YUYV422, AV_PIX_FMT_NONE},
.priv_class = &mpeg2_class,
};
#endif
@ -1147,7 +1147,7 @@ AVCodec ff_mpeg4_crystalhd_decoder = {
.capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY,
.flush = flush,
.long_name = NULL_IF_CONFIG_SMALL("MPEG-4 Part 2 (CrystalHD acceleration)"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_YUYV422, PIX_FMT_NONE},
.pix_fmts = (const enum AVPixelFormat[]){AV_PIX_FMT_YUYV422, AV_PIX_FMT_NONE},
.priv_class = &mpeg4_class,
};
#endif
@ -1171,7 +1171,7 @@ AVCodec ff_msmpeg4_crystalhd_decoder = {
.capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_EXPERIMENTAL,
.flush = flush,
.long_name = NULL_IF_CONFIG_SMALL("MPEG-4 Part 2 Microsoft variant version 3 (CrystalHD acceleration)"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_YUYV422, PIX_FMT_NONE},
.pix_fmts = (const enum AVPixelFormat[]){AV_PIX_FMT_YUYV422, AV_PIX_FMT_NONE},
.priv_class = &msmpeg4_class,
};
#endif
@ -1195,7 +1195,7 @@ AVCodec ff_vc1_crystalhd_decoder = {
.capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY,
.flush = flush,
.long_name = NULL_IF_CONFIG_SMALL("SMPTE VC-1 (CrystalHD acceleration)"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_YUYV422, PIX_FMT_NONE},
.pix_fmts = (const enum AVPixelFormat[]){AV_PIX_FMT_YUYV422, AV_PIX_FMT_NONE},
.priv_class = &vc1_class,
};
#endif
@ -1219,7 +1219,7 @@ AVCodec ff_wmv3_crystalhd_decoder = {
.capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY,
.flush = flush,
.long_name = NULL_IF_CONFIG_SMALL("Windows Media Video 9 (CrystalHD acceleration)"),
.pix_fmts = (const enum PixelFormat[]){PIX_FMT_YUYV422, PIX_FMT_NONE},
.pix_fmts = (const enum AVPixelFormat[]){AV_PIX_FMT_YUYV422, AV_PIX_FMT_NONE},
.priv_class = &wmv3_class,
};
#endif

@ -127,9 +127,9 @@ static av_cold int decode_init(AVCodecContext *avctx) {
CamStudioContext *c = avctx->priv_data;
int stride;
switch (avctx->bits_per_coded_sample) {
case 16: avctx->pix_fmt = PIX_FMT_RGB555LE; break;
case 24: avctx->pix_fmt = PIX_FMT_BGR24; break;
case 32: avctx->pix_fmt = PIX_FMT_BGRA; break;
case 16: avctx->pix_fmt = AV_PIX_FMT_RGB555LE; break;
case 24: avctx->pix_fmt = AV_PIX_FMT_BGR24; break;
case 32: avctx->pix_fmt = AV_PIX_FMT_BGRA; break;
default:
av_log(avctx, AV_LOG_ERROR,
"CamStudio codec error: invalid depth %i bpp\n",

@ -93,9 +93,9 @@ static int cyuv_decode_frame(AVCodecContext *avctx,
* of 4 pixels. Thus, the total size of the buffer ought to be:
* (3 * 16) + height * (width * 3 / 4) */
if (buf_size == 48 + s->height * (s->width * 3 / 4)) {
avctx->pix_fmt = PIX_FMT_YUV411P;
avctx->pix_fmt = AV_PIX_FMT_YUV411P;
} else if(buf_size == rawsize ) {
avctx->pix_fmt = PIX_FMT_UYVY422;
avctx->pix_fmt = AV_PIX_FMT_UYVY422;
} else {
av_log(avctx, AV_LOG_ERROR, "got a buffer with %d bytes when %d were expected\n",
buf_size, 48 + s->height * (s->width * 3 / 4));

@ -38,7 +38,7 @@ static av_cold int dfa_decode_init(AVCodecContext *avctx)
{
DfaContext *s = avctx->priv_data;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
if (!avctx->width || !avctx->height)
return AVERROR_INVALIDDATA;

@ -106,9 +106,9 @@ static const struct {
};
/* [DIRAC_STD] Table 10.2 Supported chroma sampling formats + luma Offset */
static const enum PixelFormat dirac_pix_fmt[2][3] = {
{ PIX_FMT_YUV444P, PIX_FMT_YUV422P, PIX_FMT_YUV420P },
{ PIX_FMT_YUVJ444P, PIX_FMT_YUVJ422P, PIX_FMT_YUVJ420P },
static const enum AVPixelFormat dirac_pix_fmt[2][3] = {
{ AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV420P },
{ AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ420P },
};
/* [DIRAC_STD] 10.3 Parse Source Parameters.

@ -133,7 +133,7 @@ static int dnxhd_decode_header(DNXHDContext *ctx, const uint8_t *buf, int buf_si
av_dlog(ctx->avctx, "width %d, height %d\n", ctx->width, ctx->height);
if (buf[0x21] & 0x40) {
ctx->avctx->pix_fmt = PIX_FMT_YUV422P10;
ctx->avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
ctx->avctx->bits_per_raw_sample = 10;
if (ctx->bit_depth != 10) {
ff_dsputil_init(&ctx->dsp, ctx->avctx);
@ -141,7 +141,7 @@ static int dnxhd_decode_header(DNXHDContext *ctx, const uint8_t *buf, int buf_si
ctx->decode_dct_block = dnxhd_decode_dct_block_10;
}
} else {
ctx->avctx->pix_fmt = PIX_FMT_YUV422P;
ctx->avctx->pix_fmt = AV_PIX_FMT_YUV422P;
ctx->avctx->bits_per_raw_sample = 8;
if (ctx->bit_depth != 8) {
ff_dsputil_init(&ctx->dsp, ctx->avctx);

@ -253,10 +253,10 @@ static int dnxhd_encode_init(AVCodecContext *avctx)
int i, index, bit_depth;
switch (avctx->pix_fmt) {
case PIX_FMT_YUV422P:
case AV_PIX_FMT_YUV422P:
bit_depth = 8;
break;
case PIX_FMT_YUV422P10:
case AV_PIX_FMT_YUV422P10:
bit_depth = 10;
break;
default:
@ -1021,9 +1021,9 @@ AVCodec ff_dnxhd_encoder = {
.encode2 = dnxhd_encode_picture,
.close = dnxhd_encode_end,
.capabilities = CODEC_CAP_SLICE_THREADS,
.pix_fmts = (const enum PixelFormat[]){ PIX_FMT_YUV422P,
PIX_FMT_YUV422P10,
PIX_FMT_NONE },
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV422P,
AV_PIX_FMT_YUV422P10,
AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("VC3/DNxHD"),
.priv_class = &class,
.defaults = dnxhd_defaults,

@ -125,25 +125,25 @@ static int decode_frame(AVCodecContext *avctx,
switch (bits_per_color) {
case 8:
if (elements == 4) {
avctx->pix_fmt = PIX_FMT_RGBA;
avctx->pix_fmt = AV_PIX_FMT_RGBA;
} else {
avctx->pix_fmt = PIX_FMT_RGB24;
avctx->pix_fmt = AV_PIX_FMT_RGB24;
}
source_packet_size = elements;
target_packet_size = elements;
planar = 0;
break;
case 10:
avctx->pix_fmt = PIX_FMT_GBRP10;
avctx->pix_fmt = AV_PIX_FMT_GBRP10;
target_packet_size = 6;
source_packet_size = 4;
planar = 1;
break;
case 12:
if (endian) {
avctx->pix_fmt = elements == 4 ? PIX_FMT_GBRP12BE : PIX_FMT_GBRP12BE;
avctx->pix_fmt = elements == 4 ? AV_PIX_FMT_GBRP12BE : AV_PIX_FMT_GBRP12BE;
} else {
avctx->pix_fmt = elements == 4 ? PIX_FMT_GBRP12LE : PIX_FMT_GBRP12LE;
avctx->pix_fmt = elements == 4 ? AV_PIX_FMT_GBRP12LE : AV_PIX_FMT_GBRP12LE;
}
target_packet_size = 6;
source_packet_size = 6;
@ -151,9 +151,9 @@ static int decode_frame(AVCodecContext *avctx,
break;
case 16:
if (endian) {
avctx->pix_fmt = elements == 4 ? PIX_FMT_RGBA64BE : PIX_FMT_RGB48BE;
avctx->pix_fmt = elements == 4 ? AV_PIX_FMT_RGBA64BE : AV_PIX_FMT_RGB48BE;
} else {
avctx->pix_fmt = elements == 4 ? PIX_FMT_RGBA64LE : PIX_FMT_RGB48LE;
avctx->pix_fmt = elements == 4 ? AV_PIX_FMT_RGBA64LE : AV_PIX_FMT_RGB48LE;
}
target_packet_size =
source_packet_size = elements * 2;

@ -47,31 +47,31 @@ static av_cold int encode_init(AVCodecContext *avctx)
s->planar = 0;
switch (avctx->pix_fmt) {
case PIX_FMT_RGB24:
case AV_PIX_FMT_RGB24:
break;
case PIX_FMT_RGBA:
case AV_PIX_FMT_RGBA:
s->descriptor = 51; /* RGBA */
break;
case PIX_FMT_RGB48LE:
case AV_PIX_FMT_RGB48LE:
s->big_endian = 0;
case PIX_FMT_RGB48BE:
case AV_PIX_FMT_RGB48BE:
s->bits_per_component = avctx->bits_per_raw_sample ? avctx->bits_per_raw_sample : 16;
break;
case PIX_FMT_RGBA64LE:
case AV_PIX_FMT_RGBA64LE:
s->big_endian = 0;
case PIX_FMT_RGBA64BE:
case AV_PIX_FMT_RGBA64BE:
s->descriptor = 51;
s->bits_per_component = 16;
break;
case PIX_FMT_GBRP10LE:
case AV_PIX_FMT_GBRP10LE:
s->big_endian = 0;
case PIX_FMT_GBRP10BE:
case AV_PIX_FMT_GBRP10BE:
s->bits_per_component = 10;
s->planar = 1;
break;
case PIX_FMT_GBRP12LE:
case AV_PIX_FMT_GBRP12LE:
s->big_endian = 0;
case PIX_FMT_GBRP12BE:
case AV_PIX_FMT_GBRP12BE:
s->bits_per_component = 12;
s->planar = 1;
break;
@ -246,17 +246,17 @@ AVCodec ff_dpx_encoder = {
.priv_data_size = sizeof(DPXContext),
.init = encode_init,
.encode2 = encode_frame,
.pix_fmts = (const enum PixelFormat[]){
PIX_FMT_RGB24,
PIX_FMT_RGBA,
PIX_FMT_RGB48LE,
PIX_FMT_RGB48BE,
PIX_FMT_RGBA64LE,
PIX_FMT_RGBA64BE,
PIX_FMT_GBRP10LE,
PIX_FMT_GBRP10BE,
PIX_FMT_GBRP12LE,
PIX_FMT_GBRP12BE,
PIX_FMT_NONE},
.pix_fmts = (const enum AVPixelFormat[]){
AV_PIX_FMT_RGB24,
AV_PIX_FMT_RGBA,
AV_PIX_FMT_RGB48LE,
AV_PIX_FMT_RGB48BE,
AV_PIX_FMT_RGBA64LE,
AV_PIX_FMT_RGBA64BE,
AV_PIX_FMT_GBRP10LE,
AV_PIX_FMT_GBRP10BE,
AV_PIX_FMT_GBRP12LE,
AV_PIX_FMT_GBRP12BE,
AV_PIX_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("DPX image"),
};

@ -93,7 +93,7 @@ static av_cold int cinvideo_decode_init(AVCodecContext *avctx)
unsigned int i;
cin->avctx = avctx;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
avcodec_get_frame_defaults(&cin->frame);
cin->frame.data[0] = NULL;

@ -137,19 +137,19 @@ static inline void dv_calc_mb_coordinates(const DVprofile *d, int chan, int seq,
break;
case 720:
switch (d->pix_fmt) {
case PIX_FMT_YUV422P:
case AV_PIX_FMT_YUV422P:
x = shuf3[m] + slot/3;
y = serpent1[slot] +
((((seq + off[m]) % d->difseg_size)<<1) + chan)*3;
tbl[m] = (x<<1)|(y<<8);
break;
case PIX_FMT_YUV420P:
case AV_PIX_FMT_YUV420P:
x = shuf3[m] + slot/3;
y = serpent1[slot] +
((seq + off[m]) % d->difseg_size)*3;
tbl[m] = (x<<1)|(y<<9);
break;
case PIX_FMT_YUV411P:
case AV_PIX_FMT_YUV411P:
i = (seq + off[m]) % d->difseg_size;
k = slot + ((m==1||m==2)?3:0);
@ -687,8 +687,8 @@ static int dv_encode_video_segment(AVCodecContext *avctx, void *arg)
dv_calculate_mb_xy(s, work_chunk, mb_index, &mb_x, &mb_y);
/* initializing luminance blocks */
if ((s->sys->pix_fmt == PIX_FMT_YUV420P) ||
(s->sys->pix_fmt == PIX_FMT_YUV411P && mb_x >= (704 / 8)) ||
if ((s->sys->pix_fmt == AV_PIX_FMT_YUV420P) ||
(s->sys->pix_fmt == AV_PIX_FMT_YUV411P && mb_x >= (704 / 8)) ||
(s->sys->height >= 720 && mb_y != 134)) {
y_stride = s->picture.linesize[0] << 3;
} else {
@ -713,13 +713,13 @@ static int dv_encode_video_segment(AVCodecContext *avctx, void *arg)
enc_blk += 4;
/* initializing chrominance blocks */
c_offset = (((mb_y >> (s->sys->pix_fmt == PIX_FMT_YUV420P)) * s->picture.linesize[1] +
(mb_x >> ((s->sys->pix_fmt == PIX_FMT_YUV411P) ? 2 : 1))) << 3);
c_offset = (((mb_y >> (s->sys->pix_fmt == AV_PIX_FMT_YUV420P)) * s->picture.linesize[1] +
(mb_x >> ((s->sys->pix_fmt == AV_PIX_FMT_YUV411P) ? 2 : 1))) << 3);
for (j = 2; j; j--) {
uint8_t *c_ptr = s->picture.data[j] + c_offset;
linesize = s->picture.linesize[j];
y_stride = (mb_y == 134) ? 8 : (s->picture.linesize[j] << 3);
if (s->sys->pix_fmt == PIX_FMT_YUV411P && mb_x >= (704 / 8)) {
if (s->sys->pix_fmt == AV_PIX_FMT_YUV411P && mb_x >= (704 / 8)) {
uint8_t* d;
uint8_t* b = scratch;
for (i = 0; i < 8; i++) {
@ -816,7 +816,7 @@ static inline int dv_write_pack(enum dv_pack_type pack_id, DVVideoContext *c,
* 2. It is not at all clear what STYPE is used for 4:2:0 PAL
* compression scheme (if any).
*/
int apt = (c->sys->pix_fmt == PIX_FMT_YUV420P ? 0 : 1);
int apt = (c->sys->pix_fmt == AV_PIX_FMT_YUV420P ? 0 : 1);
uint8_t aspect = 0;
if ((int)(av_q2d(c->avctx->sample_aspect_ratio) * c->avctx->width / c->avctx->height * 10) >= 17) /* 16:9 */
@ -992,8 +992,8 @@ AVCodec ff_dvvideo_encoder = {
.init = dvvideo_init_encoder,
.encode2 = dvvideo_encode_frame,
.capabilities = CODEC_CAP_SLICE_THREADS,
.pix_fmts = (const enum PixelFormat[]) {
PIX_FMT_YUV411P, PIX_FMT_YUV422P, PIX_FMT_YUV420P, PIX_FMT_NONE
.pix_fmts = (const enum AVPixelFormat[]) {
AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE
},
.long_name = NULL_IF_CONFIG_SMALL("DV (Digital Video)"),
};

@ -91,7 +91,7 @@ static const DVprofile dv_profiles[] = {
.sar = {{8, 9}, {32, 27}},
.work_chunks = &work_chunks_dv25ntsc[0],
.idct_factor = &dv_idct_factor_sd[0],
.pix_fmt = PIX_FMT_YUV411P,
.pix_fmt = AV_PIX_FMT_YUV411P,
.bpm = 6,
.block_sizes = block_sizes_dv2550,
.audio_stride = 90,
@ -111,7 +111,7 @@ static const DVprofile dv_profiles[] = {
.sar = {{16, 15}, {64, 45}},
.work_chunks = &work_chunks_dv25pal[0],
.idct_factor = &dv_idct_factor_sd[0],
.pix_fmt = PIX_FMT_YUV420P,
.pix_fmt = AV_PIX_FMT_YUV420P,
.bpm = 6,
.block_sizes = block_sizes_dv2550,
.audio_stride = 108,
@ -131,7 +131,7 @@ static const DVprofile dv_profiles[] = {
.sar = {{16, 15}, {64, 45}},
.work_chunks = &work_chunks_dv25pal411[0],
.idct_factor = &dv_idct_factor_sd[0],
.pix_fmt = PIX_FMT_YUV411P,
.pix_fmt = AV_PIX_FMT_YUV411P,
.bpm = 6,
.block_sizes = block_sizes_dv2550,
.audio_stride = 108,
@ -151,7 +151,7 @@ static const DVprofile dv_profiles[] = {
.sar = {{8, 9}, {32, 27}},
.work_chunks = &work_chunks_dv50ntsc[0],
.idct_factor = &dv_idct_factor_sd[0],
.pix_fmt = PIX_FMT_YUV422P,
.pix_fmt = AV_PIX_FMT_YUV422P,
.bpm = 6,
.block_sizes = block_sizes_dv2550,
.audio_stride = 90,
@ -171,7 +171,7 @@ static const DVprofile dv_profiles[] = {
.sar = {{16, 15}, {64, 45}},
.work_chunks = &work_chunks_dv50pal[0],
.idct_factor = &dv_idct_factor_sd[0],
.pix_fmt = PIX_FMT_YUV422P,
.pix_fmt = AV_PIX_FMT_YUV422P,
.bpm = 6,
.block_sizes = block_sizes_dv2550,
.audio_stride = 108,
@ -191,7 +191,7 @@ static const DVprofile dv_profiles[] = {
.sar = {{1, 1}, {3, 2}},
.work_chunks = &work_chunks_dv100ntsci[0],
.idct_factor = &dv_idct_factor_hd1080[0],
.pix_fmt = PIX_FMT_YUV422P,
.pix_fmt = AV_PIX_FMT_YUV422P,
.bpm = 8,
.block_sizes = block_sizes_dv100,
.audio_stride = 90,
@ -211,7 +211,7 @@ static const DVprofile dv_profiles[] = {
.sar = {{1, 1}, {4, 3}},
.work_chunks = &work_chunks_dv100pali[0],
.idct_factor = &dv_idct_factor_hd1080[0],
.pix_fmt = PIX_FMT_YUV422P,
.pix_fmt = AV_PIX_FMT_YUV422P,
.bpm = 8,
.block_sizes = block_sizes_dv100,
.audio_stride = 108,
@ -231,7 +231,7 @@ static const DVprofile dv_profiles[] = {
.sar = {{1, 1}, {4, 3}},
.work_chunks = &work_chunks_dv100ntscp[0],
.idct_factor = &dv_idct_factor_hd720[0],
.pix_fmt = PIX_FMT_YUV422P,
.pix_fmt = AV_PIX_FMT_YUV422P,
.bpm = 8,
.block_sizes = block_sizes_dv100,
.audio_stride = 90,
@ -251,7 +251,7 @@ static const DVprofile dv_profiles[] = {
.sar = {{1, 1}, {4, 3}},
.work_chunks = &work_chunks_dv100palp[0],
.idct_factor = &dv_idct_factor_hd720[0],
.pix_fmt = PIX_FMT_YUV422P,
.pix_fmt = AV_PIX_FMT_YUV422P,
.bpm = 8,
.block_sizes = block_sizes_dv100,
.audio_stride = 90,
@ -271,7 +271,7 @@ static const DVprofile dv_profiles[] = {
.sar = {{16, 15}, {64, 45}},
.work_chunks = &work_chunks_dv25pal[0],
.idct_factor = &dv_idct_factor_sd[0],
.pix_fmt = PIX_FMT_YUV420P,
.pix_fmt = AV_PIX_FMT_YUV420P,
.bpm = 6,
.block_sizes = block_sizes_dv2550,
.audio_stride = 108,

@ -53,7 +53,7 @@ typedef struct DVprofile {
AVRational sar[2]; /* sample aspect ratios for 4:3 and 16:9 */
DVwork_chunk *work_chunks; /* each thread gets its own chunk of frame to work on */
uint32_t *idct_factor; /* set of iDCT factor tables */
enum PixelFormat pix_fmt; /* picture pixel format */
enum AVPixelFormat pix_fmt; /* picture pixel format */
int bpm; /* blocks per macroblock */
const uint8_t *block_sizes; /* AC block sizes, in bits */
int audio_stride; /* size of audio_shuffle table */

@ -254,8 +254,8 @@ static int dv_decode_video_segment(AVCodecContext *avctx, void *arg)
dv_calculate_mb_xy(s, work_chunk, mb_index, &mb_x, &mb_y);
/* idct_put'ting luminance */
if ((s->sys->pix_fmt == PIX_FMT_YUV420P) ||
(s->sys->pix_fmt == PIX_FMT_YUV411P && mb_x >= (704 / 8)) ||
if ((s->sys->pix_fmt == AV_PIX_FMT_YUV420P) ||
(s->sys->pix_fmt == AV_PIX_FMT_YUV411P && mb_x >= (704 / 8)) ||
(s->sys->height >= 720 && mb_y != 134)) {
y_stride = (s->picture.linesize[0] << ((!is_field_mode[mb_index]) * log2_blocksize));
} else {
@ -275,11 +275,11 @@ static int dv_decode_video_segment(AVCodecContext *avctx, void *arg)
block += 4*64;
/* idct_put'ting chrominance */
c_offset = (((mb_y >> (s->sys->pix_fmt == PIX_FMT_YUV420P)) * s->picture.linesize[1] +
(mb_x >> ((s->sys->pix_fmt == PIX_FMT_YUV411P) ? 2 : 1))) << log2_blocksize);
c_offset = (((mb_y >> (s->sys->pix_fmt == AV_PIX_FMT_YUV420P)) * s->picture.linesize[1] +
(mb_x >> ((s->sys->pix_fmt == AV_PIX_FMT_YUV411P) ? 2 : 1))) << log2_blocksize);
for (j = 2; j; j--) {
uint8_t *c_ptr = s->picture.data[j] + c_offset;
if (s->sys->pix_fmt == PIX_FMT_YUV411P && mb_x >= (704 / 8)) {
if (s->sys->pix_fmt == AV_PIX_FMT_YUV411P && mb_x >= (704 / 8)) {
uint64_t aligned_pixels[64/8];
uint8_t *pixels = (uint8_t*)aligned_pixels;
uint8_t *c_ptr1, *ptr1;

@ -292,7 +292,7 @@ static av_cold int decode_init(AVCodecContext *avctx)
{
DxaDecContext * const c = avctx->priv_data;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
avcodec_get_frame_defaults(&c->pic);
avcodec_get_frame_defaults(&c->prev);

@ -26,7 +26,7 @@
static av_cold int decode_init(AVCodecContext *avctx)
{
avctx->pix_fmt = PIX_FMT_YUV420P;
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
avctx->coded_frame = avcodec_alloc_frame();
if (!avctx->coded_frame)
return AVERROR(ENOMEM);

@ -443,7 +443,7 @@ AVHWAccel ff_h264_dxva2_hwaccel = {
.name = "h264_dxva2",
.type = AVMEDIA_TYPE_VIDEO,
.id = AV_CODEC_ID_H264,
.pix_fmt = PIX_FMT_DXVA2_VLD,
.pix_fmt = AV_PIX_FMT_DXVA2_VLD,
.start_frame = start_frame,
.decode_slice = decode_slice,
.end_frame = end_frame,

@ -264,7 +264,7 @@ AVHWAccel ff_mpeg2_dxva2_hwaccel = {
.name = "mpeg2_dxva2",
.type = AVMEDIA_TYPE_VIDEO,
.id = AV_CODEC_ID_MPEG2VIDEO,
.pix_fmt = PIX_FMT_DXVA2_VLD,
.pix_fmt = AV_PIX_FMT_DXVA2_VLD,
.start_frame = start_frame,
.decode_slice = decode_slice,
.end_frame = end_frame,

@ -269,7 +269,7 @@ AVHWAccel ff_wmv3_dxva2_hwaccel = {
.name = "wmv3_dxva2",
.type = AVMEDIA_TYPE_VIDEO,
.id = AV_CODEC_ID_WMV3,
.pix_fmt = PIX_FMT_DXVA2_VLD,
.pix_fmt = AV_PIX_FMT_DXVA2_VLD,
.start_frame = start_frame,
.decode_slice = decode_slice,
.end_frame = end_frame,
@ -281,7 +281,7 @@ AVHWAccel ff_vc1_dxva2_hwaccel = {
.name = "vc1_dxva2",
.type = AVMEDIA_TYPE_VIDEO,
.id = AV_CODEC_ID_VC1,
.pix_fmt = PIX_FMT_DXVA2_VLD,
.pix_fmt = AV_PIX_FMT_DXVA2_VLD,
.start_frame = start_frame,
.decode_slice = decode_slice,
.end_frame = end_frame,

@ -49,7 +49,7 @@ static av_cold int cmv_decode_init(AVCodecContext *avctx){
avcodec_get_frame_defaults(&s->last2_frame);
s->avctx = avctx;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
return 0;
}

@ -61,7 +61,7 @@ static av_cold int decode_init(AVCodecContext *avctx)
{
MadContext *s = avctx->priv_data;
s->avctx = avctx;
avctx->pix_fmt = PIX_FMT_YUV420P;
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
ff_dsputil_init(&s->dsp, avctx);
ff_init_scantable_permutation(s->dsp.idct_permutation, FF_NO_IDCT_PERM);
ff_init_scantable(s->dsp.idct_permutation, &s->scantable, ff_zigzag_direct);

@ -53,7 +53,7 @@ static av_cold int tgq_decode_init(AVCodecContext *avctx){
ff_init_scantable_permutation(idct_permutation, FF_NO_IDCT_PERM);
ff_init_scantable(idct_permutation, &s->scantable, ff_zigzag_direct);
avctx->time_base = (AVRational){1, 15};
avctx->pix_fmt = PIX_FMT_YUV420P;
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
return 0;
}

@ -54,7 +54,7 @@ static av_cold int tgv_decode_init(AVCodecContext *avctx){
TgvContext *s = avctx->priv_data;
s->avctx = avctx;
avctx->time_base = (AVRational){1, 15};
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
avcodec_get_frame_defaults(&s->frame);
avcodec_get_frame_defaults(&s->last_frame);
return 0;

@ -52,7 +52,7 @@ static av_cold int tqi_decode_init(AVCodecContext *avctx)
ff_init_scantable(s->dsp.idct_permutation, &s->intra_scantable, ff_zigzag_direct);
s->qscale = 1;
avctx->time_base = (AVRational){1, 15};
avctx->pix_fmt = PIX_FMT_YUV420P;
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
ff_mpeg12_init_vlcs();
return 0;
}

@ -62,7 +62,7 @@ static av_cold int escape124_decode_init(AVCodecContext *avctx)
Escape124Context *s = avctx->priv_data;
avcodec_get_frame_defaults(&s->frame);
avctx->pix_fmt = PIX_FMT_RGB555;
avctx->pix_fmt = AV_PIX_FMT_RGB555;
s->num_superblocks = ((unsigned)avctx->width / 8) *
((unsigned)avctx->height / 8);

@ -37,7 +37,7 @@ typedef struct Escape130Context {
static av_cold int escape130_decode_init(AVCodecContext *avctx)
{
Escape130Context *s = avctx->priv_data;
avctx->pix_fmt = PIX_FMT_YUV420P;
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
if((avctx->width&1) || (avctx->height&1)){
av_log(avctx, AV_LOG_ERROR, "Dimensions are not a multiple of the block size\n");

@ -452,9 +452,9 @@ static int decode_frame(AVCodecContext *avctx,
case 2: // 32-bit
case 1: // 16-bit
if (s->channel_offsets[3] >= 0)
avctx->pix_fmt = PIX_FMT_RGBA64;
avctx->pix_fmt = AV_PIX_FMT_RGBA64;
else
avctx->pix_fmt = PIX_FMT_RGB48;
avctx->pix_fmt = AV_PIX_FMT_RGB48;
break;
// 8-bit
case 0:

@ -943,21 +943,21 @@ static av_cold int encode_init(AVCodecContext *avctx)
s->plane_count=3;
switch(avctx->pix_fmt){
case PIX_FMT_YUV444P9:
case PIX_FMT_YUV422P9:
case PIX_FMT_YUV420P9:
case AV_PIX_FMT_YUV444P9:
case AV_PIX_FMT_YUV422P9:
case AV_PIX_FMT_YUV420P9:
if (!avctx->bits_per_raw_sample)
s->bits_per_raw_sample = 9;
case PIX_FMT_YUV444P10:
case PIX_FMT_YUV420P10:
case PIX_FMT_YUV422P10:
case AV_PIX_FMT_YUV444P10:
case AV_PIX_FMT_YUV420P10:
case AV_PIX_FMT_YUV422P10:
s->packed_at_lsb = 1;
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 10;
case PIX_FMT_GRAY16:
case PIX_FMT_YUV444P16:
case PIX_FMT_YUV422P16:
case PIX_FMT_YUV420P16:
case AV_PIX_FMT_GRAY16:
case AV_PIX_FMT_YUV444P16:
case AV_PIX_FMT_YUV422P16:
case AV_PIX_FMT_YUV420P16:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample) {
s->bits_per_raw_sample = 16;
} else if (!s->bits_per_raw_sample){
@ -976,40 +976,40 @@ static av_cold int encode_init(AVCodecContext *avctx)
return AVERROR_INVALIDDATA;
}
s->version= FFMAX(s->version, 1);
case PIX_FMT_GRAY8:
case PIX_FMT_YUV444P:
case PIX_FMT_YUV440P:
case PIX_FMT_YUV422P:
case PIX_FMT_YUV420P:
case PIX_FMT_YUV411P:
case PIX_FMT_YUV410P:
case AV_PIX_FMT_GRAY8:
case AV_PIX_FMT_YUV444P:
case AV_PIX_FMT_YUV440P:
case AV_PIX_FMT_YUV422P:
case AV_PIX_FMT_YUV420P:
case AV_PIX_FMT_YUV411P:
case AV_PIX_FMT_YUV410P:
s->chroma_planes= av_pix_fmt_descriptors[avctx->pix_fmt].nb_components < 3 ? 0 : 1;
s->colorspace= 0;
break;
case PIX_FMT_YUVA444P:
case PIX_FMT_YUVA422P:
case PIX_FMT_YUVA420P:
case AV_PIX_FMT_YUVA444P:
case AV_PIX_FMT_YUVA422P:
case AV_PIX_FMT_YUVA420P:
s->chroma_planes= 1;
s->colorspace= 0;
s->transparency= 1;
break;
case PIX_FMT_RGB32:
case AV_PIX_FMT_RGB32:
s->colorspace= 1;
s->transparency= 1;
break;
case PIX_FMT_0RGB32:
case AV_PIX_FMT_0RGB32:
s->colorspace= 1;
break;
case PIX_FMT_GBRP9:
case AV_PIX_FMT_GBRP9:
if (!avctx->bits_per_raw_sample)
s->bits_per_raw_sample = 9;
case PIX_FMT_GBRP10:
case AV_PIX_FMT_GBRP10:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 10;
case PIX_FMT_GBRP12:
case AV_PIX_FMT_GBRP12:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 12;
case PIX_FMT_GBRP14:
case AV_PIX_FMT_GBRP14:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 14;
else if (!s->bits_per_raw_sample)
@ -1869,26 +1869,26 @@ static int read_header(FFV1Context *f){
if(f->colorspace==0){
if(!f->transparency && !f->chroma_planes){
if (f->avctx->bits_per_raw_sample<=8)
f->avctx->pix_fmt= PIX_FMT_GRAY8;
f->avctx->pix_fmt= AV_PIX_FMT_GRAY8;
else
f->avctx->pix_fmt= PIX_FMT_GRAY16;
f->avctx->pix_fmt= AV_PIX_FMT_GRAY16;
}else if(f->avctx->bits_per_raw_sample<=8 && !f->transparency){
switch(16*f->chroma_h_shift + f->chroma_v_shift){
case 0x00: f->avctx->pix_fmt= PIX_FMT_YUV444P; break;
case 0x01: f->avctx->pix_fmt= PIX_FMT_YUV440P; break;
case 0x10: f->avctx->pix_fmt= PIX_FMT_YUV422P; break;
case 0x11: f->avctx->pix_fmt= PIX_FMT_YUV420P; break;
case 0x20: f->avctx->pix_fmt= PIX_FMT_YUV411P; break;
case 0x22: f->avctx->pix_fmt= PIX_FMT_YUV410P; break;
case 0x00: f->avctx->pix_fmt= AV_PIX_FMT_YUV444P; break;
case 0x01: f->avctx->pix_fmt= AV_PIX_FMT_YUV440P; break;
case 0x10: f->avctx->pix_fmt= AV_PIX_FMT_YUV422P; break;
case 0x11: f->avctx->pix_fmt= AV_PIX_FMT_YUV420P; break;
case 0x20: f->avctx->pix_fmt= AV_PIX_FMT_YUV411P; break;
case 0x22: f->avctx->pix_fmt= AV_PIX_FMT_YUV410P; break;
default:
av_log(f->avctx, AV_LOG_ERROR, "format not supported\n");
return -1;
}
}else if(f->avctx->bits_per_raw_sample<=8 && f->transparency){
switch(16*f->chroma_h_shift + f->chroma_v_shift){
case 0x00: f->avctx->pix_fmt= PIX_FMT_YUVA444P; break;
case 0x10: f->avctx->pix_fmt= PIX_FMT_YUVA422P; break;
case 0x11: f->avctx->pix_fmt= PIX_FMT_YUVA420P; break;
case 0x00: f->avctx->pix_fmt= AV_PIX_FMT_YUVA444P; break;
case 0x10: f->avctx->pix_fmt= AV_PIX_FMT_YUVA422P; break;
case 0x11: f->avctx->pix_fmt= AV_PIX_FMT_YUVA420P; break;
default:
av_log(f->avctx, AV_LOG_ERROR, "format not supported\n");
return -1;
@ -1896,9 +1896,9 @@ static int read_header(FFV1Context *f){
}else if(f->avctx->bits_per_raw_sample==9) {
f->packed_at_lsb=1;
switch(16*f->chroma_h_shift + f->chroma_v_shift){
case 0x00: f->avctx->pix_fmt= PIX_FMT_YUV444P9; break;
case 0x10: f->avctx->pix_fmt= PIX_FMT_YUV422P9; break;
case 0x11: f->avctx->pix_fmt= PIX_FMT_YUV420P9; break;
case 0x00: f->avctx->pix_fmt= AV_PIX_FMT_YUV444P9; break;
case 0x10: f->avctx->pix_fmt= AV_PIX_FMT_YUV422P9; break;
case 0x11: f->avctx->pix_fmt= AV_PIX_FMT_YUV420P9; break;
default:
av_log(f->avctx, AV_LOG_ERROR, "format not supported\n");
return -1;
@ -1906,18 +1906,18 @@ static int read_header(FFV1Context *f){
}else if(f->avctx->bits_per_raw_sample==10) {
f->packed_at_lsb=1;
switch(16*f->chroma_h_shift + f->chroma_v_shift){
case 0x00: f->avctx->pix_fmt= PIX_FMT_YUV444P10; break;
case 0x10: f->avctx->pix_fmt= PIX_FMT_YUV422P10; break;
case 0x11: f->avctx->pix_fmt= PIX_FMT_YUV420P10; break;
case 0x00: f->avctx->pix_fmt= AV_PIX_FMT_YUV444P10; break;
case 0x10: f->avctx->pix_fmt= AV_PIX_FMT_YUV422P10; break;
case 0x11: f->avctx->pix_fmt= AV_PIX_FMT_YUV420P10; break;
default:
av_log(f->avctx, AV_LOG_ERROR, "format not supported\n");
return -1;
}
}else {
switch(16*f->chroma_h_shift + f->chroma_v_shift){
case 0x00: f->avctx->pix_fmt= PIX_FMT_YUV444P16; break;
case 0x10: f->avctx->pix_fmt= PIX_FMT_YUV422P16; break;
case 0x11: f->avctx->pix_fmt= PIX_FMT_YUV420P16; break;
case 0x00: f->avctx->pix_fmt= AV_PIX_FMT_YUV444P16; break;
case 0x10: f->avctx->pix_fmt= AV_PIX_FMT_YUV422P16; break;
case 0x11: f->avctx->pix_fmt= AV_PIX_FMT_YUV420P16; break;
default:
av_log(f->avctx, AV_LOG_ERROR, "format not supported\n");
return -1;
@ -1929,16 +1929,16 @@ static int read_header(FFV1Context *f){
return -1;
}
if(f->avctx->bits_per_raw_sample==9)
f->avctx->pix_fmt= PIX_FMT_GBRP9;
f->avctx->pix_fmt= AV_PIX_FMT_GBRP9;
else if(f->avctx->bits_per_raw_sample==10)
f->avctx->pix_fmt= PIX_FMT_GBRP10;
f->avctx->pix_fmt= AV_PIX_FMT_GBRP10;
else if(f->avctx->bits_per_raw_sample==12)
f->avctx->pix_fmt= PIX_FMT_GBRP12;
f->avctx->pix_fmt= AV_PIX_FMT_GBRP12;
else if(f->avctx->bits_per_raw_sample==14)
f->avctx->pix_fmt= PIX_FMT_GBRP14;
f->avctx->pix_fmt= AV_PIX_FMT_GBRP14;
else
if(f->transparency) f->avctx->pix_fmt= PIX_FMT_RGB32;
else f->avctx->pix_fmt= PIX_FMT_0RGB32;
if(f->transparency) f->avctx->pix_fmt= AV_PIX_FMT_RGB32;
else f->avctx->pix_fmt= AV_PIX_FMT_0RGB32;
}else{
av_log(f->avctx, AV_LOG_ERROR, "colorspace not supported\n");
return -1;
@ -2192,15 +2192,15 @@ AVCodec ff_ffv1_encoder = {
.close = common_end,
.capabilities = CODEC_CAP_SLICE_THREADS,
.defaults = ffv1_defaults,
.pix_fmts = (const enum PixelFormat[]){
PIX_FMT_YUV420P, PIX_FMT_YUVA420P, PIX_FMT_YUVA422P, PIX_FMT_YUV444P,
PIX_FMT_YUVA444P, PIX_FMT_YUV440P, PIX_FMT_YUV422P, PIX_FMT_YUV411P,
PIX_FMT_YUV410P, PIX_FMT_0RGB32, PIX_FMT_RGB32, PIX_FMT_YUV420P16,
PIX_FMT_YUV422P16, PIX_FMT_YUV444P16, PIX_FMT_YUV444P9, PIX_FMT_YUV422P9,
PIX_FMT_YUV420P9, PIX_FMT_YUV420P10, PIX_FMT_YUV422P10, PIX_FMT_YUV444P10,
PIX_FMT_GRAY16, PIX_FMT_GRAY8, PIX_FMT_GBRP9, PIX_FMT_GBRP10,
PIX_FMT_GBRP12, PIX_FMT_GBRP14,
PIX_FMT_NONE
.pix_fmts = (const enum AVPixelFormat[]){
AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUV444P,
AV_PIX_FMT_YUVA444P, AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV411P,
AV_PIX_FMT_YUV410P, AV_PIX_FMT_0RGB32, AV_PIX_FMT_RGB32, AV_PIX_FMT_YUV420P16,
AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV444P16, AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV422P9,
AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV444P10,
AV_PIX_FMT_GRAY16, AV_PIX_FMT_GRAY8, AV_PIX_FMT_GBRP9, AV_PIX_FMT_GBRP10,
AV_PIX_FMT_GBRP12, AV_PIX_FMT_GBRP14,
AV_PIX_FMT_NONE
},
.long_name = NULL_IF_CONFIG_SMALL("FFmpeg video codec #1"),
.priv_class = &class,

@ -114,7 +114,7 @@ static av_cold int flashsv_decode_init(AVCodecContext *avctx)
av_log(avctx, AV_LOG_ERROR, "Inflate init error: %d\n", zret);
return 1;
}
avctx->pix_fmt = PIX_FMT_BGR24;
avctx->pix_fmt = AV_PIX_FMT_BGR24;
avcodec_get_frame_defaults(&s->frame);
s->frame.data[0] = NULL;
@ -462,7 +462,7 @@ AVCodec ff_flashsv_decoder = {
.close = flashsv_decode_end,
.decode = flashsv_decode_frame,
.capabilities = CODEC_CAP_DR1,
.pix_fmts = (const enum PixelFormat[]){ PIX_FMT_BGR24, PIX_FMT_NONE },
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_BGR24, AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("Flash Screen Video v1"),
};
#endif /* CONFIG_FLASHSV_DECODER */
@ -525,7 +525,7 @@ AVCodec ff_flashsv2_decoder = {
.close = flashsv2_decode_end,
.decode = flashsv_decode_frame,
.capabilities = CODEC_CAP_DR1,
.pix_fmts = (const enum PixelFormat[]){ PIX_FMT_BGR24, PIX_FMT_NONE },
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_BGR24, AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("Flash Screen Video v2"),
};
#endif /* CONFIG_FLASHSV2_DECODER */

@ -926,6 +926,6 @@ AVCodec ff_flashsv2_encoder = {
.init = flashsv2_encode_init,
.encode2 = flashsv2_encode_frame,
.close = flashsv2_encode_end,
.pix_fmts = (const enum PixelFormat[]){ PIX_FMT_BGR24, PIX_FMT_NONE },
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_BGR24, AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("Flash Screen Video Version 2"),
};

@ -283,6 +283,6 @@ AVCodec ff_flashsv_encoder = {
.init = flashsv_encode_init,
.encode2 = flashsv_encode_frame,
.close = flashsv_encode_end,
.pix_fmts = (const enum PixelFormat[]){ PIX_FMT_BGR24, PIX_FMT_NONE },
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_BGR24, AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("Flash Screen Video"),
};

@ -129,10 +129,10 @@ static av_cold int flic_decode_init(AVCodecContext *avctx)
}
switch (depth) {
case 8 : avctx->pix_fmt = PIX_FMT_PAL8; break;
case 15 : avctx->pix_fmt = PIX_FMT_RGB555; break;
case 16 : avctx->pix_fmt = PIX_FMT_RGB565; break;
case 24 : avctx->pix_fmt = PIX_FMT_BGR24; /* Supposedly BGR, but havent any files to test with */
case 8 : avctx->pix_fmt = AV_PIX_FMT_PAL8; break;
case 15 : avctx->pix_fmt = AV_PIX_FMT_RGB555; break;
case 16 : avctx->pix_fmt = AV_PIX_FMT_RGB565; break;
case 24 : avctx->pix_fmt = AV_PIX_FMT_BGR24; /* Supposedly BGR, but havent any files to test with */
av_log(avctx, AV_LOG_ERROR, "24Bpp FLC/FLX is unsupported due to no test files.\n");
return -1;
default :
@ -768,16 +768,16 @@ static int flic_decode_frame(AVCodecContext *avctx,
{
const uint8_t *buf = avpkt->data;
int buf_size = avpkt->size;
if (avctx->pix_fmt == PIX_FMT_PAL8) {
if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
return flic_decode_frame_8BPP(avctx, data, data_size,
buf, buf_size);
}
else if ((avctx->pix_fmt == PIX_FMT_RGB555) ||
(avctx->pix_fmt == PIX_FMT_RGB565)) {
else if ((avctx->pix_fmt == AV_PIX_FMT_RGB555) ||
(avctx->pix_fmt == AV_PIX_FMT_RGB565)) {
return flic_decode_frame_15_16BPP(avctx, data, data_size,
buf, buf_size);
}
else if (avctx->pix_fmt == PIX_FMT_BGR24) {
else if (avctx->pix_fmt == AV_PIX_FMT_BGR24) {
return flic_decode_frame_24BPP(avctx, data, data_size,
buf, buf_size);
}

@ -94,7 +94,7 @@ AVCodec ff_flv_encoder = {
.init = ff_MPV_encode_init,
.encode2 = ff_MPV_encode_picture,
.close = ff_MPV_encode_end,
.pix_fmts = (const enum PixelFormat[]){ PIX_FMT_YUV420P, PIX_FMT_NONE },
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("FLV / Sorenson Spark / Sorenson H.263 (Flash Video)"),
.priv_class = &flv_class,
};

@ -142,7 +142,7 @@ static int decode_frame(AVCodecContext *avctx,
int i, j, is_chroma;
const int planes = 3;
uint8_t *out;
enum PixelFormat pix_fmt;
enum AVPixelFormat pix_fmt;
header = AV_RL32(buf);
version = header & 0xff;
@ -204,7 +204,7 @@ static int decode_frame(AVCodecContext *avctx,
f->reference = 0;
f->buffer_hints = FF_BUFFER_HINTS_VALID;
pix_fmt = version & 1 ? PIX_FMT_BGR24 : PIX_FMT_YUVJ420P;
pix_fmt = version & 1 ? AV_PIX_FMT_BGR24 : AV_PIX_FMT_YUVJ420P;
if (avctx->pix_fmt != pix_fmt && f->data[0]) {
avctx->release_buffer(avctx, f);
}

@ -29,7 +29,7 @@ static av_cold int decode_init(AVCodecContext *avctx)
av_log(avctx, AV_LOG_ERROR, "frwu needs even width\n");
return AVERROR(EINVAL);
}
avctx->pix_fmt = PIX_FMT_UYVY422;
avctx->pix_fmt = AV_PIX_FMT_UYVY422;
avctx->coded_frame = avcodec_alloc_frame();
if (!avctx->coded_frame)

@ -204,9 +204,9 @@ AVCodec ff_gif_encoder = {
.init = gif_encode_init,
.encode2 = gif_encode_frame,
.close = gif_encode_close,
.pix_fmts = (const enum PixelFormat[]){
PIX_FMT_RGB8, PIX_FMT_BGR8, PIX_FMT_RGB4_BYTE, PIX_FMT_BGR4_BYTE,
PIX_FMT_GRAY8, PIX_FMT_PAL8, PIX_FMT_NONE
.pix_fmts = (const enum AVPixelFormat[]){
AV_PIX_FMT_RGB8, AV_PIX_FMT_BGR8, AV_PIX_FMT_RGB4_BYTE, AV_PIX_FMT_BGR4_BYTE,
AV_PIX_FMT_GRAY8, AV_PIX_FMT_PAL8, AV_PIX_FMT_NONE
},
.long_name = NULL_IF_CONFIG_SMALL("GIF (Graphics Interchange Format)"),
};

@ -294,7 +294,7 @@ static int gif_decode_frame(AVCodecContext *avctx, void *data, int *data_size, A
if ((ret = gif_read_header1(s)) < 0)
return ret;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
if (av_image_check_size(s->screen_width, s->screen_height, 0, avctx))
return -1;
avcodec_set_dimensions(avctx, s->screen_width, s->screen_height);

@ -86,7 +86,7 @@ static av_cold int h261_decode_init(AVCodecContext *avctx){
s->out_format = FMT_H261;
s->low_delay= 1;
avctx->pix_fmt= PIX_FMT_YUV420P;
avctx->pix_fmt= AV_PIX_FMT_YUV420P;
s->codec_id= avctx->codec->id;

@ -332,7 +332,7 @@ AVCodec ff_h261_encoder = {
.init = ff_MPV_encode_init,
.encode2 = ff_MPV_encode_picture,
.close = ff_MPV_encode_end,
.pix_fmts = (const enum PixelFormat[]){ PIX_FMT_YUV420P, PIX_FMT_NONE },
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("H.261"),
.priv_class = &h261_class,
};

@ -62,7 +62,7 @@ av_cold int ff_h263_decode_init(AVCodecContext *avctx)
s->decode_mb= ff_h263_decode_mb;
s->low_delay= 1;
if (avctx->codec->id == AV_CODEC_ID_MSS2)
avctx->pix_fmt = PIX_FMT_YUV420P;
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
else
avctx->pix_fmt = avctx->get_format(avctx, avctx->codec->pix_fmts);
s->unrestricted_mv= 1;

@ -66,12 +66,12 @@ static const uint8_t div6[QP_MAX_NUM + 1] = {
14,14,14,14,
};
static const enum PixelFormat hwaccel_pixfmt_list_h264_jpeg_420[] = {
PIX_FMT_DXVA2_VLD,
PIX_FMT_VAAPI_VLD,
PIX_FMT_VDA_VLD,
PIX_FMT_YUVJ420P,
PIX_FMT_NONE
static const enum AVPixelFormat hwaccel_pixfmt_list_h264_jpeg_420[] = {
AV_PIX_FMT_DXVA2_VLD,
AV_PIX_FMT_VAAPI_VLD,
AV_PIX_FMT_VDA_VLD,
AV_PIX_FMT_YUVJ420P,
AV_PIX_FMT_NONE
};
int avpriv_h264_has_num_reorder_frames(AVCodecContext *avctx)
@ -2544,60 +2544,60 @@ static int decode_slice_header(H264Context *h, H264Context *h0)
case 9:
if (CHROMA444) {
if (s->avctx->colorspace == AVCOL_SPC_RGB) {
s->avctx->pix_fmt = PIX_FMT_GBRP9;
s->avctx->pix_fmt = AV_PIX_FMT_GBRP9;
} else
s->avctx->pix_fmt = PIX_FMT_YUV444P9;
s->avctx->pix_fmt = AV_PIX_FMT_YUV444P9;
} else if (CHROMA422)
s->avctx->pix_fmt = PIX_FMT_YUV422P9;
s->avctx->pix_fmt = AV_PIX_FMT_YUV422P9;
else
s->avctx->pix_fmt = PIX_FMT_YUV420P9;
s->avctx->pix_fmt = AV_PIX_FMT_YUV420P9;
break;
case 10:
if (CHROMA444) {
if (s->avctx->colorspace == AVCOL_SPC_RGB) {
s->avctx->pix_fmt = PIX_FMT_GBRP10;
s->avctx->pix_fmt = AV_PIX_FMT_GBRP10;
} else
s->avctx->pix_fmt = PIX_FMT_YUV444P10;
s->avctx->pix_fmt = AV_PIX_FMT_YUV444P10;
} else if (CHROMA422)
s->avctx->pix_fmt = PIX_FMT_YUV422P10;
s->avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
else
s->avctx->pix_fmt = PIX_FMT_YUV420P10;
s->avctx->pix_fmt = AV_PIX_FMT_YUV420P10;
break;
case 12:
if (CHROMA444) {
if (s->avctx->colorspace == AVCOL_SPC_RGB) {
s->avctx->pix_fmt = PIX_FMT_GBRP12;
s->avctx->pix_fmt = AV_PIX_FMT_GBRP12;
} else
s->avctx->pix_fmt = PIX_FMT_YUV444P12;
s->avctx->pix_fmt = AV_PIX_FMT_YUV444P12;
} else if (CHROMA422)
s->avctx->pix_fmt = PIX_FMT_YUV422P12;
s->avctx->pix_fmt = AV_PIX_FMT_YUV422P12;
else
s->avctx->pix_fmt = PIX_FMT_YUV420P12;
s->avctx->pix_fmt = AV_PIX_FMT_YUV420P12;
break;
case 14:
if (CHROMA444) {
if (s->avctx->colorspace == AVCOL_SPC_RGB) {
s->avctx->pix_fmt = PIX_FMT_GBRP14;
s->avctx->pix_fmt = AV_PIX_FMT_GBRP14;
} else
s->avctx->pix_fmt = PIX_FMT_YUV444P14;
s->avctx->pix_fmt = AV_PIX_FMT_YUV444P14;
} else if (CHROMA422)
s->avctx->pix_fmt = PIX_FMT_YUV422P14;
s->avctx->pix_fmt = AV_PIX_FMT_YUV422P14;
else
s->avctx->pix_fmt = PIX_FMT_YUV420P14;
s->avctx->pix_fmt = AV_PIX_FMT_YUV420P14;
break;
case 8:
if (CHROMA444) {
s->avctx->pix_fmt = s->avctx->color_range == AVCOL_RANGE_JPEG ? PIX_FMT_YUVJ444P
: PIX_FMT_YUV444P;
s->avctx->pix_fmt = s->avctx->color_range == AVCOL_RANGE_JPEG ? AV_PIX_FMT_YUVJ444P
: AV_PIX_FMT_YUV444P;
if (s->avctx->colorspace == AVCOL_SPC_RGB) {
s->avctx->pix_fmt = PIX_FMT_GBR24P;
s->avctx->pix_fmt = AV_PIX_FMT_GBR24P;
av_log(h->s.avctx, AV_LOG_DEBUG, "Detected GBR colorspace.\n");
} else if (s->avctx->colorspace == AVCOL_SPC_YCGCO) {
av_log(h->s.avctx, AV_LOG_WARNING, "Detected unsupported YCgCo colorspace.\n");
}
} else if (CHROMA422) {
s->avctx->pix_fmt = s->avctx->color_range == AVCOL_RANGE_JPEG ? PIX_FMT_YUVJ422P
: PIX_FMT_YUV422P;
s->avctx->pix_fmt = s->avctx->color_range == AVCOL_RANGE_JPEG ? AV_PIX_FMT_YUVJ422P
: AV_PIX_FMT_YUV422P;
} else {
s->avctx->pix_fmt = s->avctx->get_format(s->avctx,
s->avctx->codec->pix_fmts ?
@ -4248,8 +4248,8 @@ AVCodec ff_h264_vdpau_decoder = {
.capabilities = CODEC_CAP_DR1 | CODEC_CAP_DELAY | CODEC_CAP_HWACCEL_VDPAU,
.flush = flush_dpb,
.long_name = NULL_IF_CONFIG_SMALL("H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 (VDPAU acceleration)"),
.pix_fmts = (const enum PixelFormat[]) { PIX_FMT_VDPAU_H264,
PIX_FMT_NONE},
.pix_fmts = (const enum AVPixelFormat[]) { AV_PIX_FMT_VDPAU_H264,
AV_PIX_FMT_NONE},
.profiles = NULL_IF_CONFIG_SMALL(profiles),
.priv_class = &h264_vdpau_class,
};

@ -489,28 +489,28 @@ static av_cold int decode_init(AVCodecContext *avctx)
switch (s->bitstream_bpp) {
case 12:
avctx->pix_fmt = PIX_FMT_YUV420P;
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
break;
case 16:
if (s->yuy2) {
avctx->pix_fmt = PIX_FMT_YUYV422;
avctx->pix_fmt = AV_PIX_FMT_YUYV422;
} else {
avctx->pix_fmt = PIX_FMT_YUV422P;
avctx->pix_fmt = AV_PIX_FMT_YUV422P;
}
break;
case 24:
case 32:
if (s->bgr32) {
avctx->pix_fmt = PIX_FMT_RGB32;
avctx->pix_fmt = AV_PIX_FMT_RGB32;
} else {
avctx->pix_fmt = PIX_FMT_BGR24;
avctx->pix_fmt = AV_PIX_FMT_BGR24;
}
break;
default:
return AVERROR_INVALIDDATA;
}
if ((avctx->pix_fmt == PIX_FMT_YUV422P || avctx->pix_fmt == PIX_FMT_YUV420P) && avctx->width & 1) {
if ((avctx->pix_fmt == AV_PIX_FMT_YUV422P || avctx->pix_fmt == AV_PIX_FMT_YUV420P) && avctx->width & 1) {
av_log(avctx, AV_LOG_ERROR, "width must be even for this colorspace\n");
return AVERROR_INVALIDDATA;
}
@ -583,18 +583,18 @@ static av_cold int encode_init(AVCodecContext *avctx)
avctx->coded_frame = &s->picture;
switch (avctx->pix_fmt) {
case PIX_FMT_YUV420P:
case PIX_FMT_YUV422P:
case AV_PIX_FMT_YUV420P:
case AV_PIX_FMT_YUV422P:
if (s->width & 1) {
av_log(avctx, AV_LOG_ERROR, "width must be even for this colorspace\n");
return AVERROR(EINVAL);
}
s->bitstream_bpp = avctx->pix_fmt == PIX_FMT_YUV420P ? 12 : 16;
s->bitstream_bpp = avctx->pix_fmt == AV_PIX_FMT_YUV420P ? 12 : 16;
break;
case PIX_FMT_RGB32:
case AV_PIX_FMT_RGB32:
s->bitstream_bpp = 32;
break;
case PIX_FMT_RGB24:
case AV_PIX_FMT_RGB24:
s->bitstream_bpp = 24;
break;
default:
@ -616,7 +616,7 @@ static av_cold int encode_init(AVCodecContext *avctx)
}else s->context= 0;
if (avctx->codec->id == AV_CODEC_ID_HUFFYUV) {
if (avctx->pix_fmt == PIX_FMT_YUV420P) {
if (avctx->pix_fmt == AV_PIX_FMT_YUV420P) {
av_log(avctx, AV_LOG_ERROR,
"Error: YV12 is not supported by huffyuv; use "
"vcodec=ffvhuff or format=422p\n");
@ -1299,8 +1299,8 @@ static int encode_frame(AVCodecContext *avctx, AVPacket *pkt,
init_put_bits(&s->pb, pkt->data + size, pkt->size - size);
if (avctx->pix_fmt == PIX_FMT_YUV422P ||
avctx->pix_fmt == PIX_FMT_YUV420P) {
if (avctx->pix_fmt == AV_PIX_FMT_YUV422P ||
avctx->pix_fmt == AV_PIX_FMT_YUV420P) {
int lefty, leftu, leftv, y, cy;
put_bits(&s->pb, 8, leftv = p->data[2][0]);
@ -1404,7 +1404,7 @@ static int encode_frame(AVCodecContext *avctx, AVPacket *pkt,
encode_422_bitstream(s, 0, width);
}
}
} else if(avctx->pix_fmt == PIX_FMT_RGB32) {
} else if(avctx->pix_fmt == AV_PIX_FMT_RGB32) {
uint8_t *data = p->data[0] + (height - 1) * p->linesize[0];
const int stride = -p->linesize[0];
const int fake_stride = -fake_ystride;
@ -1429,7 +1429,7 @@ static int encode_frame(AVCodecContext *avctx, AVPacket *pkt,
}
encode_bgra_bitstream(s, width, 4);
}
}else if(avctx->pix_fmt == PIX_FMT_RGB24){
}else if(avctx->pix_fmt == AV_PIX_FMT_RGB24){
uint8_t *data = p->data[0] + (height-1)*p->linesize[0];
const int stride = -p->linesize[0];
const int fake_stride = -fake_ystride;
@ -1547,8 +1547,8 @@ AVCodec ff_huffyuv_encoder = {
.init = encode_init,
.encode2 = encode_frame,
.close = encode_end,
.pix_fmts = (const enum PixelFormat[]){
PIX_FMT_YUV422P, PIX_FMT_RGB24, PIX_FMT_RGB32, PIX_FMT_NONE
.pix_fmts = (const enum AVPixelFormat[]){
AV_PIX_FMT_YUV422P, AV_PIX_FMT_RGB24, AV_PIX_FMT_RGB32, AV_PIX_FMT_NONE
},
.long_name = NULL_IF_CONFIG_SMALL("Huffyuv / HuffYUV"),
};
@ -1563,8 +1563,8 @@ AVCodec ff_ffvhuff_encoder = {
.init = encode_init,
.encode2 = encode_frame,
.close = encode_end,
.pix_fmts = (const enum PixelFormat[]){
PIX_FMT_YUV420P, PIX_FMT_YUV422P, PIX_FMT_RGB24, PIX_FMT_RGB32, PIX_FMT_NONE
.pix_fmts = (const enum AVPixelFormat[]){
AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_RGB24, AV_PIX_FMT_RGB32, AV_PIX_FMT_NONE
},
.long_name = NULL_IF_CONFIG_SMALL("Huffyuv FFmpeg variant"),
};

@ -151,7 +151,7 @@ static av_cold int idcin_decode_init(AVCodecContext *avctx)
unsigned char *histograms;
s->avctx = avctx;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
/* make sure the Huffman tables make it */
if (s->avctx->extradata_size != HUFFMAN_TABLE_SIZE) {

@ -232,7 +232,7 @@ static int extract_header(AVCodecContext *const avctx,
s->masking = bytestream_get_byte(&buf);
if (s->masking == MASK_HAS_MASK) {
if (s->bpp >= 8) {
avctx->pix_fmt = PIX_FMT_RGB32;
avctx->pix_fmt = AV_PIX_FMT_RGB32;
av_freep(&s->mask_buf);
av_freep(&s->mask_palbuf);
s->mask_buf = av_malloc((s->planesize * 32) + FF_INPUT_BUFFER_PADDING_SIZE);
@ -328,10 +328,10 @@ static av_cold int decode_init(AVCodecContext *avctx)
else
palette_size = 0;
avctx->pix_fmt = (avctx->bits_per_coded_sample < 8) ||
(avctx->extradata_size >= 2 && palette_size) ? PIX_FMT_PAL8 : PIX_FMT_GRAY8;
(avctx->extradata_size >= 2 && palette_size) ? AV_PIX_FMT_PAL8 : AV_PIX_FMT_GRAY8;
} else if (avctx->bits_per_coded_sample <= 32) {
if (avctx->codec_tag != MKTAG('D','E','E','P'))
avctx->pix_fmt = PIX_FMT_BGR32;
avctx->pix_fmt = AV_PIX_FMT_BGR32;
} else {
return AVERROR_INVALIDDATA;
}
@ -488,14 +488,14 @@ static int decode_frame_ilbm(AVCodecContext *avctx,
} else if ((res = avctx->get_buffer(avctx, &s->frame)) < 0) {
av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
return res;
} else if (avctx->bits_per_coded_sample <= 8 && avctx->pix_fmt == PIX_FMT_PAL8) {
} else if (avctx->bits_per_coded_sample <= 8 && avctx->pix_fmt == AV_PIX_FMT_PAL8) {
if ((res = ff_cmap_read_palette(avctx, (uint32_t*)s->frame.data[1])) < 0)
return res;
}
s->init = 1;
if (avctx->codec_tag == MKTAG('A','C','B','M')) {
if (avctx->pix_fmt == PIX_FMT_PAL8 || avctx->pix_fmt == PIX_FMT_GRAY8) {
if (avctx->pix_fmt == AV_PIX_FMT_PAL8 || avctx->pix_fmt == AV_PIX_FMT_GRAY8) {
memset(s->frame.data[0], 0, avctx->height * s->frame.linesize[0]);
for (plane = 0; plane < s->bpp; plane++) {
for(y = 0; y < avctx->height && buf < buf_end; y++ ) {
@ -504,7 +504,7 @@ static int decode_frame_ilbm(AVCodecContext *avctx,
buf += s->planesize;
}
}
} else if (s->ham) { // HAM to PIX_FMT_BGR32
} else if (s->ham) { // HAM to AV_PIX_FMT_BGR32
memset(s->frame.data[0], 0, avctx->height * s->frame.linesize[0]);
for(y = 0; y < avctx->height; y++) {
uint8_t *row = &s->frame.data[0][y * s->frame.linesize[0]];
@ -525,13 +525,13 @@ static int decode_frame_ilbm(AVCodecContext *avctx,
uint8_t *row = &s->frame.data[0][y * s->frame.linesize[0]];
memcpy(row, buf, FFMIN(raw_width, buf_end - buf));
buf += raw_width;
if (avctx->pix_fmt == PIX_FMT_BGR32) {
if (avctx->pix_fmt == AV_PIX_FMT_BGR32) {
for(x = 0; x < avctx->width; x++)
row[4 * x + 3] = row[4 * x + 3] & 0xF0 | (row[4 * x + 3] >> 4);
}
}
} else if (avctx->codec_tag == MKTAG('I','L','B','M')) { // interleaved
if (avctx->pix_fmt == PIX_FMT_PAL8 || avctx->pix_fmt == PIX_FMT_GRAY8) {
if (avctx->pix_fmt == AV_PIX_FMT_PAL8 || avctx->pix_fmt == AV_PIX_FMT_GRAY8) {
for(y = 0; y < avctx->height; y++ ) {
uint8_t *row = &s->frame.data[0][ y*s->frame.linesize[0] ];
memset(row, 0, avctx->width);
@ -540,7 +540,7 @@ static int decode_frame_ilbm(AVCodecContext *avctx,
buf += s->planesize;
}
}
} else if (s->ham) { // HAM to PIX_FMT_BGR32
} else if (s->ham) { // HAM to AV_PIX_FMT_BGR32
for (y = 0; y < avctx->height; y++) {
uint8_t *row = &s->frame.data[0][ y*s->frame.linesize[0] ];
memset(s->ham_buf, 0, s->planesize * 8);
@ -550,7 +550,7 @@ static int decode_frame_ilbm(AVCodecContext *avctx,
}
decode_ham_plane32((uint32_t *) row, s->ham_buf, s->ham_palbuf, s->planesize);
}
} else { // PIX_FMT_BGR32
} else { // AV_PIX_FMT_BGR32
for(y = 0; y < avctx->height; y++ ) {
uint8_t *row = &s->frame.data[0][y*s->frame.linesize[0]];
memset(row, 0, avctx->width << 2);
@ -561,13 +561,13 @@ static int decode_frame_ilbm(AVCodecContext *avctx,
}
}
} else if (avctx->codec_tag == MKTAG('P','B','M',' ')) { // IFF-PBM
if (avctx->pix_fmt == PIX_FMT_PAL8 || avctx->pix_fmt == PIX_FMT_GRAY8) {
if (avctx->pix_fmt == AV_PIX_FMT_PAL8 || avctx->pix_fmt == AV_PIX_FMT_GRAY8) {
for(y = 0; y < avctx->height; y++ ) {
uint8_t *row = &s->frame.data[0][y * s->frame.linesize[0]];
memcpy(row, buf, FFMIN(avctx->width, buf_end - buf));
buf += avctx->width + (avctx->width % 2); // padding if odd
}
} else if (s->ham) { // IFF-PBM: HAM to PIX_FMT_BGR32
} else if (s->ham) { // IFF-PBM: HAM to AV_PIX_FMT_BGR32
for (y = 0; y < avctx->height; y++) {
uint8_t *row = &s->frame.data[0][ y*s->frame.linesize[0] ];
memcpy(s->ham_buf, buf, FFMIN(avctx->width, buf_end - buf));
@ -605,17 +605,17 @@ static int decode_frame_byterun1(AVCodecContext *avctx,
} else if ((res = avctx->get_buffer(avctx, &s->frame)) < 0) {
av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
return res;
} else if (avctx->pix_fmt == PIX_FMT_PAL8) {
} else if (avctx->pix_fmt == AV_PIX_FMT_PAL8) {
if ((res = ff_cmap_read_palette(avctx, (uint32_t*)s->frame.data[1])) < 0)
return res;
} else if (avctx->pix_fmt == PIX_FMT_RGB32 && avctx->bits_per_coded_sample <= 8) {
} else if (avctx->pix_fmt == AV_PIX_FMT_RGB32 && avctx->bits_per_coded_sample <= 8) {
if ((res = ff_cmap_read_palette(avctx, s->mask_palbuf)) < 0)
return res;
}
s->init = 1;
if (avctx->codec_tag == MKTAG('I','L','B','M')) { //interleaved
if (avctx->pix_fmt == PIX_FMT_PAL8 || avctx->pix_fmt == PIX_FMT_GRAY8) {
if (avctx->pix_fmt == AV_PIX_FMT_PAL8 || avctx->pix_fmt == AV_PIX_FMT_GRAY8) {
for(y = 0; y < avctx->height ; y++ ) {
uint8_t *row = &s->frame.data[0][ y*s->frame.linesize[0] ];
memset(row, 0, avctx->width);
@ -624,7 +624,7 @@ static int decode_frame_byterun1(AVCodecContext *avctx,
decodeplane8(row, s->planebuf, s->planesize, plane);
}
}
} else if (avctx->bits_per_coded_sample <= 8) { //8-bit (+ mask) to PIX_FMT_BGR32
} else if (avctx->bits_per_coded_sample <= 8) { //8-bit (+ mask) to AV_PIX_FMT_BGR32
for (y = 0; y < avctx->height ; y++ ) {
uint8_t *row = &s->frame.data[0][y*s->frame.linesize[0]];
memset(s->mask_buf, 0, avctx->width * sizeof(uint32_t));
@ -634,7 +634,7 @@ static int decode_frame_byterun1(AVCodecContext *avctx,
}
lookup_pal_indicies((uint32_t *) row, s->mask_buf, s->mask_palbuf, avctx->width);
}
} else if (s->ham) { // HAM to PIX_FMT_BGR32
} else if (s->ham) { // HAM to AV_PIX_FMT_BGR32
for (y = 0; y < avctx->height ; y++) {
uint8_t *row = &s->frame.data[0][y*s->frame.linesize[0]];
memset(s->ham_buf, 0, s->planesize * 8);
@ -644,7 +644,7 @@ static int decode_frame_byterun1(AVCodecContext *avctx,
}
decode_ham_plane32((uint32_t *) row, s->ham_buf, s->ham_palbuf, s->planesize);
}
} else { //PIX_FMT_BGR32
} else { //AV_PIX_FMT_BGR32
for(y = 0; y < avctx->height ; y++ ) {
uint8_t *row = &s->frame.data[0][y*s->frame.linesize[0]];
memset(row, 0, avctx->width << 2);
@ -655,12 +655,12 @@ static int decode_frame_byterun1(AVCodecContext *avctx,
}
}
} else if (avctx->codec_tag == MKTAG('P','B','M',' ')) { // IFF-PBM
if (avctx->pix_fmt == PIX_FMT_PAL8 || avctx->pix_fmt == PIX_FMT_GRAY8) {
if (avctx->pix_fmt == AV_PIX_FMT_PAL8 || avctx->pix_fmt == AV_PIX_FMT_GRAY8) {
for(y = 0; y < avctx->height ; y++ ) {
uint8_t *row = &s->frame.data[0][y*s->frame.linesize[0]];
buf += decode_byterun(row, avctx->width, buf, buf_end);
}
} else if (s->ham) { // IFF-PBM: HAM to PIX_FMT_BGR32
} else if (s->ham) { // IFF-PBM: HAM to AV_PIX_FMT_BGR32
for (y = 0; y < avctx->height ; y++) {
uint8_t *row = &s->frame.data[0][y*s->frame.linesize[0]];
buf += decode_byterun(s->ham_buf, avctx->width, buf, buf_end);

@ -64,349 +64,349 @@ typedef struct PixFmtInfo {
} PixFmtInfo;
/* this table gives more information about formats */
static const PixFmtInfo pix_fmt_info[PIX_FMT_NB] = {
static const PixFmtInfo pix_fmt_info[AV_PIX_FMT_NB] = {
/* YUV formats */
[PIX_FMT_YUV420P] = {
[AV_PIX_FMT_YUV420P] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV422P] = {
[AV_PIX_FMT_YUV422P] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV444P] = {
[AV_PIX_FMT_YUV444P] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUYV422] = {
[AV_PIX_FMT_YUYV422] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_UYVY422] = {
[AV_PIX_FMT_UYVY422] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV410P] = {
[AV_PIX_FMT_YUV410P] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV411P] = {
[AV_PIX_FMT_YUV411P] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV440P] = {
[AV_PIX_FMT_YUV440P] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV420P9LE] = {
[AV_PIX_FMT_YUV420P9LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV422P9LE] = {
[AV_PIX_FMT_YUV422P9LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV444P9LE] = {
[AV_PIX_FMT_YUV444P9LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV420P9BE] = {
[AV_PIX_FMT_YUV420P9BE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV422P9BE] = {
[AV_PIX_FMT_YUV422P9BE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV444P9BE] = {
[AV_PIX_FMT_YUV444P9BE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV420P10LE] = {
[AV_PIX_FMT_YUV420P10LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV422P10LE] = {
[AV_PIX_FMT_YUV422P10LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV444P10LE] = {
[AV_PIX_FMT_YUV444P10LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV420P10BE] = {
[AV_PIX_FMT_YUV420P10BE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV422P10BE] = {
[AV_PIX_FMT_YUV422P10BE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV444P10BE] = {
[AV_PIX_FMT_YUV444P10BE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV420P12LE] = {
[AV_PIX_FMT_YUV420P12LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV422P12LE] = {
[AV_PIX_FMT_YUV422P12LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV444P12LE] = {
[AV_PIX_FMT_YUV444P12LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV420P12BE] = {
[AV_PIX_FMT_YUV420P12BE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV422P12BE] = {
[AV_PIX_FMT_YUV422P12BE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV444P12BE] = {
[AV_PIX_FMT_YUV444P12BE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV420P14LE] = {
[AV_PIX_FMT_YUV420P14LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV422P14LE] = {
[AV_PIX_FMT_YUV422P14LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV444P14LE] = {
[AV_PIX_FMT_YUV444P14LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV420P14BE] = {
[AV_PIX_FMT_YUV420P14BE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV422P14BE] = {
[AV_PIX_FMT_YUV422P14BE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV444P14BE] = {
[AV_PIX_FMT_YUV444P14BE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV420P16LE] = {
[AV_PIX_FMT_YUV420P16LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV422P16LE] = {
[AV_PIX_FMT_YUV422P16LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV444P16LE] = {
[AV_PIX_FMT_YUV444P16LE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV420P16BE] = {
[AV_PIX_FMT_YUV420P16BE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV422P16BE] = {
[AV_PIX_FMT_YUV422P16BE] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUV444P16BE] = {
[AV_PIX_FMT_YUV444P16BE] = {
.color_type = FF_COLOR_YUV,
},
/* YUV formats with alpha plane */
[PIX_FMT_YUVA420P] = {
[AV_PIX_FMT_YUVA420P] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUVA422P] = {
[AV_PIX_FMT_YUVA422P] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_YUVA444P] = {
[AV_PIX_FMT_YUVA444P] = {
.color_type = FF_COLOR_YUV,
},
/* JPEG YUV */
[PIX_FMT_YUVJ420P] = {
[AV_PIX_FMT_YUVJ420P] = {
.color_type = FF_COLOR_YUV_JPEG,
},
[PIX_FMT_YUVJ422P] = {
[AV_PIX_FMT_YUVJ422P] = {
.color_type = FF_COLOR_YUV_JPEG,
},
[PIX_FMT_YUVJ444P] = {
[AV_PIX_FMT_YUVJ444P] = {
.color_type = FF_COLOR_YUV_JPEG,
},
[PIX_FMT_YUVJ440P] = {
[AV_PIX_FMT_YUVJ440P] = {
.color_type = FF_COLOR_YUV_JPEG,
},
/* RGB formats */
[PIX_FMT_RGB24] = {
[AV_PIX_FMT_RGB24] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_BGR24] = {
[AV_PIX_FMT_BGR24] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_ARGB] = {
[AV_PIX_FMT_ARGB] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_RGB48BE] = {
[AV_PIX_FMT_RGB48BE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_RGB48LE] = {
[AV_PIX_FMT_RGB48LE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_RGBA64BE] = {
[AV_PIX_FMT_RGBA64BE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_RGBA64LE] = {
[AV_PIX_FMT_RGBA64LE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_RGB565BE] = {
[AV_PIX_FMT_RGB565BE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_RGB565LE] = {
[AV_PIX_FMT_RGB565LE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_RGB555BE] = {
[AV_PIX_FMT_RGB555BE] = {
.color_type = FF_COLOR_RGB,
.padded_size = 16,
},
[PIX_FMT_RGB555LE] = {
[AV_PIX_FMT_RGB555LE] = {
.color_type = FF_COLOR_RGB,
.padded_size = 16,
},
[PIX_FMT_RGB444BE] = {
[AV_PIX_FMT_RGB444BE] = {
.color_type = FF_COLOR_RGB,
.padded_size = 16,
},
[PIX_FMT_RGB444LE] = {
[AV_PIX_FMT_RGB444LE] = {
.color_type = FF_COLOR_RGB,
.padded_size = 16,
},
/* gray / mono formats */
[PIX_FMT_GRAY16BE] = {
[AV_PIX_FMT_GRAY16BE] = {
.color_type = FF_COLOR_GRAY,
},
[PIX_FMT_GRAY16LE] = {
[AV_PIX_FMT_GRAY16LE] = {
.color_type = FF_COLOR_GRAY,
},
[PIX_FMT_GRAY8] = {
[AV_PIX_FMT_GRAY8] = {
.color_type = FF_COLOR_GRAY,
},
[PIX_FMT_GRAY8A] = {
[AV_PIX_FMT_GRAY8A] = {
.color_type = FF_COLOR_GRAY,
},
[PIX_FMT_MONOWHITE] = {
[AV_PIX_FMT_MONOWHITE] = {
.color_type = FF_COLOR_GRAY,
},
[PIX_FMT_MONOBLACK] = {
[AV_PIX_FMT_MONOBLACK] = {
.color_type = FF_COLOR_GRAY,
},
/* paletted formats */
[PIX_FMT_PAL8] = {
[AV_PIX_FMT_PAL8] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_UYYVYY411] = {
[AV_PIX_FMT_UYYVYY411] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_ABGR] = {
[AV_PIX_FMT_ABGR] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_BGR48BE] = {
[AV_PIX_FMT_BGR48BE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_BGR48LE] = {
[AV_PIX_FMT_BGR48LE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_BGRA64BE] = {
[AV_PIX_FMT_BGRA64BE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_BGRA64LE] = {
[AV_PIX_FMT_BGRA64LE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_BGR565BE] = {
[AV_PIX_FMT_BGR565BE] = {
.color_type = FF_COLOR_RGB,
.padded_size = 16,
},
[PIX_FMT_BGR565LE] = {
[AV_PIX_FMT_BGR565LE] = {
.color_type = FF_COLOR_RGB,
.padded_size = 16,
},
[PIX_FMT_BGR555BE] = {
[AV_PIX_FMT_BGR555BE] = {
.color_type = FF_COLOR_RGB,
.padded_size = 16,
},
[PIX_FMT_BGR555LE] = {
[AV_PIX_FMT_BGR555LE] = {
.color_type = FF_COLOR_RGB,
.padded_size = 16,
},
[PIX_FMT_BGR444BE] = {
[AV_PIX_FMT_BGR444BE] = {
.color_type = FF_COLOR_RGB,
.padded_size = 16,
},
[PIX_FMT_BGR444LE] = {
[AV_PIX_FMT_BGR444LE] = {
.color_type = FF_COLOR_RGB,
.padded_size = 16,
},
[PIX_FMT_RGB8] = {
[AV_PIX_FMT_RGB8] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_RGB4] = {
[AV_PIX_FMT_RGB4] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_RGB4_BYTE] = {
[AV_PIX_FMT_RGB4_BYTE] = {
.color_type = FF_COLOR_RGB,
.padded_size = 8,
},
[PIX_FMT_BGR8] = {
[AV_PIX_FMT_BGR8] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_BGR4] = {
[AV_PIX_FMT_BGR4] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_BGR4_BYTE] = {
[AV_PIX_FMT_BGR4_BYTE] = {
.color_type = FF_COLOR_RGB,
.padded_size = 8,
},
[PIX_FMT_NV12] = {
[AV_PIX_FMT_NV12] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_NV21] = {
[AV_PIX_FMT_NV21] = {
.color_type = FF_COLOR_YUV,
},
[PIX_FMT_BGRA] = {
[AV_PIX_FMT_BGRA] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_RGBA] = {
[AV_PIX_FMT_RGBA] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_GBRP] = {
[AV_PIX_FMT_GBRP] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_GBRP9BE] = {
[AV_PIX_FMT_GBRP9BE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_GBRP9LE] = {
[AV_PIX_FMT_GBRP9LE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_GBRP10BE] = {
[AV_PIX_FMT_GBRP10BE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_GBRP10LE] = {
[AV_PIX_FMT_GBRP10LE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_GBRP12BE] = {
[AV_PIX_FMT_GBRP12BE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_GBRP12LE] = {
[AV_PIX_FMT_GBRP12LE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_GBRP14BE] = {
[AV_PIX_FMT_GBRP14BE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_GBRP14LE] = {
[AV_PIX_FMT_GBRP14LE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_GBRP16BE] = {
[AV_PIX_FMT_GBRP16BE] = {
.color_type = FF_COLOR_RGB,
},
[PIX_FMT_GBRP16LE] = {
[AV_PIX_FMT_GBRP16LE] = {
.color_type = FF_COLOR_RGB,
},
};
void avcodec_get_chroma_sub_sample(enum PixelFormat pix_fmt, int *h_shift, int *v_shift)
void avcodec_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
{
*h_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_w;
*v_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
}
int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt)
int ff_is_hwaccel_pix_fmt(enum AVPixelFormat pix_fmt)
{
return av_pix_fmt_descriptors[pix_fmt].flags & PIX_FMT_HWACCEL;
}
int avpicture_fill(AVPicture *picture, uint8_t *ptr,
enum PixelFormat pix_fmt, int width, int height)
enum AVPixelFormat pix_fmt, int width, int height)
{
return av_image_fill_arrays(picture->data, picture->linesize,
ptr, pix_fmt, width, height, 1);
}
int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width, int height,
int avpicture_layout(const AVPicture* src, enum AVPixelFormat pix_fmt, int width, int height,
unsigned char *dest, int dest_size)
{
return av_image_copy_to_buffer(dest, dest_size,
@ -414,12 +414,12 @@ int avpicture_layout(const AVPicture* src, enum PixelFormat pix_fmt, int width,
pix_fmt, width, height, 1);
}
int avpicture_get_size(enum PixelFormat pix_fmt, int width, int height)
int avpicture_get_size(enum AVPixelFormat pix_fmt, int width, int height)
{
return av_image_get_buffer_size(pix_fmt, width, height, 1);
}
static int get_pix_fmt_depth(int *min, int *max, enum PixelFormat pix_fmt)
static int get_pix_fmt_depth(int *min, int *max, enum AVPixelFormat pix_fmt)
{
const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
int i;
@ -437,7 +437,7 @@ static int get_pix_fmt_depth(int *min, int *max, enum PixelFormat pix_fmt)
return 0;
}
int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_pix_fmt,
int avcodec_get_pix_fmt_loss(enum AVPixelFormat dst_pix_fmt, enum AVPixelFormat src_pix_fmt,
int has_alpha)
{
const PixFmtInfo *pf, *ps;
@ -446,7 +446,7 @@ int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_
int src_min_depth, src_max_depth, dst_min_depth, dst_max_depth;
int ret, loss;
if (dst_pix_fmt >= PIX_FMT_NB || dst_pix_fmt <= PIX_FMT_NONE)
if (dst_pix_fmt >= AV_PIX_FMT_NB || dst_pix_fmt <= AV_PIX_FMT_NONE)
return ~0;
src_desc = &av_pix_fmt_descriptors[src_pix_fmt];
@ -499,14 +499,14 @@ int avcodec_get_pix_fmt_loss(enum PixelFormat dst_pix_fmt, enum PixelFormat src_
loss |= FF_LOSS_CHROMA;
if (!pixdesc_has_alpha(dst_desc) && (pixdesc_has_alpha(src_desc) && has_alpha))
loss |= FF_LOSS_ALPHA;
if (dst_pix_fmt == PIX_FMT_PAL8 &&
(src_pix_fmt != PIX_FMT_PAL8 && (ps->color_type != FF_COLOR_GRAY || (pixdesc_has_alpha(src_desc) && has_alpha))))
if (dst_pix_fmt == AV_PIX_FMT_PAL8 &&
(src_pix_fmt != AV_PIX_FMT_PAL8 && (ps->color_type != FF_COLOR_GRAY || (pixdesc_has_alpha(src_desc) && has_alpha))))
loss |= FF_LOSS_COLORQUANT;
return loss;
}
static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
static int avg_bits_per_pixel(enum AVPixelFormat pix_fmt)
{
const PixFmtInfo *info = &pix_fmt_info[pix_fmt];
const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[pix_fmt];
@ -516,17 +516,17 @@ static int avg_bits_per_pixel(enum PixelFormat pix_fmt)
}
#if FF_API_FIND_BEST_PIX_FMT
enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelFormat src_pix_fmt,
enum AVPixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum AVPixelFormat src_pix_fmt,
int has_alpha, int *loss_ptr)
{
enum PixelFormat dst_pix_fmt;
enum AVPixelFormat dst_pix_fmt;
int i;
if (loss_ptr) /* all losses count (for backward compatibility) */
*loss_ptr = 0;
dst_pix_fmt = PIX_FMT_NONE; /* so first iteration doesn't have to be treated special */
for(i = 0; i< FFMIN(PIX_FMT_NB, 64); i++){
dst_pix_fmt = AV_PIX_FMT_NONE; /* so first iteration doesn't have to be treated special */
for(i = 0; i< FFMIN(AV_PIX_FMT_NB, 64); i++){
if (pix_fmt_mask & (1ULL << i))
dst_pix_fmt = avcodec_find_best_pix_fmt_of_2(dst_pix_fmt, i, src_pix_fmt, has_alpha, loss_ptr);
}
@ -534,10 +534,10 @@ enum PixelFormat avcodec_find_best_pix_fmt(int64_t pix_fmt_mask, enum PixelForma
}
#endif /* FF_API_FIND_BEST_PIX_FMT */
enum PixelFormat avcodec_find_best_pix_fmt_of_2(enum PixelFormat dst_pix_fmt1, enum PixelFormat dst_pix_fmt2,
enum PixelFormat src_pix_fmt, int has_alpha, int *loss_ptr)
enum AVPixelFormat avcodec_find_best_pix_fmt_of_2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2,
enum AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr)
{
enum PixelFormat dst_pix_fmt;
enum AVPixelFormat dst_pix_fmt;
int loss1, loss2, loss_order1, loss_order2, i, loss_mask;
static const int loss_mask_order[] = {
~0, /* no loss first */
@ -555,12 +555,12 @@ enum PixelFormat avcodec_find_best_pix_fmt_of_2(enum PixelFormat dst_pix_fmt1, e
};
loss_mask= loss_ptr?~*loss_ptr:~0; /* use loss mask if provided */
dst_pix_fmt = PIX_FMT_NONE;
dst_pix_fmt = AV_PIX_FMT_NONE;
loss1 = avcodec_get_pix_fmt_loss(dst_pix_fmt1, src_pix_fmt, has_alpha) & loss_mask;
loss2 = avcodec_get_pix_fmt_loss(dst_pix_fmt2, src_pix_fmt, has_alpha) & loss_mask;
/* try with successive loss */
for(i = 0;loss_mask_order[i] != 0 && dst_pix_fmt == PIX_FMT_NONE;i++) {
for(i = 0;loss_mask_order[i] != 0 && dst_pix_fmt == AV_PIX_FMT_NONE;i++) {
loss_order1 = loss1 & loss_mask_order[i];
loss_order2 = loss2 & loss_mask_order[i];
@ -577,34 +577,34 @@ enum PixelFormat avcodec_find_best_pix_fmt_of_2(enum PixelFormat dst_pix_fmt1, e
}
#if AV_HAVE_INCOMPATIBLE_FORK_ABI
enum PixelFormat avcodec_find_best_pix_fmt2(enum PixelFormat *pix_fmt_list,
enum PixelFormat src_pix_fmt,
enum AVPixelFormat avcodec_find_best_pix_fmt2(enum AVPixelFormat *pix_fmt_list,
enum AVPixelFormat src_pix_fmt,
int has_alpha, int *loss_ptr){
return avcodec_find_best_pix_fmt_of_list(pix_fmt_list, src_pix_fmt, has_alpha, loss_ptr);
}
#else
enum PixelFormat avcodec_find_best_pix_fmt2(enum PixelFormat dst_pix_fmt1, enum PixelFormat dst_pix_fmt2,
enum PixelFormat src_pix_fmt, int has_alpha, int *loss_ptr)
enum AVPixelFormat avcodec_find_best_pix_fmt2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2,
enum AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr)
{
return avcodec_find_best_pix_fmt_of_2(dst_pix_fmt1, dst_pix_fmt2, src_pix_fmt, has_alpha, loss_ptr);
}
#endif
enum PixelFormat avcodec_find_best_pix_fmt_of_list(enum PixelFormat *pix_fmt_list,
enum PixelFormat src_pix_fmt,
enum AVPixelFormat avcodec_find_best_pix_fmt_of_list(enum AVPixelFormat *pix_fmt_list,
enum AVPixelFormat src_pix_fmt,
int has_alpha, int *loss_ptr){
int i;
enum PixelFormat best = PIX_FMT_NONE;
enum AVPixelFormat best = AV_PIX_FMT_NONE;
for(i=0; pix_fmt_list[i] != PIX_FMT_NONE; i++)
for(i=0; pix_fmt_list[i] != AV_PIX_FMT_NONE; i++)
best = avcodec_find_best_pix_fmt_of_2(best, pix_fmt_list[i], src_pix_fmt, has_alpha, loss_ptr);
return best;
}
void av_picture_copy(AVPicture *dst, const AVPicture *src,
enum PixelFormat pix_fmt, int width, int height)
enum AVPixelFormat pix_fmt, int width, int height)
{
av_image_copy(dst->data, dst->linesize, (const uint8_t **)src->data,
src->linesize, pix_fmt, width, height);
@ -698,7 +698,7 @@ void ff_shrink88(uint8_t *dst, int dst_wrap,
int avpicture_alloc(AVPicture *picture,
enum PixelFormat pix_fmt, int width, int height)
enum AVPixelFormat pix_fmt, int width, int height)
{
int ret;
@ -716,7 +716,7 @@ void avpicture_free(AVPicture *picture)
}
/* return true if yuv planar */
static inline int is_yuv_planar(enum PixelFormat fmt)
static inline int is_yuv_planar(enum AVPixelFormat fmt)
{
const PixFmtInfo *info = &pix_fmt_info[fmt];
const AVPixFmtDescriptor *desc = &av_pix_fmt_descriptors[fmt];
@ -739,12 +739,12 @@ static inline int is_yuv_planar(enum PixelFormat fmt)
}
int av_picture_crop(AVPicture *dst, const AVPicture *src,
enum PixelFormat pix_fmt, int top_band, int left_band)
enum AVPixelFormat pix_fmt, int top_band, int left_band)
{
int y_shift;
int x_shift;
if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB)
if (pix_fmt < 0 || pix_fmt >= AV_PIX_FMT_NB)
return -1;
y_shift = av_pix_fmt_descriptors[pix_fmt].log2_chroma_h;
@ -769,7 +769,7 @@ int av_picture_crop(AVPicture *dst, const AVPicture *src,
}
int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
enum PixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
enum AVPixelFormat pix_fmt, int padtop, int padbottom, int padleft, int padright,
int *color)
{
uint8_t *optr;
@ -778,7 +778,7 @@ int av_picture_pad(AVPicture *dst, const AVPicture *src, int height, int width,
int yheight;
int i, y;
if (pix_fmt < 0 || pix_fmt >= PIX_FMT_NB ||
if (pix_fmt < 0 || pix_fmt >= AV_PIX_FMT_NB ||
!is_yuv_planar(pix_fmt)) return -1;
for (i = 0; i < 3; i++) {
@ -937,17 +937,17 @@ static void deinterlace_bottom_field_inplace(uint8_t *src1, int src_wrap,
}
int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
enum PixelFormat pix_fmt, int width, int height)
enum AVPixelFormat pix_fmt, int width, int height)
{
int i;
if (pix_fmt != PIX_FMT_YUV420P &&
pix_fmt != PIX_FMT_YUVJ420P &&
pix_fmt != PIX_FMT_YUV422P &&
pix_fmt != PIX_FMT_YUVJ422P &&
pix_fmt != PIX_FMT_YUV444P &&
pix_fmt != PIX_FMT_YUV411P &&
pix_fmt != PIX_FMT_GRAY8)
if (pix_fmt != AV_PIX_FMT_YUV420P &&
pix_fmt != AV_PIX_FMT_YUVJ420P &&
pix_fmt != AV_PIX_FMT_YUV422P &&
pix_fmt != AV_PIX_FMT_YUVJ422P &&
pix_fmt != AV_PIX_FMT_YUV444P &&
pix_fmt != AV_PIX_FMT_YUV411P &&
pix_fmt != AV_PIX_FMT_GRAY8)
return -1;
if ((width & 3) != 0 || (height & 3) != 0)
return -1;
@ -955,22 +955,22 @@ int avpicture_deinterlace(AVPicture *dst, const AVPicture *src,
for(i=0;i<3;i++) {
if (i == 1) {
switch(pix_fmt) {
case PIX_FMT_YUVJ420P:
case PIX_FMT_YUV420P:
case AV_PIX_FMT_YUVJ420P:
case AV_PIX_FMT_YUV420P:
width >>= 1;
height >>= 1;
break;
case PIX_FMT_YUV422P:
case PIX_FMT_YUVJ422P:
case AV_PIX_FMT_YUV422P:
case AV_PIX_FMT_YUVJ422P:
width >>= 1;
break;
case PIX_FMT_YUV411P:
case AV_PIX_FMT_YUV411P:
width >>= 2;
break;
default:
break;
}
if (pix_fmt == PIX_FMT_GRAY8) {
if (pix_fmt == AV_PIX_FMT_GRAY8) {
break;
}
}

@ -201,7 +201,7 @@ static av_cold int ir2_decode_init(AVCodecContext *avctx){
avcodec_get_frame_defaults(&ic->picture);
ic->avctx = avctx;
avctx->pix_fmt= PIX_FMT_YUV410P;
avctx->pix_fmt= AV_PIX_FMT_YUV410P;
ir2_vlc.table = vlc_tables;
ir2_vlc.table_allocated = 1 << CODE_VLC_BITS;

@ -1039,7 +1039,7 @@ static av_cold int decode_init(AVCodecContext *avctx)
Indeo3DecodeContext *ctx = avctx->priv_data;
ctx->avctx = avctx;
avctx->pix_fmt = PIX_FMT_YUV410P;
avctx->pix_fmt = AV_PIX_FMT_YUV410P;
avcodec_get_frame_defaults(&ctx->frame);
build_requant_tab();

@ -624,7 +624,7 @@ static av_cold int decode_init(AVCodecContext *avctx)
ctx->pic_conf.pic_width = 0;
ctx->pic_conf.pic_height = 0;
avctx->pix_fmt = PIX_FMT_YUV410P;
avctx->pix_fmt = AV_PIX_FMT_YUV410P;
ctx->decode_pic_hdr = decode_pic_hdr;
ctx->decode_band_hdr = decode_band_hdr;

@ -660,7 +660,7 @@ static av_cold int decode_init(AVCodecContext *avctx)
ctx->switch_buffers = switch_buffers;
ctx->is_nonnull_frame = is_nonnull_frame;
avctx->pix_fmt = PIX_FMT_YUV410P;
avctx->pix_fmt = AV_PIX_FMT_YUV410P;
return 0;
}

@ -36,7 +36,7 @@ typedef struct InternalBuffer {
int linesize[AV_NUM_DATA_POINTERS];
int width;
int height;
enum PixelFormat pix_fmt;
enum AVPixelFormat pix_fmt;
uint8_t **extended_data;
int audio_data_size;
int nb_channels;
@ -99,7 +99,7 @@ struct AVCodecDefault {
/**
* Determine whether pix_fmt is a hardware accelerated format.
*/
int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt);
int ff_is_hwaccel_pix_fmt(enum AVPixelFormat pix_fmt);
/**
* Return the hardware accelerated codec for codec codec_id and
@ -109,7 +109,7 @@ int ff_is_hwaccel_pix_fmt(enum PixelFormat pix_fmt);
* @param pix_fmt the pixel format to match
* @return the hardware accelerated codec, or NULL if none was found.
*/
AVHWAccel *ff_find_hwaccel(enum AVCodecID codec_id, enum PixelFormat pix_fmt);
AVHWAccel *ff_find_hwaccel(enum AVCodecID codec_id, enum AVPixelFormat pix_fmt);
/**
* Return the index into tab at which {a,b} match elements {[0],[1]} of tab.

@ -941,7 +941,7 @@ static av_cold int ipvideo_decode_init(AVCodecContext *avctx)
s->avctx = avctx;
s->is_16bpp = avctx->bits_per_coded_sample == 16;
avctx->pix_fmt = s->is_16bpp ? PIX_FMT_RGB555 : PIX_FMT_PAL8;
avctx->pix_fmt = s->is_16bpp ? AV_PIX_FMT_RGB555 : AV_PIX_FMT_PAL8;
ff_dsputil_init(&s->dsp, avctx);

@ -260,20 +260,20 @@ static int get_siz(J2kDecoderContext *s)
switch(s->ncomponents){
case 1:
if (s->precision > 8) {
s->avctx->pix_fmt = PIX_FMT_GRAY16;
s->avctx->pix_fmt = AV_PIX_FMT_GRAY16;
} else {
s->avctx->pix_fmt = PIX_FMT_GRAY8;
s->avctx->pix_fmt = AV_PIX_FMT_GRAY8;
}
break;
case 3:
if (s->precision > 8) {
s->avctx->pix_fmt = PIX_FMT_RGB48;
s->avctx->pix_fmt = AV_PIX_FMT_RGB48;
} else {
s->avctx->pix_fmt = PIX_FMT_RGB24;
s->avctx->pix_fmt = AV_PIX_FMT_RGB24;
}
break;
case 4:
s->avctx->pix_fmt = PIX_FMT_RGBA;
s->avctx->pix_fmt = AV_PIX_FMT_RGBA;
break;
}

@ -286,7 +286,7 @@ static int put_cod(J2kEncoderContext *s)
// SGcod
bytestream_put_byte(&s->buf, 0); // progression level
bytestream_put_be16(&s->buf, 1); // num of layers
if(s->avctx->pix_fmt == PIX_FMT_YUV444P){
if(s->avctx->pix_fmt == AV_PIX_FMT_YUV444P){
bytestream_put_byte(&s->buf, 2); // ICT
}else{
bytestream_put_byte(&s->buf, 0); // unspecified
@ -1010,9 +1010,9 @@ static av_cold int j2kenc_init(AVCodecContext *avctx)
for (i = 0; i < 3; i++)
s->cbps[i] = 8;
if (avctx->pix_fmt == PIX_FMT_RGB24){
if (avctx->pix_fmt == AV_PIX_FMT_RGB24){
s->ncomponents = 3;
} else if (avctx->pix_fmt == PIX_FMT_GRAY8){
} else if (avctx->pix_fmt == AV_PIX_FMT_GRAY8){
s->ncomponents = 1;
} else{ // planar YUV
s->planar = 1;
@ -1052,11 +1052,11 @@ AVCodec ff_jpeg2000_encoder = {
.close = j2kenc_destroy,
.capabilities = CODEC_CAP_EXPERIMENTAL,
.long_name = NULL_IF_CONFIG_SMALL("JPEG 2000"),
.pix_fmts = (const enum PixelFormat[]) {
PIX_FMT_RGB24, PIX_FMT_YUV444P, PIX_FMT_GRAY8,
/* PIX_FMT_YUV420P,
PIX_FMT_YUV422P, PIX_FMT_YUV444P,
PIX_FMT_YUV410P, PIX_FMT_YUV411P,*/
PIX_FMT_NONE
.pix_fmts = (const enum AVPixelFormat[]) {
AV_PIX_FMT_RGB24, AV_PIX_FMT_YUV444P, AV_PIX_FMT_GRAY8,
/* AV_PIX_FMT_YUV420P,
AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P,
AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV411P,*/
AV_PIX_FMT_NONE
}
};

@ -244,7 +244,7 @@ static int encode_picture_ls(AVCodecContext *avctx, AVPacket *pkt,
p->pict_type= AV_PICTURE_TYPE_I;
p->key_frame= 1;
if(avctx->pix_fmt == PIX_FMT_GRAY8 || avctx->pix_fmt == PIX_FMT_GRAY16)
if(avctx->pix_fmt == AV_PIX_FMT_GRAY8 || avctx->pix_fmt == AV_PIX_FMT_GRAY16)
comps = 1;
else
comps = 3;
@ -262,7 +262,7 @@ static int encode_picture_ls(AVCodecContext *avctx, AVPacket *pkt,
put_marker(&pb, SOI);
put_marker(&pb, SOF48);
put_bits(&pb, 16, 8 + comps * 3); // header size depends on components
put_bits(&pb, 8, (avctx->pix_fmt == PIX_FMT_GRAY16) ? 16 : 8); // bpp
put_bits(&pb, 8, (avctx->pix_fmt == AV_PIX_FMT_GRAY16) ? 16 : 8); // bpp
put_bits(&pb, 16, avctx->height);
put_bits(&pb, 16, avctx->width);
put_bits(&pb, 8, comps); // components
@ -286,7 +286,7 @@ static int encode_picture_ls(AVCodecContext *avctx, AVPacket *pkt,
state = av_mallocz(sizeof(JLSState));
/* initialize JPEG-LS state from JPEG parameters */
state->near = near;
state->bpp = (avctx->pix_fmt == PIX_FMT_GRAY16) ? 16 : 8;
state->bpp = (avctx->pix_fmt == AV_PIX_FMT_GRAY16) ? 16 : 8;
ff_jpegls_reset_coding_parameters(state, 0);
ff_jpegls_init_state(state);
@ -297,7 +297,7 @@ static int encode_picture_ls(AVCodecContext *avctx, AVPacket *pkt,
return AVERROR(ENOMEM);
last = zero;
cur = p->data[0];
if(avctx->pix_fmt == PIX_FMT_GRAY8){
if(avctx->pix_fmt == AV_PIX_FMT_GRAY8){
int t = 0;
for(i = 0; i < avctx->height; i++) {
@ -306,7 +306,7 @@ static int encode_picture_ls(AVCodecContext *avctx, AVPacket *pkt,
last = cur;
cur += p->linesize[0];
}
}else if(avctx->pix_fmt == PIX_FMT_GRAY16){
}else if(avctx->pix_fmt == AV_PIX_FMT_GRAY16){
int t = 0;
for(i = 0; i < avctx->height; i++) {
@ -315,7 +315,7 @@ static int encode_picture_ls(AVCodecContext *avctx, AVPacket *pkt,
last = cur;
cur += p->linesize[0];
}
}else if(avctx->pix_fmt == PIX_FMT_RGB24){
}else if(avctx->pix_fmt == AV_PIX_FMT_RGB24){
int j, width;
int Rc[3] = {0, 0, 0};
@ -328,7 +328,7 @@ static int encode_picture_ls(AVCodecContext *avctx, AVPacket *pkt,
last = cur;
cur += s->picture.linesize[0];
}
}else if(avctx->pix_fmt == PIX_FMT_BGR24){
}else if(avctx->pix_fmt == AV_PIX_FMT_BGR24){
int j, width;
int Rc[3] = {0, 0, 0};
@ -385,7 +385,7 @@ static av_cold int encode_init_ls(AVCodecContext *ctx) {
c->avctx = ctx;
ctx->coded_frame = &c->picture;
if(ctx->pix_fmt != PIX_FMT_GRAY8 && ctx->pix_fmt != PIX_FMT_GRAY16 && ctx->pix_fmt != PIX_FMT_RGB24 && ctx->pix_fmt != PIX_FMT_BGR24){
if(ctx->pix_fmt != AV_PIX_FMT_GRAY8 && ctx->pix_fmt != AV_PIX_FMT_GRAY16 && ctx->pix_fmt != AV_PIX_FMT_RGB24 && ctx->pix_fmt != AV_PIX_FMT_BGR24){
av_log(ctx, AV_LOG_ERROR, "Only grayscale and RGB24/BGR24 images are supported\n");
return -1;
}
@ -399,9 +399,9 @@ AVCodec ff_jpegls_encoder = {
.priv_data_size = sizeof(JpeglsContext),
.init = encode_init_ls,
.encode2 = encode_picture_ls,
.pix_fmts = (const enum PixelFormat[]){
PIX_FMT_BGR24, PIX_FMT_RGB24, PIX_FMT_GRAY8, PIX_FMT_GRAY16,
PIX_FMT_NONE
.pix_fmts = (const enum AVPixelFormat[]){
AV_PIX_FMT_BGR24, AV_PIX_FMT_RGB24, AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY16,
AV_PIX_FMT_NONE
},
.long_name = NULL_IF_CONFIG_SMALL("JPEG-LS"),
};

@ -40,7 +40,7 @@ typedef struct JvContext {
static av_cold int decode_init(AVCodecContext *avctx)
{
JvContext *s = avctx->priv_data;
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
ff_dsputil_init(&s->dsp, avctx);
return 0;
}

@ -169,7 +169,7 @@ static av_cold int decode_init(AVCodecContext *avctx)
KgvContext * const c = avctx->priv_data;
c->avctx = avctx;
avctx->pix_fmt = PIX_FMT_RGB555;
avctx->pix_fmt = AV_PIX_FMT_RGB555;
avctx->flags |= CODEC_FLAG_EMU_EDGE;
return 0;

@ -415,7 +415,7 @@ static av_cold int decode_init(AVCodecContext * avctx)
}
avcodec_get_frame_defaults(&c->pic);
avctx->pix_fmt = PIX_FMT_PAL8;
avctx->pix_fmt = AV_PIX_FMT_PAL8;
return 0;
}

@ -259,7 +259,7 @@ static void lag_pred_line(LagarithContext *l, uint8_t *buf,
if (line == 1) {
/* Second line, left predict first pixel, the rest of the line is median predicted
* NOTE: In the case of RGB this pixel is top predicted */
TL = l->avctx->pix_fmt == PIX_FMT_YUV420P ? buf[-stride] : L;
TL = l->avctx->pix_fmt == AV_PIX_FMT_YUV420P ? buf[-stride] : L;
} else {
/* Top left is 2 rows back, last pixel */
TL = buf[width - (2 * stride) - 1];
@ -482,7 +482,7 @@ static int lag_decode_arith_plane(LagarithContext *l, uint8_t *dst,
return -1;
}
if (l->avctx->pix_fmt != PIX_FMT_YUV422P) {
if (l->avctx->pix_fmt != AV_PIX_FMT_YUV422P) {
for (i = 0; i < height; i++) {
lag_pred_line(l, dst, width, stride, i);
dst += stride;
@ -534,7 +534,7 @@ static int lag_decode_frame(AVCodecContext *avctx,
switch (frametype) {
case FRAME_SOLID_RGBA:
avctx->pix_fmt = PIX_FMT_RGB32;
avctx->pix_fmt = AV_PIX_FMT_RGB32;
if (ff_thread_get_buffer(avctx, p) < 0) {
av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
@ -549,14 +549,14 @@ static int lag_decode_frame(AVCodecContext *avctx,
}
break;
case FRAME_ARITH_RGBA:
avctx->pix_fmt = PIX_FMT_RGB32;
avctx->pix_fmt = AV_PIX_FMT_RGB32;
planes = 4;
offset_ry += 4;
offs[3] = AV_RL32(buf + 9);
case FRAME_ARITH_RGB24:
case FRAME_U_RGB24:
if (frametype == FRAME_ARITH_RGB24 || frametype == FRAME_U_RGB24)
avctx->pix_fmt = PIX_FMT_RGB24;
avctx->pix_fmt = AV_PIX_FMT_RGB24;
if (ff_thread_get_buffer(avctx, p) < 0) {
av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
@ -615,7 +615,7 @@ static int lag_decode_frame(AVCodecContext *avctx,
}
break;
case FRAME_ARITH_YUY2:
avctx->pix_fmt = PIX_FMT_YUV422P;
avctx->pix_fmt = AV_PIX_FMT_YUV422P;
if (ff_thread_get_buffer(avctx, p) < 0) {
av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");
@ -641,7 +641,7 @@ static int lag_decode_frame(AVCodecContext *avctx,
buf + offset_bv, buf_size - offset_bv);
break;
case FRAME_ARITH_YV12:
avctx->pix_fmt = PIX_FMT_YUV420P;
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
if (ff_thread_get_buffer(avctx, p) < 0) {
av_log(avctx, AV_LOG_ERROR, "get_buffer() failed\n");

@ -502,37 +502,37 @@ static av_cold int decode_init(AVCodecContext *avctx)
case IMGTYPE_YUV111:
c->decomp_size = basesize * 3;
max_decomp_size = max_basesize * 3;
avctx->pix_fmt = PIX_FMT_YUV444P;
avctx->pix_fmt = AV_PIX_FMT_YUV444P;
av_log(avctx, AV_LOG_DEBUG, "Image type is YUV 1:1:1.\n");
break;
case IMGTYPE_YUV422:
c->decomp_size = basesize * 2;
max_decomp_size = max_basesize * 2;
avctx->pix_fmt = PIX_FMT_YUV422P;
avctx->pix_fmt = AV_PIX_FMT_YUV422P;
av_log(avctx, AV_LOG_DEBUG, "Image type is YUV 4:2:2.\n");
break;
case IMGTYPE_RGB24:
c->decomp_size = basesize * 3;
max_decomp_size = max_basesize * 3;
avctx->pix_fmt = PIX_FMT_BGR24;
avctx->pix_fmt = AV_PIX_FMT_BGR24;
av_log(avctx, AV_LOG_DEBUG, "Image type is RGB 24.\n");
break;
case IMGTYPE_YUV411:
c->decomp_size = basesize / 2 * 3;
max_decomp_size = max_basesize / 2 * 3;
avctx->pix_fmt = PIX_FMT_YUV411P;
avctx->pix_fmt = AV_PIX_FMT_YUV411P;
av_log(avctx, AV_LOG_DEBUG, "Image type is YUV 4:1:1.\n");
break;
case IMGTYPE_YUV211:
c->decomp_size = basesize * 2;
max_decomp_size = max_basesize * 2;
avctx->pix_fmt = PIX_FMT_YUV422P;
avctx->pix_fmt = AV_PIX_FMT_YUV422P;
av_log(avctx, AV_LOG_DEBUG, "Image type is YUV 2:1:1.\n");
break;
case IMGTYPE_YUV420:
c->decomp_size = basesize / 2 * 3;
max_decomp_size = max_basesize / 2 * 3;
avctx->pix_fmt = PIX_FMT_YUV420P;
avctx->pix_fmt = AV_PIX_FMT_YUV420P;
av_log(avctx, AV_LOG_DEBUG, "Image type is YUV 4:2:0.\n");
break;
default:

@ -88,7 +88,7 @@ static int encode_frame(AVCodecContext *avctx, AVPacket *pkt,
p->pict_type= AV_PICTURE_TYPE_I;
p->key_frame= 1;
if(avctx->pix_fmt != PIX_FMT_BGR24){
if(avctx->pix_fmt != AV_PIX_FMT_BGR24){
av_log(avctx, AV_LOG_ERROR, "Format not supported!\n");
return -1;
}
@ -191,6 +191,6 @@ AVCodec ff_zlib_encoder = {
.init = encode_init,
.encode2 = encode_frame,
.close = encode_end,
.pix_fmts = (const enum PixelFormat[]) { PIX_FMT_BGR24, PIX_FMT_NONE },
.pix_fmts = (const enum AVPixelFormat[]) { AV_PIX_FMT_BGR24, AV_PIX_FMT_NONE },
.long_name = NULL_IF_CONFIG_SMALL("LCL (LossLess Codec Library) ZLIB"),
};

@ -40,21 +40,21 @@
// pix_fmts with lower bpp have to be listed before
// similar pix_fmts with higher bpp.
#define RGB_PIXEL_FORMATS PIX_FMT_RGB24,PIX_FMT_RGBA,PIX_FMT_RGB48,PIX_FMT_RGBA64
#define GRAY_PIXEL_FORMATS PIX_FMT_GRAY8,PIX_FMT_GRAY8A,PIX_FMT_GRAY16
#define YUV_PIXEL_FORMATS PIX_FMT_YUV410P,PIX_FMT_YUV411P,PIX_FMT_YUVA420P, \
PIX_FMT_YUV420P,PIX_FMT_YUV422P,PIX_FMT_YUVA422P, \
PIX_FMT_YUV440P,PIX_FMT_YUV444P,PIX_FMT_YUVA444P, \
PIX_FMT_YUV420P9,PIX_FMT_YUV422P9,PIX_FMT_YUV444P9, \
PIX_FMT_YUV420P10,PIX_FMT_YUV422P10,PIX_FMT_YUV444P10, \
PIX_FMT_YUV420P12,PIX_FMT_YUV422P12,PIX_FMT_YUV444P12, \
PIX_FMT_YUV420P14,PIX_FMT_YUV422P14,PIX_FMT_YUV444P14, \
PIX_FMT_YUV420P16,PIX_FMT_YUV422P16,PIX_FMT_YUV444P16
static const enum PixelFormat libopenjpeg_rgb_pix_fmts[] = {RGB_PIXEL_FORMATS};
static const enum PixelFormat libopenjpeg_gray_pix_fmts[] = {GRAY_PIXEL_FORMATS};
static const enum PixelFormat libopenjpeg_yuv_pix_fmts[] = {YUV_PIXEL_FORMATS};
static const enum PixelFormat libopenjpeg_all_pix_fmts[] = {RGB_PIXEL_FORMATS,GRAY_PIXEL_FORMATS,YUV_PIXEL_FORMATS};
#define RGB_PIXEL_FORMATS AV_PIX_FMT_RGB24,AV_PIX_FMT_RGBA,AV_PIX_FMT_RGB48,AV_PIX_FMT_RGBA64
#define GRAY_PIXEL_FORMATS AV_PIX_FMT_GRAY8,AV_PIX_FMT_GRAY8A,AV_PIX_FMT_GRAY16
#define YUV_PIXEL_FORMATS AV_PIX_FMT_YUV410P,AV_PIX_FMT_YUV411P,AV_PIX_FMT_YUVA420P, \
AV_PIX_FMT_YUV420P,AV_PIX_FMT_YUV422P,AV_PIX_FMT_YUVA422P, \
AV_PIX_FMT_YUV440P,AV_PIX_FMT_YUV444P,AV_PIX_FMT_YUVA444P, \
AV_PIX_FMT_YUV420P9,AV_PIX_FMT_YUV422P9,AV_PIX_FMT_YUV444P9, \
AV_PIX_FMT_YUV420P10,AV_PIX_FMT_YUV422P10,AV_PIX_FMT_YUV444P10, \
AV_PIX_FMT_YUV420P12,AV_PIX_FMT_YUV422P12,AV_PIX_FMT_YUV444P12, \
AV_PIX_FMT_YUV420P14,AV_PIX_FMT_YUV422P14,AV_PIX_FMT_YUV444P14, \
AV_PIX_FMT_YUV420P16,AV_PIX_FMT_YUV422P16,AV_PIX_FMT_YUV444P16
static const enum AVPixelFormat libopenjpeg_rgb_pix_fmts[] = {RGB_PIXEL_FORMATS};
static const enum AVPixelFormat libopenjpeg_gray_pix_fmts[] = {GRAY_PIXEL_FORMATS};
static const enum AVPixelFormat libopenjpeg_yuv_pix_fmts[] = {YUV_PIXEL_FORMATS};
static const enum AVPixelFormat libopenjpeg_all_pix_fmts[] = {RGB_PIXEL_FORMATS,GRAY_PIXEL_FORMATS,YUV_PIXEL_FORMATS};
typedef struct {
AVClass *class;
@ -63,7 +63,7 @@ typedef struct {
int lowqual;
} LibOpenJPEGContext;
static inline int libopenjpeg_matches_pix_fmt(const opj_image_t *image, enum PixelFormat pix_fmt)
static inline int libopenjpeg_matches_pix_fmt(const opj_image_t *image, enum AVPixelFormat pix_fmt)
{
AVPixFmtDescriptor descriptor = av_pix_fmt_descriptors[pix_fmt];
int match = 1;
@ -92,9 +92,9 @@ static inline int libopenjpeg_matches_pix_fmt(const opj_image_t *image, enum Pix
return match;
}
static inline enum PixelFormat libopenjpeg_guess_pix_fmt(const opj_image_t *image) {
static inline enum AVPixelFormat libopenjpeg_guess_pix_fmt(const opj_image_t *image) {
int index;
const enum PixelFormat *possible_fmts = NULL;
const enum AVPixelFormat *possible_fmts = NULL;
int possible_fmts_nb = 0;
switch (image->color_space) {
@ -122,13 +122,13 @@ static inline enum PixelFormat libopenjpeg_guess_pix_fmt(const opj_image_t *imag
}
}
return PIX_FMT_NONE;
return AV_PIX_FMT_NONE;
}
static inline int libopenjpeg_ispacked(enum PixelFormat pix_fmt) {
static inline int libopenjpeg_ispacked(enum AVPixelFormat pix_fmt) {
int i, component_plane;
if (pix_fmt == PIX_FMT_GRAY16)
if (pix_fmt == AV_PIX_FMT_GRAY16)
return 0;
component_plane = av_pix_fmt_descriptors[pix_fmt].comp[0].plane;
@ -294,14 +294,14 @@ static int libopenjpeg_decode_frame(AVCodecContext *avctx,
avcodec_set_dimensions(avctx, width, height);
if (avctx->pix_fmt != PIX_FMT_NONE)
if (avctx->pix_fmt != AV_PIX_FMT_NONE)
if (!libopenjpeg_matches_pix_fmt(image, avctx->pix_fmt))
avctx->pix_fmt = PIX_FMT_NONE;
avctx->pix_fmt = AV_PIX_FMT_NONE;
if (avctx->pix_fmt == PIX_FMT_NONE)
if (avctx->pix_fmt == AV_PIX_FMT_NONE)
avctx->pix_fmt = libopenjpeg_guess_pix_fmt(image);
if (avctx->pix_fmt == PIX_FMT_NONE) {
if (avctx->pix_fmt == AV_PIX_FMT_NONE) {
av_log(avctx, AV_LOG_ERROR, "Unable to determine pixel format\n");
goto done;
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save