mmvideo: use the AVFrame API properly.

pull/46/head
Anton Khirnov 11 years ago
parent 2e09096da9
commit 5c96f02901
  1. 26
      libavcodec/mmvideo.c

@ -48,7 +48,7 @@
typedef struct MmContext { typedef struct MmContext {
AVCodecContext *avctx; AVCodecContext *avctx;
AVFrame frame; AVFrame *frame;
int palette[AVPALETTE_COUNT]; int palette[AVPALETTE_COUNT];
GetByteContext gb; GetByteContext gb;
} MmContext; } MmContext;
@ -61,7 +61,9 @@ static av_cold int mm_decode_init(AVCodecContext *avctx)
avctx->pix_fmt = AV_PIX_FMT_PAL8; avctx->pix_fmt = AV_PIX_FMT_PAL8;
avcodec_get_frame_defaults(&s->frame); s->frame = av_frame_alloc();
if (!s->frame)
return AVERROR(ENOMEM);
return 0; return 0;
} }
@ -105,9 +107,9 @@ static int mm_decode_intra(MmContext * s, int half_horiz, int half_vert)
run_length *=2; run_length *=2;
if (color) { if (color) {
memset(s->frame.data[0] + y*s->frame.linesize[0] + x, color, run_length); memset(s->frame->data[0] + y*s->frame->linesize[0] + x, color, run_length);
if (half_vert) if (half_vert)
memset(s->frame.data[0] + (y+1)*s->frame.linesize[0] + x, color, run_length); memset(s->frame->data[0] + (y+1)*s->frame->linesize[0] + x, color, run_length);
} }
x+= run_length; x+= run_length;
@ -154,13 +156,13 @@ static int mm_decode_inter(MmContext * s, int half_horiz, int half_vert)
int replace = (replace_array >> (7-j)) & 1; int replace = (replace_array >> (7-j)) & 1;
if (replace) { if (replace) {
int color = bytestream2_get_byte(&data_ptr); int color = bytestream2_get_byte(&data_ptr);
s->frame.data[0][y*s->frame.linesize[0] + x] = color; s->frame->data[0][y*s->frame->linesize[0] + x] = color;
if (half_horiz) if (half_horiz)
s->frame.data[0][y*s->frame.linesize[0] + x + 1] = color; s->frame->data[0][y*s->frame->linesize[0] + x + 1] = color;
if (half_vert) { if (half_vert) {
s->frame.data[0][(y+1)*s->frame.linesize[0] + x] = color; s->frame->data[0][(y+1)*s->frame->linesize[0] + x] = color;
if (half_horiz) if (half_horiz)
s->frame.data[0][(y+1)*s->frame.linesize[0] + x + 1] = color; s->frame->data[0][(y+1)*s->frame->linesize[0] + x + 1] = color;
} }
} }
x += 1 + half_horiz; x += 1 + half_horiz;
@ -189,7 +191,7 @@ static int mm_decode_frame(AVCodecContext *avctx,
buf_size -= MM_PREAMBLE_SIZE; buf_size -= MM_PREAMBLE_SIZE;
bytestream2_init(&s->gb, buf, buf_size); bytestream2_init(&s->gb, buf, buf_size);
if ((res = ff_reget_buffer(avctx, &s->frame)) < 0) { if ((res = ff_reget_buffer(avctx, s->frame)) < 0) {
av_log(avctx, AV_LOG_ERROR, "reget_buffer() failed\n"); av_log(avctx, AV_LOG_ERROR, "reget_buffer() failed\n");
return res; return res;
} }
@ -209,9 +211,9 @@ static int mm_decode_frame(AVCodecContext *avctx,
if (res < 0) if (res < 0)
return res; return res;
memcpy(s->frame.data[1], s->palette, AVPALETTE_SIZE); memcpy(s->frame->data[1], s->palette, AVPALETTE_SIZE);
if ((res = av_frame_ref(data, &s->frame)) < 0) if ((res = av_frame_ref(data, s->frame)) < 0)
return res; return res;
*got_frame = 1; *got_frame = 1;
@ -223,7 +225,7 @@ static av_cold int mm_decode_end(AVCodecContext *avctx)
{ {
MmContext *s = avctx->priv_data; MmContext *s = avctx->priv_data;
av_frame_unref(&s->frame); av_frame_free(&s->frame);
return 0; return 0;
} }

Loading…
Cancel
Save