|
|
|
@ -935,6 +935,10 @@ static av_cold int encode_init(AVCodecContext *avctx) |
|
|
|
|
case PIX_FMT_YUV410P: |
|
|
|
|
s->colorspace= 0; |
|
|
|
|
break; |
|
|
|
|
case PIX_FMT_YUVA420P: |
|
|
|
|
s->colorspace= 0; |
|
|
|
|
s->transparency= 1; |
|
|
|
|
break; |
|
|
|
|
case PIX_FMT_RGB32: |
|
|
|
|
s->colorspace= 1; |
|
|
|
|
s->transparency= 1; |
|
|
|
@ -1097,6 +1101,8 @@ static int encode_slice(AVCodecContext *c, void *arg){ |
|
|
|
|
|
|
|
|
|
encode_plane(fs, p->data[1] + ps*cx+cy*p->linesize[1], chroma_width, chroma_height, p->linesize[1], 1); |
|
|
|
|
encode_plane(fs, p->data[2] + ps*cx+cy*p->linesize[2], chroma_width, chroma_height, p->linesize[2], 1); |
|
|
|
|
if (fs->transparency) |
|
|
|
|
encode_plane(fs, p->data[3] + ps*x + y*p->linesize[3], width, height, p->linesize[3], 2); |
|
|
|
|
}else{ |
|
|
|
|
encode_rgb_frame(fs, (uint32_t*)(p->data[0]) + ps*x + y*(p->linesize[0]/4), width, height, p->linesize[0]/4); |
|
|
|
|
} |
|
|
|
@ -1798,7 +1804,7 @@ AVCodec ff_ffv1_encoder = { |
|
|
|
|
.encode = encode_frame, |
|
|
|
|
.close = common_end, |
|
|
|
|
.capabilities = CODEC_CAP_SLICE_THREADS, |
|
|
|
|
.pix_fmts= (const enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_YUV444P, PIX_FMT_YUV422P, PIX_FMT_YUV411P, PIX_FMT_YUV410P, PIX_FMT_0RGB32, PIX_FMT_RGB32, PIX_FMT_YUV420P16, PIX_FMT_YUV422P16, PIX_FMT_YUV444P16, PIX_FMT_YUV420P9, PIX_FMT_YUV420P10, PIX_FMT_YUV422P10, PIX_FMT_NONE}, |
|
|
|
|
.pix_fmts= (const enum PixelFormat[]){PIX_FMT_YUV420P, PIX_FMT_YUVA420P, PIX_FMT_YUV444P, PIX_FMT_YUV422P, PIX_FMT_YUV411P, PIX_FMT_YUV410P, PIX_FMT_0RGB32, PIX_FMT_RGB32, PIX_FMT_YUV420P16, PIX_FMT_YUV422P16, PIX_FMT_YUV444P16, PIX_FMT_YUV420P9, PIX_FMT_YUV420P10, PIX_FMT_YUV422P10, PIX_FMT_NONE}, |
|
|
|
|
.long_name= NULL_IF_CONFIG_SMALL("FFmpeg video codec #1"), |
|
|
|
|
}; |
|
|
|
|
#endif |
|
|
|
|