ffv1enc: split off encoder initialization into a separate function

pull/391/head
Lynne 4 months ago
parent 91a4f1539f
commit a13ef376da
No known key found for this signature in database
GPG Key ID: A2FEA5F03F034464
  1. 401
      libavcodec/ffv1enc.c
  2. 30
      libavcodec/ffv1enc.h

@ -39,6 +39,7 @@
#include "put_golomb.h"
#include "rangecoder.h"
#include "ffv1.h"
#include "ffv1enc.h"
static const int8_t quant5_10bit[256] = {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1,
@ -512,16 +513,42 @@ static int sort_stt(FFV1Context *s, uint8_t stt[256])
return print;
}
static av_cold int encode_init(AVCodecContext *avctx)
static int encode_determine_slices(AVCodecContext *avctx)
{
FFV1Context *s = avctx->priv_data;
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(avctx->pix_fmt);
int i, j, k, m, ret;
if ((ret = ff_ffv1_common_init(avctx)) < 0)
return ret;
int plane_count = 1 + 2*s->chroma_planes + s->transparency;
int max_h_slices = AV_CEIL_RSHIFT(avctx->width , s->chroma_h_shift);
int max_v_slices = AV_CEIL_RSHIFT(avctx->height, s->chroma_v_shift);
s->num_v_slices = (avctx->width > 352 || avctx->height > 288 || !avctx->slices) ? 2 : 1;
s->num_v_slices = FFMIN(s->num_v_slices, max_v_slices);
for (; s->num_v_slices < 32; s->num_v_slices++) {
for (s->num_h_slices = s->num_v_slices; s->num_h_slices < 2*s->num_v_slices; s->num_h_slices++) {
int maxw = (avctx->width + s->num_h_slices - 1) / s->num_h_slices;
int maxh = (avctx->height + s->num_v_slices - 1) / s->num_v_slices;
if (s->num_h_slices > max_h_slices || s->num_v_slices > max_v_slices)
continue;
if (maxw * maxh * (int64_t)(s->bits_per_raw_sample+1) * plane_count > 8<<24)
continue;
if (s->version < 4)
if ( ff_need_new_slices(avctx->width , s->num_h_slices, s->chroma_h_shift)
||ff_need_new_slices(avctx->height, s->num_v_slices, s->chroma_v_shift))
continue;
if (avctx->slices == s->num_h_slices * s->num_v_slices && avctx->slices <= MAX_SLICES || !avctx->slices)
return 0;
}
}
av_log(avctx, AV_LOG_ERROR,
"Unsupported number %d of slices requested, please specify a "
"supported number with -slices (ex:4,6,9,12,16, ...)\n",
avctx->slices);
return AVERROR(ENOSYS);
}
s->version = 0;
av_cold int ff_ffv1_encode_init(AVCodecContext *avctx)
{
FFV1Context *s = avctx->priv_data;
int i, j, k, m, ret;
if ((avctx->flags & (AV_CODEC_FLAG_PASS1 | AV_CODEC_FLAG_PASS2)) ||
avctx->slices > 1)
@ -568,153 +595,6 @@ static av_cold int encode_init(AVCodecContext *avctx)
return AVERROR_INVALIDDATA;
}
if (s->ac == 1) // Compatbility with common command line usage
s->ac = AC_RANGE_CUSTOM_TAB;
else if (s->ac == AC_RANGE_DEFAULT_TAB_FORCE)
s->ac = AC_RANGE_DEFAULT_TAB;
s->plane_count = 3;
switch(avctx->pix_fmt) {
case AV_PIX_FMT_GRAY9:
case AV_PIX_FMT_YUV444P9:
case AV_PIX_FMT_YUV422P9:
case AV_PIX_FMT_YUV420P9:
case AV_PIX_FMT_YUVA444P9:
case AV_PIX_FMT_YUVA422P9:
case AV_PIX_FMT_YUVA420P9:
if (!avctx->bits_per_raw_sample)
s->bits_per_raw_sample = 9;
case AV_PIX_FMT_GRAY10:
case AV_PIX_FMT_YUV444P10:
case AV_PIX_FMT_YUV440P10:
case AV_PIX_FMT_YUV420P10:
case AV_PIX_FMT_YUV422P10:
case AV_PIX_FMT_YUVA444P10:
case AV_PIX_FMT_YUVA422P10:
case AV_PIX_FMT_YUVA420P10:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 10;
case AV_PIX_FMT_GRAY12:
case AV_PIX_FMT_YUV444P12:
case AV_PIX_FMT_YUV440P12:
case AV_PIX_FMT_YUV420P12:
case AV_PIX_FMT_YUV422P12:
case AV_PIX_FMT_YUVA444P12:
case AV_PIX_FMT_YUVA422P12:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 12;
case AV_PIX_FMT_GRAY14:
case AV_PIX_FMT_YUV444P14:
case AV_PIX_FMT_YUV420P14:
case AV_PIX_FMT_YUV422P14:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 14;
s->packed_at_lsb = 1;
case AV_PIX_FMT_GRAY16:
case AV_PIX_FMT_YUV444P16:
case AV_PIX_FMT_YUV422P16:
case AV_PIX_FMT_YUV420P16:
case AV_PIX_FMT_YUVA444P16:
case AV_PIX_FMT_YUVA422P16:
case AV_PIX_FMT_YUVA420P16:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample) {
s->bits_per_raw_sample = 16;
} else if (!s->bits_per_raw_sample) {
s->bits_per_raw_sample = avctx->bits_per_raw_sample;
}
if (s->bits_per_raw_sample <= 8) {
av_log(avctx, AV_LOG_ERROR, "bits_per_raw_sample invalid\n");
return AVERROR_INVALIDDATA;
}
s->version = FFMAX(s->version, 1);
case AV_PIX_FMT_GRAY8:
case AV_PIX_FMT_YA8:
case AV_PIX_FMT_YUV444P:
case AV_PIX_FMT_YUV440P:
case AV_PIX_FMT_YUV422P:
case AV_PIX_FMT_YUV420P:
case AV_PIX_FMT_YUV411P:
case AV_PIX_FMT_YUV410P:
case AV_PIX_FMT_YUVA444P:
case AV_PIX_FMT_YUVA422P:
case AV_PIX_FMT_YUVA420P:
s->chroma_planes = desc->nb_components < 3 ? 0 : 1;
s->colorspace = 0;
s->transparency = !!(desc->flags & AV_PIX_FMT_FLAG_ALPHA);
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 8;
else if (!s->bits_per_raw_sample)
s->bits_per_raw_sample = 8;
break;
case AV_PIX_FMT_RGB32:
s->colorspace = 1;
s->transparency = 1;
s->chroma_planes = 1;
s->bits_per_raw_sample = 8;
break;
case AV_PIX_FMT_RGBA64:
s->colorspace = 1;
s->transparency = 1;
s->chroma_planes = 1;
s->bits_per_raw_sample = 16;
s->use32bit = 1;
s->version = FFMAX(s->version, 1);
break;
case AV_PIX_FMT_RGB48:
s->colorspace = 1;
s->chroma_planes = 1;
s->bits_per_raw_sample = 16;
s->use32bit = 1;
s->version = FFMAX(s->version, 1);
break;
case AV_PIX_FMT_0RGB32:
s->colorspace = 1;
s->chroma_planes = 1;
s->bits_per_raw_sample = 8;
break;
case AV_PIX_FMT_GBRP9:
if (!avctx->bits_per_raw_sample)
s->bits_per_raw_sample = 9;
case AV_PIX_FMT_GBRP10:
case AV_PIX_FMT_GBRAP10:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 10;
case AV_PIX_FMT_GBRP12:
case AV_PIX_FMT_GBRAP12:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 12;
case AV_PIX_FMT_GBRP14:
case AV_PIX_FMT_GBRAP14:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 14;
case AV_PIX_FMT_GBRP16:
case AV_PIX_FMT_GBRAP16:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 16;
else if (!s->bits_per_raw_sample)
s->bits_per_raw_sample = avctx->bits_per_raw_sample;
s->transparency = !!(desc->flags & AV_PIX_FMT_FLAG_ALPHA);
s->colorspace = 1;
s->chroma_planes = 1;
if (s->bits_per_raw_sample >= 16) {
s->use32bit = 1;
}
s->version = FFMAX(s->version, 1);
break;
default:
av_log(avctx, AV_LOG_ERROR, "format not supported\n");
return AVERROR(ENOSYS);
}
av_assert0(s->bits_per_raw_sample >= 8);
if (s->bits_per_raw_sample > (s->version > 3 ? 16 : 8)) {
if (s->ac == AC_GOLOMB_RICE) {
av_log(avctx, AV_LOG_INFO,
"high bits_per_raw_sample, forcing range coder\n");
s->ac = AC_RANGE_CUSTOM_TAB;
}
}
if (s->ac == AC_RANGE_CUSTOM_TAB) {
for (i = 1; i < 256; i++)
s->state_transition[i] = ver2_state[i];
@ -725,7 +605,7 @@ static av_cold int encode_init(AVCodecContext *avctx)
s->state_transition[i] = c.one_state[i];
}
for (i = 0; i < MAX_QUANT_TABLE_SIZE; i++) {
for (i = 0; i < 256; i++) {
s->quant_table_count = 2;
if ((s->qtable == -1 && s->bits_per_raw_sample <= 8) || s->qtable == 1) {
s->quant_tables[0][0][i]= quant11[i];
@ -760,10 +640,6 @@ static av_cold int encode_init(AVCodecContext *avctx)
if (!s->chroma_planes && s->version > 3)
s->plane_count--;
ret = av_pix_fmt_get_chroma_sub_sample (avctx->pix_fmt, &s->chroma_h_shift, &s->chroma_v_shift);
if (ret)
return ret;
s->picture_number = 0;
if (avctx->flags & (AV_CODEC_FLAG_PASS1 | AV_CODEC_FLAG_PASS2)) {
@ -856,36 +732,14 @@ static av_cold int encode_init(AVCodecContext *avctx)
av_freep(&best_state);
}
if (s->version > 1) {
int plane_count = 1 + 2*s->chroma_planes + s->transparency;
int max_h_slices = AV_CEIL_RSHIFT(avctx->width , s->chroma_h_shift);
int max_v_slices = AV_CEIL_RSHIFT(avctx->height, s->chroma_v_shift);
s->num_v_slices = (avctx->width > 352 || avctx->height > 288 || !avctx->slices) ? 2 : 1;
s->num_v_slices = FFMIN(s->num_v_slices, max_v_slices);
if (s->version <= 1) {
/* Disable slices when the version doesn't support them */
s->num_h_slices = 1;
s->num_v_slices = 1;
} else {
if ((ret = encode_determine_slices(avctx)) < 0)
return ret;
for (; s->num_v_slices < 32; s->num_v_slices++) {
for (s->num_h_slices = s->num_v_slices; s->num_h_slices < 2*s->num_v_slices; s->num_h_slices++) {
int maxw = (avctx->width + s->num_h_slices - 1) / s->num_h_slices;
int maxh = (avctx->height + s->num_v_slices - 1) / s->num_v_slices;
if (s->num_h_slices > max_h_slices || s->num_v_slices > max_v_slices)
continue;
if (maxw * maxh * (int64_t)(s->bits_per_raw_sample+1) * plane_count > 8<<24)
continue;
if (s->version < 4)
if ( ff_need_new_slices(avctx->width , s->num_h_slices, s->chroma_h_shift)
||ff_need_new_slices(avctx->height, s->num_v_slices, s->chroma_v_shift))
continue;
if (avctx->slices == s->num_h_slices * s->num_v_slices && avctx->slices <= MAX_SLICES || !avctx->slices)
goto slices_ok;
}
}
av_log(avctx, AV_LOG_ERROR,
"Unsupported number %d of slices requested, please specify a "
"supported number with -slices (ex:4,6,9,12,16, ...)\n",
avctx->slices);
return AVERROR(ENOSYS);
slices_ok:
if ((ret = write_extradata(s)) < 0)
return ret;
}
@ -908,13 +762,178 @@ slices_ok:
if ((ret = ff_ffv1_init_slices_state(s)) < 0)
return ret;
return 0;
}
static int encode_init_internal(AVCodecContext *avctx)
{
int ret;
FFV1Context *s = avctx->priv_data;
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(avctx->pix_fmt);
if ((ret = ff_ffv1_common_init(avctx)) < 0)
return ret;
if (s->ac == 1) // Compatbility with common command line usage
s->ac = AC_RANGE_CUSTOM_TAB;
else if (s->ac == AC_RANGE_DEFAULT_TAB_FORCE)
s->ac = AC_RANGE_DEFAULT_TAB;
s->plane_count = 3;
switch(avctx->pix_fmt) {
case AV_PIX_FMT_GRAY9:
case AV_PIX_FMT_YUV444P9:
case AV_PIX_FMT_YUV422P9:
case AV_PIX_FMT_YUV420P9:
case AV_PIX_FMT_YUVA444P9:
case AV_PIX_FMT_YUVA422P9:
case AV_PIX_FMT_YUVA420P9:
if (!avctx->bits_per_raw_sample)
s->bits_per_raw_sample = 9;
case AV_PIX_FMT_GRAY10:
case AV_PIX_FMT_YUV444P10:
case AV_PIX_FMT_YUV440P10:
case AV_PIX_FMT_YUV420P10:
case AV_PIX_FMT_YUV422P10:
case AV_PIX_FMT_YUVA444P10:
case AV_PIX_FMT_YUVA422P10:
case AV_PIX_FMT_YUVA420P10:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 10;
case AV_PIX_FMT_GRAY12:
case AV_PIX_FMT_YUV444P12:
case AV_PIX_FMT_YUV440P12:
case AV_PIX_FMT_YUV420P12:
case AV_PIX_FMT_YUV422P12:
case AV_PIX_FMT_YUVA444P12:
case AV_PIX_FMT_YUVA422P12:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 12;
case AV_PIX_FMT_GRAY14:
case AV_PIX_FMT_YUV444P14:
case AV_PIX_FMT_YUV420P14:
case AV_PIX_FMT_YUV422P14:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 14;
s->packed_at_lsb = 1;
case AV_PIX_FMT_GRAY16:
case AV_PIX_FMT_YUV444P16:
case AV_PIX_FMT_YUV422P16:
case AV_PIX_FMT_YUV420P16:
case AV_PIX_FMT_YUVA444P16:
case AV_PIX_FMT_YUVA422P16:
case AV_PIX_FMT_YUVA420P16:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample) {
s->bits_per_raw_sample = 16;
} else if (!s->bits_per_raw_sample) {
s->bits_per_raw_sample = avctx->bits_per_raw_sample;
}
if (s->bits_per_raw_sample <= 8) {
av_log(avctx, AV_LOG_ERROR, "bits_per_raw_sample invalid\n");
return AVERROR_INVALIDDATA;
}
s->version = FFMAX(s->version, 1);
case AV_PIX_FMT_GRAY8:
case AV_PIX_FMT_YA8:
case AV_PIX_FMT_YUV444P:
case AV_PIX_FMT_YUV440P:
case AV_PIX_FMT_YUV422P:
case AV_PIX_FMT_YUV420P:
case AV_PIX_FMT_YUV411P:
case AV_PIX_FMT_YUV410P:
case AV_PIX_FMT_YUVA444P:
case AV_PIX_FMT_YUVA422P:
case AV_PIX_FMT_YUVA420P:
s->chroma_planes = desc->nb_components < 3 ? 0 : 1;
s->colorspace = 0;
s->transparency = !!(desc->flags & AV_PIX_FMT_FLAG_ALPHA);
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 8;
else if (!s->bits_per_raw_sample)
s->bits_per_raw_sample = 8;
break;
case AV_PIX_FMT_RGB32:
s->colorspace = 1;
s->transparency = 1;
s->chroma_planes = 1;
s->bits_per_raw_sample = 8;
break;
case AV_PIX_FMT_RGBA64:
s->colorspace = 1;
s->transparency = 1;
s->chroma_planes = 1;
s->bits_per_raw_sample = 16;
s->use32bit = 1;
s->version = FFMAX(s->version, 1);
break;
case AV_PIX_FMT_RGB48:
s->colorspace = 1;
s->chroma_planes = 1;
s->bits_per_raw_sample = 16;
s->use32bit = 1;
s->version = FFMAX(s->version, 1);
break;
case AV_PIX_FMT_0RGB32:
s->colorspace = 1;
s->chroma_planes = 1;
s->bits_per_raw_sample = 8;
break;
case AV_PIX_FMT_GBRP9:
if (!avctx->bits_per_raw_sample)
s->bits_per_raw_sample = 9;
case AV_PIX_FMT_GBRP10:
case AV_PIX_FMT_GBRAP10:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 10;
case AV_PIX_FMT_GBRP12:
case AV_PIX_FMT_GBRAP12:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 12;
case AV_PIX_FMT_GBRP14:
case AV_PIX_FMT_GBRAP14:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 14;
case AV_PIX_FMT_GBRP16:
case AV_PIX_FMT_GBRAP16:
if (!avctx->bits_per_raw_sample && !s->bits_per_raw_sample)
s->bits_per_raw_sample = 16;
else if (!s->bits_per_raw_sample)
s->bits_per_raw_sample = avctx->bits_per_raw_sample;
s->transparency = !!(desc->flags & AV_PIX_FMT_FLAG_ALPHA);
s->colorspace = 1;
s->chroma_planes = 1;
if (s->bits_per_raw_sample >= 16) {
s->use32bit = 1;
}
s->version = FFMAX(s->version, 1);
break;
default:
av_log(avctx, AV_LOG_ERROR, "format not supported\n");
return AVERROR(ENOSYS);
}
av_assert0(s->bits_per_raw_sample >= 8);
if (s->bits_per_raw_sample > (s->version > 3 ? 16 : 8)) {
if (s->ac == AC_GOLOMB_RICE) {
av_log(avctx, AV_LOG_INFO,
"high bits_per_raw_sample, forcing range coder\n");
s->ac = AC_RANGE_CUSTOM_TAB;
}
}
s->version = 0;
ret = ff_ffv1_encode_init(avctx);
if (ret < 0)
return ret;
#define STATS_OUT_SIZE 1024 * 1024 * 6
if (avctx->flags & AV_CODEC_FLAG_PASS1) {
avctx->stats_out = av_mallocz(STATS_OUT_SIZE);
if (!avctx->stats_out)
return AVERROR(ENOMEM);
for (i = 0; i < s->quant_table_count; i++)
for (j = 0; j < s->max_slice_count; j++) {
for (int i = 0; i < s->quant_table_count; i++)
for (int j = 0; j < s->max_slice_count; j++) {
FFV1SliceContext *sc = &s->slices[j];
av_assert0(!sc->rc_stat2[i]);
sc->rc_stat2[i] = av_mallocz(s->context_count[i] *
@ -1321,7 +1340,7 @@ const FFCodec ff_ffv1_encoder = {
AV_CODEC_CAP_SLICE_THREADS |
AV_CODEC_CAP_ENCODER_REORDERED_OPAQUE,
.priv_data_size = sizeof(FFV1Context),
.init = encode_init,
.init = encode_init_internal,
FF_CODEC_ENCODE_CB(encode_frame),
.close = ff_ffv1_close,
.p.pix_fmts = (const enum AVPixelFormat[]) {

@ -0,0 +1,30 @@
/*
* FFV1 encoder
*
* Copyright (c) 2003-2013 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_FFV1ENC_H
#define AVCODEC_FFV1ENC_H
#include "avcodec.h"
av_cold int ff_ffv1_encode_init(AVCodecContext *avctx);
#endif /* AVCODEC_FFV1ENC_H */
Loading…
Cancel
Save