vaapi_encode_mjpeg: Use CBS to store parameters and write headers

Also adds greyscale, 4:2:2, 4:4:4 and RGB support.
pull/299/head
Mark Thompson 6 years ago
parent 525de2000b
commit ff0eb2bce3
  1. 2
      configure
  2. 17
      doc/encoders.texi
  3. 529
      libavcodec/vaapi_encode_mjpeg.c

2
configure vendored

@ -2974,7 +2974,7 @@ mjpeg_cuvid_decoder_deps="cuvid"
mjpeg_qsv_encoder_deps="libmfx" mjpeg_qsv_encoder_deps="libmfx"
mjpeg_qsv_encoder_select="qsvenc" mjpeg_qsv_encoder_select="qsvenc"
mjpeg_vaapi_encoder_deps="VAEncPictureParameterBufferJPEG" mjpeg_vaapi_encoder_deps="VAEncPictureParameterBufferJPEG"
mjpeg_vaapi_encoder_select="vaapi_encode jpegtables" mjpeg_vaapi_encoder_select="cbs_jpeg jpegtables vaapi_encode"
mpeg1_cuvid_decoder_deps="cuvid" mpeg1_cuvid_decoder_deps="cuvid"
mpeg1_v4l2m2m_decoder_deps="v4l2_m2m mpeg1_v4l2_m2m" mpeg1_v4l2m2m_decoder_deps="v4l2_m2m mpeg1_v4l2_m2m"
mpeg2_crystalhd_decoder_select="crystalhd" mpeg2_crystalhd_decoder_select="crystalhd"

@ -2662,8 +2662,21 @@ Include access unit delimiters in the stream (not included by default).
@end table @end table
@item mjpeg_vaapi @item mjpeg_vaapi
Always encodes using the standard quantisation and huffman tables - Only baseline DCT encoding is supported. The encoder always uses the standard
@option{global_quality} scales the standard quantisation table (range 1-100). quantisation and huffman tables - @option{global_quality} scales the standard
quantisation table (range 1-100).
For YUV, 4:2:0, 4:2:2 and 4:4:4 subsampling modes are supported. RGB is also
supported, and will create an RGB JPEG.
@table @option
@item jfif
Include JFIF header in each frame (not included by default).
@item huffman
Include standard huffman tables (on by default). Turning this off will save
a few hundred bytes in each output frame, but may lose compatibility with some
JPEG decoders which don't fully handle MJPEG.
@end table
@item mpeg2_vaapi @item mpeg2_vaapi
@option{profile} and @option{level} set the value of @emph{profile_and_level_indication}. @option{profile} and @option{level} set the value of @emph{profile_and_level_indication}.

@ -23,9 +23,12 @@
#include "libavutil/common.h" #include "libavutil/common.h"
#include "libavutil/internal.h" #include "libavutil/internal.h"
#include "libavutil/opt.h" #include "libavutil/opt.h"
#include "libavutil/pixfmt.h" #include "libavutil/pixdesc.h"
#include "avcodec.h" #include "avcodec.h"
#include "bytestream.h"
#include "cbs.h"
#include "cbs_jpeg.h"
#include "internal.h" #include "internal.h"
#include "jpegtables.h" #include "jpegtables.h"
#include "mjpeg.h" #include "mjpeg.h"
@ -58,253 +61,346 @@ static const unsigned char vaapi_encode_mjpeg_quant_chrominance[64] = {
typedef struct VAAPIEncodeMJPEGContext { typedef struct VAAPIEncodeMJPEGContext {
VAAPIEncodeContext common; VAAPIEncodeContext common;
// User options.
int jfif;
int huffman;
// Derived settings.
int quality; int quality;
int component_subsample_h[3]; uint8_t jfif_data[14];
int component_subsample_v[3];
// Writer structures.
JPEGRawFrameHeader frame_header;
JPEGRawScan scan;
JPEGRawApplicationData jfif_header;
JPEGRawQuantisationTableSpecification quant_tables;
JPEGRawHuffmanTableSpecification huffman_tables;
VAQMatrixBufferJPEG quant_tables; CodedBitstreamContext *cbc;
VAHuffmanTableBufferJPEGBaseline huffman_tables; CodedBitstreamFragment current_fragment;
} VAAPIEncodeMJPEGContext; } VAAPIEncodeMJPEGContext;
static av_cold void vaapi_encode_mjpeg_copy_huffman(unsigned char *dst_lengths, static int vaapi_encode_mjpeg_write_image_header(AVCodecContext *avctx,
unsigned char *dst_values, VAAPIEncodePicture *pic,
const unsigned char *src_lengths, VAAPIEncodeSlice *slice,
const unsigned char *src_values) char *data, size_t *data_len)
{ {
int i, mt; VAAPIEncodeMJPEGContext *priv = avctx->priv_data;
CodedBitstreamFragment *frag = &priv->current_fragment;
++src_lengths; int err;
if (priv->jfif) {
err = ff_cbs_insert_unit_content(priv->cbc, frag, -1,
JPEG_MARKER_APPN + 0,
&priv->jfif_header, NULL);
if (err < 0)
goto fail;
}
mt = 0; err = ff_cbs_insert_unit_content(priv->cbc, frag, -1,
for (i = 0; i < 16; i++) JPEG_MARKER_DQT,
mt += (dst_lengths[i] = src_lengths[i]); &priv->quant_tables, NULL);
if (err < 0)
goto fail;
err = ff_cbs_insert_unit_content(priv->cbc, frag, -1,
JPEG_MARKER_SOF0,
&priv->frame_header, NULL);
if (err < 0)
goto fail;
if (priv->huffman) {
err = ff_cbs_insert_unit_content(priv->cbc, frag, -1,
JPEG_MARKER_DHT,
&priv->huffman_tables, NULL);
if (err < 0)
goto fail;
}
for (i = 0; i < mt; i++) err = ff_cbs_insert_unit_content(priv->cbc, frag, -1,
dst_values[i] = src_values[i]; JPEG_MARKER_SOS,
} &priv->scan, NULL);
if (err < 0)
goto fail;
static av_cold void vaapi_encode_mjpeg_init_tables(AVCodecContext *avctx) err = ff_cbs_write_fragment_data(priv->cbc, frag);
{ if (err < 0) {
VAAPIEncodeMJPEGContext *priv = avctx->priv_data; av_log(avctx, AV_LOG_ERROR, "Failed to write image header.\n");
VAQMatrixBufferJPEG *quant = &priv->quant_tables; goto fail;
VAHuffmanTableBufferJPEGBaseline *huff = &priv->huffman_tables; }
int i;
quant->load_lum_quantiser_matrix = 1;
quant->load_chroma_quantiser_matrix = 1;
for (i = 0; i < 64; i++) { if (*data_len < 8 * frag->data_size) {
quant->lum_quantiser_matrix[i] = av_log(avctx, AV_LOG_ERROR, "Image header too large: "
vaapi_encode_mjpeg_quant_luminance[i]; "%zu < %zu.\n", *data_len, 8 * frag->data_size);
quant->chroma_quantiser_matrix[i] = err = AVERROR(ENOSPC);
vaapi_encode_mjpeg_quant_chrominance[i]; goto fail;
} }
huff->load_huffman_table[0] = 1; // Remove the EOI at the end of the fragment.
vaapi_encode_mjpeg_copy_huffman(huff->huffman_table[0].num_dc_codes, memcpy(data, frag->data, frag->data_size - 2);
huff->huffman_table[0].dc_values, *data_len = 8 * (frag->data_size - 2);
avpriv_mjpeg_bits_dc_luminance,
avpriv_mjpeg_val_dc);
vaapi_encode_mjpeg_copy_huffman(huff->huffman_table[0].num_ac_codes,
huff->huffman_table[0].ac_values,
avpriv_mjpeg_bits_ac_luminance,
avpriv_mjpeg_val_ac_luminance);
memset(huff->huffman_table[0].pad, 0, sizeof(huff->huffman_table[0].pad));
huff->load_huffman_table[1] = 1;
vaapi_encode_mjpeg_copy_huffman(huff->huffman_table[1].num_dc_codes,
huff->huffman_table[1].dc_values,
avpriv_mjpeg_bits_dc_chrominance,
avpriv_mjpeg_val_dc);
vaapi_encode_mjpeg_copy_huffman(huff->huffman_table[1].num_ac_codes,
huff->huffman_table[1].ac_values,
avpriv_mjpeg_bits_ac_chrominance,
avpriv_mjpeg_val_ac_chrominance);
memset(huff->huffman_table[1].pad, 0, sizeof(huff->huffman_table[1].pad));
}
static void vaapi_encode_mjpeg_write_marker(PutBitContext *pbc, int marker) err = 0;
{ fail:
put_bits(pbc, 8, 0xff); ff_cbs_fragment_uninit(priv->cbc, frag);
put_bits(pbc, 8, marker); return err;
} }
static int vaapi_encode_mjpeg_write_image_header(AVCodecContext *avctx, static int vaapi_encode_mjpeg_write_extra_buffer(AVCodecContext *avctx,
VAAPIEncodePicture *pic, VAAPIEncodePicture *pic,
VAAPIEncodeSlice *slice, int index, int *type,
char *data, size_t *data_len) char *data, size_t *data_len)
{ {
VAAPIEncodeMJPEGContext *priv = avctx->priv_data; VAAPIEncodeMJPEGContext *priv = avctx->priv_data;
VAEncPictureParameterBufferJPEG *vpic = pic->codec_picture_params; int t, i, k;
VAEncSliceParameterBufferJPEG *vslice = slice->codec_slice_params;
PutBitContext pbc;
int t, i, quant_scale;
init_put_bits(&pbc, data, *data_len); if (index == 0) {
// Write quantisation tables.
JPEGRawFrameHeader *fh = &priv->frame_header;
JPEGRawQuantisationTableSpecification *dqt = &priv->quant_tables;
VAQMatrixBufferJPEG *quant;
if (*data_len < sizeof(*quant))
return AVERROR(ENOSPC);
*type = VAQMatrixBufferType;
*data_len = sizeof(*quant);
quant = (VAQMatrixBufferJPEG*)data;
memset(quant, 0, sizeof(*quant));
quant->load_lum_quantiser_matrix = 1;
for (i = 0; i < 64; i++)
quant->lum_quantiser_matrix[i] = dqt->table[fh->Tq[0]].Q[i];
if (fh->Nf > 1) {
quant->load_chroma_quantiser_matrix = 1;
for (i = 0; i < 64; i++)
quant->chroma_quantiser_matrix[i] =
dqt->table[fh->Tq[1]].Q[i];
}
vaapi_encode_mjpeg_write_marker(&pbc, SOI); } else if (index == 1) {
// Write huffman tables.
JPEGRawScanHeader *sh = &priv->scan.header;
JPEGRawHuffmanTableSpecification *dht = &priv->huffman_tables;
VAHuffmanTableBufferJPEGBaseline *huff;
if (*data_len < sizeof(*huff))
return AVERROR(ENOSPC);
*type = VAHuffmanTableBufferType;
*data_len = sizeof(*huff);
huff = (VAHuffmanTableBufferJPEGBaseline*)data;
memset(huff, 0, sizeof(*huff));
for (t = 0; t < 1 + (sh->Ns > 1); t++) {
const JPEGRawHuffmanTable *ht;
huff->load_huffman_table[t] = 1;
ht = &dht->table[2 * t];
for (i = k = 0; i < 16; i++)
k += (huff->huffman_table[t].num_dc_codes[i] = ht->L[i]);
av_assert0(k <= sizeof(huff->huffman_table[t].dc_values));
for (i = 0; i < k; i++)
huff->huffman_table[t].dc_values[i] = ht->V[i];
ht = &dht->table[2 * t + 1];
for (i = k = 0; i < 16; i++)
k += (huff->huffman_table[t].num_ac_codes[i] = ht->L[i]);
av_assert0(k <= sizeof(huff->huffman_table[t].ac_values));
for (i = 0; i < k; i++)
huff->huffman_table[t].ac_values[i] = ht->V[i];
}
// Quantisation table coefficients are scaled for quality by the driver, } else {
// so we also need to do it ourselves here so that headers match. return AVERROR_EOF;
if (priv->quality < 50) }
quant_scale = 5000 / priv->quality; return 0;
}
static int vaapi_encode_mjpeg_init_picture_params(AVCodecContext *avctx,
VAAPIEncodePicture *pic)
{
VAAPIEncodeMJPEGContext *priv = avctx->priv_data;
JPEGRawFrameHeader *fh = &priv->frame_header;
JPEGRawScanHeader *sh = &priv->scan.header;
VAEncPictureParameterBufferJPEG *vpic = pic->codec_picture_params;
const AVPixFmtDescriptor *desc;
const uint8_t *components;
int t, i, quant_scale, len;
desc = av_pix_fmt_desc_get(priv->common.input_frames->sw_format);
av_assert0(desc);
if (desc->flags & AV_PIX_FMT_FLAG_RGB)
components = (uint8_t[3]) { 'R', 'G', 'B' };
else else
quant_scale = 200 - 2 * priv->quality; components = (uint8_t[3]) { 1, 2, 3 };
for (t = 0; t < 2; t++) { // Frame header.
int q;
vaapi_encode_mjpeg_write_marker(&pbc, DQT); fh->P = 8;
fh->Y = avctx->height;
fh->X = avctx->width;
fh->Nf = desc->nb_components;
put_bits(&pbc, 16, 3 + 64); // Lq for (i = 0; i < fh->Nf; i++) {
put_bits(&pbc, 4, 0); // Pq fh->C[i] = components[i];
put_bits(&pbc, 4, t); // Tq fh->H[i] = 1 + (i == 0 ? desc->log2_chroma_w : 0);
fh->V[i] = 1 + (i == 0 ? desc->log2_chroma_h : 0);
for (i = 0; i < 64; i++) { fh->Tq[i] = !!i;
q = i[t ? priv->quant_tables.chroma_quantiser_matrix
: priv->quant_tables.lum_quantiser_matrix];
q = (q * quant_scale) / 100;
if (q < 1) q = 1;
if (q > 255) q = 255;
put_bits(&pbc, 8, q);
}
} }
vaapi_encode_mjpeg_write_marker(&pbc, SOF0); fh->Lf = 8 + 3 * fh->Nf;
put_bits(&pbc, 16, 8 + 3 * vpic->num_components); // Lf // JFIF header.
put_bits(&pbc, 8, vpic->sample_bit_depth); // P if (priv->jfif) {
put_bits(&pbc, 16, vpic->picture_height); // Y JPEGRawApplicationData *app = &priv->jfif_header;
put_bits(&pbc, 16, vpic->picture_width); // X AVRational sar = pic->input_image->sample_aspect_ratio;
put_bits(&pbc, 8, vpic->num_components); // Nf int sar_w, sar_h;
PutByteContext pbc;
for (i = 0; i < vpic->num_components; i++) { bytestream2_init_writer(&pbc, priv->jfif_data,
put_bits(&pbc, 8, vpic->component_id[i]); // Ci sizeof(priv->jfif_data));
put_bits(&pbc, 4, priv->component_subsample_h[i]); // Hi
put_bits(&pbc, 4, priv->component_subsample_v[i]); // Vi
put_bits(&pbc, 8, vpic->quantiser_table_selector[i]); // Tqi
}
for (t = 0; t < 4; t++) {
int mt;
unsigned char *lengths, *values;
vaapi_encode_mjpeg_write_marker(&pbc, DHT); bytestream2_put_buffer(&pbc, "JFIF", 5);
bytestream2_put_be16(&pbc, 0x0102);
bytestream2_put_byte(&pbc, 0);
if ((t & 1) == 0) { av_reduce(&sar_w, &sar_h, sar.num, sar.den, 65535);
lengths = priv->huffman_tables.huffman_table[t / 2].num_dc_codes; if (sar_w && sar_h) {
values = priv->huffman_tables.huffman_table[t / 2].dc_values; bytestream2_put_be16(&pbc, sar_w);
bytestream2_put_be16(&pbc, sar_h);
} else { } else {
lengths = priv->huffman_tables.huffman_table[t / 2].num_ac_codes; bytestream2_put_be16(&pbc, 1);
values = priv->huffman_tables.huffman_table[t / 2].ac_values; bytestream2_put_be16(&pbc, 1);
} }
mt = 0; bytestream2_put_byte(&pbc, 0);
for (i = 0; i < 16; i++) bytestream2_put_byte(&pbc, 0);
mt += lengths[i];
put_bits(&pbc, 16, 2 + 17 + mt); // Lh av_assert0(bytestream2_get_bytes_left_p(&pbc) == 0);
put_bits(&pbc, 4, t & 1); // Tc
put_bits(&pbc, 4, t / 2); // Th
for (i = 0; i < 16; i++) app->Lp = 2 + sizeof(priv->jfif_data);
put_bits(&pbc, 8, lengths[i]); app->Ap = priv->jfif_data;
for (i = 0; i < mt; i++) app->Ap_ref = NULL;
put_bits(&pbc, 8, values[i]);
} }
vaapi_encode_mjpeg_write_marker(&pbc, SOS); // Quantisation tables.
av_assert0(vpic->num_components == vslice->num_components); if (priv->quality < 50)
quant_scale = 5000 / priv->quality;
else
quant_scale = 200 - 2 * priv->quality;
put_bits(&pbc, 16, 6 + 2 * vslice->num_components); // Ls len = 2;
put_bits(&pbc, 8, vslice->num_components); // Ns
for (i = 0; i < vslice->num_components; i++) { for (t = 0; t < 1 + (fh->Nf > 1); t++) {
put_bits(&pbc, 8, vslice->components[i].component_selector); // Csj JPEGRawQuantisationTable *quant = &priv->quant_tables.table[t];
put_bits(&pbc, 4, vslice->components[i].dc_table_selector); // Tdj const uint8_t *data = t == 0 ?
put_bits(&pbc, 4, vslice->components[i].ac_table_selector); // Taj vaapi_encode_mjpeg_quant_luminance :
} vaapi_encode_mjpeg_quant_chrominance;
put_bits(&pbc, 8, 0); // Ss quant->Pq = 0;
put_bits(&pbc, 8, 63); // Se quant->Tq = t;
put_bits(&pbc, 4, 0); // Ah for (i = 0; i < 64; i++)
put_bits(&pbc, 4, 0); // Al quant->Q[i] = av_clip(data[i] * quant_scale / 100, 1, 255);
*data_len = put_bits_count(&pbc); len += 65;
flush_put_bits(&pbc); }
return 0; priv->quant_tables.Lq = len;
}
// Huffman tables.
len = 2;
for (t = 0; t < 2 + 2 * (fh->Nf > 1); t++) {
JPEGRawHuffmanTable *huff = &priv->huffman_tables.table[t];
const uint8_t *lengths, *values;
int k;
switch (t) {
case 0:
lengths = avpriv_mjpeg_bits_dc_luminance + 1;
values = avpriv_mjpeg_val_dc;
break;
case 1:
lengths = avpriv_mjpeg_bits_ac_luminance + 1;
values = avpriv_mjpeg_val_ac_luminance;
break;
case 2:
lengths = avpriv_mjpeg_bits_dc_chrominance + 1;
values = avpriv_mjpeg_val_dc;
break;
case 3:
lengths = avpriv_mjpeg_bits_ac_chrominance + 1;
values = avpriv_mjpeg_val_ac_chrominance;
break;
}
static int vaapi_encode_mjpeg_write_extra_buffer(AVCodecContext *avctx, huff->Tc = t % 2;
VAAPIEncodePicture *pic, huff->Th = t / 2;
int index, int *type,
char *data, size_t *data_len)
{
VAAPIEncodeMJPEGContext *priv = avctx->priv_data;
if (index == 0) { for (i = k = 0; i < 16; i++)
// Write quantisation tables. k += (huff->L[i] = lengths[i]);
if (*data_len < sizeof(priv->quant_tables))
return AVERROR(EINVAL);
*type = VAQMatrixBufferType;
memcpy(data, &priv->quant_tables,
*data_len = sizeof(priv->quant_tables));
} else if (index == 1) { for (i = 0; i < k; i++)
// Write huffman tables. huff->V[i] = values[i];
if (*data_len < sizeof(priv->huffman_tables))
return AVERROR(EINVAL);
*type = VAHuffmanTableBufferType;
memcpy(data, &priv->huffman_tables,
*data_len = sizeof(priv->huffman_tables));
} else { len += 17 + k;
return AVERROR_EOF;
} }
return 0;
}
static int vaapi_encode_mjpeg_init_picture_params(AVCodecContext *avctx, priv->huffman_tables.Lh = len;
VAAPIEncodePicture *pic)
{ // Scan header.
VAAPIEncodeMJPEGContext *priv = avctx->priv_data;
VAEncPictureParameterBufferJPEG *vpic = pic->codec_picture_params; sh->Ns = fh->Nf;
for (i = 0; i < fh->Nf; i++) {
sh->Cs[i] = fh->C[i];
sh->Td[i] = i > 0;
sh->Ta[i] = i > 0;
}
vpic->reconstructed_picture = pic->recon_surface; sh->Ss = 0;
vpic->coded_buf = pic->output_buffer; sh->Se = 63;
sh->Ah = 0;
sh->Al = 0;
vpic->picture_width = avctx->width; sh->Ls = 6 + 2 * sh->Ns;
vpic->picture_height = avctx->height;
vpic->pic_flags.bits.profile = 0;
vpic->pic_flags.bits.progressive = 0;
vpic->pic_flags.bits.huffman = 1;
vpic->pic_flags.bits.interleaved = 0;
vpic->pic_flags.bits.differential = 0;
vpic->sample_bit_depth = 8; *vpic = (VAEncPictureParameterBufferJPEG) {
vpic->num_scan = 1; .reconstructed_picture = pic->recon_surface,
.coded_buf = pic->output_buffer,
vpic->num_components = 3; .picture_width = fh->X,
.picture_height = fh->Y,
vpic->component_id[0] = 1; .pic_flags.bits = {
vpic->component_id[1] = 2; .profile = 0,
vpic->component_id[2] = 3; .progressive = 0,
.huffman = 1,
.interleaved = 0,
.differential = 0,
},
priv->component_subsample_h[0] = 2; .sample_bit_depth = fh->P,
priv->component_subsample_v[0] = 2; .num_scan = 1,
priv->component_subsample_h[1] = 1; .num_components = fh->Nf,
priv->component_subsample_v[1] = 1;
priv->component_subsample_h[2] = 1;
priv->component_subsample_v[2] = 1;
vpic->quantiser_table_selector[0] = 0; // The driver modifies the provided quantisation tables according
vpic->quantiser_table_selector[1] = 1; // to this quality value; the middle value of 50 makes that the
vpic->quantiser_table_selector[2] = 1; // identity so that they are used unchanged.
.quality = 50,
};
vpic->quality = priv->quality; for (i = 0; i < fh->Nf; i++) {
vpic->component_id[i] = fh->C[i];
vpic->quantiser_table_selector[i] = fh->Tq[i];
}
pic->nb_slices = 1; pic->nb_slices = 1;
@ -315,17 +411,20 @@ static int vaapi_encode_mjpeg_init_slice_params(AVCodecContext *avctx,
VAAPIEncodePicture *pic, VAAPIEncodePicture *pic,
VAAPIEncodeSlice *slice) VAAPIEncodeSlice *slice)
{ {
VAEncPictureParameterBufferJPEG *vpic = pic->codec_picture_params; VAAPIEncodeMJPEGContext *priv = avctx->priv_data;
JPEGRawScanHeader *sh = &priv->scan.header;
VAEncSliceParameterBufferJPEG *vslice = slice->codec_slice_params; VAEncSliceParameterBufferJPEG *vslice = slice->codec_slice_params;
int i; int i;
vslice->restart_interval = 0; *vslice = (VAEncSliceParameterBufferJPEG) {
.restart_interval = 0,
.num_components = sh->Ns,
};
vslice->num_components = vpic->num_components; for (i = 0; i < sh->Ns; i++) {
for (i = 0; i < vslice->num_components; i++) { vslice->components[i].component_selector = sh->Cs[i];
vslice->components[i].component_selector = i + 1; vslice->components[i].dc_table_selector = sh->Td[i];
vslice->components[i].dc_table_selector = (i > 0); vslice->components[i].ac_table_selector = sh->Ta[i];
vslice->components[i].ac_table_selector = (i > 0);
} }
return 0; return 0;
@ -335,6 +434,7 @@ static av_cold int vaapi_encode_mjpeg_configure(AVCodecContext *avctx)
{ {
VAAPIEncodeContext *ctx = avctx->priv_data; VAAPIEncodeContext *ctx = avctx->priv_data;
VAAPIEncodeMJPEGContext *priv = avctx->priv_data; VAAPIEncodeMJPEGContext *priv = avctx->priv_data;
int err;
priv->quality = avctx->global_quality; priv->quality = avctx->global_quality;
if (priv->quality < 1 || priv->quality > 100) { if (priv->quality < 1 || priv->quality > 100) {
@ -354,14 +454,22 @@ static av_cold int vaapi_encode_mjpeg_configure(AVCodecContext *avctx)
ctx->va_packed_headers |= VA_ENC_PACKED_HEADER_SLICE; ctx->va_packed_headers |= VA_ENC_PACKED_HEADER_SLICE;
} }
vaapi_encode_mjpeg_init_tables(avctx); err = ff_cbs_init(&priv->cbc, AV_CODEC_ID_MJPEG, avctx);
if (err < 0)
return err;
return 0; return 0;
} }
static const VAAPIEncodeProfile vaapi_encode_mjpeg_profiles[] = { static const VAAPIEncodeProfile vaapi_encode_mjpeg_profiles[] = {
{ FF_PROFILE_MJPEG_HUFFMAN_BASELINE_DCT,
8, 1, 0, 0, VAProfileJPEGBaseline },
{ FF_PROFILE_MJPEG_HUFFMAN_BASELINE_DCT, { FF_PROFILE_MJPEG_HUFFMAN_BASELINE_DCT,
8, 3, 1, 1, VAProfileJPEGBaseline }, 8, 3, 1, 1, VAProfileJPEGBaseline },
{ FF_PROFILE_MJPEG_HUFFMAN_BASELINE_DCT,
8, 3, 1, 0, VAProfileJPEGBaseline },
{ FF_PROFILE_MJPEG_HUFFMAN_BASELINE_DCT,
8, 3, 0, 0, VAProfileJPEGBaseline },
{ FF_PROFILE_UNKNOWN } { FF_PROFILE_UNKNOWN }
}; };
@ -398,6 +506,30 @@ static av_cold int vaapi_encode_mjpeg_init(AVCodecContext *avctx)
return ff_vaapi_encode_init(avctx); return ff_vaapi_encode_init(avctx);
} }
static av_cold int vaapi_encode_mjpeg_close(AVCodecContext *avctx)
{
VAAPIEncodeMJPEGContext *priv = avctx->priv_data;
ff_cbs_close(&priv->cbc);
return ff_vaapi_encode_close(avctx);
}
#define OFFSET(x) offsetof(VAAPIEncodeMJPEGContext, x)
#define FLAGS (AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM)
static const AVOption vaapi_encode_mjpeg_options[] = {
VAAPI_ENCODE_COMMON_OPTIONS,
{ "jfif", "Include JFIF header",
OFFSET(jfif), AV_OPT_TYPE_BOOL,
{ .i64 = 0 }, 0, 1, FLAGS },
{ "huffman", "Include huffman tables",
OFFSET(huffman), AV_OPT_TYPE_BOOL,
{ .i64 = 1 }, 0, 1, FLAGS },
{ NULL },
};
static const AVCodecDefault vaapi_encode_mjpeg_defaults[] = { static const AVCodecDefault vaapi_encode_mjpeg_defaults[] = {
{ "global_quality", "80" }, { "global_quality", "80" },
{ "b", "0" }, { "b", "0" },
@ -408,6 +540,7 @@ static const AVCodecDefault vaapi_encode_mjpeg_defaults[] = {
static const AVClass vaapi_encode_mjpeg_class = { static const AVClass vaapi_encode_mjpeg_class = {
.class_name = "mjpeg_vaapi", .class_name = "mjpeg_vaapi",
.item_name = av_default_item_name, .item_name = av_default_item_name,
.option = vaapi_encode_mjpeg_options,
.version = LIBAVUTIL_VERSION_INT, .version = LIBAVUTIL_VERSION_INT,
}; };
@ -419,7 +552,7 @@ AVCodec ff_mjpeg_vaapi_encoder = {
.priv_data_size = sizeof(VAAPIEncodeMJPEGContext), .priv_data_size = sizeof(VAAPIEncodeMJPEGContext),
.init = &vaapi_encode_mjpeg_init, .init = &vaapi_encode_mjpeg_init,
.encode2 = &ff_vaapi_encode2, .encode2 = &ff_vaapi_encode2,
.close = &ff_vaapi_encode_close, .close = &vaapi_encode_mjpeg_close,
.priv_class = &vaapi_encode_mjpeg_class, .priv_class = &vaapi_encode_mjpeg_class,
.capabilities = AV_CODEC_CAP_HARDWARE, .capabilities = AV_CODEC_CAP_HARDWARE,
.defaults = vaapi_encode_mjpeg_defaults, .defaults = vaapi_encode_mjpeg_defaults,

Loading…
Cancel
Save