Move av_find_best_pix_fmt_of_2() from avcodec to avutil

This avoids a dependancy of libavfilter on libavcodec

See Ticket 3592
Fixes Ticket2784

Signed-off-by: Michael Niedermayer <michaelni@gmx.at>
pull/68/head
Michael Niedermayer 11 years ago
parent 8ff72924cf
commit 617e866e25
  1. 55
      libavcodec/avcodec.h
  2. 188
      libavcodec/imgconvert.c
  3. 3
      libavfilter/avfiltergraph.c
  4. 1
      libavutil/Makefile
  5. 223
      libavutil/pixdesc.c
  6. 50
      libavutil/pixdesc.h

@ -4690,30 +4690,8 @@ void avcodec_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int
*/
unsigned int avcodec_pix_fmt_to_codec_tag(enum AVPixelFormat pix_fmt);
#define FF_LOSS_RESOLUTION 0x0001 /**< loss due to resolution change */
#define FF_LOSS_DEPTH 0x0002 /**< loss due to color depth change */
#define FF_LOSS_COLORSPACE 0x0004 /**< loss due to color space conversion */
#define FF_LOSS_ALPHA 0x0008 /**< loss of alpha bits */
#define FF_LOSS_COLORQUANT 0x0010 /**< loss due to color quantization */
#define FF_LOSS_CHROMA 0x0020 /**< loss of chroma (e.g. RGB to gray conversion) */
/**
* Compute what kind of losses will occur when converting from one specific
* pixel format to another.
* When converting from one pixel format to another, information loss may occur.
* For example, when converting from RGB24 to GRAY, the color information will
* be lost. Similarly, other losses occur when converting from some formats to
* other formats. These losses can involve loss of chroma, but also loss of
* resolution, loss of color depth, loss due to the color space conversion, loss
* of the alpha bits or loss due to color quantization.
* avcodec_get_fix_fmt_loss() informs you about the various types of losses
* which will occur when converting from one pixel format to another.
*
* @param[in] dst_pix_fmt destination pixel format
* @param[in] src_pix_fmt source pixel format
* @param[in] has_alpha Whether the source pixel format alpha channel is used.
* @return Combination of flags informing you what kind of losses will occur
* (maximum loss for an invalid dst_pix_fmt).
/**
* @deprecated see av_get_pix_fmt_loss()
*/
int avcodec_get_pix_fmt_loss(enum AVPixelFormat dst_pix_fmt, enum AVPixelFormat src_pix_fmt,
int has_alpha);
@ -4740,34 +4718,7 @@ enum AVPixelFormat avcodec_find_best_pix_fmt_of_list(const enum AVPixelFormat *p
int has_alpha, int *loss_ptr);
/**
* Find the best pixel format to convert to given a certain source pixel
* format and a selection of two destination pixel formats. When converting from
* one pixel format to another, information loss may occur. For example, when converting
* from RGB24 to GRAY, the color information will be lost. Similarly, other losses occur when
* converting from some formats to other formats. avcodec_find_best_pix_fmt_of_2() selects which of
* the given pixel formats should be used to suffer the least amount of loss.
*
* If one of the destination formats is AV_PIX_FMT_NONE the other pixel format (if valid) will be
* returned.
*
* @code
* src_pix_fmt = AV_PIX_FMT_YUV420P;
* dst_pix_fmt1= AV_PIX_FMT_RGB24;
* dst_pix_fmt2= AV_PIX_FMT_GRAY8;
* dst_pix_fmt3= AV_PIX_FMT_RGB8;
* loss= FF_LOSS_CHROMA; // don't care about chroma loss, so chroma loss will be ignored.
* dst_pix_fmt = avcodec_find_best_pix_fmt_of_2(dst_pix_fmt1, dst_pix_fmt2, src_pix_fmt, alpha, &loss);
* dst_pix_fmt = avcodec_find_best_pix_fmt_of_2(dst_pix_fmt, dst_pix_fmt3, src_pix_fmt, alpha, &loss);
* @endcode
*
* @param[in] dst_pix_fmt1 One of the two destination pixel formats to choose from
* @param[in] dst_pix_fmt2 The other of the two destination pixel formats to choose from
* @param[in] src_pix_fmt Source pixel format
* @param[in] has_alpha Whether the source pixel format alpha channel is used.
* @param[in, out] loss_ptr Combination of loss flags. In: selects which of the losses to ignore, i.e.
* NULL or value of zero means we care about all losses. Out: the loss
* that occurs when converting from src to selected dst pixel format.
* @return The best pixel format to convert to or -1 if none was found.
* @deprecated see av_find_best_pix_fmt_of_2()
*/
enum AVPixelFormat avcodec_find_best_pix_fmt_of_2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2,
enum AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr);

@ -40,12 +40,6 @@
#include "libavutil/pixdesc.h"
#include "libavutil/imgutils.h"
#define FF_COLOR_NA -1
#define FF_COLOR_RGB 0 /**< RGB color space */
#define FF_COLOR_GRAY 1 /**< gray color space */
#define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
#define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
#if HAVE_MMX_EXTERNAL
#define deinterlace_line_inplace ff_deinterlace_line_inplace_mmx
#define deinterlace_line ff_deinterlace_line_mmx
@ -54,10 +48,6 @@
#define deinterlace_line deinterlace_line_c
#endif
#define pixdesc_has_alpha(pixdesc) \
((pixdesc)->nb_components == 2 || (pixdesc)->nb_components == 4 || (pixdesc)->flags & AV_PIX_FMT_FLAG_PAL)
void avcodec_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
{
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
@ -66,189 +56,17 @@ void avcodec_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int
*v_shift = desc->log2_chroma_h;
}
static int get_color_type(const AVPixFmtDescriptor *desc) {
if (desc->flags & AV_PIX_FMT_FLAG_PAL)
return FF_COLOR_RGB;
if(desc->nb_components == 1 || desc->nb_components == 2)
return FF_COLOR_GRAY;
if(desc->name && !strncmp(desc->name, "yuvj", 4))
return FF_COLOR_YUV_JPEG;
if(desc->flags & AV_PIX_FMT_FLAG_RGB)
return FF_COLOR_RGB;
if(desc->nb_components == 0)
return FF_COLOR_NA;
return FF_COLOR_YUV;
}
static int get_pix_fmt_depth(int *min, int *max, enum AVPixelFormat pix_fmt)
{
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
int i;
if (!desc || !desc->nb_components) {
*min = *max = 0;
return AVERROR(EINVAL);
}
*min = INT_MAX, *max = -INT_MAX;
for (i = 0; i < desc->nb_components; i++) {
*min = FFMIN(desc->comp[i].depth_minus1+1, *min);
*max = FFMAX(desc->comp[i].depth_minus1+1, *max);
}
return 0;
}
static int get_pix_fmt_score(enum AVPixelFormat dst_pix_fmt,
enum AVPixelFormat src_pix_fmt,
unsigned *lossp, unsigned consider)
{
const AVPixFmtDescriptor *src_desc = av_pix_fmt_desc_get(src_pix_fmt);
const AVPixFmtDescriptor *dst_desc = av_pix_fmt_desc_get(dst_pix_fmt);
int src_color, dst_color;
int src_min_depth, src_max_depth, dst_min_depth, dst_max_depth;
int ret, loss, i, nb_components;
int score = INT_MAX - 1;
if (dst_pix_fmt >= AV_PIX_FMT_NB || dst_pix_fmt <= AV_PIX_FMT_NONE)
return ~0;
/* compute loss */
*lossp = loss = 0;
if (dst_pix_fmt == src_pix_fmt)
return INT_MAX;
if ((ret = get_pix_fmt_depth(&src_min_depth, &src_max_depth, src_pix_fmt)) < 0)
return ret;
if ((ret = get_pix_fmt_depth(&dst_min_depth, &dst_max_depth, dst_pix_fmt)) < 0)
return ret;
src_color = get_color_type(src_desc);
dst_color = get_color_type(dst_desc);
if (dst_pix_fmt == AV_PIX_FMT_PAL8)
nb_components = FFMIN(src_desc->nb_components, 4);
else
nb_components = FFMIN(src_desc->nb_components, dst_desc->nb_components);
for (i = 0; i < nb_components; i++) {
int depth_minus1 = (dst_pix_fmt == AV_PIX_FMT_PAL8) ? 7/nb_components : dst_desc->comp[i].depth_minus1;
if (src_desc->comp[i].depth_minus1 > depth_minus1 && (consider & FF_LOSS_DEPTH)) {
loss |= FF_LOSS_DEPTH;
score -= 65536 >> depth_minus1;
}
}
if (consider & FF_LOSS_RESOLUTION) {
if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w) {
loss |= FF_LOSS_RESOLUTION;
score -= 256 << dst_desc->log2_chroma_w;
}
if (dst_desc->log2_chroma_h > src_desc->log2_chroma_h) {
loss |= FF_LOSS_RESOLUTION;
score -= 256 << dst_desc->log2_chroma_h;
}
// don't favor 422 over 420 if downsampling is needed, because 420 has much better support on the decoder side
if (dst_desc->log2_chroma_w == 1 && src_desc->log2_chroma_w == 0 &&
dst_desc->log2_chroma_h == 1 && src_desc->log2_chroma_h == 0 ) {
score += 512;
}
}
if(consider & FF_LOSS_COLORSPACE)
switch(dst_color) {
case FF_COLOR_RGB:
if (src_color != FF_COLOR_RGB &&
src_color != FF_COLOR_GRAY)
loss |= FF_LOSS_COLORSPACE;
break;
case FF_COLOR_GRAY:
if (src_color != FF_COLOR_GRAY)
loss |= FF_LOSS_COLORSPACE;
break;
case FF_COLOR_YUV:
if (src_color != FF_COLOR_YUV)
loss |= FF_LOSS_COLORSPACE;
break;
case FF_COLOR_YUV_JPEG:
if (src_color != FF_COLOR_YUV_JPEG &&
src_color != FF_COLOR_YUV &&
src_color != FF_COLOR_GRAY)
loss |= FF_LOSS_COLORSPACE;
break;
default:
/* fail safe test */
if (src_color != dst_color)
loss |= FF_LOSS_COLORSPACE;
break;
}
if(loss & FF_LOSS_COLORSPACE)
score -= (nb_components * 65536) >> FFMIN(dst_desc->comp[0].depth_minus1, src_desc->comp[0].depth_minus1);
if (dst_color == FF_COLOR_GRAY &&
src_color != FF_COLOR_GRAY && (consider & FF_LOSS_CHROMA)) {
loss |= FF_LOSS_CHROMA;
score -= 2 * 65536;
}
if (!pixdesc_has_alpha(dst_desc) && (pixdesc_has_alpha(src_desc) && (consider & FF_LOSS_ALPHA))) {
loss |= FF_LOSS_ALPHA;
score -= 65536;
}
if (dst_pix_fmt == AV_PIX_FMT_PAL8 && (consider & FF_LOSS_COLORQUANT) &&
(src_pix_fmt != AV_PIX_FMT_PAL8 && (src_color != FF_COLOR_GRAY || (pixdesc_has_alpha(src_desc) && (consider & FF_LOSS_ALPHA))))) {
loss |= FF_LOSS_COLORQUANT;
score -= 65536;
}
*lossp = loss;
return score;
}
int avcodec_get_pix_fmt_loss(enum AVPixelFormat dst_pix_fmt,
enum AVPixelFormat src_pix_fmt,
int has_alpha)
{
int loss;
int ret = get_pix_fmt_score(dst_pix_fmt, src_pix_fmt, &loss, has_alpha ? ~0 : ~FF_LOSS_ALPHA);
if (ret < 0)
return ret;
return loss;
return av_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
}
enum AVPixelFormat avcodec_find_best_pix_fmt_of_2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2,
enum AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr)
{
enum AVPixelFormat dst_pix_fmt;
int loss1, loss2, loss_mask;
const AVPixFmtDescriptor *desc1 = av_pix_fmt_desc_get(dst_pix_fmt1);
const AVPixFmtDescriptor *desc2 = av_pix_fmt_desc_get(dst_pix_fmt2);
int score1, score2;
loss_mask= loss_ptr?~*loss_ptr:~0; /* use loss mask if provided */
if(!has_alpha)
loss_mask &= ~FF_LOSS_ALPHA;
dst_pix_fmt = AV_PIX_FMT_NONE;
score1 = get_pix_fmt_score(dst_pix_fmt1, src_pix_fmt, &loss1, loss_mask);
score2 = get_pix_fmt_score(dst_pix_fmt2, src_pix_fmt, &loss2, loss_mask);
if (score1 == score2) {
if(av_get_padded_bits_per_pixel(desc2) != av_get_padded_bits_per_pixel(desc1)) {
dst_pix_fmt = av_get_padded_bits_per_pixel(desc2) < av_get_padded_bits_per_pixel(desc1) ? dst_pix_fmt2 : dst_pix_fmt1;
} else {
dst_pix_fmt = desc2->nb_components < desc1->nb_components ? dst_pix_fmt2 : dst_pix_fmt1;
}
} else {
dst_pix_fmt = score1 < score2 ? dst_pix_fmt2 : dst_pix_fmt1;
}
if (loss_ptr)
*loss_ptr = avcodec_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
return dst_pix_fmt;
return av_find_best_pix_fmt_of_2(dst_pix_fmt1, dst_pix_fmt2, src_pix_fmt, has_alpha, loss_ptr);
}
#if AV_HAVE_INCOMPATIBLE_LIBAV_ABI
@ -657,7 +475,7 @@ int main(void){
av_log(NULL, AV_LOG_INFO, "%3d unused pixel format values\n", skip);
skip = 0;
}
av_log(NULL, AV_LOG_INFO, "pix fmt %s yuv_plan:%d avg_bpp:%d colortype:%d\n", desc->name, is_yuv_planar(desc), av_get_padded_bits_per_pixel(desc), get_color_type(desc));
av_log(NULL, AV_LOG_INFO, "pix fmt %s yuv_plan:%d avg_bpp:%d\n", desc->name, is_yuv_planar(desc), av_get_padded_bits_per_pixel(desc));
if ((!(desc->flags & AV_PIX_FMT_FLAG_ALPHA)) != (desc->nb_components != 2 && desc->nb_components != 4)) {
av_log(NULL, AV_LOG_ERROR, "Alpha flag mismatch\n");
err = 1;

@ -31,7 +31,6 @@
#include "libavutil/internal.h"
#include "libavutil/opt.h"
#include "libavutil/pixdesc.h"
#include "libavcodec/avcodec.h" // avcodec_find_best_pix_fmt_of_2()
#include "avfilter.h"
#include "formats.h"
@ -628,7 +627,7 @@ static int pick_format(AVFilterLink *link, AVFilterLink *ref)
int i;
for (i=0; i<link->in_formats->nb_formats; i++) {
enum AVPixelFormat p = link->in_formats->formats[i];
best= avcodec_find_best_pix_fmt_of_2(best, p, ref->format, has_alpha, NULL);
best= av_find_best_pix_fmt_of_2(best, p, ref->format, has_alpha, NULL);
}
av_log(link->src,AV_LOG_DEBUG, "picking %s out of %d ref:%s alpha:%d\n",
av_get_pix_fmt_name(best), link->in_formats->nb_formats,

@ -164,6 +164,7 @@ TESTPROGS = adler32 \
opt \
pca \
parseutils \
pixdesc \
random_seed \
rational \
ripemd \

@ -2082,3 +2082,226 @@ enum AVPixelFormat av_pix_fmt_swap_endianness(enum AVPixelFormat pix_fmt)
return get_pix_fmt_internal(name);
}
#define FF_COLOR_NA -1
#define FF_COLOR_RGB 0 /**< RGB color space */
#define FF_COLOR_GRAY 1 /**< gray color space */
#define FF_COLOR_YUV 2 /**< YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
#define FF_COLOR_YUV_JPEG 3 /**< YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
#define pixdesc_has_alpha(pixdesc) \
((pixdesc)->nb_components == 2 || (pixdesc)->nb_components == 4 || (pixdesc)->flags & AV_PIX_FMT_FLAG_PAL)
static int get_color_type(const AVPixFmtDescriptor *desc) {
if (desc->flags & AV_PIX_FMT_FLAG_PAL)
return FF_COLOR_RGB;
if(desc->nb_components == 1 || desc->nb_components == 2)
return FF_COLOR_GRAY;
if(desc->name && !strncmp(desc->name, "yuvj", 4))
return FF_COLOR_YUV_JPEG;
if(desc->flags & AV_PIX_FMT_FLAG_RGB)
return FF_COLOR_RGB;
if(desc->nb_components == 0)
return FF_COLOR_NA;
return FF_COLOR_YUV;
}
static int get_pix_fmt_depth(int *min, int *max, enum AVPixelFormat pix_fmt)
{
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(pix_fmt);
int i;
if (!desc || !desc->nb_components) {
*min = *max = 0;
return AVERROR(EINVAL);
}
*min = INT_MAX, *max = -INT_MAX;
for (i = 0; i < desc->nb_components; i++) {
*min = FFMIN(desc->comp[i].depth_minus1+1, *min);
*max = FFMAX(desc->comp[i].depth_minus1+1, *max);
}
return 0;
}
static int get_pix_fmt_score(enum AVPixelFormat dst_pix_fmt,
enum AVPixelFormat src_pix_fmt,
unsigned *lossp, unsigned consider)
{
const AVPixFmtDescriptor *src_desc = av_pix_fmt_desc_get(src_pix_fmt);
const AVPixFmtDescriptor *dst_desc = av_pix_fmt_desc_get(dst_pix_fmt);
int src_color, dst_color;
int src_min_depth, src_max_depth, dst_min_depth, dst_max_depth;
int ret, loss, i, nb_components;
int score = INT_MAX - 1;
if (dst_pix_fmt >= AV_PIX_FMT_NB || dst_pix_fmt <= AV_PIX_FMT_NONE)
return ~0;
/* compute loss */
*lossp = loss = 0;
if (dst_pix_fmt == src_pix_fmt)
return INT_MAX;
if ((ret = get_pix_fmt_depth(&src_min_depth, &src_max_depth, src_pix_fmt)) < 0)
return ret;
if ((ret = get_pix_fmt_depth(&dst_min_depth, &dst_max_depth, dst_pix_fmt)) < 0)
return ret;
src_color = get_color_type(src_desc);
dst_color = get_color_type(dst_desc);
if (dst_pix_fmt == AV_PIX_FMT_PAL8)
nb_components = FFMIN(src_desc->nb_components, 4);
else
nb_components = FFMIN(src_desc->nb_components, dst_desc->nb_components);
for (i = 0; i < nb_components; i++) {
int depth_minus1 = (dst_pix_fmt == AV_PIX_FMT_PAL8) ? 7/nb_components : dst_desc->comp[i].depth_minus1;
if (src_desc->comp[i].depth_minus1 > depth_minus1 && (consider & FF_LOSS_DEPTH)) {
loss |= FF_LOSS_DEPTH;
score -= 65536 >> depth_minus1;
}
}
if (consider & FF_LOSS_RESOLUTION) {
if (dst_desc->log2_chroma_w > src_desc->log2_chroma_w) {
loss |= FF_LOSS_RESOLUTION;
score -= 256 << dst_desc->log2_chroma_w;
}
if (dst_desc->log2_chroma_h > src_desc->log2_chroma_h) {
loss |= FF_LOSS_RESOLUTION;
score -= 256 << dst_desc->log2_chroma_h;
}
// don't favor 422 over 420 if downsampling is needed, because 420 has much better support on the decoder side
if (dst_desc->log2_chroma_w == 1 && src_desc->log2_chroma_w == 0 &&
dst_desc->log2_chroma_h == 1 && src_desc->log2_chroma_h == 0 ) {
score += 512;
}
}
if(consider & FF_LOSS_COLORSPACE)
switch(dst_color) {
case FF_COLOR_RGB:
if (src_color != FF_COLOR_RGB &&
src_color != FF_COLOR_GRAY)
loss |= FF_LOSS_COLORSPACE;
break;
case FF_COLOR_GRAY:
if (src_color != FF_COLOR_GRAY)
loss |= FF_LOSS_COLORSPACE;
break;
case FF_COLOR_YUV:
if (src_color != FF_COLOR_YUV)
loss |= FF_LOSS_COLORSPACE;
break;
case FF_COLOR_YUV_JPEG:
if (src_color != FF_COLOR_YUV_JPEG &&
src_color != FF_COLOR_YUV &&
src_color != FF_COLOR_GRAY)
loss |= FF_LOSS_COLORSPACE;
break;
default:
/* fail safe test */
if (src_color != dst_color)
loss |= FF_LOSS_COLORSPACE;
break;
}
if(loss & FF_LOSS_COLORSPACE)
score -= (nb_components * 65536) >> FFMIN(dst_desc->comp[0].depth_minus1, src_desc->comp[0].depth_minus1);
if (dst_color == FF_COLOR_GRAY &&
src_color != FF_COLOR_GRAY && (consider & FF_LOSS_CHROMA)) {
loss |= FF_LOSS_CHROMA;
score -= 2 * 65536;
}
if (!pixdesc_has_alpha(dst_desc) && (pixdesc_has_alpha(src_desc) && (consider & FF_LOSS_ALPHA))) {
loss |= FF_LOSS_ALPHA;
score -= 65536;
}
if (dst_pix_fmt == AV_PIX_FMT_PAL8 && (consider & FF_LOSS_COLORQUANT) &&
(src_pix_fmt != AV_PIX_FMT_PAL8 && (src_color != FF_COLOR_GRAY || (pixdesc_has_alpha(src_desc) && (consider & FF_LOSS_ALPHA))))) {
loss |= FF_LOSS_COLORQUANT;
score -= 65536;
}
*lossp = loss;
return score;
}
int av_get_pix_fmt_loss(enum AVPixelFormat dst_pix_fmt,
enum AVPixelFormat src_pix_fmt,
int has_alpha)
{
int loss;
int ret = get_pix_fmt_score(dst_pix_fmt, src_pix_fmt, &loss, has_alpha ? ~0 : ~FF_LOSS_ALPHA);
if (ret < 0)
return ret;
return loss;
}
enum AVPixelFormat av_find_best_pix_fmt_of_2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2,
enum AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr)
{
enum AVPixelFormat dst_pix_fmt;
int loss1, loss2, loss_mask;
const AVPixFmtDescriptor *desc1 = av_pix_fmt_desc_get(dst_pix_fmt1);
const AVPixFmtDescriptor *desc2 = av_pix_fmt_desc_get(dst_pix_fmt2);
int score1, score2;
loss_mask= loss_ptr?~*loss_ptr:~0; /* use loss mask if provided */
if(!has_alpha)
loss_mask &= ~FF_LOSS_ALPHA;
dst_pix_fmt = AV_PIX_FMT_NONE;
score1 = get_pix_fmt_score(dst_pix_fmt1, src_pix_fmt, &loss1, loss_mask);
score2 = get_pix_fmt_score(dst_pix_fmt2, src_pix_fmt, &loss2, loss_mask);
if (score1 == score2) {
if(av_get_padded_bits_per_pixel(desc2) != av_get_padded_bits_per_pixel(desc1)) {
dst_pix_fmt = av_get_padded_bits_per_pixel(desc2) < av_get_padded_bits_per_pixel(desc1) ? dst_pix_fmt2 : dst_pix_fmt1;
} else {
dst_pix_fmt = desc2->nb_components < desc1->nb_components ? dst_pix_fmt2 : dst_pix_fmt1;
}
} else {
dst_pix_fmt = score1 < score2 ? dst_pix_fmt2 : dst_pix_fmt1;
}
if (loss_ptr)
*loss_ptr = av_get_pix_fmt_loss(dst_pix_fmt, src_pix_fmt, has_alpha);
return dst_pix_fmt;
}
#ifdef TEST
int main(void){
int i;
int err=0;
int skip = 0;
for (i=0; i<AV_PIX_FMT_NB*2; i++) {
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(i);
if(!desc || !desc->name) {
skip ++;
continue;
}
if (skip) {
av_log(NULL, AV_LOG_INFO, "%3d unused pixel format values\n", skip);
skip = 0;
}
av_log(NULL, AV_LOG_INFO, "pix fmt %s avg_bpp:%d colortype:%d\n", desc->name, av_get_padded_bits_per_pixel(desc), get_color_type(desc));
if ((!(desc->flags & AV_PIX_FMT_FLAG_ALPHA)) != (desc->nb_components != 2 && desc->nb_components != 4)) {
av_log(NULL, AV_LOG_ERROR, "Alpha flag mismatch\n");
err = 1;
}
}
return err;
}
#endif

@ -255,7 +255,7 @@ enum AVPixelFormat av_pix_fmt_desc_get_id(const AVPixFmtDescriptor *desc);
* Utility function to access log2_chroma_w log2_chroma_h from
* the pixel format AVPixFmtDescriptor.
*
* See avcodec_get_chroma_sub_sample() for a function that asserts a
* See av_get_chroma_sub_sample() for a function that asserts a
* valid pixel format instead of returning an error code.
* Its recommanded that you use avcodec_get_chroma_sub_sample unless
* you do check the return code!
@ -287,5 +287,53 @@ void ff_check_pixfmt_descriptors(void);
*/
enum AVPixelFormat av_pix_fmt_swap_endianness(enum AVPixelFormat pix_fmt);
#define FF_LOSS_RESOLUTION 0x0001 /**< loss due to resolution change */
#define FF_LOSS_DEPTH 0x0002 /**< loss due to color depth change */
#define FF_LOSS_COLORSPACE 0x0004 /**< loss due to color space conversion */
#define FF_LOSS_ALPHA 0x0008 /**< loss of alpha bits */
#define FF_LOSS_COLORQUANT 0x0010 /**< loss due to color quantization */
#define FF_LOSS_CHROMA 0x0020 /**< loss of chroma (e.g. RGB to gray conversion) */
/**
* Compute what kind of losses will occur when converting from one specific
* pixel format to another.
* When converting from one pixel format to another, information loss may occur.
* For example, when converting from RGB24 to GRAY, the color information will
* be lost. Similarly, other losses occur when converting from some formats to
* other formats. These losses can involve loss of chroma, but also loss of
* resolution, loss of color depth, loss due to the color space conversion, loss
* of the alpha bits or loss due to color quantization.
* av_get_fix_fmt_loss() informs you about the various types of losses
* which will occur when converting from one pixel format to another.
*
* @param[in] dst_pix_fmt destination pixel format
* @param[in] src_pix_fmt source pixel format
* @param[in] has_alpha Whether the source pixel format alpha channel is used.
* @return Combination of flags informing you what kind of losses will occur
* (maximum loss for an invalid dst_pix_fmt).
*/
int av_get_pix_fmt_loss(enum AVPixelFormat dst_pix_fmt,
enum AVPixelFormat src_pix_fmt,
int has_alpha);
/**
* Compute what kind of losses will occur when converting from one specific
* pixel format to another.
* When converting from one pixel format to another, information loss may occur.
* For example, when converting from RGB24 to GRAY, the color information will
* be lost. Similarly, other losses occur when converting from some formats to
* other formats. These losses can involve loss of chroma, but also loss of
* resolution, loss of color depth, loss due to the color space conversion, loss
* of the alpha bits or loss due to color quantization.
* av_get_fix_fmt_loss() informs you about the various types of losses
* which will occur when converting from one pixel format to another.
*
* @param[in] dst_pix_fmt destination pixel format
* @param[in] src_pix_fmt source pixel format
* @param[in] has_alpha Whether the source pixel format alpha channel is used.
* @return Combination of flags informing you what kind of losses will occur
* (maximum loss for an invalid dst_pix_fmt).
*/
enum AVPixelFormat av_find_best_pix_fmt_of_2(enum AVPixelFormat dst_pix_fmt1, enum AVPixelFormat dst_pix_fmt2,
enum AVPixelFormat src_pix_fmt, int has_alpha, int *loss_ptr);
#endif /* AVUTIL_PIXDESC_H */

Loading…
Cancel
Save