|
|
|
@ -83,7 +83,7 @@ void ff_h264_draw_horiz_band(const H264Context *h, H264SliceContext *sl, |
|
|
|
|
int y, int height) |
|
|
|
|
{ |
|
|
|
|
AVCodecContext *avctx = h->avctx; |
|
|
|
|
const AVFrame *src = &h->cur_pic.f; |
|
|
|
|
const AVFrame *src = h->cur_pic.f; |
|
|
|
|
const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(avctx->pix_fmt); |
|
|
|
|
int vshift = desc->log2_chroma_h; |
|
|
|
|
const int field_pic = h->picture_structure != PICT_FRAME; |
|
|
|
@ -590,9 +590,15 @@ static int h264_init_context(AVCodecContext *avctx, H264Context *h) |
|
|
|
|
return AVERROR(ENOMEM); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) |
|
|
|
|
av_frame_unref(&h->DPB[i].f); |
|
|
|
|
av_frame_unref(&h->cur_pic.f); |
|
|
|
|
for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) { |
|
|
|
|
h->DPB[i].f = av_frame_alloc(); |
|
|
|
|
if (!h->DPB[i].f) |
|
|
|
|
return AVERROR(ENOMEM); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
h->cur_pic.f = av_frame_alloc(); |
|
|
|
|
if (!h->cur_pic.f) |
|
|
|
|
return AVERROR(ENOMEM); |
|
|
|
|
|
|
|
|
|
for (i = 0; i < h->nb_slice_ctx; i++) |
|
|
|
|
h->slice_ctx[i].h264 = h; |
|
|
|
@ -682,7 +688,7 @@ static void decode_postinit(H264Context *h, int setup_finished) |
|
|
|
|
int i, pics, out_of_order, out_idx; |
|
|
|
|
int invalid = 0, cnt = 0; |
|
|
|
|
|
|
|
|
|
h->cur_pic_ptr->f.pict_type = h->pict_type; |
|
|
|
|
h->cur_pic_ptr->f->pict_type = h->pict_type; |
|
|
|
|
|
|
|
|
|
if (h->next_output_pic) |
|
|
|
|
return; |
|
|
|
@ -697,8 +703,8 @@ static void decode_postinit(H264Context *h, int setup_finished) |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
cur->f.interlaced_frame = 0; |
|
|
|
|
cur->f.repeat_pict = 0; |
|
|
|
|
cur->f->interlaced_frame = 0; |
|
|
|
|
cur->f->repeat_pict = 0; |
|
|
|
|
|
|
|
|
|
/* Signal interlacing information externally. */ |
|
|
|
|
/* Prioritize picture timing SEI information over used
|
|
|
|
@ -710,55 +716,55 @@ static void decode_postinit(H264Context *h, int setup_finished) |
|
|
|
|
break; |
|
|
|
|
case SEI_PIC_STRUCT_TOP_FIELD: |
|
|
|
|
case SEI_PIC_STRUCT_BOTTOM_FIELD: |
|
|
|
|
cur->f.interlaced_frame = 1; |
|
|
|
|
cur->f->interlaced_frame = 1; |
|
|
|
|
break; |
|
|
|
|
case SEI_PIC_STRUCT_TOP_BOTTOM: |
|
|
|
|
case SEI_PIC_STRUCT_BOTTOM_TOP: |
|
|
|
|
if (FIELD_OR_MBAFF_PICTURE(h)) |
|
|
|
|
cur->f.interlaced_frame = 1; |
|
|
|
|
cur->f->interlaced_frame = 1; |
|
|
|
|
else |
|
|
|
|
// try to flag soft telecine progressive
|
|
|
|
|
cur->f.interlaced_frame = h->prev_interlaced_frame; |
|
|
|
|
cur->f->interlaced_frame = h->prev_interlaced_frame; |
|
|
|
|
break; |
|
|
|
|
case SEI_PIC_STRUCT_TOP_BOTTOM_TOP: |
|
|
|
|
case SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM: |
|
|
|
|
/* Signal the possibility of telecined film externally
|
|
|
|
|
* (pic_struct 5,6). From these hints, let the applications |
|
|
|
|
* decide if they apply deinterlacing. */ |
|
|
|
|
cur->f.repeat_pict = 1; |
|
|
|
|
cur->f->repeat_pict = 1; |
|
|
|
|
break; |
|
|
|
|
case SEI_PIC_STRUCT_FRAME_DOUBLING: |
|
|
|
|
cur->f.repeat_pict = 2; |
|
|
|
|
cur->f->repeat_pict = 2; |
|
|
|
|
break; |
|
|
|
|
case SEI_PIC_STRUCT_FRAME_TRIPLING: |
|
|
|
|
cur->f.repeat_pict = 4; |
|
|
|
|
cur->f->repeat_pict = 4; |
|
|
|
|
break; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
if ((h->sei_ct_type & 3) && |
|
|
|
|
h->sei_pic_struct <= SEI_PIC_STRUCT_BOTTOM_TOP) |
|
|
|
|
cur->f.interlaced_frame = (h->sei_ct_type & (1 << 1)) != 0; |
|
|
|
|
cur->f->interlaced_frame = (h->sei_ct_type & (1 << 1)) != 0; |
|
|
|
|
} else { |
|
|
|
|
/* Derive interlacing flag from used decoding process. */ |
|
|
|
|
cur->f.interlaced_frame = FIELD_OR_MBAFF_PICTURE(h); |
|
|
|
|
cur->f->interlaced_frame = FIELD_OR_MBAFF_PICTURE(h); |
|
|
|
|
} |
|
|
|
|
h->prev_interlaced_frame = cur->f.interlaced_frame; |
|
|
|
|
h->prev_interlaced_frame = cur->f->interlaced_frame; |
|
|
|
|
|
|
|
|
|
if (cur->field_poc[0] != cur->field_poc[1]) { |
|
|
|
|
/* Derive top_field_first from field pocs. */ |
|
|
|
|
cur->f.top_field_first = cur->field_poc[0] < cur->field_poc[1]; |
|
|
|
|
cur->f->top_field_first = cur->field_poc[0] < cur->field_poc[1]; |
|
|
|
|
} else { |
|
|
|
|
if (cur->f.interlaced_frame || h->sps.pic_struct_present_flag) { |
|
|
|
|
if (cur->f->interlaced_frame || h->sps.pic_struct_present_flag) { |
|
|
|
|
/* Use picture timing SEI information. Even if it is a
|
|
|
|
|
* information of a past frame, better than nothing. */ |
|
|
|
|
if (h->sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM || |
|
|
|
|
h->sei_pic_struct == SEI_PIC_STRUCT_TOP_BOTTOM_TOP) |
|
|
|
|
cur->f.top_field_first = 1; |
|
|
|
|
cur->f->top_field_first = 1; |
|
|
|
|
else |
|
|
|
|
cur->f.top_field_first = 0; |
|
|
|
|
cur->f->top_field_first = 0; |
|
|
|
|
} else { |
|
|
|
|
/* Most likely progressive */ |
|
|
|
|
cur->f.top_field_first = 0; |
|
|
|
|
cur->f->top_field_first = 0; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
@ -767,7 +773,7 @@ static void decode_postinit(H264Context *h, int setup_finished) |
|
|
|
|
h->frame_packing_arrangement_type <= 6 && |
|
|
|
|
h->content_interpretation_type > 0 && |
|
|
|
|
h->content_interpretation_type < 3) { |
|
|
|
|
AVStereo3D *stereo = av_stereo3d_create_side_data(&cur->f); |
|
|
|
|
AVStereo3D *stereo = av_stereo3d_create_side_data(cur->f); |
|
|
|
|
if (!stereo) |
|
|
|
|
return; |
|
|
|
|
|
|
|
|
@ -805,7 +811,7 @@ static void decode_postinit(H264Context *h, int setup_finished) |
|
|
|
|
if (h->sei_display_orientation_present && |
|
|
|
|
(h->sei_anticlockwise_rotation || h->sei_hflip || h->sei_vflip)) { |
|
|
|
|
double angle = h->sei_anticlockwise_rotation * 360 / (double) (1 << 16); |
|
|
|
|
AVFrameSideData *rotation = av_frame_new_side_data(&cur->f, |
|
|
|
|
AVFrameSideData *rotation = av_frame_new_side_data(cur->f, |
|
|
|
|
AV_FRAME_DATA_DISPLAYMATRIX, |
|
|
|
|
sizeof(int32_t) * 9); |
|
|
|
|
if (!rotation) |
|
|
|
@ -859,13 +865,13 @@ static void decode_postinit(H264Context *h, int setup_finished) |
|
|
|
|
cnt += out->poc < h->last_pocs[i]; |
|
|
|
|
invalid += out->poc == INT_MIN; |
|
|
|
|
} |
|
|
|
|
if (!h->mmco_reset && !cur->f.key_frame && |
|
|
|
|
if (!h->mmco_reset && !cur->f->key_frame && |
|
|
|
|
cnt + invalid == MAX_DELAYED_PIC_COUNT && cnt > 0) { |
|
|
|
|
h->mmco_reset = 2; |
|
|
|
|
if (pics > 1) |
|
|
|
|
h->delayed_pic[pics - 2]->mmco_reset = 2; |
|
|
|
|
} |
|
|
|
|
if (h->mmco_reset || cur->f.key_frame) { |
|
|
|
|
if (h->mmco_reset || cur->f->key_frame) { |
|
|
|
|
for (i = 0; i < MAX_DELAYED_PIC_COUNT; i++) |
|
|
|
|
h->last_pocs[i] = INT_MIN; |
|
|
|
|
cnt = 0; |
|
|
|
@ -876,16 +882,16 @@ static void decode_postinit(H264Context *h, int setup_finished) |
|
|
|
|
for (i = 1; i < MAX_DELAYED_PIC_COUNT && |
|
|
|
|
h->delayed_pic[i] && |
|
|
|
|
!h->delayed_pic[i - 1]->mmco_reset && |
|
|
|
|
!h->delayed_pic[i]->f.key_frame; |
|
|
|
|
!h->delayed_pic[i]->f->key_frame; |
|
|
|
|
i++) |
|
|
|
|
if (h->delayed_pic[i]->poc < out->poc) { |
|
|
|
|
out = h->delayed_pic[i]; |
|
|
|
|
out_idx = i; |
|
|
|
|
} |
|
|
|
|
if (h->avctx->has_b_frames == 0 && |
|
|
|
|
(h->delayed_pic[0]->f.key_frame || h->mmco_reset)) |
|
|
|
|
(h->delayed_pic[0]->f->key_frame || h->mmco_reset)) |
|
|
|
|
h->next_outputed_poc = INT_MIN; |
|
|
|
|
out_of_order = !out->f.key_frame && !h->mmco_reset && |
|
|
|
|
out_of_order = !out->f->key_frame && !h->mmco_reset && |
|
|
|
|
(out->poc < h->next_outputed_poc); |
|
|
|
|
|
|
|
|
|
if (h->sps.bitstream_restriction_flag && |
|
|
|
@ -899,7 +905,7 @@ static void decode_postinit(H264Context *h, int setup_finished) |
|
|
|
|
} else if (h->low_delay && |
|
|
|
|
((h->next_outputed_poc != INT_MIN && |
|
|
|
|
out->poc > h->next_outputed_poc + 2) || |
|
|
|
|
cur->f.pict_type == AV_PICTURE_TYPE_B)) { |
|
|
|
|
cur->f->pict_type == AV_PICTURE_TYPE_B)) { |
|
|
|
|
h->low_delay = 0; |
|
|
|
|
h->avctx->has_b_frames++; |
|
|
|
|
} |
|
|
|
@ -924,7 +930,7 @@ static void decode_postinit(H264Context *h, int setup_finished) |
|
|
|
|
h->next_outputed_poc = INT_MIN; |
|
|
|
|
} |
|
|
|
|
} else { |
|
|
|
|
if (out_idx == 0 && pics > 1 && h->delayed_pic[0]->f.key_frame) { |
|
|
|
|
if (out_idx == 0 && pics > 1 && h->delayed_pic[0]->f->key_frame) { |
|
|
|
|
h->next_outputed_poc = INT_MIN; |
|
|
|
|
} else { |
|
|
|
|
h->next_outputed_poc = out->poc; |
|
|
|
@ -1445,7 +1451,7 @@ again: |
|
|
|
|
((1 << h->sps.log2_max_frame_num) - 1); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
h->cur_pic_ptr->f.key_frame |= |
|
|
|
|
h->cur_pic_ptr->f->key_frame |= |
|
|
|
|
(h->nal_unit_type == NAL_IDR_SLICE) || |
|
|
|
|
(h->sei_recovery_frame_cnt >= 0); |
|
|
|
|
|
|
|
|
@ -1626,7 +1632,7 @@ out: |
|
|
|
|
out_idx = 0; |
|
|
|
|
for (i = 1; |
|
|
|
|
h->delayed_pic[i] && |
|
|
|
|
!h->delayed_pic[i]->f.key_frame && |
|
|
|
|
!h->delayed_pic[i]->f->key_frame && |
|
|
|
|
!h->delayed_pic[i]->mmco_reset; |
|
|
|
|
i++) |
|
|
|
|
if (h->delayed_pic[i]->poc < out->poc) { |
|
|
|
@ -1638,7 +1644,7 @@ out: |
|
|
|
|
h->delayed_pic[i] = h->delayed_pic[i + 1]; |
|
|
|
|
|
|
|
|
|
if (out) { |
|
|
|
|
ret = output_frame(h, pict, &out->f); |
|
|
|
|
ret = output_frame(h, pict, out->f); |
|
|
|
|
if (ret < 0) |
|
|
|
|
return ret; |
|
|
|
|
*got_frame = 1; |
|
|
|
@ -1674,9 +1680,9 @@ out: |
|
|
|
|
if (h->next_output_pic && ((avctx->flags & CODEC_FLAG_OUTPUT_CORRUPT) || |
|
|
|
|
h->next_output_pic->recovered)) { |
|
|
|
|
if (!h->next_output_pic->recovered) |
|
|
|
|
h->next_output_pic->f.flags |= AV_FRAME_FLAG_CORRUPT; |
|
|
|
|
h->next_output_pic->f->flags |= AV_FRAME_FLAG_CORRUPT; |
|
|
|
|
|
|
|
|
|
ret = output_frame(h, pict, &h->next_output_pic->f); |
|
|
|
|
ret = output_frame(h, pict, h->next_output_pic->f); |
|
|
|
|
if (ret < 0) |
|
|
|
|
return ret; |
|
|
|
|
*got_frame = 1; |
|
|
|
@ -1694,8 +1700,10 @@ av_cold void ff_h264_free_context(H264Context *h) |
|
|
|
|
|
|
|
|
|
ff_h264_free_tables(h); |
|
|
|
|
|
|
|
|
|
for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) |
|
|
|
|
for (i = 0; i < H264_MAX_PICTURE_COUNT; i++) { |
|
|
|
|
ff_h264_unref_picture(h, &h->DPB[i]); |
|
|
|
|
av_frame_free(&h->DPB[i].f); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
h->cur_pic_ptr = NULL; |
|
|
|
|
|
|
|
|
@ -1718,6 +1726,7 @@ static av_cold int h264_decode_end(AVCodecContext *avctx) |
|
|
|
|
ff_h264_free_context(h); |
|
|
|
|
|
|
|
|
|
ff_h264_unref_picture(h, &h->cur_pic); |
|
|
|
|
av_frame_free(&h->cur_pic.f); |
|
|
|
|
|
|
|
|
|
return 0; |
|
|
|
|
} |
|
|
|
|