avcodec/pgssubdec: Use dedicated pointer for accesses

Improves readability and decreases codesize.

Signed-off-by: Andreas Rheinhardt <andreas.rheinhardt@outlook.com>
pull/377/head
Andreas Rheinhardt 3 years ago
parent 58228ab9b9
commit d59f454ed2
  1. 66
      libavcodec/pgssubdec.c

@ -443,6 +443,7 @@ static int parse_presentation_segment(AVCodecContext *avctx,
for (i = 0; i < ctx->presentation.object_count; i++) for (i = 0; i < ctx->presentation.object_count; i++)
{ {
PGSSubObjectRef *const object = &ctx->presentation.objects[i];
if (buf_end - buf < 8) { if (buf_end - buf < 8) {
av_log(avctx, AV_LOG_ERROR, "Insufficent space for object\n"); av_log(avctx, AV_LOG_ERROR, "Insufficent space for object\n");
@ -450,32 +451,29 @@ static int parse_presentation_segment(AVCodecContext *avctx,
return AVERROR_INVALIDDATA; return AVERROR_INVALIDDATA;
} }
ctx->presentation.objects[i].id = bytestream_get_be16(&buf); object->id = bytestream_get_be16(&buf);
ctx->presentation.objects[i].window_id = bytestream_get_byte(&buf); object->window_id = bytestream_get_byte(&buf);
ctx->presentation.objects[i].composition_flag = bytestream_get_byte(&buf); object->composition_flag = bytestream_get_byte(&buf);
ctx->presentation.objects[i].x = bytestream_get_be16(&buf); object->x = bytestream_get_be16(&buf);
ctx->presentation.objects[i].y = bytestream_get_be16(&buf); object->y = bytestream_get_be16(&buf);
// If cropping // If cropping
if (ctx->presentation.objects[i].composition_flag & 0x80) { if (object->composition_flag & 0x80) {
ctx->presentation.objects[i].crop_x = bytestream_get_be16(&buf); object->crop_x = bytestream_get_be16(&buf);
ctx->presentation.objects[i].crop_y = bytestream_get_be16(&buf); object->crop_y = bytestream_get_be16(&buf);
ctx->presentation.objects[i].crop_w = bytestream_get_be16(&buf); object->crop_w = bytestream_get_be16(&buf);
ctx->presentation.objects[i].crop_h = bytestream_get_be16(&buf); object->crop_h = bytestream_get_be16(&buf);
} }
ff_dlog(avctx, "Subtitle Placement x=%d, y=%d\n", ff_dlog(avctx, "Subtitle Placement x=%d, y=%d\n",
ctx->presentation.objects[i].x, ctx->presentation.objects[i].y); object->x, object->y);
if (ctx->presentation.objects[i].x > avctx->width || if (object->x > avctx->width || object->y > avctx->height) {
ctx->presentation.objects[i].y > avctx->height) {
av_log(avctx, AV_LOG_ERROR, "Subtitle out of video bounds. x = %d, y = %d, video width = %d, video height = %d.\n", av_log(avctx, AV_LOG_ERROR, "Subtitle out of video bounds. x = %d, y = %d, video width = %d, video height = %d.\n",
ctx->presentation.objects[i].x, object->x, object->y,
ctx->presentation.objects[i].y,
avctx->width, avctx->height); avctx->width, avctx->height);
ctx->presentation.objects[i].x = 0; object->y = object->x = 0;
ctx->presentation.objects[i].y = 0;
if (avctx->err_recognition & AV_EF_EXPLODE) { if (avctx->err_recognition & AV_EF_EXPLODE) {
return AVERROR_INVALIDDATA; return AVERROR_INVALIDDATA;
} }
@ -531,13 +529,13 @@ static int display_end_segment(AVCodecContext *avctx, void *data,
return AVERROR_INVALIDDATA; return AVERROR_INVALIDDATA;
} }
for (i = 0; i < ctx->presentation.object_count; i++) { for (i = 0; i < ctx->presentation.object_count; i++) {
AVSubtitleRect *const rect = av_mallocz(sizeof(*rect));
PGSSubObject *object; PGSSubObject *object;
sub->rects[i] = av_mallocz(sizeof(*sub->rects[0])); if (!rect)
if (!sub->rects[i])
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
sub->num_rects++; sub->rects[sub->num_rects++] = rect;
sub->rects[i]->type = SUBTITLE_BITMAP; rect->type = SUBTITLE_BITMAP;
/* Process bitmap */ /* Process bitmap */
object = find_object(ctx->presentation.objects[i].id, &ctx->objects); object = find_object(ctx->presentation.objects[i].id, &ctx->objects);
@ -551,16 +549,16 @@ static int display_end_segment(AVCodecContext *avctx, void *data,
continue; continue;
} }
if (ctx->presentation.objects[i].composition_flag & 0x40) if (ctx->presentation.objects[i].composition_flag & 0x40)
sub->rects[i]->flags |= AV_SUBTITLE_FLAG_FORCED; rect->flags |= AV_SUBTITLE_FLAG_FORCED;
sub->rects[i]->x = ctx->presentation.objects[i].x; rect->x = ctx->presentation.objects[i].x;
sub->rects[i]->y = ctx->presentation.objects[i].y; rect->y = ctx->presentation.objects[i].y;
if (object->rle) { if (object->rle) {
sub->rects[i]->w = object->w; rect->w = object->w;
sub->rects[i]->h = object->h; rect->h = object->h;
sub->rects[i]->linesize[0] = object->w; rect->linesize[0] = object->w;
if (object->rle_remaining_len) { if (object->rle_remaining_len) {
av_log(avctx, AV_LOG_ERROR, "RLE data length %u is %u bytes shorter than expected\n", av_log(avctx, AV_LOG_ERROR, "RLE data length %u is %u bytes shorter than expected\n",
@ -568,25 +566,25 @@ static int display_end_segment(AVCodecContext *avctx, void *data,
if (avctx->err_recognition & AV_EF_EXPLODE) if (avctx->err_recognition & AV_EF_EXPLODE)
return AVERROR_INVALIDDATA; return AVERROR_INVALIDDATA;
} }
ret = decode_rle(avctx, sub->rects[i], object->rle, object->rle_data_len); ret = decode_rle(avctx, rect, object->rle, object->rle_data_len);
if (ret < 0) { if (ret < 0) {
if ((avctx->err_recognition & AV_EF_EXPLODE) || if ((avctx->err_recognition & AV_EF_EXPLODE) ||
ret == AVERROR(ENOMEM)) { ret == AVERROR(ENOMEM)) {
return ret; return ret;
} }
sub->rects[i]->w = 0; rect->w = 0;
sub->rects[i]->h = 0; rect->h = 0;
continue; continue;
} }
} }
/* Allocate memory for colors */ /* Allocate memory for colors */
sub->rects[i]->nb_colors = 256; rect->nb_colors = 256;
sub->rects[i]->data[1] = av_mallocz(AVPALETTE_SIZE); rect->data[1] = av_mallocz(AVPALETTE_SIZE);
if (!sub->rects[i]->data[1]) if (!rect->data[1])
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
if (!ctx->forced_subs_only || ctx->presentation.objects[i].composition_flag & 0x40) if (!ctx->forced_subs_only || ctx->presentation.objects[i].composition_flag & 0x40)
memcpy(sub->rects[i]->data[1], palette->clut, sub->rects[i]->nb_colors * sizeof(uint32_t)); memcpy(rect->data[1], palette->clut, rect->nb_colors * sizeof(uint32_t));
} }
return 1; return 1;
} }

Loading…
Cancel
Save