diff --git a/avplay.c b/avplay.c index 225f81acbc..93a0e1b5a8 100644 --- a/avplay.c +++ b/avplay.c @@ -103,6 +103,7 @@ typedef struct VideoPicture { SDL_Overlay *bmp; int width, height; /* source height & width */ int allocated; + int reallocate; enum PixelFormat pix_fmt; #if CONFIG_AVFILTER @@ -1339,7 +1340,7 @@ static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, int64_t vp = &is->pictq[is->pictq_windex]; /* alloc or resize hardware picture buffer */ - if (!vp->bmp || + if (!vp->bmp || vp->reallocate || #if CONFIG_AVFILTER vp->width != is->out_video_filter->inputs[0]->w || vp->height != is->out_video_filter->inputs[0]->h) { @@ -1349,7 +1350,8 @@ static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, int64_t #endif SDL_Event event; - vp->allocated = 0; + vp->allocated = 0; + vp->reallocate = 0; /* the allocation must be done in the main thread to avoid locking problems */ @@ -2681,6 +2683,12 @@ static void stream_cycle_channel(VideoState *is, int codec_type) static void toggle_full_screen(void) { is_full_screen = !is_full_screen; +#if defined(__APPLE__) && SDL_VERSION_ATLEAST(1, 2, 14) + /* OSX needs to empty the picture_queue */ + for (int i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++) { + cur_stream->pictq[i].reallocate = 1; + } +#endif video_open(cur_stream); }