|
|
@ -31,6 +31,8 @@ |
|
|
|
#undef NDEBUG |
|
|
|
#undef NDEBUG |
|
|
|
#include <assert.h> |
|
|
|
#include <assert.h> |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#define PERM_RWP AV_PERM_WRITE | AV_PERM_PRESERVE | AV_PERM_REUSE |
|
|
|
|
|
|
|
|
|
|
|
#define CHECK(j)\ |
|
|
|
#define CHECK(j)\ |
|
|
|
{ int score = FFABS(cur[mrefs-1+(j)] - cur[prefs-1-(j)])\
|
|
|
|
{ int score = FFABS(cur[mrefs-1+(j)] - cur[prefs-1-(j)])\
|
|
|
|
+ FFABS(cur[mrefs +(j)] - cur[prefs -(j)])\
|
|
|
|
+ FFABS(cur[mrefs +(j)] - cur[prefs -(j)])\
|
|
|
@ -47,19 +49,19 @@ |
|
|
|
int temporal_diff0 = FFABS(prev2[0] - next2[0]); \
|
|
|
|
int temporal_diff0 = FFABS(prev2[0] - next2[0]); \
|
|
|
|
int temporal_diff1 =(FFABS(prev[mrefs] - c) + FFABS(prev[prefs] - e) )>>1; \
|
|
|
|
int temporal_diff1 =(FFABS(prev[mrefs] - c) + FFABS(prev[prefs] - e) )>>1; \
|
|
|
|
int temporal_diff2 =(FFABS(next[mrefs] - c) + FFABS(next[prefs] - e) )>>1; \
|
|
|
|
int temporal_diff2 =(FFABS(next[mrefs] - c) + FFABS(next[prefs] - e) )>>1; \
|
|
|
|
int diff = FFMAX3(temporal_diff0>>1, temporal_diff1, temporal_diff2); \
|
|
|
|
int diff = FFMAX3(temporal_diff0 >> 1, temporal_diff1, temporal_diff2); \
|
|
|
|
int spatial_pred = (c+e)>>1; \
|
|
|
|
int spatial_pred = (c+e) >> 1; \
|
|
|
|
int spatial_score = FFABS(cur[mrefs-1] - cur[prefs-1]) + FFABS(c-e) \
|
|
|
|
int spatial_score = FFABS(cur[mrefs - 1] - cur[prefs - 1]) + FFABS(c-e) \
|
|
|
|
+ FFABS(cur[mrefs+1] - cur[prefs+1]) - 1; \
|
|
|
|
+ FFABS(cur[mrefs + 1] - cur[prefs + 1]) - 1; \
|
|
|
|
\
|
|
|
|
\
|
|
|
|
CHECK(-1) CHECK(-2) }} }} \
|
|
|
|
CHECK(-1) CHECK(-2) }} }} \
|
|
|
|
CHECK( 1) CHECK( 2) }} }} \
|
|
|
|
CHECK( 1) CHECK( 2) }} }} \
|
|
|
|
\
|
|
|
|
\
|
|
|
|
if (mode < 2) { \
|
|
|
|
if (mode < 2) { \
|
|
|
|
int b = (prev2[2*mrefs] + next2[2*mrefs])>>1; \
|
|
|
|
int b = (prev2[2 * mrefs] + next2[2 * mrefs])>>1; \
|
|
|
|
int f = (prev2[2*prefs] + next2[2*prefs])>>1; \
|
|
|
|
int f = (prev2[2 * prefs] + next2[2 * prefs])>>1; \
|
|
|
|
int max = FFMAX3(d-e, d-c, FFMIN(b-c, f-e)); \
|
|
|
|
int max = FFMAX3(d - e, d - c, FFMIN(b - c, f - e)); \
|
|
|
|
int min = FFMIN3(d-e, d-c, FFMAX(b-c, f-e)); \
|
|
|
|
int min = FFMIN3(d - e, d - c, FFMAX(b - c, f - e)); \
|
|
|
|
\
|
|
|
|
\
|
|
|
|
diff = FFMAX3(diff, min, -max); \
|
|
|
|
diff = FFMAX3(diff, min, -max); \
|
|
|
|
} \
|
|
|
|
} \
|
|
|
@ -92,7 +94,8 @@ static void filter_line_c(uint8_t *dst, |
|
|
|
|
|
|
|
|
|
|
|
static void filter_line_c_16bit(uint16_t *dst, |
|
|
|
static void filter_line_c_16bit(uint16_t *dst, |
|
|
|
uint16_t *prev, uint16_t *cur, uint16_t *next, |
|
|
|
uint16_t *prev, uint16_t *cur, uint16_t *next, |
|
|
|
int w, int prefs, int mrefs, int parity, int mode) |
|
|
|
int w, int prefs, int mrefs, int parity, |
|
|
|
|
|
|
|
int mode) |
|
|
|
{ |
|
|
|
{ |
|
|
|
int x; |
|
|
|
int x; |
|
|
|
uint16_t *prev2 = parity ? prev : cur ; |
|
|
|
uint16_t *prev2 = parity ? prev : cur ; |
|
|
@ -123,15 +126,18 @@ static void filter(AVFilterContext *ctx, AVFilterBufferRef *dstpic, |
|
|
|
|
|
|
|
|
|
|
|
for (y = 0; y < h; y++) { |
|
|
|
for (y = 0; y < h; y++) { |
|
|
|
if ((y ^ parity) & 1) { |
|
|
|
if ((y ^ parity) & 1) { |
|
|
|
uint8_t *prev = &yadif->prev->data[i][y*refs]; |
|
|
|
uint8_t *prev = &yadif->prev->data[i][y * refs]; |
|
|
|
uint8_t *cur = &yadif->cur ->data[i][y*refs]; |
|
|
|
uint8_t *cur = &yadif->cur ->data[i][y * refs]; |
|
|
|
uint8_t *next = &yadif->next->data[i][y*refs]; |
|
|
|
uint8_t *next = &yadif->next->data[i][y * refs]; |
|
|
|
uint8_t *dst = &dstpic->data[i][y*dstpic->linesize[i]]; |
|
|
|
uint8_t *dst = &dstpic->data[i][y * dstpic->linesize[i]]; |
|
|
|
int mode = y==1 || y+2==h ? 2 : yadif->mode; |
|
|
|
int mode = y == 1 || y + 2 == h ? 2 : yadif->mode; |
|
|
|
yadif->filter_line(dst, prev, cur, next, w, y+1<h ? refs : -refs, y ? -refs : refs, parity ^ tff, mode); |
|
|
|
yadif->filter_line(dst, prev, cur, next, w, |
|
|
|
|
|
|
|
y + 1 < h ? refs : -refs, |
|
|
|
|
|
|
|
y ? -refs : refs, |
|
|
|
|
|
|
|
parity ^ tff, mode); |
|
|
|
} else { |
|
|
|
} else { |
|
|
|
memcpy(&dstpic->data[i][y*dstpic->linesize[i]], |
|
|
|
memcpy(&dstpic->data[i][y * dstpic->linesize[i]], |
|
|
|
&yadif->cur->data[i][y*refs], w*df); |
|
|
|
&yadif->cur->data[i][y * refs], w * df); |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
@ -139,11 +145,12 @@ static void filter(AVFilterContext *ctx, AVFilterBufferRef *dstpic, |
|
|
|
emms_c(); |
|
|
|
emms_c(); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
static AVFilterBufferRef *get_video_buffer(AVFilterLink *link, int perms, int w, int h) |
|
|
|
static AVFilterBufferRef *get_video_buffer(AVFilterLink *link, int perms, |
|
|
|
|
|
|
|
int w, int h) |
|
|
|
{ |
|
|
|
{ |
|
|
|
AVFilterBufferRef *picref; |
|
|
|
AVFilterBufferRef *picref; |
|
|
|
int width = FFALIGN(w, 32); |
|
|
|
int width = FFALIGN(w, 32); |
|
|
|
int height= FFALIGN(h+2, 32); |
|
|
|
int height = FFALIGN(h + 2, 32); |
|
|
|
int i; |
|
|
|
int i; |
|
|
|
|
|
|
|
|
|
|
|
picref = ff_default_get_video_buffer(link, perms, width, height); |
|
|
|
picref = ff_default_get_video_buffer(link, perms, width, height); |
|
|
@ -160,19 +167,18 @@ static AVFilterBufferRef *get_video_buffer(AVFilterLink *link, int perms, int w, |
|
|
|
static int return_frame(AVFilterContext *ctx, int is_second) |
|
|
|
static int return_frame(AVFilterContext *ctx, int is_second) |
|
|
|
{ |
|
|
|
{ |
|
|
|
YADIFContext *yadif = ctx->priv; |
|
|
|
YADIFContext *yadif = ctx->priv; |
|
|
|
AVFilterLink *link= ctx->outputs[0]; |
|
|
|
AVFilterLink *link = ctx->outputs[0]; |
|
|
|
int tff, ret; |
|
|
|
int tff, ret; |
|
|
|
|
|
|
|
|
|
|
|
if (yadif->parity == -1) { |
|
|
|
if (yadif->parity == -1) { |
|
|
|
tff = yadif->cur->video->interlaced ? |
|
|
|
tff = yadif->cur->video->interlaced ? |
|
|
|
yadif->cur->video->top_field_first : 1; |
|
|
|
yadif->cur->video->top_field_first : 1; |
|
|
|
} else { |
|
|
|
} else { |
|
|
|
tff = yadif->parity^1; |
|
|
|
tff = yadif->parity ^ 1; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
if (is_second) { |
|
|
|
if (is_second) { |
|
|
|
yadif->out = ff_get_video_buffer(link, AV_PERM_WRITE | AV_PERM_PRESERVE | |
|
|
|
yadif->out = ff_get_video_buffer(link, PERM_RWP, link->w, link->h); |
|
|
|
AV_PERM_REUSE, link->w, link->h); |
|
|
|
|
|
|
|
if (!yadif->out) |
|
|
|
if (!yadif->out) |
|
|
|
return AVERROR(ENOMEM); |
|
|
|
return AVERROR(ENOMEM); |
|
|
|
|
|
|
|
|
|
|
@ -241,15 +247,17 @@ static int start_frame(AVFilterLink *link, AVFilterBufferRef *picref) |
|
|
|
!(yadif->prev = avfilter_ref_buffer(yadif->cur, AV_PERM_READ))) |
|
|
|
!(yadif->prev = avfilter_ref_buffer(yadif->cur, AV_PERM_READ))) |
|
|
|
return AVERROR(ENOMEM); |
|
|
|
return AVERROR(ENOMEM); |
|
|
|
|
|
|
|
|
|
|
|
yadif->out = ff_get_video_buffer(ctx->outputs[0], AV_PERM_WRITE | AV_PERM_PRESERVE | |
|
|
|
yadif->out = ff_get_video_buffer(ctx->outputs[0], PERM_RWP, |
|
|
|
AV_PERM_REUSE, link->w, link->h); |
|
|
|
link->w, link->h); |
|
|
|
if (!yadif->out) |
|
|
|
if (!yadif->out) |
|
|
|
return AVERROR(ENOMEM); |
|
|
|
return AVERROR(ENOMEM); |
|
|
|
|
|
|
|
|
|
|
|
avfilter_copy_buffer_ref_props(yadif->out, yadif->cur); |
|
|
|
avfilter_copy_buffer_ref_props(yadif->out, yadif->cur); |
|
|
|
yadif->out->video->interlaced = 0; |
|
|
|
yadif->out->video->interlaced = 0; |
|
|
|
|
|
|
|
|
|
|
|
if (yadif->out->pts != AV_NOPTS_VALUE) |
|
|
|
if (yadif->out->pts != AV_NOPTS_VALUE) |
|
|
|
yadif->out->pts *= 2; |
|
|
|
yadif->out->pts *= 2; |
|
|
|
|
|
|
|
|
|
|
|
return ff_start_frame(ctx->outputs[0], yadif->out); |
|
|
|
return ff_start_frame(ctx->outputs[0], yadif->out); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -291,7 +299,9 @@ static int request_frame(AVFilterLink *link) |
|
|
|
ret = ff_request_frame(link->src->inputs[0]); |
|
|
|
ret = ff_request_frame(link->src->inputs[0]); |
|
|
|
|
|
|
|
|
|
|
|
if (ret == AVERROR_EOF && yadif->next) { |
|
|
|
if (ret == AVERROR_EOF && yadif->next) { |
|
|
|
AVFilterBufferRef *next = avfilter_ref_buffer(yadif->next, AV_PERM_READ); |
|
|
|
AVFilterBufferRef *next = |
|
|
|
|
|
|
|
avfilter_ref_buffer(yadif->next, AV_PERM_READ); |
|
|
|
|
|
|
|
|
|
|
|
if (!next) |
|
|
|
if (!next) |
|
|
|
return AVERROR(ENOMEM); |
|
|
|
return AVERROR(ENOMEM); |
|
|
|
|
|
|
|
|
|
|
@ -320,7 +330,8 @@ static int poll_frame(AVFilterLink *link) |
|
|
|
if (val <= 0) |
|
|
|
if (val <= 0) |
|
|
|
return val; |
|
|
|
return val; |
|
|
|
|
|
|
|
|
|
|
|
if (val==1 && !yadif->next) { //FIXME change API to not requre this red tape
|
|
|
|
//FIXME change API to not requre this red tape
|
|
|
|
|
|
|
|
if (val == 1 && !yadif->next) { |
|
|
|
if ((ret = ff_request_frame(link->src->inputs[0])) < 0) |
|
|
|
if ((ret = ff_request_frame(link->src->inputs[0])) < 0) |
|
|
|
return ret; |
|
|
|
return ret; |
|
|
|
val = ff_poll_frame(link->src->inputs[0]); |
|
|
|
val = ff_poll_frame(link->src->inputs[0]); |
|
|
@ -383,14 +394,17 @@ static av_cold int init(AVFilterContext *ctx, const char *args) |
|
|
|
yadif->auto_enable = 0; |
|
|
|
yadif->auto_enable = 0; |
|
|
|
yadif->csp = NULL; |
|
|
|
yadif->csp = NULL; |
|
|
|
|
|
|
|
|
|
|
|
if (args) sscanf(args, "%d:%d:%d", &yadif->mode, &yadif->parity, &yadif->auto_enable); |
|
|
|
if (args) |
|
|
|
|
|
|
|
sscanf(args, "%d:%d:%d", |
|
|
|
|
|
|
|
&yadif->mode, &yadif->parity, &yadif->auto_enable); |
|
|
|
|
|
|
|
|
|
|
|
yadif->filter_line = filter_line_c; |
|
|
|
yadif->filter_line = filter_line_c; |
|
|
|
|
|
|
|
|
|
|
|
if (HAVE_MMX) |
|
|
|
if (HAVE_MMX) |
|
|
|
ff_yadif_init_x86(yadif); |
|
|
|
ff_yadif_init_x86(yadif); |
|
|
|
|
|
|
|
|
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "mode:%d parity:%d auto_enable:%d\n", yadif->mode, yadif->parity, yadif->auto_enable); |
|
|
|
av_log(ctx, AV_LOG_VERBOSE, "mode:%d parity:%d auto_enable:%d\n", |
|
|
|
|
|
|
|
yadif->mode, yadif->parity, yadif->auto_enable); |
|
|
|
|
|
|
|
|
|
|
|
return 0; |
|
|
|
return 0; |
|
|
|
} |
|
|
|
} |
|
|
|