00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021 #include "libavutil/cpu.h"
00022 #include "libavutil/common.h"
00023 #include "avfilter.h"
00024 #include "yadif.h"
00025
00026 #undef NDEBUG
00027 #include <assert.h>
00028
00029 typedef struct {
00036 int mode;
00037
00043 int parity;
00044
00045 int frame_pending;
00046
00047 AVFilterBufferRef *cur;
00048 AVFilterBufferRef *next;
00049 AVFilterBufferRef *prev;
00050 AVFilterBufferRef *out;
00051 void (*filter_line)(uint8_t *dst,
00052 uint8_t *prev, uint8_t *cur, uint8_t *next,
00053 int w, int refs, int parity, int mode);
00054 } YADIFContext;
00055
00056 static void filter_line_c(uint8_t *dst,
00057 uint8_t *prev, uint8_t *cur, uint8_t *next,
00058 int w, int refs, int parity, int mode)
00059 {
00060 int x;
00061 uint8_t *prev2 = parity ? prev : cur ;
00062 uint8_t *next2 = parity ? cur : next;
00063 for (x = 0; x < w; x++) {
00064 int c = cur[-refs];
00065 int d = (prev2[0] + next2[0])>>1;
00066 int e = cur[+refs];
00067 int temporal_diff0 = FFABS(prev2[0] - next2[0]);
00068 int temporal_diff1 =(FFABS(prev[-refs] - c) + FFABS(prev[+refs] - e) )>>1;
00069 int temporal_diff2 =(FFABS(next[-refs] - c) + FFABS(next[+refs] - e) )>>1;
00070 int diff = FFMAX3(temporal_diff0>>1, temporal_diff1, temporal_diff2);
00071 int spatial_pred = (c+e)>>1;
00072 int spatial_score = FFABS(cur[-refs-1] - cur[+refs-1]) + FFABS(c-e)
00073 + FFABS(cur[-refs+1] - cur[+refs+1]) - 1;
00074
00075 #define CHECK(j)\
00076 { int score = FFABS(cur[-refs-1+(j)] - cur[+refs-1-(j)])\
00077 + FFABS(cur[-refs +(j)] - cur[+refs -(j)])\
00078 + FFABS(cur[-refs+1+(j)] - cur[+refs+1-(j)]);\
00079 if (score < spatial_score) {\
00080 spatial_score= score;\
00081 spatial_pred= (cur[-refs +(j)] + cur[+refs -(j)])>>1;\
00082
00083 CHECK(-1) CHECK(-2) }} }}
00084 CHECK( 1) CHECK( 2) }} }}
00085
00086 if (mode < 2) {
00087 int b = (prev2[-2*refs] + next2[-2*refs])>>1;
00088 int f = (prev2[+2*refs] + next2[+2*refs])>>1;
00089 #if 0
00090 int a = cur[-3*refs];
00091 int g = cur[+3*refs];
00092 int max = FFMAX3(d-e, d-c, FFMIN3(FFMAX(b-c,f-e),FFMAX(b-c,b-a),FFMAX(f-g,f-e)) );
00093 int min = FFMIN3(d-e, d-c, FFMAX3(FFMIN(b-c,f-e),FFMIN(b-c,b-a),FFMIN(f-g,f-e)) );
00094 #else
00095 int max = FFMAX3(d-e, d-c, FFMIN(b-c, f-e));
00096 int min = FFMIN3(d-e, d-c, FFMAX(b-c, f-e));
00097 #endif
00098
00099 diff = FFMAX3(diff, min, -max);
00100 }
00101
00102 if (spatial_pred > d + diff)
00103 spatial_pred = d + diff;
00104 else if (spatial_pred < d - diff)
00105 spatial_pred = d - diff;
00106
00107 dst[0] = spatial_pred;
00108
00109 dst++;
00110 cur++;
00111 prev++;
00112 next++;
00113 prev2++;
00114 next2++;
00115 }
00116 }
00117
00118 static void filter(AVFilterContext *ctx, AVFilterBufferRef *dstpic,
00119 int parity, int tff)
00120 {
00121 YADIFContext *yadif = ctx->priv;
00122 int y, i;
00123
00124 for (i = 0; i < 3; i++) {
00125 int is_chroma = !!i;
00126 int w = dstpic->video->w >> is_chroma;
00127 int h = dstpic->video->h >> is_chroma;
00128 int refs = yadif->cur->linesize[i];
00129
00130 for (y = 0; y < h; y++) {
00131 if ((y ^ parity) & 1) {
00132 uint8_t *prev = &yadif->prev->data[i][y*refs];
00133 uint8_t *cur = &yadif->cur ->data[i][y*refs];
00134 uint8_t *next = &yadif->next->data[i][y*refs];
00135 uint8_t *dst = &dstpic->data[i][y*dstpic->linesize[i]];
00136 yadif->filter_line(dst, prev, cur, next, w, refs, parity ^ tff, yadif->mode);
00137 } else {
00138 memcpy(&dstpic->data[i][y*dstpic->linesize[i]],
00139 &yadif->cur->data[i][y*refs], w);
00140 }
00141 }
00142 }
00143 #if HAVE_MMX
00144 __asm__ volatile("emms \n\t" : : : "memory");
00145 #endif
00146 }
00147
00148 static AVFilterBufferRef *get_video_buffer(AVFilterLink *link, int perms, int w, int h)
00149 {
00150 AVFilterBufferRef *picref;
00151 int width = FFALIGN(w, 32);
00152 int height= FFALIGN(h+6, 32);
00153 int i;
00154
00155 picref = avfilter_default_get_video_buffer(link, perms, width, height);
00156
00157 picref->video->w = w;
00158 picref->video->h = h;
00159
00160 for (i = 0; i < 3; i++)
00161 picref->data[i] += 3 * picref->linesize[i];
00162
00163 return picref;
00164 }
00165
00166 static void return_frame(AVFilterContext *ctx, int is_second)
00167 {
00168 YADIFContext *yadif = ctx->priv;
00169 AVFilterLink *link= ctx->outputs[0];
00170 int tff;
00171
00172 if (yadif->parity == -1) {
00173 tff = yadif->cur->video->interlaced ?
00174 yadif->cur->video->top_field_first : 1;
00175 } else {
00176 tff = yadif->parity^1;
00177 }
00178
00179 if (is_second)
00180 yadif->out = avfilter_get_video_buffer(link, AV_PERM_WRITE | AV_PERM_PRESERVE |
00181 AV_PERM_REUSE, link->w, link->h);
00182
00183 filter(ctx, yadif->out, tff ^ !is_second, tff);
00184
00185 if (is_second) {
00186 if (yadif->next->pts != AV_NOPTS_VALUE &&
00187 yadif->cur->pts != AV_NOPTS_VALUE) {
00188 yadif->out->pts =
00189 (yadif->next->pts&yadif->cur->pts) +
00190 ((yadif->next->pts^yadif->cur->pts)>>1);
00191 } else {
00192 yadif->out->pts = AV_NOPTS_VALUE;
00193 }
00194 avfilter_start_frame(ctx->outputs[0], yadif->out);
00195 }
00196 avfilter_draw_slice(ctx->outputs[0], 0, link->h, 1);
00197 avfilter_end_frame(ctx->outputs[0]);
00198
00199 yadif->frame_pending = (yadif->mode&1) && !is_second;
00200 }
00201
00202 static void start_frame(AVFilterLink *link, AVFilterBufferRef *picref)
00203 {
00204 AVFilterContext *ctx = link->dst;
00205 YADIFContext *yadif = ctx->priv;
00206
00207 if (yadif->frame_pending)
00208 return_frame(ctx, 1);
00209
00210 if (yadif->prev)
00211 avfilter_unref_buffer(yadif->prev);
00212 yadif->prev = yadif->cur;
00213 yadif->cur = yadif->next;
00214 yadif->next = picref;
00215
00216 if (!yadif->cur)
00217 return;
00218
00219 if (!yadif->prev)
00220 yadif->prev = avfilter_ref_buffer(yadif->cur, AV_PERM_READ);
00221
00222 yadif->out = avfilter_get_video_buffer(ctx->outputs[0], AV_PERM_WRITE | AV_PERM_PRESERVE |
00223 AV_PERM_REUSE, link->w, link->h);
00224
00225 avfilter_copy_buffer_ref_props(yadif->out, yadif->cur);
00226 yadif->out->video->interlaced = 0;
00227 avfilter_start_frame(ctx->outputs[0], yadif->out);
00228 }
00229
00230 static void end_frame(AVFilterLink *link)
00231 {
00232 AVFilterContext *ctx = link->dst;
00233 YADIFContext *yadif = ctx->priv;
00234
00235 if (!yadif->out)
00236 return;
00237
00238 return_frame(ctx, 0);
00239 }
00240
00241 static int request_frame(AVFilterLink *link)
00242 {
00243 AVFilterContext *ctx = link->src;
00244 YADIFContext *yadif = ctx->priv;
00245
00246 if (yadif->frame_pending) {
00247 return_frame(ctx, 1);
00248 return 0;
00249 }
00250
00251 do {
00252 int ret;
00253
00254 if ((ret = avfilter_request_frame(link->src->inputs[0])))
00255 return ret;
00256 } while (!yadif->cur);
00257
00258 return 0;
00259 }
00260
00261 static int poll_frame(AVFilterLink *link)
00262 {
00263 YADIFContext *yadif = link->src->priv;
00264 int ret, val;
00265
00266 if (yadif->frame_pending)
00267 return 1;
00268
00269 val = avfilter_poll_frame(link->src->inputs[0]);
00270
00271 if (val==1 && !yadif->next) {
00272 if ((ret = avfilter_request_frame(link->src->inputs[0])) < 0)
00273 return ret;
00274 val = avfilter_poll_frame(link->src->inputs[0]);
00275 }
00276 assert(yadif->next);
00277
00278 return val * ((yadif->mode&1)+1);
00279 }
00280
00281 static av_cold void uninit(AVFilterContext *ctx)
00282 {
00283 YADIFContext *yadif = ctx->priv;
00284
00285 if (yadif->prev) avfilter_unref_buffer(yadif->prev);
00286 if (yadif->cur ) avfilter_unref_buffer(yadif->cur );
00287 if (yadif->next) avfilter_unref_buffer(yadif->next);
00288 }
00289
00290 static int query_formats(AVFilterContext *ctx)
00291 {
00292 static const enum PixelFormat pix_fmts[] = {
00293 PIX_FMT_YUV420P,
00294 PIX_FMT_GRAY8,
00295 PIX_FMT_NONE
00296 };
00297
00298 avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
00299
00300 return 0;
00301 }
00302
00303 static av_cold int init(AVFilterContext *ctx, const char *args, void *opaque)
00304 {
00305 YADIFContext *yadif = ctx->priv;
00306 av_unused int cpu_flags = av_get_cpu_flags();
00307
00308 yadif->mode = 0;
00309 yadif->parity = -1;
00310
00311 if (args) sscanf(args, "%d:%d", &yadif->mode, &yadif->parity);
00312
00313 yadif->filter_line = filter_line_c;
00314 if (HAVE_SSSE3 && cpu_flags & AV_CPU_FLAG_SSSE3)
00315 yadif->filter_line = ff_yadif_filter_line_ssse3;
00316 else if (HAVE_SSE && cpu_flags & AV_CPU_FLAG_SSE2)
00317 yadif->filter_line = ff_yadif_filter_line_sse2;
00318 else if (HAVE_MMX && cpu_flags & AV_CPU_FLAG_MMX)
00319 yadif->filter_line = ff_yadif_filter_line_mmx;
00320
00321 av_log(ctx, AV_LOG_INFO, "mode:%d parity:%d\n", yadif->mode, yadif->parity);
00322
00323 return 0;
00324 }
00325
00326 static void null_draw_slice(AVFilterLink *link, int y, int h, int slice_dir) { }
00327
00328 AVFilter avfilter_vf_yadif = {
00329 .name = "yadif",
00330 .description = NULL_IF_CONFIG_SMALL("Deinterlace the input image"),
00331
00332 .priv_size = sizeof(YADIFContext),
00333 .init = init,
00334 .uninit = uninit,
00335 .query_formats = query_formats,
00336
00337 .inputs = (AVFilterPad[]) {{ .name = "default",
00338 .type = AVMEDIA_TYPE_VIDEO,
00339 .start_frame = start_frame,
00340 .get_video_buffer = get_video_buffer,
00341 .draw_slice = null_draw_slice,
00342 .end_frame = end_frame, },
00343 { .name = NULL}},
00344
00345 .outputs = (AVFilterPad[]) {{ .name = "default",
00346 .type = AVMEDIA_TYPE_VIDEO,
00347 .poll_frame = poll_frame,
00348 .request_frame = request_frame, },
00349 { .name = NULL}},
00350 };