Statistics
| Branch: | Revision:

ffmpeg / libavfilter / vf_yadif.c @ 2657fd0c

History | View | Annotate | Download (13.7 KB)

1
/*
2
 * Copyright (C) 2006-2010 Michael Niedermayer <michaelni@gmx.at>
3
 *
4
 * This file is part of FFmpeg.
5
 *
6
 * FFmpeg is free software; you can redistribute it and/or modify
7
 * it under the terms of the GNU General Public License as published by
8
 * the Free Software Foundation; either version 2 of the License, or
9
 * (at your option) any later version.
10
 *
11
 * FFmpeg is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
 * GNU General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU General Public License along
17
 * with FFmpeg; if not, write to the Free Software Foundation, Inc.,
18
 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
 */
20

    
21
#include "libavutil/cpu.h"
22
#include "libavutil/common.h"
23
#include "libavutil/pixdesc.h"
24
#include "avfilter.h"
25
#include "yadif.h"
26

    
27
#undef NDEBUG
28
#include <assert.h>
29

    
30
typedef struct {
31
    /**
32
     * 0: send 1 frame for each frame
33
     * 1: send 1 frame for each field
34
     * 2: like 0 but skips spatial interlacing check
35
     * 3: like 1 but skips spatial interlacing check
36
     */
37
    int mode;
38

    
39
    /**
40
     *  0: bottom field first
41
     *  1: top field first
42
     * -1: auto-detection
43
     */
44
    int parity;
45

    
46
    int frame_pending;
47

    
48
    AVFilterBufferRef *cur;
49
    AVFilterBufferRef *next;
50
    AVFilterBufferRef *prev;
51
    AVFilterBufferRef *out;
52
    void (*filter_line)(uint8_t *dst,
53
                        uint8_t *prev, uint8_t *cur, uint8_t *next,
54
                        int w, int prefs, int mrefs, int parity, int mode);
55

    
56
    const AVPixFmtDescriptor *csp;
57
} YADIFContext;
58

    
59
static void filter_line_c(uint8_t *dst,
60
                          uint8_t *prev, uint8_t *cur, uint8_t *next,
61
                          int w, int prefs, int mrefs, int parity, int mode)
62
{
63
    int x;
64
    uint8_t *prev2 = parity ? prev : cur ;
65
    uint8_t *next2 = parity ? cur  : next;
66
    for (x = 0;  x < w; x++) {
67
        int c = cur[mrefs];
68
        int d = (prev2[0] + next2[0])>>1;
69
        int e = cur[prefs];
70
        int temporal_diff0 = FFABS(prev2[0] - next2[0]);
71
        int temporal_diff1 =(FFABS(prev[mrefs] - c) + FFABS(prev[prefs] - e) )>>1;
72
        int temporal_diff2 =(FFABS(next[mrefs] - c) + FFABS(next[prefs] - e) )>>1;
73
        int diff = FFMAX3(temporal_diff0>>1, temporal_diff1, temporal_diff2);
74
        int spatial_pred = (c+e)>>1;
75
        int spatial_score = FFABS(cur[mrefs-1] - cur[prefs-1]) + FFABS(c-e)
76
                          + FFABS(cur[mrefs+1] - cur[prefs+1]) - 1;
77

    
78
#define CHECK(j)\
79
    {   int score = FFABS(cur[mrefs-1+(j)] - cur[prefs-1-(j)])\
80
                  + FFABS(cur[mrefs  +(j)] - cur[prefs  -(j)])\
81
                  + FFABS(cur[mrefs+1+(j)] - cur[prefs+1-(j)]);\
82
        if (score < spatial_score) {\
83
            spatial_score= score;\
84
            spatial_pred= (cur[mrefs  +(j)] + cur[prefs  -(j)])>>1;\
85

    
86
        CHECK(-1) CHECK(-2) }} }}
87
        CHECK( 1) CHECK( 2) }} }}
88

    
89
        if (mode < 2) {
90
            int b = (prev2[2*mrefs] + next2[2*mrefs])>>1;
91
            int f = (prev2[2*prefs] + next2[2*prefs])>>1;
92
#if 0
93
            int a = cur[-3*refs];
94
            int g = cur[+3*refs];
95
            int max = FFMAX3(d-e, d-c, FFMIN3(FFMAX(b-c,f-e),FFMAX(b-c,b-a),FFMAX(f-g,f-e)) );
96
            int min = FFMIN3(d-e, d-c, FFMAX3(FFMIN(b-c,f-e),FFMIN(b-c,b-a),FFMIN(f-g,f-e)) );
97
#else
98
            int max = FFMAX3(d-e, d-c, FFMIN(b-c, f-e));
99
            int min = FFMIN3(d-e, d-c, FFMAX(b-c, f-e));
100
#endif
101

    
102
            diff = FFMAX3(diff, min, -max);
103
        }
104

    
105
        if (spatial_pred > d + diff)
106
           spatial_pred = d + diff;
107
        else if (spatial_pred < d - diff)
108
           spatial_pred = d - diff;
109

    
110
        dst[0] = spatial_pred;
111

    
112
        dst++;
113
        cur++;
114
        prev++;
115
        next++;
116
        prev2++;
117
        next2++;
118
    }
119
}
120

    
121
static void filter_line_c_16bit(uint16_t *dst,
122
                                uint16_t *prev, uint16_t *cur, uint16_t *next,
123
                                int w, int prefs, int mrefs, int parity, int mode)
124
{
125
    int x;
126
    uint16_t *prev2 = parity ? prev : cur ;
127
    uint16_t *next2 = parity ? cur  : next;
128
    mrefs /= 2;
129
    prefs /= 2;
130
    for (x = 0;  x < w; x++) {
131
        int c = cur[mrefs];
132
        int d = (prev2[0] + next2[0])>>1;
133
        int e = cur[prefs];
134
        int temporal_diff0 = FFABS(prev2[0] - next2[0]);
135
        int temporal_diff1 =(FFABS(prev[mrefs] - c) + FFABS(prev[prefs] - e) )>>1;
136
        int temporal_diff2 =(FFABS(next[mrefs] - c) + FFABS(next[prefs] - e) )>>1;
137
        int diff = FFMAX3(temporal_diff0>>1, temporal_diff1, temporal_diff2);
138
        int spatial_pred = (c+e)>>1;
139
        int spatial_score = FFABS(cur[mrefs-1] - cur[prefs-1]) + FFABS(c-e)
140
                          + FFABS(cur[mrefs+1] - cur[prefs+1]) - 1;
141

    
142
#define CHECK(j)\
143
    {   int score = FFABS(cur[mrefs-1+j] - cur[prefs-1-j])\
144
                  + FFABS(cur[mrefs  +j] - cur[prefs  -j])\
145
                  + FFABS(cur[mrefs+1+j] - cur[prefs+1-j]);\
146
        if (score < spatial_score) {\
147
            spatial_score= score;\
148
            spatial_pred= (cur[mrefs  +j] + cur[prefs  -j])>>1;\
149

    
150
        CHECK(-1) CHECK(-2) }} }}
151
        CHECK( 1) CHECK( 2) }} }}
152

    
153
        if (mode < 2) {
154
            int b = (prev2[2*mrefs] + next2[2*mrefs])>>1;
155
            int f = (prev2[2*prefs] + next2[2*prefs])>>1;
156
#if 0
157
            int a = cur[-3*refs];
158
            int g = cur[+3*refs];
159
            int max = FFMAX3(d-e, d-c, FFMIN3(FFMAX(b-c,f-e),FFMAX(b-c,b-a),FFMAX(f-g,f-e)) );
160
            int min = FFMIN3(d-e, d-c, FFMAX3(FFMIN(b-c,f-e),FFMIN(b-c,b-a),FFMIN(f-g,f-e)) );
161
#else
162
            int max = FFMAX3(d-e, d-c, FFMIN(b-c, f-e));
163
            int min = FFMIN3(d-e, d-c, FFMAX(b-c, f-e));
164
#endif
165

    
166
            diff = FFMAX3(diff, min, -max);
167
        }
168

    
169
        if (spatial_pred > d + diff)
170
           spatial_pred = d + diff;
171
        else if (spatial_pred < d - diff)
172
           spatial_pred = d - diff;
173

    
174
        dst[0] = spatial_pred;
175

    
176
        dst++;
177
        cur++;
178
        prev++;
179
        next++;
180
        prev2++;
181
        next2++;
182
    }
183
}
184

    
185
static void filter(AVFilterContext *ctx, AVFilterBufferRef *dstpic,
186
                   int parity, int tff)
187
{
188
    YADIFContext *yadif = ctx->priv;
189
    int y, i;
190

    
191
    for (i = 0; i < yadif->csp->nb_components; i++) {
192
        int w = dstpic->video->w;
193
        int h = dstpic->video->h;
194
        int refs = yadif->cur->linesize[i];
195
        int df = (yadif->csp->comp[i].depth_minus1+1) / 8;
196

    
197
        if (i) {
198
        /* Why is this not part of the per-plane description thing? */
199
            w >>= yadif->csp->log2_chroma_w;
200
            h >>= yadif->csp->log2_chroma_h;
201
        }
202

    
203
        for (y = 0; y < h; y++) {
204
            if ((y ^ parity) & 1) {
205
                uint8_t *prev = &yadif->prev->data[i][y*refs];
206
                uint8_t *cur  = &yadif->cur ->data[i][y*refs];
207
                uint8_t *next = &yadif->next->data[i][y*refs];
208
                uint8_t *dst  = &dstpic->data[i][y*dstpic->linesize[i]];
209
                int     mode  = y==1 || y+2==h ? 2 : yadif->mode;
210
                yadif->filter_line(dst, prev, cur, next, w, y+1<h ? refs : -refs, y ? -refs : refs, parity ^ tff, mode);
211
            } else {
212
                memcpy(&dstpic->data[i][y*dstpic->linesize[i]],
213
                       &yadif->cur->data[i][y*refs], w*df);
214
            }
215
        }
216
    }
217
#if HAVE_MMX
218
    __asm__ volatile("emms \n\t" : : : "memory");
219
#endif
220
}
221

    
222
static AVFilterBufferRef *get_video_buffer(AVFilterLink *link, int perms, int w, int h)
223
{
224
    AVFilterBufferRef *picref;
225
    int width = FFALIGN(w, 32);
226
    int height= FFALIGN(h+2, 32);
227
    int i;
228

    
229
    picref = avfilter_default_get_video_buffer(link, perms, width, height);
230

    
231
    picref->video->w = w;
232
    picref->video->h = h;
233

    
234
    for (i = 0; i < 3; i++)
235
        picref->data[i] += picref->linesize[i];
236

    
237
    return picref;
238
}
239

    
240
static void return_frame(AVFilterContext *ctx, int is_second)
241
{
242
    YADIFContext *yadif = ctx->priv;
243
    AVFilterLink *link= ctx->outputs[0];
244
    int tff;
245

    
246
    if (yadif->parity == -1) {
247
        tff = yadif->cur->video->interlaced ?
248
            yadif->cur->video->top_field_first : 1;
249
    } else {
250
        tff = yadif->parity^1;
251
    }
252

    
253
    if (is_second)
254
        yadif->out = avfilter_get_video_buffer(link, AV_PERM_WRITE | AV_PERM_PRESERVE |
255
                                               AV_PERM_REUSE, link->w, link->h);
256

    
257
    if (!yadif->csp)
258
        yadif->csp = &av_pix_fmt_descriptors[link->format];
259
    if (yadif->csp->comp[0].depth_minus1 == 15)
260
        yadif->filter_line = filter_line_c_16bit;
261

    
262
    filter(ctx, yadif->out, tff ^ !is_second, tff);
263

    
264
    if (is_second) {
265
        if (yadif->next->pts != AV_NOPTS_VALUE &&
266
            yadif->cur->pts != AV_NOPTS_VALUE) {
267
            yadif->out->pts =
268
                (yadif->next->pts&yadif->cur->pts) +
269
                ((yadif->next->pts^yadif->cur->pts)>>1);
270
        } else {
271
            yadif->out->pts = AV_NOPTS_VALUE;
272
        }
273
        avfilter_start_frame(ctx->outputs[0], yadif->out);
274
    }
275
    avfilter_draw_slice(ctx->outputs[0], 0, link->h, 1);
276
    avfilter_end_frame(ctx->outputs[0]);
277

    
278
    yadif->frame_pending = (yadif->mode&1) && !is_second;
279
}
280

    
281
static void start_frame(AVFilterLink *link, AVFilterBufferRef *picref)
282
{
283
    AVFilterContext *ctx = link->dst;
284
    YADIFContext *yadif = ctx->priv;
285

    
286
    if (yadif->frame_pending)
287
        return_frame(ctx, 1);
288

    
289
    if (yadif->prev)
290
        avfilter_unref_buffer(yadif->prev);
291
    yadif->prev = yadif->cur;
292
    yadif->cur  = yadif->next;
293
    yadif->next = picref;
294

    
295
    if (!yadif->cur)
296
        return;
297

    
298
    if (!yadif->prev)
299
        yadif->prev = avfilter_ref_buffer(yadif->cur, AV_PERM_READ);
300

    
301
    yadif->out = avfilter_get_video_buffer(ctx->outputs[0], AV_PERM_WRITE | AV_PERM_PRESERVE |
302
                                       AV_PERM_REUSE, link->w, link->h);
303

    
304
    avfilter_copy_buffer_ref_props(yadif->out, yadif->cur);
305
    yadif->out->video->interlaced = 0;
306
    avfilter_start_frame(ctx->outputs[0], yadif->out);
307
}
308

    
309
static void end_frame(AVFilterLink *link)
310
{
311
    AVFilterContext *ctx = link->dst;
312
    YADIFContext *yadif = ctx->priv;
313

    
314
    if (!yadif->out)
315
        return;
316

    
317
    return_frame(ctx, 0);
318
}
319

    
320
static int request_frame(AVFilterLink *link)
321
{
322
    AVFilterContext *ctx = link->src;
323
    YADIFContext *yadif = ctx->priv;
324

    
325
    if (yadif->frame_pending) {
326
        return_frame(ctx, 1);
327
        return 0;
328
    }
329

    
330
    do {
331
        int ret;
332

    
333
        if ((ret = avfilter_request_frame(link->src->inputs[0])))
334
            return ret;
335
    } while (!yadif->cur);
336

    
337
    return 0;
338
}
339

    
340
static int poll_frame(AVFilterLink *link)
341
{
342
    YADIFContext *yadif = link->src->priv;
343
    int ret, val;
344

    
345
    if (yadif->frame_pending)
346
        return 1;
347

    
348
    val = avfilter_poll_frame(link->src->inputs[0]);
349

    
350
    if (val==1 && !yadif->next) { //FIXME change API to not requre this red tape
351
        if ((ret = avfilter_request_frame(link->src->inputs[0])) < 0)
352
            return ret;
353
        val = avfilter_poll_frame(link->src->inputs[0]);
354
    }
355
    assert(yadif->next || !val);
356

    
357
    return val * ((yadif->mode&1)+1);
358
}
359

    
360
static av_cold void uninit(AVFilterContext *ctx)
361
{
362
    YADIFContext *yadif = ctx->priv;
363

    
364
    if (yadif->prev) avfilter_unref_buffer(yadif->prev);
365
    if (yadif->cur ) avfilter_unref_buffer(yadif->cur );
366
    if (yadif->next) avfilter_unref_buffer(yadif->next);
367
}
368

    
369
static int query_formats(AVFilterContext *ctx)
370
{
371
    static const enum PixelFormat pix_fmts[] = {
372
        PIX_FMT_YUV420P,
373
        PIX_FMT_YUV422P,
374
        PIX_FMT_YUV444P,
375
        PIX_FMT_YUV410P,
376
        PIX_FMT_YUV411P,
377
        PIX_FMT_GRAY8,
378
        PIX_FMT_YUVJ420P,
379
        PIX_FMT_YUVJ422P,
380
        PIX_FMT_YUVJ444P,
381
        AV_NE( PIX_FMT_GRAY16BE, PIX_FMT_GRAY16LE ),
382
        PIX_FMT_YUV440P,
383
        PIX_FMT_YUVJ440P,
384
        AV_NE( PIX_FMT_YUV420P16BE, PIX_FMT_YUV420P16LE ),
385
        AV_NE( PIX_FMT_YUV422P16BE, PIX_FMT_YUV422P16LE ),
386
        AV_NE( PIX_FMT_YUV444P16BE, PIX_FMT_YUV444P16LE ),
387
        PIX_FMT_NONE
388
    };
389

    
390
    avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
391

    
392
    return 0;
393
}
394

    
395
static av_cold int init(AVFilterContext *ctx, const char *args, void *opaque)
396
{
397
    YADIFContext *yadif = ctx->priv;
398
    av_unused int cpu_flags = av_get_cpu_flags();
399

    
400
    yadif->mode = 0;
401
    yadif->parity = -1;
402
    yadif->csp = NULL;
403

    
404
    if (args) sscanf(args, "%d:%d", &yadif->mode, &yadif->parity);
405

    
406
    yadif->filter_line = filter_line_c;
407
    if (HAVE_SSSE3 && cpu_flags & AV_CPU_FLAG_SSSE3)
408
        yadif->filter_line = ff_yadif_filter_line_ssse3;
409
    else if (HAVE_SSE && cpu_flags & AV_CPU_FLAG_SSE2)
410
        yadif->filter_line = ff_yadif_filter_line_sse2;
411
    else if (HAVE_MMX && cpu_flags & AV_CPU_FLAG_MMX)
412
        yadif->filter_line = ff_yadif_filter_line_mmx;
413

    
414
    av_log(ctx, AV_LOG_INFO, "mode:%d parity:%d\n", yadif->mode, yadif->parity);
415

    
416
    return 0;
417
}
418

    
419
static void null_draw_slice(AVFilterLink *link, int y, int h, int slice_dir) { }
420

    
421
AVFilter avfilter_vf_yadif = {
422
    .name          = "yadif",
423
    .description   = NULL_IF_CONFIG_SMALL("Deinterlace the input image"),
424

    
425
    .priv_size     = sizeof(YADIFContext),
426
    .init          = init,
427
    .uninit        = uninit,
428
    .query_formats = query_formats,
429

    
430
    .inputs    = (AVFilterPad[]) {{ .name             = "default",
431
                                    .type             = AVMEDIA_TYPE_VIDEO,
432
                                    .start_frame      = start_frame,
433
                                    .get_video_buffer = get_video_buffer,
434
                                    .draw_slice       = null_draw_slice,
435
                                    .end_frame        = end_frame, },
436
                                  { .name = NULL}},
437

    
438
    .outputs   = (AVFilterPad[]) {{ .name             = "default",
439
                                    .type             = AVMEDIA_TYPE_VIDEO,
440
                                    .poll_frame       = poll_frame,
441
                                    .request_frame    = request_frame, },
442
                                  { .name = NULL}},
443
};