Statistics
| Branch: | Revision:

ffmpeg / libavfilter / vf_yadif.c @ eb79c528

History | View | Annotate | Download (11.8 KB)

1
/*
2
 * Copyright (C) 2006-2010 Michael Niedermayer <michaelni@gmx.at>
3
 *               2010 James Darnley <james.darnley@gmail.com>
4
 * This file is part of FFmpeg.
5
 *
6
 * FFmpeg is free software; you can redistribute it and/or modify
7
 * it under the terms of the GNU General Public License as published by
8
 * the Free Software Foundation; either version 2 of the License, or
9
 * (at your option) any later version.
10
 *
11
 * FFmpeg is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
 * GNU General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU General Public License along
17
 * with FFmpeg; if not, write to the Free Software Foundation, Inc.,
18
 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
 */
20

    
21
#include "libavutil/cpu.h"
22
#include "libavutil/common.h"
23
#include "libavutil/pixdesc.h"
24
#include "avfilter.h"
25
#include "yadif.h"
26

    
27
#undef NDEBUG
28
#include <assert.h>
29

    
30
typedef struct {
31
    /**
32
     * 0: send 1 frame for each frame
33
     * 1: send 1 frame for each field
34
     * 2: like 0 but skips spatial interlacing check
35
     * 3: like 1 but skips spatial interlacing check
36
     */
37
    int mode;
38

    
39
    /**
40
     *  0: bottom field first
41
     *  1: top field first
42
     * -1: auto-detection
43
     */
44
    int parity;
45

    
46
    int frame_pending;
47

    
48
    AVFilterBufferRef *cur;
49
    AVFilterBufferRef *next;
50
    AVFilterBufferRef *prev;
51
    AVFilterBufferRef *out;
52
    void (*filter_line)(uint8_t *dst,
53
                        uint8_t *prev, uint8_t *cur, uint8_t *next,
54
                        int w, int prefs, int mrefs, int parity, int mode);
55

    
56
    const AVPixFmtDescriptor *csp;
57
} YADIFContext;
58

    
59
#define CHECK(j)\
60
    {   int score = FFABS(cur[mrefs-1+(j)] - cur[prefs-1-(j)])\
61
                  + FFABS(cur[mrefs  +(j)] - cur[prefs  -(j)])\
62
                  + FFABS(cur[mrefs+1+(j)] - cur[prefs+1-(j)]);\
63
        if (score < spatial_score) {\
64
            spatial_score= score;\
65
            spatial_pred= (cur[mrefs  +(j)] + cur[prefs  -(j)])>>1;\
66

    
67
#define FILTER \
68
    for (x = 0;  x < w; x++) { \
69
        int c = cur[mrefs]; \
70
        int d = (prev2[0] + next2[0])>>1; \
71
        int e = cur[prefs]; \
72
        int temporal_diff0 = FFABS(prev2[0] - next2[0]); \
73
        int temporal_diff1 =(FFABS(prev[mrefs] - c) + FFABS(prev[prefs] - e) )>>1; \
74
        int temporal_diff2 =(FFABS(next[mrefs] - c) + FFABS(next[prefs] - e) )>>1; \
75
        int diff = FFMAX3(temporal_diff0>>1, temporal_diff1, temporal_diff2); \
76
        int spatial_pred = (c+e)>>1; \
77
        int spatial_score = FFABS(cur[mrefs-1] - cur[prefs-1]) + FFABS(c-e) \
78
                          + FFABS(cur[mrefs+1] - cur[prefs+1]) - 1; \
79
 \
80
        CHECK(-1) CHECK(-2) }} }} \
81
        CHECK( 1) CHECK( 2) }} }} \
82
 \
83
        if (mode < 2) { \
84
            int b = (prev2[2*mrefs] + next2[2*mrefs])>>1; \
85
            int f = (prev2[2*prefs] + next2[2*prefs])>>1; \
86
            int max = FFMAX3(d-e, d-c, FFMIN(b-c, f-e)); \
87
            int min = FFMIN3(d-e, d-c, FFMAX(b-c, f-e)); \
88
 \
89
            diff = FFMAX3(diff, min, -max); \
90
        } \
91
 \
92
        if (spatial_pred > d + diff) \
93
           spatial_pred = d + diff; \
94
        else if (spatial_pred < d - diff) \
95
           spatial_pred = d - diff; \
96
 \
97
        dst[0] = spatial_pred; \
98
 \
99
        dst++; \
100
        cur++; \
101
        prev++; \
102
        next++; \
103
        prev2++; \
104
        next2++; \
105
    }
106

    
107
static void filter_line_c(uint8_t *dst,
108
                          uint8_t *prev, uint8_t *cur, uint8_t *next,
109
                          int w, int prefs, int mrefs, int parity, int mode)
110
{
111
    int x;
112
    uint8_t *prev2 = parity ? prev : cur ;
113
    uint8_t *next2 = parity ? cur  : next;
114

    
115
    FILTER
116
}
117

    
118
static void filter_line_c_16bit(uint16_t *dst,
119
                                uint16_t *prev, uint16_t *cur, uint16_t *next,
120
                                int w, int prefs, int mrefs, int parity, int mode)
121
{
122
    int x;
123
    uint16_t *prev2 = parity ? prev : cur ;
124
    uint16_t *next2 = parity ? cur  : next;
125
    mrefs /= 2;
126
    prefs /= 2;
127

    
128
    FILTER
129
}
130

    
131
static void filter(AVFilterContext *ctx, AVFilterBufferRef *dstpic,
132
                   int parity, int tff)
133
{
134
    YADIFContext *yadif = ctx->priv;
135
    int y, i;
136

    
137
    for (i = 0; i < yadif->csp->nb_components; i++) {
138
        int w = dstpic->video->w;
139
        int h = dstpic->video->h;
140
        int refs = yadif->cur->linesize[i];
141
        int df = (yadif->csp->comp[i].depth_minus1+1) / 8;
142

    
143
        if (i) {
144
        /* Why is this not part of the per-plane description thing? */
145
            w >>= yadif->csp->log2_chroma_w;
146
            h >>= yadif->csp->log2_chroma_h;
147
        }
148

    
149
        for (y = 0; y < h; y++) {
150
            if ((y ^ parity) & 1) {
151
                uint8_t *prev = &yadif->prev->data[i][y*refs];
152
                uint8_t *cur  = &yadif->cur ->data[i][y*refs];
153
                uint8_t *next = &yadif->next->data[i][y*refs];
154
                uint8_t *dst  = &dstpic->data[i][y*dstpic->linesize[i]];
155
                int     mode  = y==1 || y+2==h ? 2 : yadif->mode;
156
                yadif->filter_line(dst, prev, cur, next, w, y+1<h ? refs : -refs, y ? -refs : refs, parity ^ tff, mode);
157
            } else {
158
                memcpy(&dstpic->data[i][y*dstpic->linesize[i]],
159
                       &yadif->cur->data[i][y*refs], w*df);
160
            }
161
        }
162
    }
163
#if HAVE_MMX
164
    __asm__ volatile("emms \n\t" : : : "memory");
165
#endif
166
}
167

    
168
static AVFilterBufferRef *get_video_buffer(AVFilterLink *link, int perms, int w, int h)
169
{
170
    AVFilterBufferRef *picref;
171
    int width = FFALIGN(w, 32);
172
    int height= FFALIGN(h+2, 32);
173
    int i;
174

    
175
    picref = avfilter_default_get_video_buffer(link, perms, width, height);
176

    
177
    picref->video->w = w;
178
    picref->video->h = h;
179

    
180
    for (i = 0; i < 3; i++)
181
        picref->data[i] += picref->linesize[i];
182

    
183
    return picref;
184
}
185

    
186
static void return_frame(AVFilterContext *ctx, int is_second)
187
{
188
    YADIFContext *yadif = ctx->priv;
189
    AVFilterLink *link= ctx->outputs[0];
190
    int tff;
191

    
192
    if (yadif->parity == -1) {
193
        tff = yadif->cur->video->interlaced ?
194
            yadif->cur->video->top_field_first : 1;
195
    } else {
196
        tff = yadif->parity^1;
197
    }
198

    
199
    if (is_second)
200
        yadif->out = avfilter_get_video_buffer(link, AV_PERM_WRITE | AV_PERM_PRESERVE |
201
                                               AV_PERM_REUSE, link->w, link->h);
202

    
203
    if (!yadif->csp)
204
        yadif->csp = &av_pix_fmt_descriptors[link->format];
205
    if (yadif->csp->comp[0].depth_minus1 == 15)
206
        yadif->filter_line = filter_line_c_16bit;
207

    
208
    filter(ctx, yadif->out, tff ^ !is_second, tff);
209

    
210
    if (is_second) {
211
        if (yadif->next->pts != AV_NOPTS_VALUE &&
212
            yadif->cur->pts != AV_NOPTS_VALUE) {
213
            yadif->out->pts =
214
                (yadif->next->pts&yadif->cur->pts) +
215
                ((yadif->next->pts^yadif->cur->pts)>>1);
216
        } else {
217
            yadif->out->pts = AV_NOPTS_VALUE;
218
        }
219
        avfilter_start_frame(ctx->outputs[0], yadif->out);
220
    }
221
    avfilter_draw_slice(ctx->outputs[0], 0, link->h, 1);
222
    avfilter_end_frame(ctx->outputs[0]);
223

    
224
    yadif->frame_pending = (yadif->mode&1) && !is_second;
225
}
226

    
227
static void start_frame(AVFilterLink *link, AVFilterBufferRef *picref)
228
{
229
    AVFilterContext *ctx = link->dst;
230
    YADIFContext *yadif = ctx->priv;
231

    
232
    if (yadif->frame_pending)
233
        return_frame(ctx, 1);
234

    
235
    if (yadif->prev)
236
        avfilter_unref_buffer(yadif->prev);
237
    yadif->prev = yadif->cur;
238
    yadif->cur  = yadif->next;
239
    yadif->next = picref;
240

    
241
    if (!yadif->cur)
242
        return;
243

    
244
    if (!yadif->prev)
245
        yadif->prev = avfilter_ref_buffer(yadif->cur, AV_PERM_READ);
246

    
247
    yadif->out = avfilter_get_video_buffer(ctx->outputs[0], AV_PERM_WRITE | AV_PERM_PRESERVE |
248
                                       AV_PERM_REUSE, link->w, link->h);
249

    
250
    avfilter_copy_buffer_ref_props(yadif->out, yadif->cur);
251
    yadif->out->video->interlaced = 0;
252
    avfilter_start_frame(ctx->outputs[0], yadif->out);
253
}
254

    
255
static void end_frame(AVFilterLink *link)
256
{
257
    AVFilterContext *ctx = link->dst;
258
    YADIFContext *yadif = ctx->priv;
259

    
260
    if (!yadif->out)
261
        return;
262

    
263
    return_frame(ctx, 0);
264
}
265

    
266
static int request_frame(AVFilterLink *link)
267
{
268
    AVFilterContext *ctx = link->src;
269
    YADIFContext *yadif = ctx->priv;
270

    
271
    if (yadif->frame_pending) {
272
        return_frame(ctx, 1);
273
        return 0;
274
    }
275

    
276
    do {
277
        int ret;
278

    
279
        if ((ret = avfilter_request_frame(link->src->inputs[0])))
280
            return ret;
281
    } while (!yadif->cur);
282

    
283
    return 0;
284
}
285

    
286
static int poll_frame(AVFilterLink *link)
287
{
288
    YADIFContext *yadif = link->src->priv;
289
    int ret, val;
290

    
291
    if (yadif->frame_pending)
292
        return 1;
293

    
294
    val = avfilter_poll_frame(link->src->inputs[0]);
295

    
296
    if (val==1 && !yadif->next) { //FIXME change API to not requre this red tape
297
        if ((ret = avfilter_request_frame(link->src->inputs[0])) < 0)
298
            return ret;
299
        val = avfilter_poll_frame(link->src->inputs[0]);
300
    }
301
    assert(yadif->next || !val);
302

    
303
    return val * ((yadif->mode&1)+1);
304
}
305

    
306
static av_cold void uninit(AVFilterContext *ctx)
307
{
308
    YADIFContext *yadif = ctx->priv;
309

    
310
    if (yadif->prev) avfilter_unref_buffer(yadif->prev);
311
    if (yadif->cur ) avfilter_unref_buffer(yadif->cur );
312
    if (yadif->next) avfilter_unref_buffer(yadif->next);
313
}
314

    
315
static int query_formats(AVFilterContext *ctx)
316
{
317
    static const enum PixelFormat pix_fmts[] = {
318
        PIX_FMT_YUV420P,
319
        PIX_FMT_YUV422P,
320
        PIX_FMT_YUV444P,
321
        PIX_FMT_YUV410P,
322
        PIX_FMT_YUV411P,
323
        PIX_FMT_GRAY8,
324
        PIX_FMT_YUVJ420P,
325
        PIX_FMT_YUVJ422P,
326
        PIX_FMT_YUVJ444P,
327
        AV_NE( PIX_FMT_GRAY16BE, PIX_FMT_GRAY16LE ),
328
        PIX_FMT_YUV440P,
329
        PIX_FMT_YUVJ440P,
330
        AV_NE( PIX_FMT_YUV420P16BE, PIX_FMT_YUV420P16LE ),
331
        AV_NE( PIX_FMT_YUV422P16BE, PIX_FMT_YUV422P16LE ),
332
        AV_NE( PIX_FMT_YUV444P16BE, PIX_FMT_YUV444P16LE ),
333
        PIX_FMT_NONE
334
    };
335

    
336
    avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
337

    
338
    return 0;
339
}
340

    
341
static av_cold int init(AVFilterContext *ctx, const char *args, void *opaque)
342
{
343
    YADIFContext *yadif = ctx->priv;
344
    av_unused int cpu_flags = av_get_cpu_flags();
345

    
346
    yadif->mode = 0;
347
    yadif->parity = -1;
348
    yadif->csp = NULL;
349

    
350
    if (args) sscanf(args, "%d:%d", &yadif->mode, &yadif->parity);
351

    
352
    yadif->filter_line = filter_line_c;
353
    if (HAVE_SSSE3 && cpu_flags & AV_CPU_FLAG_SSSE3)
354
        yadif->filter_line = ff_yadif_filter_line_ssse3;
355
    else if (HAVE_SSE && cpu_flags & AV_CPU_FLAG_SSE2)
356
        yadif->filter_line = ff_yadif_filter_line_sse2;
357
    else if (HAVE_MMX && cpu_flags & AV_CPU_FLAG_MMX)
358
        yadif->filter_line = ff_yadif_filter_line_mmx;
359

    
360
    av_log(ctx, AV_LOG_INFO, "mode:%d parity:%d\n", yadif->mode, yadif->parity);
361

    
362
    return 0;
363
}
364

    
365
static void null_draw_slice(AVFilterLink *link, int y, int h, int slice_dir) { }
366

    
367
AVFilter avfilter_vf_yadif = {
368
    .name          = "yadif",
369
    .description   = NULL_IF_CONFIG_SMALL("Deinterlace the input image"),
370

    
371
    .priv_size     = sizeof(YADIFContext),
372
    .init          = init,
373
    .uninit        = uninit,
374
    .query_formats = query_formats,
375

    
376
    .inputs    = (AVFilterPad[]) {{ .name             = "default",
377
                                    .type             = AVMEDIA_TYPE_VIDEO,
378
                                    .start_frame      = start_frame,
379
                                    .get_video_buffer = get_video_buffer,
380
                                    .draw_slice       = null_draw_slice,
381
                                    .end_frame        = end_frame, },
382
                                  { .name = NULL}},
383

    
384
    .outputs   = (AVFilterPad[]) {{ .name             = "default",
385
                                    .type             = AVMEDIA_TYPE_VIDEO,
386
                                    .poll_frame       = poll_frame,
387
                                    .request_frame    = request_frame, },
388
                                  { .name = NULL}},
389
};