Statistics
| Branch: | Revision:

ffmpeg / libavfilter / vf_yadif.c @ 88312a4d

History | View | Annotate | Download (11.2 KB)

1
/*
2
 * Copyright (C) 2006-2010 Michael Niedermayer <michaelni@gmx.at>
3
 *
4
 * This file is part of Libav.
5
 *
6
 * Libav is free software; you can redistribute it and/or modify
7
 * it under the terms of the GNU General Public License as published by
8
 * the Free Software Foundation; either version 2 of the License, or
9
 * (at your option) any later version.
10
 *
11
 * Libav is distributed in the hope that it will be useful,
12
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14
 * GNU General Public License for more details.
15
 *
16
 * You should have received a copy of the GNU General Public License along
17
 * with Libav; if not, write to the Free Software Foundation, Inc.,
18
 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
19
 */
20

    
21
#include "libavutil/cpu.h"
22
#include "libavutil/common.h"
23
#include "libavutil/pixdesc.h"
24
#include "avfilter.h"
25
#include "yadif.h"
26

    
27
#undef NDEBUG
28
#include <assert.h>
29

    
30
typedef struct {
31
    /**
32
     * 0: send 1 frame for each frame
33
     * 1: send 1 frame for each field
34
     * 2: like 0 but skips spatial interlacing check
35
     * 3: like 1 but skips spatial interlacing check
36
     */
37
    int mode;
38

    
39
    /**
40
     *  0: bottom field first
41
     *  1: top field first
42
     * -1: auto-detection
43
     */
44
    int parity;
45

    
46
    int frame_pending;
47

    
48
    AVFilterBufferRef *cur;
49
    AVFilterBufferRef *next;
50
    AVFilterBufferRef *prev;
51
    AVFilterBufferRef *out;
52
    void (*filter_line)(uint8_t *dst,
53
                        uint8_t *prev, uint8_t *cur, uint8_t *next,
54
                        int w, int prefs, int mrefs, int parity, int mode);
55

    
56
    const AVPixFmtDescriptor *csp;
57
} YADIFContext;
58

    
59
static void filter_line_c(uint8_t *dst,
60
                          uint8_t *prev, uint8_t *cur, uint8_t *next,
61
                          int w, int prefs, int mrefs, int parity, int mode)
62
{
63
    int x;
64
    uint8_t *prev2 = parity ? prev : cur ;
65
    uint8_t *next2 = parity ? cur  : next;
66
    for (x = 0;  x < w; x++) {
67
        int c = cur[mrefs];
68
        int d = (prev2[0] + next2[0])>>1;
69
        int e = cur[prefs];
70
        int temporal_diff0 = FFABS(prev2[0] - next2[0]);
71
        int temporal_diff1 =(FFABS(prev[mrefs] - c) + FFABS(prev[prefs] - e) )>>1;
72
        int temporal_diff2 =(FFABS(next[mrefs] - c) + FFABS(next[prefs] - e) )>>1;
73
        int diff = FFMAX3(temporal_diff0>>1, temporal_diff1, temporal_diff2);
74
        int spatial_pred = (c+e)>>1;
75
        int spatial_score = FFABS(cur[mrefs-1] - cur[prefs-1]) + FFABS(c-e)
76
                          + FFABS(cur[mrefs+1] - cur[prefs+1]) - 1;
77

    
78
#define CHECK(j)\
79
    {   int score = FFABS(cur[mrefs-1+(j)] - cur[prefs-1-(j)])\
80
                  + FFABS(cur[mrefs  +(j)] - cur[prefs  -(j)])\
81
                  + FFABS(cur[mrefs+1+(j)] - cur[prefs+1-(j)]);\
82
        if (score < spatial_score) {\
83
            spatial_score= score;\
84
            spatial_pred= (cur[mrefs  +(j)] + cur[prefs  -(j)])>>1;\
85

    
86
        CHECK(-1) CHECK(-2) }} }}
87
        CHECK( 1) CHECK( 2) }} }}
88

    
89
        if (mode < 2) {
90
            int b = (prev2[2*mrefs] + next2[2*mrefs])>>1;
91
            int f = (prev2[2*prefs] + next2[2*prefs])>>1;
92
#if 0
93
            int a = cur[-3*refs];
94
            int g = cur[+3*refs];
95
            int max = FFMAX3(d-e, d-c, FFMIN3(FFMAX(b-c,f-e),FFMAX(b-c,b-a),FFMAX(f-g,f-e)) );
96
            int min = FFMIN3(d-e, d-c, FFMAX3(FFMIN(b-c,f-e),FFMIN(b-c,b-a),FFMIN(f-g,f-e)) );
97
#else
98
            int max = FFMAX3(d-e, d-c, FFMIN(b-c, f-e));
99
            int min = FFMIN3(d-e, d-c, FFMAX(b-c, f-e));
100
#endif
101

    
102
            diff = FFMAX3(diff, min, -max);
103
        }
104

    
105
        if (spatial_pred > d + diff)
106
           spatial_pred = d + diff;
107
        else if (spatial_pred < d - diff)
108
           spatial_pred = d - diff;
109

    
110
        dst[0] = spatial_pred;
111

    
112
        dst++;
113
        cur++;
114
        prev++;
115
        next++;
116
        prev2++;
117
        next2++;
118
    }
119
}
120

    
121
static void filter(AVFilterContext *ctx, AVFilterBufferRef *dstpic,
122
                   int parity, int tff)
123
{
124
    YADIFContext *yadif = ctx->priv;
125
    int y, i;
126

    
127
    for (i = 0; i < yadif->csp->nb_components; i++) {
128
        int w = dstpic->video->w;
129
        int h = dstpic->video->h;
130
        int refs = yadif->cur->linesize[i];
131

    
132
        if (i) {
133
        /* Why is this not part of the per-plane description thing? */
134
            w >>= yadif->csp->log2_chroma_w;
135
            h >>= yadif->csp->log2_chroma_h;
136
        }
137

    
138
        for (y = 0; y < h; y++) {
139
            if ((y ^ parity) & 1) {
140
                uint8_t *prev = &yadif->prev->data[i][y*refs];
141
                uint8_t *cur  = &yadif->cur ->data[i][y*refs];
142
                uint8_t *next = &yadif->next->data[i][y*refs];
143
                uint8_t *dst  = &dstpic->data[i][y*dstpic->linesize[i]];
144
                int     mode  = y==1 || y+2==h ? 2 : yadif->mode;
145
                yadif->filter_line(dst, prev, cur, next, w, y+1<h ? refs : -refs, y ? -refs : refs, parity ^ tff, mode);
146
            } else {
147
                memcpy(&dstpic->data[i][y*dstpic->linesize[i]],
148
                       &yadif->cur->data[i][y*refs], w);
149
            }
150
        }
151
    }
152
#if HAVE_MMX
153
    __asm__ volatile("emms \n\t" : : : "memory");
154
#endif
155
}
156

    
157
static AVFilterBufferRef *get_video_buffer(AVFilterLink *link, int perms, int w, int h)
158
{
159
    AVFilterBufferRef *picref;
160
    int width = FFALIGN(w, 32);
161
    int height= FFALIGN(h+2, 32);
162
    int i;
163

    
164
    picref = avfilter_default_get_video_buffer(link, perms, width, height);
165

    
166
    picref->video->w = w;
167
    picref->video->h = h;
168

    
169
    for (i = 0; i < 3; i++)
170
        picref->data[i] += picref->linesize[i];
171

    
172
    return picref;
173
}
174

    
175
static void return_frame(AVFilterContext *ctx, int is_second)
176
{
177
    YADIFContext *yadif = ctx->priv;
178
    AVFilterLink *link= ctx->outputs[0];
179
    int tff;
180

    
181
    if (yadif->parity == -1) {
182
        tff = yadif->cur->video->interlaced ?
183
            yadif->cur->video->top_field_first : 1;
184
    } else {
185
        tff = yadif->parity^1;
186
    }
187

    
188
    if (is_second)
189
        yadif->out = avfilter_get_video_buffer(link, AV_PERM_WRITE | AV_PERM_PRESERVE |
190
                                               AV_PERM_REUSE, link->w, link->h);
191

    
192
    if (!yadif->csp)
193
        yadif->csp = &av_pix_fmt_descriptors[link->format];
194

    
195
    filter(ctx, yadif->out, tff ^ !is_second, tff);
196

    
197
    if (is_second) {
198
        if (yadif->next->pts != AV_NOPTS_VALUE &&
199
            yadif->cur->pts != AV_NOPTS_VALUE) {
200
            yadif->out->pts =
201
                (yadif->next->pts&yadif->cur->pts) +
202
                ((yadif->next->pts^yadif->cur->pts)>>1);
203
        } else {
204
            yadif->out->pts = AV_NOPTS_VALUE;
205
        }
206
        avfilter_start_frame(ctx->outputs[0], yadif->out);
207
    }
208
    avfilter_draw_slice(ctx->outputs[0], 0, link->h, 1);
209
    avfilter_end_frame(ctx->outputs[0]);
210

    
211
    yadif->frame_pending = (yadif->mode&1) && !is_second;
212
}
213

    
214
static void start_frame(AVFilterLink *link, AVFilterBufferRef *picref)
215
{
216
    AVFilterContext *ctx = link->dst;
217
    YADIFContext *yadif = ctx->priv;
218

    
219
    if (yadif->frame_pending)
220
        return_frame(ctx, 1);
221

    
222
    if (yadif->prev)
223
        avfilter_unref_buffer(yadif->prev);
224
    yadif->prev = yadif->cur;
225
    yadif->cur  = yadif->next;
226
    yadif->next = picref;
227

    
228
    if (!yadif->cur)
229
        return;
230

    
231
    if (!yadif->prev)
232
        yadif->prev = avfilter_ref_buffer(yadif->cur, AV_PERM_READ);
233

    
234
    yadif->out = avfilter_get_video_buffer(ctx->outputs[0], AV_PERM_WRITE | AV_PERM_PRESERVE |
235
                                       AV_PERM_REUSE, link->w, link->h);
236

    
237
    avfilter_copy_buffer_ref_props(yadif->out, yadif->cur);
238
    yadif->out->video->interlaced = 0;
239
    avfilter_start_frame(ctx->outputs[0], yadif->out);
240
}
241

    
242
static void end_frame(AVFilterLink *link)
243
{
244
    AVFilterContext *ctx = link->dst;
245
    YADIFContext *yadif = ctx->priv;
246

    
247
    if (!yadif->out)
248
        return;
249

    
250
    return_frame(ctx, 0);
251
}
252

    
253
static int request_frame(AVFilterLink *link)
254
{
255
    AVFilterContext *ctx = link->src;
256
    YADIFContext *yadif = ctx->priv;
257

    
258
    if (yadif->frame_pending) {
259
        return_frame(ctx, 1);
260
        return 0;
261
    }
262

    
263
    do {
264
        int ret;
265

    
266
        if ((ret = avfilter_request_frame(link->src->inputs[0])))
267
            return ret;
268
    } while (!yadif->cur);
269

    
270
    return 0;
271
}
272

    
273
static int poll_frame(AVFilterLink *link)
274
{
275
    YADIFContext *yadif = link->src->priv;
276
    int ret, val;
277

    
278
    if (yadif->frame_pending)
279
        return 1;
280

    
281
    val = avfilter_poll_frame(link->src->inputs[0]);
282

    
283
    if (val==1 && !yadif->next) { //FIXME change API to not requre this red tape
284
        if ((ret = avfilter_request_frame(link->src->inputs[0])) < 0)
285
            return ret;
286
        val = avfilter_poll_frame(link->src->inputs[0]);
287
    }
288
    assert(yadif->next || !val);
289

    
290
    return val * ((yadif->mode&1)+1);
291
}
292

    
293
static av_cold void uninit(AVFilterContext *ctx)
294
{
295
    YADIFContext *yadif = ctx->priv;
296

    
297
    if (yadif->prev) avfilter_unref_buffer(yadif->prev);
298
    if (yadif->cur ) avfilter_unref_buffer(yadif->cur );
299
    if (yadif->next) avfilter_unref_buffer(yadif->next);
300
}
301

    
302
static int query_formats(AVFilterContext *ctx)
303
{
304
    static const enum PixelFormat pix_fmts[] = {
305
        PIX_FMT_YUV420P,
306
        PIX_FMT_YUV422P,
307
        PIX_FMT_YUV444P,
308
        PIX_FMT_YUV410P,
309
        PIX_FMT_YUV411P,
310
        PIX_FMT_GRAY8,
311
        PIX_FMT_YUVJ420P,
312
        PIX_FMT_YUVJ422P,
313
        PIX_FMT_YUVJ444P,
314
        PIX_FMT_YUV440P,
315
        PIX_FMT_YUVJ440P,
316
        PIX_FMT_NONE
317
    };
318

    
319
    avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
320

    
321
    return 0;
322
}
323

    
324
static av_cold int init(AVFilterContext *ctx, const char *args, void *opaque)
325
{
326
    YADIFContext *yadif = ctx->priv;
327
    av_unused int cpu_flags = av_get_cpu_flags();
328

    
329
    yadif->mode = 0;
330
    yadif->parity = -1;
331
    yadif->csp = NULL;
332

    
333
    if (args) sscanf(args, "%d:%d", &yadif->mode, &yadif->parity);
334

    
335
    yadif->filter_line = filter_line_c;
336
    if (HAVE_SSSE3 && cpu_flags & AV_CPU_FLAG_SSSE3)
337
        yadif->filter_line = ff_yadif_filter_line_ssse3;
338
    else if (HAVE_SSE && cpu_flags & AV_CPU_FLAG_SSE2)
339
        yadif->filter_line = ff_yadif_filter_line_sse2;
340
    else if (HAVE_MMX && cpu_flags & AV_CPU_FLAG_MMX)
341
        yadif->filter_line = ff_yadif_filter_line_mmx;
342

    
343
    av_log(ctx, AV_LOG_INFO, "mode:%d parity:%d\n", yadif->mode, yadif->parity);
344

    
345
    return 0;
346
}
347

    
348
static void null_draw_slice(AVFilterLink *link, int y, int h, int slice_dir) { }
349

    
350
AVFilter avfilter_vf_yadif = {
351
    .name          = "yadif",
352
    .description   = NULL_IF_CONFIG_SMALL("Deinterlace the input image"),
353

    
354
    .priv_size     = sizeof(YADIFContext),
355
    .init          = init,
356
    .uninit        = uninit,
357
    .query_formats = query_formats,
358

    
359
    .inputs    = (AVFilterPad[]) {{ .name             = "default",
360
                                    .type             = AVMEDIA_TYPE_VIDEO,
361
                                    .start_frame      = start_frame,
362
                                    .get_video_buffer = get_video_buffer,
363
                                    .draw_slice       = null_draw_slice,
364
                                    .end_frame        = end_frame, },
365
                                  { .name = NULL}},
366

    
367
    .outputs   = (AVFilterPad[]) {{ .name             = "default",
368
                                    .type             = AVMEDIA_TYPE_VIDEO,
369
                                    .poll_frame       = poll_frame,
370
                                    .request_frame    = request_frame, },
371
                                  { .name = NULL}},
372
};