Statistics
| Branch: | Revision:

ffmpeg / libavcodec / h264pred.c @ 5ada2524

History | View | Annotate | Download (16.6 KB)

1 c92a30bb Kostya Shishkov
/*
2
 * H.26L/H.264/AVC/JVT/14496-10/... encoder/decoder
3
 * Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
4
 *
5 2912e87a Mans Rullgard
 * This file is part of Libav.
6 c92a30bb Kostya Shishkov
 *
7 2912e87a Mans Rullgard
 * Libav is free software; you can redistribute it and/or
8 c92a30bb Kostya Shishkov
 * modify it under the terms of the GNU Lesser General Public
9
 * License as published by the Free Software Foundation; either
10
 * version 2.1 of the License, or (at your option) any later version.
11
 *
12 2912e87a Mans Rullgard
 * Libav is distributed in the hope that it will be useful,
13 c92a30bb Kostya Shishkov
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
 * Lesser General Public License for more details.
16
 *
17
 * You should have received a copy of the GNU Lesser General Public
18 2912e87a Mans Rullgard
 * License along with Libav; if not, write to the Free Software
19 c92a30bb Kostya Shishkov
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20
 */
21
22
/**
23 ba87f080 Diego Biurrun
 * @file
24 c92a30bb Kostya Shishkov
 * H.264 / AVC / MPEG4 part10 prediction functions.
25
 * @author Michael Niedermayer <michaelni@gmx.at>
26
 */
27
28
#include "h264pred.h"
29 5ada2524 Oskar Arvidsson
#include "h264pred_template.c"
30 c92a30bb Kostya Shishkov
31 3ad289fc David Conrad
static void pred4x4_vertical_vp8_c(uint8_t *src, const uint8_t *topright, int stride){
32
    const int lt= src[-1-1*stride];
33
    LOAD_TOP_EDGE
34
    LOAD_TOP_RIGHT_EDGE
35 cf60d669 Måns Rullgård
    uint32_t v = PACK_4U8((lt + 2*t0 + t1 + 2) >> 2,
36 5ada2524 Oskar Arvidsson
                          (t0 + 2*t1 + t2 + 2) >> 2,
37
                          (t1 + 2*t2 + t3 + 2) >> 2,
38
                          (t2 + 2*t3 + t4 + 2) >> 2);
39 3ad289fc David Conrad
40
    AV_WN32A(src+0*stride, v);
41
    AV_WN32A(src+1*stride, v);
42
    AV_WN32A(src+2*stride, v);
43
    AV_WN32A(src+3*stride, v);
44
}
45
46
static void pred4x4_horizontal_vp8_c(uint8_t *src, const uint8_t *topright, int stride){
47
    const int lt= src[-1-1*stride];
48
    LOAD_LEFT_EDGE
49
50
    AV_WN32A(src+0*stride, ((lt + 2*l0 + l1 + 2) >> 2)*0x01010101);
51
    AV_WN32A(src+1*stride, ((l0 + 2*l1 + l2 + 2) >> 2)*0x01010101);
52
    AV_WN32A(src+2*stride, ((l1 + 2*l2 + l3 + 2) >> 2)*0x01010101);
53
    AV_WN32A(src+3*stride, ((l2 + 2*l3 + l3 + 2) >> 2)*0x01010101);
54
}
55
56 caaf098c David Conrad
static void pred4x4_down_left_svq3_c(uint8_t *src, const uint8_t *topright, int stride){
57 c92a30bb Kostya Shishkov
    LOAD_TOP_EDGE
58
    LOAD_LEFT_EDGE
59
    const av_unused int unu0= t0;
60
    const av_unused int unu1= l0;
61
62
    src[0+0*stride]=(l1 + t1)>>1;
63
    src[1+0*stride]=
64
    src[0+1*stride]=(l2 + t2)>>1;
65
    src[2+0*stride]=
66
    src[1+1*stride]=
67
    src[0+2*stride]=
68
    src[3+0*stride]=
69
    src[2+1*stride]=
70
    src[1+2*stride]=
71
    src[0+3*stride]=
72
    src[3+1*stride]=
73
    src[2+2*stride]=
74
    src[1+3*stride]=
75
    src[3+2*stride]=
76
    src[2+3*stride]=
77
    src[3+3*stride]=(l3 + t3)>>1;
78
}
79
80 caaf098c David Conrad
static void pred4x4_down_left_rv40_c(uint8_t *src, const uint8_t *topright, int stride){
81 c92a30bb Kostya Shishkov
    LOAD_TOP_EDGE
82
    LOAD_TOP_RIGHT_EDGE
83
    LOAD_LEFT_EDGE
84
    LOAD_DOWN_LEFT_EDGE
85
86
    src[0+0*stride]=(t0 + t2 + 2*t1 + 2 + l0 + l2 + 2*l1 + 2)>>3;
87
    src[1+0*stride]=
88
    src[0+1*stride]=(t1 + t3 + 2*t2 + 2 + l1 + l3 + 2*l2 + 2)>>3;
89
    src[2+0*stride]=
90
    src[1+1*stride]=
91
    src[0+2*stride]=(t2 + t4 + 2*t3 + 2 + l2 + l4 + 2*l3 + 2)>>3;
92
    src[3+0*stride]=
93
    src[2+1*stride]=
94
    src[1+2*stride]=
95
    src[0+3*stride]=(t3 + t5 + 2*t4 + 2 + l3 + l5 + 2*l4 + 2)>>3;
96
    src[3+1*stride]=
97
    src[2+2*stride]=
98
    src[1+3*stride]=(t4 + t6 + 2*t5 + 2 + l4 + l6 + 2*l5 + 2)>>3;
99
    src[3+2*stride]=
100
    src[2+3*stride]=(t5 + t7 + 2*t6 + 2 + l5 + l7 + 2*l6 + 2)>>3;
101
    src[3+3*stride]=(t6 + t7 + 1 + l6 + l7 + 1)>>2;
102
}
103
104 caaf098c David Conrad
static void pred4x4_down_left_rv40_nodown_c(uint8_t *src, const uint8_t *topright, int stride){
105 c92a30bb Kostya Shishkov
    LOAD_TOP_EDGE
106
    LOAD_TOP_RIGHT_EDGE
107
    LOAD_LEFT_EDGE
108
109
    src[0+0*stride]=(t0 + t2 + 2*t1 + 2 + l0 + l2 + 2*l1 + 2)>>3;
110
    src[1+0*stride]=
111
    src[0+1*stride]=(t1 + t3 + 2*t2 + 2 + l1 + l3 + 2*l2 + 2)>>3;
112
    src[2+0*stride]=
113
    src[1+1*stride]=
114
    src[0+2*stride]=(t2 + t4 + 2*t3 + 2 + l2 + 3*l3 + 2)>>3;
115
    src[3+0*stride]=
116
    src[2+1*stride]=
117
    src[1+2*stride]=
118
    src[0+3*stride]=(t3 + t5 + 2*t4 + 2 + l3*4 + 2)>>3;
119
    src[3+1*stride]=
120
    src[2+2*stride]=
121
    src[1+3*stride]=(t4 + t6 + 2*t5 + 2 + l3*4 + 2)>>3;
122
    src[3+2*stride]=
123
    src[2+3*stride]=(t5 + t7 + 2*t6 + 2 + l3*4 + 2)>>3;
124
    src[3+3*stride]=(t6 + t7 + 1 + 2*l3 + 1)>>2;
125
}
126
127 caaf098c David Conrad
static void pred4x4_vertical_left_rv40(uint8_t *src, const uint8_t *topright, int stride,
128 5ada2524 Oskar Arvidsson
                                       const int l0, const int l1, const int l2, const int l3, const int l4){
129 c92a30bb Kostya Shishkov
    LOAD_TOP_EDGE
130
    LOAD_TOP_RIGHT_EDGE
131
132
    src[0+0*stride]=(2*t0 + 2*t1 + l1 + 2*l2 + l3 + 4)>>3;
133
    src[1+0*stride]=
134
    src[0+2*stride]=(t1 + t2 + 1)>>1;
135
    src[2+0*stride]=
136
    src[1+2*stride]=(t2 + t3 + 1)>>1;
137
    src[3+0*stride]=
138
    src[2+2*stride]=(t3 + t4+ 1)>>1;
139
    src[3+2*stride]=(t4 + t5+ 1)>>1;
140 b0797570 Kostya Shishkov
    src[0+1*stride]=(t0 + 2*t1 + t2 + l2 + 2*l3 + l4 + 4)>>3;
141 c92a30bb Kostya Shishkov
    src[1+1*stride]=
142
    src[0+3*stride]=(t1 + 2*t2 + t3 + 2)>>2;
143
    src[2+1*stride]=
144
    src[1+3*stride]=(t2 + 2*t3 + t4 + 2)>>2;
145
    src[3+1*stride]=
146
    src[2+3*stride]=(t3 + 2*t4 + t5 + 2)>>2;
147
    src[3+3*stride]=(t4 + 2*t5 + t6 + 2)>>2;
148
}
149
150 caaf098c David Conrad
static void pred4x4_vertical_left_rv40_c(uint8_t *src, const uint8_t *topright, int stride){
151 b0797570 Kostya Shishkov
    LOAD_LEFT_EDGE
152
    LOAD_DOWN_LEFT_EDGE
153
154
    pred4x4_vertical_left_rv40(src, topright, stride, l0, l1, l2, l3, l4);
155
}
156
157 caaf098c David Conrad
static void pred4x4_vertical_left_rv40_nodown_c(uint8_t *src, const uint8_t *topright, int stride){
158 b0797570 Kostya Shishkov
    LOAD_LEFT_EDGE
159
160
    pred4x4_vertical_left_rv40(src, topright, stride, l0, l1, l2, l3, l3);
161
}
162
163 3ad289fc David Conrad
static void pred4x4_vertical_left_vp8_c(uint8_t *src, const uint8_t *topright, int stride){
164
    LOAD_TOP_EDGE
165
    LOAD_TOP_RIGHT_EDGE
166
167
    src[0+0*stride]=(t0 + t1 + 1)>>1;
168
    src[1+0*stride]=
169
    src[0+2*stride]=(t1 + t2 + 1)>>1;
170
    src[2+0*stride]=
171
    src[1+2*stride]=(t2 + t3 + 1)>>1;
172
    src[3+0*stride]=
173
    src[2+2*stride]=(t3 + t4 + 1)>>1;
174
    src[0+1*stride]=(t0 + 2*t1 + t2 + 2)>>2;
175
    src[1+1*stride]=
176
    src[0+3*stride]=(t1 + 2*t2 + t3 + 2)>>2;
177
    src[2+1*stride]=
178
    src[1+3*stride]=(t2 + 2*t3 + t4 + 2)>>2;
179
    src[3+1*stride]=
180
    src[2+3*stride]=(t3 + 2*t4 + t5 + 2)>>2;
181
    src[3+2*stride]=(t4 + 2*t5 + t6 + 2)>>2;
182
    src[3+3*stride]=(t5 + 2*t6 + t7 + 2)>>2;
183
}
184
185 caaf098c David Conrad
static void pred4x4_horizontal_up_rv40_c(uint8_t *src, const uint8_t *topright, int stride){
186 c92a30bb Kostya Shishkov
    LOAD_LEFT_EDGE
187
    LOAD_DOWN_LEFT_EDGE
188
    LOAD_TOP_EDGE
189
    LOAD_TOP_RIGHT_EDGE
190
191
    src[0+0*stride]=(t1 + 2*t2 + t3 + 2*l0 + 2*l1 + 4)>>3;
192
    src[1+0*stride]=(t2 + 2*t3 + t4 + l0 + 2*l1 + l2 + 4)>>3;
193
    src[2+0*stride]=
194
    src[0+1*stride]=(t3 + 2*t4 + t5 + 2*l1 + 2*l2 + 4)>>3;
195
    src[3+0*stride]=
196
    src[1+1*stride]=(t4 + 2*t5 + t6 + l1 + 2*l2 + l3 + 4)>>3;
197
    src[2+1*stride]=
198
    src[0+2*stride]=(t5 + 2*t6 + t7 + 2*l2 + 2*l3 + 4)>>3;
199
    src[3+1*stride]=
200
    src[1+2*stride]=(t6 + 3*t7 + l2 + 3*l3 + 4)>>3;
201
    src[3+2*stride]=
202
    src[1+3*stride]=(l3 + 2*l4 + l5 + 2)>>2;
203
    src[0+3*stride]=
204
    src[2+2*stride]=(t6 + t7 + l3 + l4 + 2)>>2;
205
    src[2+3*stride]=(l4 + l5 + 1)>>1;
206
    src[3+3*stride]=(l4 + 2*l5 + l6 + 2)>>2;
207
}
208
209 caaf098c David Conrad
static void pred4x4_horizontal_up_rv40_nodown_c(uint8_t *src, const uint8_t *topright, int stride){
210 c92a30bb Kostya Shishkov
    LOAD_LEFT_EDGE
211
    LOAD_TOP_EDGE
212
    LOAD_TOP_RIGHT_EDGE
213
214
    src[0+0*stride]=(t1 + 2*t2 + t3 + 2*l0 + 2*l1 + 4)>>3;
215
    src[1+0*stride]=(t2 + 2*t3 + t4 + l0 + 2*l1 + l2 + 4)>>3;
216
    src[2+0*stride]=
217
    src[0+1*stride]=(t3 + 2*t4 + t5 + 2*l1 + 2*l2 + 4)>>3;
218
    src[3+0*stride]=
219
    src[1+1*stride]=(t4 + 2*t5 + t6 + l1 + 2*l2 + l3 + 4)>>3;
220
    src[2+1*stride]=
221
    src[0+2*stride]=(t5 + 2*t6 + t7 + 2*l2 + 2*l3 + 4)>>3;
222
    src[3+1*stride]=
223
    src[1+2*stride]=(t6 + 3*t7 + l2 + 3*l3 + 4)>>3;
224
    src[3+2*stride]=
225
    src[1+3*stride]=l3;
226
    src[0+3*stride]=
227
    src[2+2*stride]=(t6 + t7 + 2*l3 + 2)>>2;
228
    src[2+3*stride]=
229
    src[3+3*stride]=l3;
230
}
231
232 3ad289fc David Conrad
static void pred4x4_tm_vp8_c(uint8_t *src, const uint8_t *topright, int stride){
233
    uint8_t *cm = ff_cropTbl + MAX_NEG_CROP - src[-1-stride];
234
    uint8_t *top = src-stride;
235
    int y;
236
237
    for (y = 0; y < 4; y++) {
238
        uint8_t *cm_in = cm + src[-1];
239
        src[0] = cm_in[top[0]];
240
        src[1] = cm_in[top[1]];
241
        src[2] = cm_in[top[2]];
242
        src[3] = cm_in[top[3]];
243
        src += stride;
244
    }
245
}
246
247 c92a30bb Kostya Shishkov
static void pred16x16_plane_svq3_c(uint8_t *src, int stride){
248
    pred16x16_plane_compat_c(src, stride, 1, 0);
249
}
250
251
static void pred16x16_plane_rv40_c(uint8_t *src, int stride){
252
    pred16x16_plane_compat_c(src, stride, 0, 1);
253
}
254
255 3ad289fc David Conrad
static void pred16x16_tm_vp8_c(uint8_t *src, int stride){
256
    uint8_t *cm = ff_cropTbl + MAX_NEG_CROP - src[-1-stride];
257
    uint8_t *top = src-stride;
258
    int y;
259
260
    for (y = 0; y < 16; y++) {
261
        uint8_t *cm_in = cm + src[-1];
262
        src[0]  = cm_in[top[0]];
263
        src[1]  = cm_in[top[1]];
264
        src[2]  = cm_in[top[2]];
265
        src[3]  = cm_in[top[3]];
266
        src[4]  = cm_in[top[4]];
267
        src[5]  = cm_in[top[5]];
268
        src[6]  = cm_in[top[6]];
269
        src[7]  = cm_in[top[7]];
270
        src[8]  = cm_in[top[8]];
271
        src[9]  = cm_in[top[9]];
272
        src[10] = cm_in[top[10]];
273
        src[11] = cm_in[top[11]];
274
        src[12] = cm_in[top[12]];
275
        src[13] = cm_in[top[13]];
276
        src[14] = cm_in[top[14]];
277
        src[15] = cm_in[top[15]];
278
        src += stride;
279
    }
280
}
281
282 c92a30bb Kostya Shishkov
static void pred8x8_left_dc_rv40_c(uint8_t *src, int stride){
283
    int i;
284
    int dc0;
285
286
    dc0=0;
287
    for(i=0;i<8; i++)
288
        dc0+= src[-1+i*stride];
289
    dc0= 0x01010101*((dc0 + 4)>>3);
290
291
    for(i=0; i<8; i++){
292
        ((uint32_t*)(src+i*stride))[0]=
293
        ((uint32_t*)(src+i*stride))[1]= dc0;
294
    }
295
}
296
297
static void pred8x8_top_dc_rv40_c(uint8_t *src, int stride){
298
    int i;
299
    int dc0;
300
301
    dc0=0;
302
    for(i=0;i<8; i++)
303
        dc0+= src[i-stride];
304
    dc0= 0x01010101*((dc0 + 4)>>3);
305
306
    for(i=0; i<8; i++){
307
        ((uint32_t*)(src+i*stride))[0]=
308
        ((uint32_t*)(src+i*stride))[1]= dc0;
309
    }
310
}
311
312
static void pred8x8_dc_rv40_c(uint8_t *src, int stride){
313
    int i;
314
    int dc0=0;
315
316
    for(i=0;i<4; i++){
317
        dc0+= src[-1+i*stride] + src[i-stride];
318
        dc0+= src[4+i-stride];
319
        dc0+= src[-1+(i+4)*stride];
320
    }
321
    dc0= 0x01010101*((dc0 + 8)>>4);
322
323
    for(i=0; i<4; i++){
324
        ((uint32_t*)(src+i*stride))[0]= dc0;
325
        ((uint32_t*)(src+i*stride))[1]= dc0;
326
    }
327
    for(i=4; i<8; i++){
328
        ((uint32_t*)(src+i*stride))[0]= dc0;
329
        ((uint32_t*)(src+i*stride))[1]= dc0;
330
    }
331
}
332
333 3ad289fc David Conrad
static void pred8x8_tm_vp8_c(uint8_t *src, int stride){
334
    uint8_t *cm = ff_cropTbl + MAX_NEG_CROP - src[-1-stride];
335
    uint8_t *top = src-stride;
336
    int y;
337
338
    for (y = 0; y < 8; y++) {
339
        uint8_t *cm_in = cm + src[-1];
340
        src[0] = cm_in[top[0]];
341
        src[1] = cm_in[top[1]];
342
        src[2] = cm_in[top[2]];
343
        src[3] = cm_in[top[3]];
344
        src[4] = cm_in[top[4]];
345
        src[5] = cm_in[top[5]];
346
        src[6] = cm_in[top[6]];
347
        src[7] = cm_in[top[7]];
348
        src += stride;
349
    }
350
}
351
352 c92a30bb Kostya Shishkov
/**
353 49bd8e4b Måns Rullgård
 * Set the intra prediction function pointers.
354 c92a30bb Kostya Shishkov
 */
355
void ff_h264_pred_init(H264PredContext *h, int codec_id){
356
//    MpegEncContext * const s = &h->s;
357
358
    if(codec_id != CODEC_ID_RV40){
359 3ad289fc David Conrad
        if(codec_id == CODEC_ID_VP8) {
360
            h->pred4x4[VERT_PRED       ]= pred4x4_vertical_vp8_c;
361
            h->pred4x4[HOR_PRED        ]= pred4x4_horizontal_vp8_c;
362
        } else {
363 a815602a Ronald S. Bultje
            h->pred4x4[VERT_PRED       ]= pred4x4_vertical_c;
364
            h->pred4x4[HOR_PRED        ]= pred4x4_horizontal_c;
365 3ad289fc David Conrad
        }
366 c92a30bb Kostya Shishkov
        h->pred4x4[DC_PRED             ]= pred4x4_dc_c;
367
        if(codec_id == CODEC_ID_SVQ3)
368
            h->pred4x4[DIAG_DOWN_LEFT_PRED ]= pred4x4_down_left_svq3_c;
369
        else
370
            h->pred4x4[DIAG_DOWN_LEFT_PRED ]= pred4x4_down_left_c;
371
        h->pred4x4[DIAG_DOWN_RIGHT_PRED]= pred4x4_down_right_c;
372
        h->pred4x4[VERT_RIGHT_PRED     ]= pred4x4_vertical_right_c;
373
        h->pred4x4[HOR_DOWN_PRED       ]= pred4x4_horizontal_down_c;
374 3ad289fc David Conrad
        if (codec_id == CODEC_ID_VP8) {
375
            h->pred4x4[VERT_LEFT_PRED  ]= pred4x4_vertical_left_vp8_c;
376
        } else
377 a815602a Ronald S. Bultje
            h->pred4x4[VERT_LEFT_PRED  ]= pred4x4_vertical_left_c;
378 c92a30bb Kostya Shishkov
        h->pred4x4[HOR_UP_PRED         ]= pred4x4_horizontal_up_c;
379 ee555de7 Ronald S. Bultje
        if(codec_id != CODEC_ID_VP8) {
380 916393da Ronald S. Bultje
            h->pred4x4[LEFT_DC_PRED    ]= pred4x4_left_dc_c;
381
            h->pred4x4[TOP_DC_PRED     ]= pred4x4_top_dc_c;
382
            h->pred4x4[DC_128_PRED     ]= pred4x4_128_dc_c;
383 ee555de7 Ronald S. Bultje
        } else {
384 3ad289fc David Conrad
            h->pred4x4[TM_VP8_PRED     ]= pred4x4_tm_vp8_c;
385 ee555de7 Ronald S. Bultje
            h->pred4x4[DC_127_PRED     ]= pred4x4_127_dc_c;
386
            h->pred4x4[DC_129_PRED     ]= pred4x4_129_dc_c;
387
            h->pred4x4[VERT_VP8_PRED   ]= pred4x4_vertical_c;
388
            h->pred4x4[HOR_VP8_PRED    ]= pred4x4_horizontal_c;
389
        }
390 c92a30bb Kostya Shishkov
    }else{
391
        h->pred4x4[VERT_PRED           ]= pred4x4_vertical_c;
392
        h->pred4x4[HOR_PRED            ]= pred4x4_horizontal_c;
393
        h->pred4x4[DC_PRED             ]= pred4x4_dc_c;
394
        h->pred4x4[DIAG_DOWN_LEFT_PRED ]= pred4x4_down_left_rv40_c;
395
        h->pred4x4[DIAG_DOWN_RIGHT_PRED]= pred4x4_down_right_c;
396
        h->pred4x4[VERT_RIGHT_PRED     ]= pred4x4_vertical_right_c;
397
        h->pred4x4[HOR_DOWN_PRED       ]= pred4x4_horizontal_down_c;
398
        h->pred4x4[VERT_LEFT_PRED      ]= pred4x4_vertical_left_rv40_c;
399
        h->pred4x4[HOR_UP_PRED         ]= pred4x4_horizontal_up_rv40_c;
400
        h->pred4x4[LEFT_DC_PRED        ]= pred4x4_left_dc_c;
401
        h->pred4x4[TOP_DC_PRED         ]= pred4x4_top_dc_c;
402
        h->pred4x4[DC_128_PRED         ]= pred4x4_128_dc_c;
403
        h->pred4x4[DIAG_DOWN_LEFT_PRED_RV40_NODOWN]= pred4x4_down_left_rv40_nodown_c;
404
        h->pred4x4[HOR_UP_PRED_RV40_NODOWN]= pred4x4_horizontal_up_rv40_nodown_c;
405 b0797570 Kostya Shishkov
        h->pred4x4[VERT_LEFT_PRED_RV40_NODOWN]= pred4x4_vertical_left_rv40_nodown_c;
406 c92a30bb Kostya Shishkov
    }
407
408
    h->pred8x8l[VERT_PRED           ]= pred8x8l_vertical_c;
409
    h->pred8x8l[HOR_PRED            ]= pred8x8l_horizontal_c;
410
    h->pred8x8l[DC_PRED             ]= pred8x8l_dc_c;
411
    h->pred8x8l[DIAG_DOWN_LEFT_PRED ]= pred8x8l_down_left_c;
412
    h->pred8x8l[DIAG_DOWN_RIGHT_PRED]= pred8x8l_down_right_c;
413
    h->pred8x8l[VERT_RIGHT_PRED     ]= pred8x8l_vertical_right_c;
414
    h->pred8x8l[HOR_DOWN_PRED       ]= pred8x8l_horizontal_down_c;
415
    h->pred8x8l[VERT_LEFT_PRED      ]= pred8x8l_vertical_left_c;
416
    h->pred8x8l[HOR_UP_PRED         ]= pred8x8l_horizontal_up_c;
417
    h->pred8x8l[LEFT_DC_PRED        ]= pred8x8l_left_dc_c;
418
    h->pred8x8l[TOP_DC_PRED         ]= pred8x8l_top_dc_c;
419
    h->pred8x8l[DC_128_PRED         ]= pred8x8l_128_dc_c;
420
421
    h->pred8x8[VERT_PRED8x8   ]= pred8x8_vertical_c;
422
    h->pred8x8[HOR_PRED8x8    ]= pred8x8_horizontal_c;
423 3ad289fc David Conrad
    if (codec_id != CODEC_ID_VP8) {
424 a815602a Ronald S. Bultje
        h->pred8x8[PLANE_PRED8x8]= pred8x8_plane_c;
425 3ad289fc David Conrad
    } else
426
        h->pred8x8[PLANE_PRED8x8]= pred8x8_tm_vp8_c;
427
    if(codec_id != CODEC_ID_RV40 && codec_id != CODEC_ID_VP8){
428 c92a30bb Kostya Shishkov
        h->pred8x8[DC_PRED8x8     ]= pred8x8_dc_c;
429
        h->pred8x8[LEFT_DC_PRED8x8]= pred8x8_left_dc_c;
430
        h->pred8x8[TOP_DC_PRED8x8 ]= pred8x8_top_dc_c;
431 d1d10e91 Michael Niedermayer
        h->pred8x8[ALZHEIMER_DC_L0T_PRED8x8 ]= pred8x8_mad_cow_dc_l0t;
432
        h->pred8x8[ALZHEIMER_DC_0LT_PRED8x8 ]= pred8x8_mad_cow_dc_0lt;
433
        h->pred8x8[ALZHEIMER_DC_L00_PRED8x8 ]= pred8x8_mad_cow_dc_l00;
434
        h->pred8x8[ALZHEIMER_DC_0L0_PRED8x8 ]= pred8x8_mad_cow_dc_0l0;
435 c92a30bb Kostya Shishkov
    }else{
436
        h->pred8x8[DC_PRED8x8     ]= pred8x8_dc_rv40_c;
437
        h->pred8x8[LEFT_DC_PRED8x8]= pred8x8_left_dc_rv40_c;
438
        h->pred8x8[TOP_DC_PRED8x8 ]= pred8x8_top_dc_rv40_c;
439 ee555de7 Ronald S. Bultje
        if (codec_id == CODEC_ID_VP8) {
440
            h->pred8x8[DC_127_PRED8x8]= pred8x8_127_dc_c;
441
            h->pred8x8[DC_129_PRED8x8]= pred8x8_129_dc_c;
442
        }
443 c92a30bb Kostya Shishkov
    }
444
    h->pred8x8[DC_128_PRED8x8 ]= pred8x8_128_dc_c;
445
446
    h->pred16x16[DC_PRED8x8     ]= pred16x16_dc_c;
447
    h->pred16x16[VERT_PRED8x8   ]= pred16x16_vertical_c;
448
    h->pred16x16[HOR_PRED8x8    ]= pred16x16_horizontal_c;
449
    switch(codec_id){
450
    case CODEC_ID_SVQ3:
451
       h->pred16x16[PLANE_PRED8x8  ]= pred16x16_plane_svq3_c;
452
       break;
453
    case CODEC_ID_RV40:
454
       h->pred16x16[PLANE_PRED8x8  ]= pred16x16_plane_rv40_c;
455
       break;
456 3ad289fc David Conrad
    case CODEC_ID_VP8:
457
       h->pred16x16[PLANE_PRED8x8  ]= pred16x16_tm_vp8_c;
458 ee555de7 Ronald S. Bultje
       h->pred16x16[DC_127_PRED8x8]= pred16x16_127_dc_c;
459
       h->pred16x16[DC_129_PRED8x8]= pred16x16_129_dc_c;
460 3ad289fc David Conrad
       break;
461 c92a30bb Kostya Shishkov
    default:
462
       h->pred16x16[PLANE_PRED8x8  ]= pred16x16_plane_c;
463 ee555de7 Ronald S. Bultje
       break;
464 c92a30bb Kostya Shishkov
    }
465
    h->pred16x16[LEFT_DC_PRED8x8]= pred16x16_left_dc_c;
466
    h->pred16x16[TOP_DC_PRED8x8 ]= pred16x16_top_dc_c;
467
    h->pred16x16[DC_128_PRED8x8 ]= pred16x16_128_dc_c;
468 d358caed Michael Niedermayer
469
    //special lossless h/v prediction for h264
470
    h->pred4x4_add  [VERT_PRED   ]= pred4x4_vertical_add_c;
471
    h->pred4x4_add  [ HOR_PRED   ]= pred4x4_horizontal_add_c;
472
    h->pred8x8l_add [VERT_PRED   ]= pred8x8l_vertical_add_c;
473
    h->pred8x8l_add [ HOR_PRED   ]= pred8x8l_horizontal_add_c;
474
    h->pred8x8_add  [VERT_PRED8x8]= pred8x8_vertical_add_c;
475
    h->pred8x8_add  [ HOR_PRED8x8]= pred8x8_horizontal_add_c;
476
    h->pred16x16_add[VERT_PRED8x8]= pred16x16_vertical_add_c;
477
    h->pred16x16_add[ HOR_PRED8x8]= pred16x16_horizontal_add_c;
478 702b5885 Måns Rullgård
479
    if (ARCH_ARM) ff_h264_pred_init_arm(h, codec_id);
480 a64fadf6 Stefano Sabatini
    if (HAVE_MMX) ff_h264_pred_init_x86(h, codec_id);
481 c92a30bb Kostya Shishkov
}