Statistics
| Branch: | Revision:

ffmpeg / libavcodec / alpha / asm.h @ 5b21bdab

History | View | Annotate | Download (9.08 KB)

1
/*
2
 * Alpha optimized DSP utils
3
 * Copyright (c) 2002 Falk Hueffner <falk@debian.org>
4
 *
5
 * This file is part of FFmpeg.
6
 *
7
 * FFmpeg is free software; you can redistribute it and/or
8
 * modify it under the terms of the GNU Lesser General Public
9
 * License as published by the Free Software Foundation; either
10
 * version 2.1 of the License, or (at your option) any later version.
11
 *
12
 * FFmpeg is distributed in the hope that it will be useful,
13
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15
 * Lesser General Public License for more details.
16
 *
17
 * You should have received a copy of the GNU Lesser General Public
18
 * License along with FFmpeg; if not, write to the Free Software
19
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20
 */
21

    
22
#ifndef FFMPEG_ASM_H
23
#define FFMPEG_ASM_H
24

    
25
#include <inttypes.h>
26

    
27
#if defined __GNUC__
28
# define GNUC_PREREQ(maj, min) \
29
        ((__GNUC__ << 16) + __GNUC_MINOR__ >= ((maj) << 16) + (min))
30
#else
31
# define GNUC_PREREQ(maj, min) 0
32
#endif
33

    
34
#if GNUC_PREREQ(2,96)
35
# define likely(x)      __builtin_expect((x) != 0, 1)
36
# define unlikely(x)    __builtin_expect((x) != 0, 0)
37
#else
38
# define likely(x)      (x)
39
# define unlikely(x)    (x)
40
#endif
41

    
42
#define AMASK_BWX (1 << 0)
43
#define AMASK_FIX (1 << 1)
44
#define AMASK_CIX (1 << 2)
45
#define AMASK_MVI (1 << 8)
46

    
47
static inline uint64_t BYTE_VEC(uint64_t x)
48
{
49
    x |= x <<  8;
50
    x |= x << 16;
51
    x |= x << 32;
52
    return x;
53
}
54
static inline uint64_t WORD_VEC(uint64_t x)
55
{
56
    x |= x << 16;
57
    x |= x << 32;
58
    return x;
59
}
60

    
61
#define sextw(x) ((int16_t) (x))
62

    
63
#ifdef __GNUC__
64
#define ldq(p)                                                  \
65
    (((union {                                                  \
66
        uint64_t __l;                                           \
67
        __typeof__(*(p)) __s[sizeof (uint64_t) / sizeof *(p)];  \
68
    } *) (p))->__l)
69
#define ldl(p)                                                  \
70
    (((union {                                                  \
71
        int32_t __l;                                            \
72
        __typeof__(*(p)) __s[sizeof (int32_t) / sizeof *(p)];   \
73
    } *) (p))->__l)
74
#define stq(l, p)                                                       \
75
    do {                                                                \
76
        (((union {                                                      \
77
            uint64_t __l;                                               \
78
            __typeof__(*(p)) __s[sizeof (uint64_t) / sizeof *(p)];      \
79
        } *) (p))->__l) = l;                                            \
80
    } while (0)
81
#define stl(l, p)                                                       \
82
    do {                                                                \
83
        (((union {                                                      \
84
            int32_t __l;                                                \
85
            __typeof__(*(p)) __s[sizeof (int32_t) / sizeof *(p)];       \
86
        } *) (p))->__l) = l;                                            \
87
    } while (0)
88
struct unaligned_long { uint64_t l; } __attribute__((packed));
89
#define ldq_u(p)        (*(const uint64_t *) (((uint64_t) (p)) & ~7ul))
90
#define uldq(a)         (((const struct unaligned_long *) (a))->l)
91

    
92
#if GNUC_PREREQ(3,3)
93
#define prefetch(p)     __builtin_prefetch((p), 0, 1)
94
#define prefetch_en(p)  __builtin_prefetch((p), 0, 0)
95
#define prefetch_m(p)   __builtin_prefetch((p), 1, 1)
96
#define prefetch_men(p) __builtin_prefetch((p), 1, 0)
97
#define cmpbge          __builtin_alpha_cmpbge
98
/* Avoid warnings.  */
99
#define extql(a, b)     __builtin_alpha_extql(a, (uint64_t) (b))
100
#define extwl(a, b)     __builtin_alpha_extwl(a, (uint64_t) (b))
101
#define extqh(a, b)     __builtin_alpha_extqh(a, (uint64_t) (b))
102
#define zap             __builtin_alpha_zap
103
#define zapnot          __builtin_alpha_zapnot
104
#define amask           __builtin_alpha_amask
105
#define implver         __builtin_alpha_implver
106
#define rpcc            __builtin_alpha_rpcc
107
#else
108
#define prefetch(p)     asm volatile("ldl $31,%0"  : : "m"(*(const char *) (p)) : "memory")
109
#define prefetch_en(p)  asm volatile("ldq $31,%0"  : : "m"(*(const char *) (p)) : "memory")
110
#define prefetch_m(p)   asm volatile("lds $f31,%0" : : "m"(*(const char *) (p)) : "memory")
111
#define prefetch_men(p) asm volatile("ldt $f31,%0" : : "m"(*(const char *) (p)) : "memory")
112
#define cmpbge(a, b) ({ uint64_t __r; asm ("cmpbge  %r1,%2,%0"  : "=r" (__r) : "rJ"  (a), "rI" (b)); __r; })
113
#define extql(a, b)  ({ uint64_t __r; asm ("extql   %r1,%2,%0"  : "=r" (__r) : "rJ"  (a), "rI" (b)); __r; })
114
#define extwl(a, b)  ({ uint64_t __r; asm ("extwl   %r1,%2,%0"  : "=r" (__r) : "rJ"  (a), "rI" (b)); __r; })
115
#define extqh(a, b)  ({ uint64_t __r; asm ("extqh   %r1,%2,%0"  : "=r" (__r) : "rJ"  (a), "rI" (b)); __r; })
116
#define zap(a, b)    ({ uint64_t __r; asm ("zap     %r1,%2,%0"  : "=r" (__r) : "rJ"  (a), "rI" (b)); __r; })
117
#define zapnot(a, b) ({ uint64_t __r; asm ("zapnot  %r1,%2,%0"  : "=r" (__r) : "rJ"  (a), "rI" (b)); __r; })
118
#define amask(a)     ({ uint64_t __r; asm ("amask   %1,%0"      : "=r" (__r) : "rI"  (a));           __r; })
119
#define implver()    ({ uint64_t __r; asm ("implver %0"         : "=r" (__r));                       __r; })
120
#define rpcc()       ({ uint64_t __r; asm volatile ("rpcc %0"   : "=r" (__r));                       __r; })
121
#endif
122
#define wh64(p) asm volatile("wh64 (%0)" : : "r"(p) : "memory")
123

    
124
#if GNUC_PREREQ(3,3) && defined(__alpha_max__)
125
#define minub8  __builtin_alpha_minub8
126
#define minsb8  __builtin_alpha_minsb8
127
#define minuw4  __builtin_alpha_minuw4
128
#define minsw4  __builtin_alpha_minsw4
129
#define maxub8  __builtin_alpha_maxub8
130
#define maxsb8  __builtin_alpha_maxsb8
131
#define maxuw4  __builtin_alpha_maxuw4
132
#define maxsw4  __builtin_alpha_maxsw4
133
#define perr    __builtin_alpha_perr
134
#define pklb    __builtin_alpha_pklb
135
#define pkwb    __builtin_alpha_pkwb
136
#define unpkbl  __builtin_alpha_unpkbl
137
#define unpkbw  __builtin_alpha_unpkbw
138
#else
139
#define minub8(a, b) ({ uint64_t __r; asm (".arch ev6; minub8  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
140
#define minsb8(a, b) ({ uint64_t __r; asm (".arch ev6; minsb8  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
141
#define minuw4(a, b) ({ uint64_t __r; asm (".arch ev6; minuw4  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
142
#define minsw4(a, b) ({ uint64_t __r; asm (".arch ev6; minsw4  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
143
#define maxub8(a, b) ({ uint64_t __r; asm (".arch ev6; maxub8  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
144
#define maxsb8(a, b) ({ uint64_t __r; asm (".arch ev6; maxsb8  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
145
#define maxuw4(a, b) ({ uint64_t __r; asm (".arch ev6; maxuw4  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
146
#define maxsw4(a, b) ({ uint64_t __r; asm (".arch ev6; maxsw4  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
147
#define perr(a, b)   ({ uint64_t __r; asm (".arch ev6; perr    %r1,%r2,%0" : "=r" (__r) : "%rJ" (a), "rJ" (b)); __r; })
148
#define pklb(a)      ({ uint64_t __r; asm (".arch ev6; pklb    %r1,%0"     : "=r" (__r) : "rJ"  (a));           __r; })
149
#define pkwb(a)      ({ uint64_t __r; asm (".arch ev6; pkwb    %r1,%0"     : "=r" (__r) : "rJ"  (a));           __r; })
150
#define unpkbl(a)    ({ uint64_t __r; asm (".arch ev6; unpkbl  %r1,%0"     : "=r" (__r) : "rJ"  (a));           __r; })
151
#define unpkbw(a)    ({ uint64_t __r; asm (".arch ev6; unpkbw  %r1,%0"     : "=r" (__r) : "rJ"  (a));           __r; })
152
#endif
153

    
154
#elif defined(__DECC)           /* Digital/Compaq/hp "ccc" compiler */
155

    
156
#include <c_asm.h>
157
#define ldq(p) (*(const uint64_t *) (p))
158
#define ldl(p) (*(const int32_t *)  (p))
159
#define stq(l, p) do { *(uint64_t *) (p) = (l); } while (0)
160
#define stl(l, p) do { *(int32_t *)  (p) = (l); } while (0)
161
#define ldq_u(a)     asm ("ldq_u   %v0,0(%a0)", a)
162
#define uldq(a)      (*(const __unaligned uint64_t *) (a))
163
#define cmpbge(a, b) asm ("cmpbge  %a0,%a1,%v0", a, b)
164
#define extql(a, b)  asm ("extql   %a0,%a1,%v0", a, b)
165
#define extwl(a, b)  asm ("extwl   %a0,%a1,%v0", a, b)
166
#define extqh(a, b)  asm ("extqh   %a0,%a1,%v0", a, b)
167
#define zap(a, b)    asm ("zap     %a0,%a1,%v0", a, b)
168
#define zapnot(a, b) asm ("zapnot  %a0,%a1,%v0", a, b)
169
#define amask(a)     asm ("amask   %a0,%v0", a)
170
#define implver()    asm ("implver %v0")
171
#define rpcc()       asm ("rpcc           %v0")
172
#define minub8(a, b) asm ("minub8  %a0,%a1,%v0", a, b)
173
#define minsb8(a, b) asm ("minsb8  %a0,%a1,%v0", a, b)
174
#define minuw4(a, b) asm ("minuw4  %a0,%a1,%v0", a, b)
175
#define minsw4(a, b) asm ("minsw4  %a0,%a1,%v0", a, b)
176
#define maxub8(a, b) asm ("maxub8  %a0,%a1,%v0", a, b)
177
#define maxsb8(a, b) asm ("maxsb8  %a0,%a1,%v0", a, b)
178
#define maxuw4(a, b) asm ("maxuw4  %a0,%a1,%v0", a, b)
179
#define maxsw4(a, b) asm ("maxsw4  %a0,%a1,%v0", a, b)
180
#define perr(a, b)   asm ("perr    %a0,%a1,%v0", a, b)
181
#define pklb(a)      asm ("pklb    %a0,%v0", a)
182
#define pkwb(a)      asm ("pkwb    %a0,%v0", a)
183
#define unpkbl(a)    asm ("unpkbl  %a0,%v0", a)
184
#define unpkbw(a)    asm ("unpkbw  %a0,%v0", a)
185
#define wh64(a)      asm ("wh64    %a0", a)
186

    
187
#else
188
#error "Unknown compiler!"
189
#endif
190

    
191
#endif /* FFMPEG_ASM_H */