Merge branch 'mirror' into vdpau
[FFMpeg-mirror/ffmpeg-vdpau.git] / libavcodec / alpha / asm.h
blob1877704e7bd6bbd2fc0400da553065b509022668
1 /*
2 * Alpha optimized DSP utils
3 * Copyright (c) 2002 Falk Hueffner <falk@debian.org>
5 * This file is part of FFmpeg.
7 * FFmpeg is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
12 * FFmpeg is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with FFmpeg; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 #ifndef AVCODEC_ALPHA_ASM_H
23 #define AVCODEC_ALPHA_ASM_H
25 #include <inttypes.h>
27 #if defined __GNUC__
28 # define GNUC_PREREQ(maj, min) \
29 ((__GNUC__ << 16) + __GNUC_MINOR__ >= ((maj) << 16) + (min))
30 #else
31 # define GNUC_PREREQ(maj, min) 0
32 #endif
34 #if GNUC_PREREQ(2,96)
35 # define likely(x) __builtin_expect((x) != 0, 1)
36 # define unlikely(x) __builtin_expect((x) != 0, 0)
37 #else
38 # define likely(x) (x)
39 # define unlikely(x) (x)
40 #endif
42 #define AMASK_BWX (1 << 0)
43 #define AMASK_FIX (1 << 1)
44 #define AMASK_CIX (1 << 2)
45 #define AMASK_MVI (1 << 8)
47 static inline uint64_t BYTE_VEC(uint64_t x)
49 x |= x << 8;
50 x |= x << 16;
51 x |= x << 32;
52 return x;
54 static inline uint64_t WORD_VEC(uint64_t x)
56 x |= x << 16;
57 x |= x << 32;
58 return x;
61 #define sextw(x) ((int16_t) (x))
63 #ifdef __GNUC__
64 #define ldq(p) \
65 (((union { \
66 uint64_t __l; \
67 __typeof__(*(p)) __s[sizeof (uint64_t) / sizeof *(p)]; \
68 } *) (p))->__l)
69 #define ldl(p) \
70 (((union { \
71 int32_t __l; \
72 __typeof__(*(p)) __s[sizeof (int32_t) / sizeof *(p)]; \
73 } *) (p))->__l)
74 #define stq(l, p) \
75 do { \
76 (((union { \
77 uint64_t __l; \
78 __typeof__(*(p)) __s[sizeof (uint64_t) / sizeof *(p)]; \
79 } *) (p))->__l) = l; \
80 } while (0)
81 #define stl(l, p) \
82 do { \
83 (((union { \
84 int32_t __l; \
85 __typeof__(*(p)) __s[sizeof (int32_t) / sizeof *(p)]; \
86 } *) (p))->__l) = l; \
87 } while (0)
88 struct unaligned_long { uint64_t l; } __attribute__((packed));
89 #define ldq_u(p) (*(const uint64_t *) (((uint64_t) (p)) & ~7ul))
90 #define uldq(a) (((const struct unaligned_long *) (a))->l)
92 #if GNUC_PREREQ(3,3)
93 #define prefetch(p) __builtin_prefetch((p), 0, 1)
94 #define prefetch_en(p) __builtin_prefetch((p), 0, 0)
95 #define prefetch_m(p) __builtin_prefetch((p), 1, 1)
96 #define prefetch_men(p) __builtin_prefetch((p), 1, 0)
97 #define cmpbge __builtin_alpha_cmpbge
98 /* Avoid warnings. */
99 #define extql(a, b) __builtin_alpha_extql(a, (uint64_t) (b))
100 #define extwl(a, b) __builtin_alpha_extwl(a, (uint64_t) (b))
101 #define extqh(a, b) __builtin_alpha_extqh(a, (uint64_t) (b))
102 #define zap __builtin_alpha_zap
103 #define zapnot __builtin_alpha_zapnot
104 #define amask __builtin_alpha_amask
105 #define implver __builtin_alpha_implver
106 #define rpcc __builtin_alpha_rpcc
107 #else
108 #define prefetch(p) __asm__ volatile("ldl $31,%0" : : "m"(*(const char *) (p)) : "memory")
109 #define prefetch_en(p) __asm__ volatile("ldq $31,%0" : : "m"(*(const char *) (p)) : "memory")
110 #define prefetch_m(p) __asm__ volatile("lds $f31,%0" : : "m"(*(const char *) (p)) : "memory")
111 #define prefetch_men(p) __asm__ volatile("ldt $f31,%0" : : "m"(*(const char *) (p)) : "memory")
112 #define cmpbge(a, b) ({ uint64_t __r; __asm__ ("cmpbge %r1,%2,%0" : "=r" (__r) : "rJ" (a), "rI" (b)); __r; })
113 #define extql(a, b) ({ uint64_t __r; __asm__ ("extql %r1,%2,%0" : "=r" (__r) : "rJ" (a), "rI" (b)); __r; })
114 #define extwl(a, b) ({ uint64_t __r; __asm__ ("extwl %r1,%2,%0" : "=r" (__r) : "rJ" (a), "rI" (b)); __r; })
115 #define extqh(a, b) ({ uint64_t __r; __asm__ ("extqh %r1,%2,%0" : "=r" (__r) : "rJ" (a), "rI" (b)); __r; })
116 #define zap(a, b) ({ uint64_t __r; __asm__ ("zap %r1,%2,%0" : "=r" (__r) : "rJ" (a), "rI" (b)); __r; })
117 #define zapnot(a, b) ({ uint64_t __r; __asm__ ("zapnot %r1,%2,%0" : "=r" (__r) : "rJ" (a), "rI" (b)); __r; })
118 #define amask(a) ({ uint64_t __r; __asm__ ("amask %1,%0" : "=r" (__r) : "rI" (a)); __r; })
119 #define implver() ({ uint64_t __r; __asm__ ("implver %0" : "=r" (__r)); __r; })
120 #define rpcc() ({ uint64_t __r; __asm__ volatile ("rpcc %0" : "=r" (__r)); __r; })
121 #endif
122 #define wh64(p) __asm__ volatile("wh64 (%0)" : : "r"(p) : "memory")
124 #if GNUC_PREREQ(3,3) && defined(__alpha_max__)
125 #define minub8 __builtin_alpha_minub8
126 #define minsb8 __builtin_alpha_minsb8
127 #define minuw4 __builtin_alpha_minuw4
128 #define minsw4 __builtin_alpha_minsw4
129 #define maxub8 __builtin_alpha_maxub8
130 #define maxsb8 __builtin_alpha_maxsb8
131 #define maxuw4 __builtin_alpha_maxuw4
132 #define maxsw4 __builtin_alpha_maxsw4
133 #define perr __builtin_alpha_perr
134 #define pklb __builtin_alpha_pklb
135 #define pkwb __builtin_alpha_pkwb
136 #define unpkbl __builtin_alpha_unpkbl
137 #define unpkbw __builtin_alpha_unpkbw
138 #else
139 #define minub8(a, b) ({ uint64_t __r; __asm__ (".arch ev6; minub8 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
140 #define minsb8(a, b) ({ uint64_t __r; __asm__ (".arch ev6; minsb8 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
141 #define minuw4(a, b) ({ uint64_t __r; __asm__ (".arch ev6; minuw4 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
142 #define minsw4(a, b) ({ uint64_t __r; __asm__ (".arch ev6; minsw4 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
143 #define maxub8(a, b) ({ uint64_t __r; __asm__ (".arch ev6; maxub8 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
144 #define maxsb8(a, b) ({ uint64_t __r; __asm__ (".arch ev6; maxsb8 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
145 #define maxuw4(a, b) ({ uint64_t __r; __asm__ (".arch ev6; maxuw4 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
146 #define maxsw4(a, b) ({ uint64_t __r; __asm__ (".arch ev6; maxsw4 %r1,%2,%0" : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
147 #define perr(a, b) ({ uint64_t __r; __asm__ (".arch ev6; perr %r1,%r2,%0" : "=r" (__r) : "%rJ" (a), "rJ" (b)); __r; })
148 #define pklb(a) ({ uint64_t __r; __asm__ (".arch ev6; pklb %r1,%0" : "=r" (__r) : "rJ" (a)); __r; })
149 #define pkwb(a) ({ uint64_t __r; __asm__ (".arch ev6; pkwb %r1,%0" : "=r" (__r) : "rJ" (a)); __r; })
150 #define unpkbl(a) ({ uint64_t __r; __asm__ (".arch ev6; unpkbl %r1,%0" : "=r" (__r) : "rJ" (a)); __r; })
151 #define unpkbw(a) ({ uint64_t __r; __asm__ (".arch ev6; unpkbw %r1,%0" : "=r" (__r) : "rJ" (a)); __r; })
152 #endif
154 #elif defined(__DECC) /* Digital/Compaq/hp "ccc" compiler */
156 #include <c_asm.h>
157 #define ldq(p) (*(const uint64_t *) (p))
158 #define ldl(p) (*(const int32_t *) (p))
159 #define stq(l, p) do { *(uint64_t *) (p) = (l); } while (0)
160 #define stl(l, p) do { *(int32_t *) (p) = (l); } while (0)
161 #define ldq_u(a) __asm__ ("ldq_u %v0,0(%a0)", a)
162 #define uldq(a) (*(const __unaligned uint64_t *) (a))
163 #define cmpbge(a, b) __asm__ ("cmpbge %a0,%a1,%v0", a, b)
164 #define extql(a, b) __asm__ ("extql %a0,%a1,%v0", a, b)
165 #define extwl(a, b) __asm__ ("extwl %a0,%a1,%v0", a, b)
166 #define extqh(a, b) __asm__ ("extqh %a0,%a1,%v0", a, b)
167 #define zap(a, b) __asm__ ("zap %a0,%a1,%v0", a, b)
168 #define zapnot(a, b) __asm__ ("zapnot %a0,%a1,%v0", a, b)
169 #define amask(a) __asm__ ("amask %a0,%v0", a)
170 #define implver() __asm__ ("implver %v0")
171 #define rpcc() __asm__ ("rpcc %v0")
172 #define minub8(a, b) __asm__ ("minub8 %a0,%a1,%v0", a, b)
173 #define minsb8(a, b) __asm__ ("minsb8 %a0,%a1,%v0", a, b)
174 #define minuw4(a, b) __asm__ ("minuw4 %a0,%a1,%v0", a, b)
175 #define minsw4(a, b) __asm__ ("minsw4 %a0,%a1,%v0", a, b)
176 #define maxub8(a, b) __asm__ ("maxub8 %a0,%a1,%v0", a, b)
177 #define maxsb8(a, b) __asm__ ("maxsb8 %a0,%a1,%v0", a, b)
178 #define maxuw4(a, b) __asm__ ("maxuw4 %a0,%a1,%v0", a, b)
179 #define maxsw4(a, b) __asm__ ("maxsw4 %a0,%a1,%v0", a, b)
180 #define perr(a, b) __asm__ ("perr %a0,%a1,%v0", a, b)
181 #define pklb(a) __asm__ ("pklb %a0,%v0", a)
182 #define pkwb(a) __asm__ ("pkwb %a0,%v0", a)
183 #define unpkbl(a) __asm__ ("unpkbl %a0,%v0", a)
184 #define unpkbw(a) __asm__ ("unpkbw %a0,%v0", a)
185 #define wh64(a) __asm__ ("wh64 %a0", a)
187 #else
188 #error "Unknown compiler!"
189 #endif
191 #endif /* AVCODEC_ALPHA_ASM_H */