1 #ifndef _ASM_IA64_INTEL_INTRIN_H
2 #define _ASM_IA64_INTEL_INTRIN_H
4 * Intel Compiler Intrinsics
6 * Copyright (C) 2002,2003 Jun Nakajima <jun.nakajima@intel.com>
7 * Copyright (C) 2002,2003 Suresh Siddha <suresh.b.siddha@intel.com>
10 #include <asm/types.h>
12 void __lfetch(int lfhint
, void *y
);
13 void __lfetch_excl(int lfhint
, void *y
);
14 void __lfetch_fault(int lfhint
, void *y
);
15 void __lfetch_fault_excl(int lfhint
, void *y
);
17 /* In the following, whichFloatReg should be an integer from 0-127 */
18 void __ldfs(const int whichFloatReg
, void *src
);
19 void __ldfd(const int whichFloatReg
, void *src
);
20 void __ldfe(const int whichFloatReg
, void *src
);
21 void __ldf8(const int whichFloatReg
, void *src
);
22 void __ldf_fill(const int whichFloatReg
, void *src
);
23 void __stfs(void *dst
, const int whichFloatReg
);
24 void __stfd(void *dst
, const int whichFloatReg
);
25 void __stfe(void *dst
, const int whichFloatReg
);
26 void __stf8(void *dst
, const int whichFloatReg
);
27 void __stf_spill(void *dst
, const int whichFloatReg
);
29 void __st1_rel(void *dst
, const __s8 value
);
30 void __st2_rel(void *dst
, const __s16 value
);
31 void __st4_rel(void *dst
, const __s32 value
);
32 void __st8_rel(void *dst
, const __s64 value
);
33 __u8
__ld1_acq(void *src
);
34 __u16
__ld2_acq(void *src
);
35 __u32
__ld4_acq(void *src
);
36 __u64
__ld8_acq(void *src
);
38 __u64
__fetchadd4_acq(__u32
*addend
, const int increment
);
39 __u64
__fetchadd4_rel(__u32
*addend
, const int increment
);
40 __u64
__fetchadd8_acq(__u64
*addend
, const int increment
);
41 __u64
__fetchadd8_rel(__u64
*addend
, const int increment
);
43 __u64
__getf_exp(double d
);
45 /* OS Related Itanium(R) Intrinsics */
47 /* The names to use for whichReg and whichIndReg below come from
48 the include file asm/ia64regs.h */
50 __u64
__getIndReg(const int whichIndReg
, __s64 index
);
51 __u64
__getReg(const int whichReg
);
53 void __setIndReg(const int whichIndReg
, __s64 index
, __u64 value
);
54 void __setReg(const int whichReg
, __u64 value
);
59 void __itcd(__s64 pa
);
60 void __itci(__s64 pa
);
61 void __itrd(__s64 whichTransReg
, __s64 pa
);
62 void __itri(__s64 whichTransReg
, __s64 pa
);
63 void __ptce(__s64 va
);
64 void __ptcl(__s64 va
, __s64 pagesz
);
65 void __ptcg(__s64 va
, __s64 pagesz
);
66 void __ptcga(__s64 va
, __s64 pagesz
);
67 void __ptri(__s64 va
, __s64 pagesz
);
68 void __ptrd(__s64 va
, __s64 pagesz
);
70 void __invala_gr(const int whichGeneralReg
/* 0-127 */ );
71 void __invala_fr(const int whichFloatReg
/* 0-127 */ );
72 void __nop(const int);
73 void __fc(__u64
*addr
);
82 /* Intrinsics for implementing get/put_user macros */
83 void __st_user(const char *tableName
, __u64 addr
, char size
, char relocType
, __u64 val
);
84 void __ld_user(const char *tableName
, __u64 addr
, char size
, char relocType
);
86 /* This intrinsic does not generate code, it creates a barrier across which
87 * the compiler will not schedule data access instructions.
89 void __memory_barrier(void);
94 __u64
_m64_mux1(__u64 a
, const int n
);
97 /* Lock and Atomic Operation Related Intrinsics */
98 __u64
_InterlockedExchange8(volatile __u8
*trgt
, __u8 value
);
99 __u64
_InterlockedExchange16(volatile __u16
*trgt
, __u16 value
);
100 __s64
_InterlockedExchange(volatile __u32
*trgt
, __u32 value
);
101 __s64
_InterlockedExchange64(volatile __u64
*trgt
, __u64 value
);
103 __u64
_InterlockedCompareExchange8_rel(volatile __u8
*dest
, __u64 xchg
, __u64 comp
);
104 __u64
_InterlockedCompareExchange8_acq(volatile __u8
*dest
, __u64 xchg
, __u64 comp
);
105 __u64
_InterlockedCompareExchange16_rel(volatile __u16
*dest
, __u64 xchg
, __u64 comp
);
106 __u64
_InterlockedCompareExchange16_acq(volatile __u16
*dest
, __u64 xchg
, __u64 comp
);
107 __u64
_InterlockedCompareExchange_rel(volatile __u32
*dest
, __u64 xchg
, __u64 comp
);
108 __u64
_InterlockedCompareExchange_acq(volatile __u32
*dest
, __u64 xchg
, __u64 comp
);
109 __u64
_InterlockedCompareExchange64_rel(volatile __u64
*dest
, __u64 xchg
, __u64 comp
);
110 __u64
_InterlockedCompareExchange64_acq(volatile __u64
*dest
, __u64 xchg
, __u64 comp
);
112 __s64
_m64_dep_mi(const int v
, __s64 s
, const int p
, const int len
);
113 __s64
_m64_shrp(__s64 a
, __s64 b
, const int count
);
114 __s64
_m64_popcnt(__s64 a
);
116 #define ia64_barrier() __memory_barrier()
118 #define ia64_stop() /* Nothing: As of now stop bit is generated for each
122 #define ia64_getreg __getReg
123 #define ia64_setreg __setReg
127 #define ia64_mux1_brcst 0
128 #define ia64_mux1_mix 8
129 #define ia64_mux1_shuf 9
130 #define ia64_mux1_alt 10
131 #define ia64_mux1_rev 11
133 #define ia64_mux1 _m64_mux1
134 #define ia64_popcnt _m64_popcnt
135 #define ia64_getf_exp __getf_exp
136 #define ia64_shrp _m64_shrp
138 #define ia64_tpa __tpa
139 #define ia64_invala __invala
140 #define ia64_invala_gr __invala_gr
141 #define ia64_invala_fr __invala_fr
142 #define ia64_nop __nop
143 #define ia64_sum __sum
144 #define ia64_ssm __ssm
145 #define ia64_rum __rum
146 #define ia64_rsm __rsm
149 #define ia64_ldfs __ldfs
150 #define ia64_ldfd __ldfd
151 #define ia64_ldfe __ldfe
152 #define ia64_ldf8 __ldf8
153 #define ia64_ldf_fill __ldf_fill
155 #define ia64_stfs __stfs
156 #define ia64_stfd __stfd
157 #define ia64_stfe __stfe
158 #define ia64_stf8 __stf8
159 #define ia64_stf_spill __stf_spill
162 #define ia64_mfa __mfa
164 #define ia64_fetchadd4_acq __fetchadd4_acq
165 #define ia64_fetchadd4_rel __fetchadd4_rel
166 #define ia64_fetchadd8_acq __fetchadd8_acq
167 #define ia64_fetchadd8_rel __fetchadd8_rel
169 #define ia64_xchg1 _InterlockedExchange8
170 #define ia64_xchg2 _InterlockedExchange16
171 #define ia64_xchg4 _InterlockedExchange
172 #define ia64_xchg8 _InterlockedExchange64
174 #define ia64_cmpxchg1_rel _InterlockedCompareExchange8_rel
175 #define ia64_cmpxchg1_acq _InterlockedCompareExchange8_acq
176 #define ia64_cmpxchg2_rel _InterlockedCompareExchange16_rel
177 #define ia64_cmpxchg2_acq _InterlockedCompareExchange16_acq
178 #define ia64_cmpxchg4_rel _InterlockedCompareExchange_rel
179 #define ia64_cmpxchg4_acq _InterlockedCompareExchange_acq
180 #define ia64_cmpxchg8_rel _InterlockedCompareExchange64_rel
181 #define ia64_cmpxchg8_acq _InterlockedCompareExchange64_acq
183 #define __ia64_set_dbr(index, val) \
184 __setIndReg(_IA64_REG_INDR_DBR, index, val)
185 #define ia64_set_ibr(index, val) \
186 __setIndReg(_IA64_REG_INDR_IBR, index, val)
187 #define ia64_set_pkr(index, val) \
188 __setIndReg(_IA64_REG_INDR_PKR, index, val)
189 #define ia64_set_pmc(index, val) \
190 __setIndReg(_IA64_REG_INDR_PMC, index, val)
191 #define ia64_set_pmd(index, val) \
192 __setIndReg(_IA64_REG_INDR_PMD, index, val)
193 #define ia64_set_rr(index, val) \
194 __setIndReg(_IA64_REG_INDR_RR, index, val)
196 #define ia64_get_cpuid(index) __getIndReg(_IA64_REG_INDR_CPUID, index)
197 #define __ia64_get_dbr(index) __getIndReg(_IA64_REG_INDR_DBR, index)
198 #define ia64_get_ibr(index) __getIndReg(_IA64_REG_INDR_IBR, index)
199 #define ia64_get_pkr(index) __getIndReg(_IA64_REG_INDR_PKR, index)
200 #define ia64_get_pmc(index) __getIndReg(_IA64_REG_INDR_PMC, index)
201 #define ia64_get_pmd(index) __getIndReg(_IA64_REG_INDR_PMD, index)
202 #define ia64_get_rr(index) __getIndReg(_IA64_REG_INDR_RR, index)
204 #define ia64_srlz_d __dsrlz
205 #define ia64_srlz_i __isrlz
207 #define ia64_dv_serialize_data()
208 #define ia64_dv_serialize_instruction()
210 #define ia64_st1_rel __st1_rel
211 #define ia64_st2_rel __st2_rel
212 #define ia64_st4_rel __st4_rel
213 #define ia64_st8_rel __st8_rel
215 #define ia64_ld1_acq __ld1_acq
216 #define ia64_ld2_acq __ld2_acq
217 #define ia64_ld4_acq __ld4_acq
218 #define ia64_ld8_acq __ld8_acq
220 #define ia64_sync_i __synci
221 #define ia64_thash __thash
222 #define ia64_ttag __ttag
223 #define ia64_itcd __itcd
224 #define ia64_itci __itci
225 #define ia64_itrd __itrd
226 #define ia64_itri __itri
227 #define ia64_ptce __ptce
228 #define ia64_ptcl __ptcl
229 #define ia64_ptcg __ptcg
230 #define ia64_ptcga __ptcga
231 #define ia64_ptri __ptri
232 #define ia64_ptrd __ptrd
233 #define ia64_dep_mi _m64_dep_mi
235 /* Values for lfhint in __lfetch and __lfetch_fault */
237 #define ia64_lfhint_none 0
238 #define ia64_lfhint_nt1 1
239 #define ia64_lfhint_nt2 2
240 #define ia64_lfhint_nta 3
242 #define ia64_lfetch __lfetch
243 #define ia64_lfetch_excl __lfetch_excl
244 #define ia64_lfetch_fault __lfetch_fault
245 #define ia64_lfetch_fault_excl __lfetch_fault_excl
247 #define ia64_intrin_local_irq_restore(x) \
250 ia64_ssm(IA64_PSR_I); \
253 ia64_rsm(IA64_PSR_I); \
257 #endif /* _ASM_IA64_INTEL_INTRIN_H */