x86: Clean up __vdso_clock_gettime variable.
[glibc.git] / sysdeps / unix / sysv / linux / x86 / hle.h
blob4a7b9e3bf7e9e93e06692d6937236f1cbec3c22f
1 /* Shared RTM header. Emulate TSX intrinsics for compilers and assemblers
2 that do not support the intrinsics and instructions yet. */
3 #ifndef _HLE_H
4 #define _HLE_H 1
6 #ifdef __ASSEMBLER__
8 .macro XBEGIN target
9 .byte 0xc7,0xf8
10 .long \target-1f
12 .endm
14 .macro XEND
15 .byte 0x0f,0x01,0xd5
16 .endm
18 .macro XABORT code
19 .byte 0xc6,0xf8,\code
20 .endm
22 .macro XTEST
23 .byte 0x0f,0x01,0xd6
24 .endm
26 #endif
28 /* Official RTM intrinsics interface matching gcc/icc, but works
29 on older gcc compatible compilers and binutils.
30 We should somehow detect if the compiler supports it, because
31 it may be able to generate slightly better code. */
33 #define _XBEGIN_STARTED (~0u)
34 #define _XABORT_EXPLICIT (1 << 0)
35 #define _XABORT_RETRY (1 << 1)
36 #define _XABORT_CONFLICT (1 << 2)
37 #define _XABORT_CAPACITY (1 << 3)
38 #define _XABORT_DEBUG (1 << 4)
39 #define _XABORT_NESTED (1 << 5)
40 #define _XABORT_CODE(x) (((x) >> 24) & 0xff)
42 #define _ABORT_LOCK_BUSY 0xff
43 #define _ABORT_LOCK_IS_LOCKED 0xfe
44 #define _ABORT_NESTED_TRYLOCK 0xfd
46 #ifndef __ASSEMBLER__
48 #define __force_inline __attribute__((__always_inline__)) inline
50 static __force_inline int _xbegin(void)
52 int ret = _XBEGIN_STARTED;
53 asm volatile (".byte 0xc7,0xf8 ; .long 0" : "+a" (ret) :: "memory");
54 return ret;
57 static __force_inline void _xend(void)
59 asm volatile (".byte 0x0f,0x01,0xd5" ::: "memory");
62 static __force_inline void _xabort(const unsigned int status)
64 asm volatile (".byte 0xc6,0xf8,%P0" :: "i" (status) : "memory");
67 static __force_inline int _xtest(void)
69 unsigned char out;
70 asm volatile (".byte 0x0f,0x01,0xd6 ; setnz %0" : "=r" (out) :: "memory");
71 return out;
74 #endif
75 #endif