Allow _Static_assert declarations in structs
[tinycc.git] / lib / stdatomic.c
blob7cce88c1de9a43814e754b996e025689273c9683
1 // for libtcc1, avoid including files that are not part of tcc
2 // #include <stdint.h>
3 #define uint8_t unsigned char
4 #define uint16_t unsigned short
5 #define uint32_t unsigned int
6 #define uint64_t unsigned long long
7 #define bool _Bool
8 #define false 0
9 #define true 1
10 #define __ATOMIC_RELAXED 0
11 #define __ATOMIC_CONSUME 1
12 #define __ATOMIC_ACQUIRE 2
13 #define __ATOMIC_RELEASE 3
14 #define __ATOMIC_ACQ_REL 4
15 #define __ATOMIC_SEQ_CST 5
16 typedef __SIZE_TYPE__ size_t;
18 #if defined __i386__ || defined __x86_64__
19 #define ATOMIC_COMPARE_EXCHANGE(TYPE, MODE, SUFFIX) \
20 bool __atomic_compare_exchange_##MODE \
21 (volatile void *atom, void *ref, TYPE xchg, \
22 bool weak, int success_memorder, int failure_memorder) \
23 { \
24 TYPE rv; \
25 TYPE cmp = *(TYPE *)ref; \
26 __asm__ volatile( \
27 "lock cmpxchg" SUFFIX " %2,%1\n" \
28 : "=a" (rv), "+m" (*(TYPE *)atom) \
29 : "q" (xchg), "0" (cmp) \
30 : "memory" \
31 ); \
32 *(TYPE *)ref = rv; \
33 return (rv == cmp); \
35 #else
36 #define ATOMIC_COMPARE_EXCHANGE(TYPE, MODE, SUFFIX) \
37 extern bool __atomic_compare_exchange_##MODE \
38 (volatile void *atom, void *ref, TYPE xchg, \
39 bool weak, int success_memorder, int failure_memorder);
40 #endif
42 #define ATOMIC_LOAD(TYPE, MODE) \
43 TYPE __atomic_load_##MODE(const volatile void *atom, int memorder) \
44 { \
45 return *(volatile TYPE *)atom; \
48 #define ATOMIC_STORE(TYPE, MODE) \
49 void __atomic_store_##MODE(volatile void *atom, TYPE value, int memorder) \
50 { \
51 *(volatile TYPE *)atom = value; \
54 #define ATOMIC_GEN_OP(TYPE, MODE, NAME, OP, RET) \
55 TYPE __atomic_##NAME##_##MODE(volatile void *atom, TYPE value, int memorder) \
56 { \
57 TYPE xchg, cmp; \
58 __atomic_load((TYPE *)atom, (TYPE *)&cmp, __ATOMIC_RELAXED); \
59 do { \
60 xchg = (OP); \
61 } while (!__atomic_compare_exchange((TYPE *)atom, &cmp, &xchg, true, \
62 __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST)); \
63 return RET; \
66 #define ATOMIC_EXCHANGE(TYPE, MODE) \
67 ATOMIC_GEN_OP(TYPE, MODE, exchange, value, cmp)
68 #define ATOMIC_ADD_FETCH(TYPE, MODE) \
69 ATOMIC_GEN_OP(TYPE, MODE, add_fetch, (cmp + value), xchg)
70 #define ATOMIC_SUB_FETCH(TYPE, MODE) \
71 ATOMIC_GEN_OP(TYPE, MODE, sub_fetch, (cmp - value), xchg)
72 #define ATOMIC_AND_FETCH(TYPE, MODE) \
73 ATOMIC_GEN_OP(TYPE, MODE, and_fetch, (cmp & value), xchg)
74 #define ATOMIC_OR_FETCH(TYPE, MODE) \
75 ATOMIC_GEN_OP(TYPE, MODE, or_fetch, (cmp | value), xchg)
76 #define ATOMIC_XOR_FETCH(TYPE, MODE) \
77 ATOMIC_GEN_OP(TYPE, MODE, xor_fetch, (cmp ^ value), xchg)
78 #define ATOMIC_NAND_FETCH(TYPE, MODE) \
79 ATOMIC_GEN_OP(TYPE, MODE, nand_fetch, ~(cmp & value), xchg)
80 #define ATOMIC_FETCH_ADD(TYPE, MODE) \
81 ATOMIC_GEN_OP(TYPE, MODE, fetch_add, (cmp + value), cmp)
82 #define ATOMIC_FETCH_SUB(TYPE, MODE) \
83 ATOMIC_GEN_OP(TYPE, MODE, fetch_sub, (cmp - value), cmp)
84 #define ATOMIC_FETCH_AND(TYPE, MODE) \
85 ATOMIC_GEN_OP(TYPE, MODE, fetch_and, (cmp & value), cmp)
86 #define ATOMIC_FETCH_OR(TYPE, MODE) \
87 ATOMIC_GEN_OP(TYPE, MODE, fetch_or, (cmp | value), cmp)
88 #define ATOMIC_FETCH_XOR(TYPE, MODE) \
89 ATOMIC_GEN_OP(TYPE, MODE, fetch_xor, (cmp ^ value), cmp)
90 #define ATOMIC_FETCH_NAND(TYPE, MODE) \
91 ATOMIC_GEN_OP(TYPE, MODE, fetch_nand, ~(cmp & value), cmp)
93 #define ATOMIC_GEN(TYPE, SIZE, SUFFIX) \
94 ATOMIC_STORE(TYPE, SIZE) \
95 ATOMIC_LOAD(TYPE, SIZE) \
96 ATOMIC_COMPARE_EXCHANGE(TYPE, SIZE, SUFFIX) \
97 ATOMIC_EXCHANGE(TYPE, SIZE) \
98 ATOMIC_ADD_FETCH(TYPE, SIZE) \
99 ATOMIC_SUB_FETCH(TYPE, SIZE) \
100 ATOMIC_AND_FETCH(TYPE, SIZE) \
101 ATOMIC_OR_FETCH(TYPE, SIZE) \
102 ATOMIC_XOR_FETCH(TYPE, SIZE) \
103 ATOMIC_NAND_FETCH(TYPE, SIZE) \
104 ATOMIC_FETCH_ADD(TYPE, SIZE) \
105 ATOMIC_FETCH_SUB(TYPE, SIZE) \
106 ATOMIC_FETCH_AND(TYPE, SIZE) \
107 ATOMIC_FETCH_OR(TYPE, SIZE) \
108 ATOMIC_FETCH_XOR(TYPE, SIZE) \
109 ATOMIC_FETCH_NAND(TYPE, SIZE)
111 ATOMIC_GEN(uint8_t, 1, "b")
112 ATOMIC_GEN(uint16_t, 2, "w")
113 ATOMIC_GEN(uint32_t, 4, "l")
114 #if defined __x86_64__ || defined __aarch64__ || defined __riscv
115 ATOMIC_GEN(uint64_t, 8, "q")
116 #endif
118 void __atomic_signal_fence (int memorder)
122 void __atomic_thread_fence (int memorder)
124 #if defined __i386__
125 __asm__ volatile("lock orl $0, (%esp)");
126 #elif defined __x86_64__
127 __asm__ volatile("lock orq $0, (%rsp)");
128 #elif defined __arm__
129 __asm__ volatile(".int 0xee070fba"); // mcr p15, 0, r0, c7, c10, 5
130 #elif defined __aarch64__
131 __asm__ volatile(".int 0xd5033bbf"); // dmb ish
132 #elif defined __riscv
133 __asm__ volatile(".int 0x0ff0000f"); // fence iorw,iorw
134 #endif
137 bool __atomic_is_lock_free(size_t size, void *ptr)
139 bool ret;
141 switch (size) {
142 case 1: ret = true; break;
143 case 2: ret = true; break;
144 case 4: ret = true; break;
145 #if defined __x86_64__ || defined __aarch64__ || defined __riscv
146 case 8: ret = true; break;
147 #else
148 case 8: ret = false; break;
149 #endif
150 default: ret = false; break;
152 return ret;