3 // { dg-options "-O2 -fnon-call-exceptions" }
5 struct A { A (); ~A (); };
13 int e = __atomic_compare_exchange_n (d, &f, 1, 1, __ATOMIC_RELAXED, __ATOMIC_RELAXED);
21 unsigned int mask = (1u << bit);
22 return (__sync_fetch_and_or (a, mask) & mask) != 0;
29 unsigned int mask = (1u << bit);
30 unsigned int t1 = __atomic_fetch_or (a, mask, __ATOMIC_RELAXED);
31 unsigned int t2 = t1 & mask;
36 f3 (long int *a, int bit)
39 unsigned long int mask = (1ul << bit);
40 return (__atomic_fetch_or (a, mask, __ATOMIC_SEQ_CST) & mask) == 0;
47 unsigned int mask = (1u << 7);
48 return (__sync_fetch_and_or (a, mask) & mask) != 0;
55 unsigned int mask = (1u << 13);
56 return (__atomic_fetch_or (a, mask, __ATOMIC_RELAXED) & mask) != 0;
63 unsigned int mask = (1u << 0);
64 return (__atomic_fetch_or (a, mask, __ATOMIC_SEQ_CST) & mask) != 0;
71 unsigned int mask = (1u << bit);
72 if ((__sync_fetch_and_xor (a, mask) & mask) != 0)
80 unsigned int mask = (1u << bit);
81 if ((__atomic_fetch_xor (a, mask, __ATOMIC_RELAXED) & mask) == 0)
89 unsigned int mask = (1u << bit);
90 return (__atomic_fetch_xor (a, mask, __ATOMIC_SEQ_CST) & mask) != 0;
97 unsigned int mask = (1u << 7);
98 return (__sync_fetch_and_xor (a, mask) & mask) != 0;
105 unsigned int mask = (1u << 13);
106 return (__atomic_fetch_xor (a, mask, __ATOMIC_RELAXED) & mask) != 0;
113 unsigned int mask = (1u << 0);
114 return (__atomic_fetch_xor (a, mask, __ATOMIC_SEQ_CST) & mask) != 0;
118 f13 (int *a, int bit)
121 unsigned int mask = (1u << bit);
122 return (__sync_fetch_and_and (a, ~mask) & mask) != 0;
126 f14 (int *a, int bit)
129 unsigned int mask = (1u << bit);
130 return (__atomic_fetch_and (a, ~mask, __ATOMIC_RELAXED) & mask) != 0;
134 f15 (int *a, int bit)
137 unsigned int mask = (1u << bit);
138 return (__atomic_fetch_and (a, ~mask, __ATOMIC_SEQ_CST) & mask) != 0;
145 unsigned int mask = (1u << 7);
146 return (__sync_fetch_and_and (a, ~mask) & mask) != 0;
153 unsigned int mask = (1u << 13);
154 return (__atomic_fetch_and (a, ~mask, __ATOMIC_RELAXED) & mask) != 0;
161 unsigned int mask = (1u << 0);
162 return (__atomic_fetch_and (a, ~mask, __ATOMIC_SEQ_CST) & mask) != 0;
166 f19 (unsigned long int *a, int bit)
169 unsigned long int mask = (1ul << bit);
170 return (__atomic_xor_fetch (a, mask, __ATOMIC_SEQ_CST) & mask) != 0;
174 f20 (unsigned long int *a)
177 unsigned long int mask = (1ul << 7);
178 return (__atomic_xor_fetch (a, mask, __ATOMIC_SEQ_CST) & mask) == 0;
182 f21 (int *a, int bit)
185 unsigned int mask = (1u << bit);
186 return (__sync_fetch_and_or (a, mask) & mask);
190 f22 (unsigned long int *a)
193 unsigned long int mask = (1ul << 7);
194 return (__atomic_xor_fetch (a, mask, __ATOMIC_SEQ_CST) & mask);
198 f23 (unsigned long int *a)
201 unsigned long int mask = (1ul << 7);
202 return (__atomic_fetch_xor (a, mask, __ATOMIC_SEQ_CST) & mask);
206 f24 (unsigned short int *a)
209 unsigned short int mask = (1u << 7);
210 return (__sync_fetch_and_or (a, mask) & mask) != 0;
214 f25 (unsigned short int *a)
217 unsigned short int mask = (1u << 7);
218 return (__atomic_fetch_or (a, mask, __ATOMIC_SEQ_CST) & mask) != 0;