Update Copyright years for files modified in 2010.
[official-gcc.git] / gcc / config / arm / linux-atomic.c
blob57065a6e8a088196f1e9f8a8de101993204d97c9
1 /* Linux-specific atomic operations for ARM EABI.
2 Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
3 Contributed by CodeSourcery.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 Under Section 7 of GPL version 3, you are granted additional
18 permissions described in the GCC Runtime Library Exception, version
19 3.1, as published by the Free Software Foundation.
21 You should have received a copy of the GNU General Public License and
22 a copy of the GCC Runtime Library Exception along with this program;
23 see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
24 <http://www.gnu.org/licenses/>. */
26 /* Kernel helper for compare-and-exchange. */
27 typedef int (__kernel_cmpxchg_t) (int oldval, int newval, int *ptr);
28 #define __kernel_cmpxchg (*(__kernel_cmpxchg_t *) 0xffff0fc0)
30 /* Kernel helper for memory barrier. */
31 typedef void (__kernel_dmb_t) (void);
32 #define __kernel_dmb (*(__kernel_dmb_t *) 0xffff0fa0)
34 /* Note: we implement byte, short and int versions of atomic operations using
35 the above kernel helpers, but there is no support for "long long" (64-bit)
36 operations as yet. */
38 #define HIDDEN __attribute__ ((visibility ("hidden")))
40 #ifdef __ARMEL__
41 #define INVERT_MASK_1 0
42 #define INVERT_MASK_2 0
43 #else
44 #define INVERT_MASK_1 24
45 #define INVERT_MASK_2 16
46 #endif
48 #define MASK_1 0xffu
49 #define MASK_2 0xffffu
51 #define FETCH_AND_OP_WORD(OP, PFX_OP, INF_OP) \
52 int HIDDEN \
53 __sync_fetch_and_##OP##_4 (int *ptr, int val) \
54 { \
55 int failure, tmp; \
57 do { \
58 tmp = *ptr; \
59 failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr); \
60 } while (failure != 0); \
62 return tmp; \
65 FETCH_AND_OP_WORD (add, , +)
66 FETCH_AND_OP_WORD (sub, , -)
67 FETCH_AND_OP_WORD (or, , |)
68 FETCH_AND_OP_WORD (and, , &)
69 FETCH_AND_OP_WORD (xor, , ^)
70 FETCH_AND_OP_WORD (nand, ~, &)
72 #define NAME_oldval(OP, WIDTH) __sync_fetch_and_##OP##_##WIDTH
73 #define NAME_newval(OP, WIDTH) __sync_##OP##_and_fetch_##WIDTH
75 /* Implement both __sync_<op>_and_fetch and __sync_fetch_and_<op> for
76 subword-sized quantities. */
78 #define SUBWORD_SYNC_OP(OP, PFX_OP, INF_OP, TYPE, WIDTH, RETURN) \
79 TYPE HIDDEN \
80 NAME##_##RETURN (OP, WIDTH) (TYPE *ptr, TYPE val) \
81 { \
82 int *wordptr = (int *) ((unsigned int) ptr & ~3); \
83 unsigned int mask, shift, oldval, newval; \
84 int failure; \
86 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
87 mask = MASK_##WIDTH << shift; \
89 do { \
90 oldval = *wordptr; \
91 newval = ((PFX_OP (((oldval & mask) >> shift) \
92 INF_OP (unsigned int) val)) << shift) & mask; \
93 newval |= oldval & ~mask; \
94 failure = __kernel_cmpxchg (oldval, newval, wordptr); \
95 } while (failure != 0); \
97 return (RETURN & mask) >> shift; \
100 SUBWORD_SYNC_OP (add, , +, unsigned short, 2, oldval)
101 SUBWORD_SYNC_OP (sub, , -, unsigned short, 2, oldval)
102 SUBWORD_SYNC_OP (or, , |, unsigned short, 2, oldval)
103 SUBWORD_SYNC_OP (and, , &, unsigned short, 2, oldval)
104 SUBWORD_SYNC_OP (xor, , ^, unsigned short, 2, oldval)
105 SUBWORD_SYNC_OP (nand, ~, &, unsigned short, 2, oldval)
107 SUBWORD_SYNC_OP (add, , +, unsigned char, 1, oldval)
108 SUBWORD_SYNC_OP (sub, , -, unsigned char, 1, oldval)
109 SUBWORD_SYNC_OP (or, , |, unsigned char, 1, oldval)
110 SUBWORD_SYNC_OP (and, , &, unsigned char, 1, oldval)
111 SUBWORD_SYNC_OP (xor, , ^, unsigned char, 1, oldval)
112 SUBWORD_SYNC_OP (nand, ~, &, unsigned char, 1, oldval)
114 #define OP_AND_FETCH_WORD(OP, PFX_OP, INF_OP) \
115 int HIDDEN \
116 __sync_##OP##_and_fetch_4 (int *ptr, int val) \
118 int tmp, failure; \
120 do { \
121 tmp = *ptr; \
122 failure = __kernel_cmpxchg (tmp, PFX_OP (tmp INF_OP val), ptr); \
123 } while (failure != 0); \
125 return PFX_OP (tmp INF_OP val); \
128 OP_AND_FETCH_WORD (add, , +)
129 OP_AND_FETCH_WORD (sub, , -)
130 OP_AND_FETCH_WORD (or, , |)
131 OP_AND_FETCH_WORD (and, , &)
132 OP_AND_FETCH_WORD (xor, , ^)
133 OP_AND_FETCH_WORD (nand, ~, &)
135 SUBWORD_SYNC_OP (add, , +, unsigned short, 2, newval)
136 SUBWORD_SYNC_OP (sub, , -, unsigned short, 2, newval)
137 SUBWORD_SYNC_OP (or, , |, unsigned short, 2, newval)
138 SUBWORD_SYNC_OP (and, , &, unsigned short, 2, newval)
139 SUBWORD_SYNC_OP (xor, , ^, unsigned short, 2, newval)
140 SUBWORD_SYNC_OP (nand, ~, &, unsigned short, 2, newval)
142 SUBWORD_SYNC_OP (add, , +, unsigned char, 1, newval)
143 SUBWORD_SYNC_OP (sub, , -, unsigned char, 1, newval)
144 SUBWORD_SYNC_OP (or, , |, unsigned char, 1, newval)
145 SUBWORD_SYNC_OP (and, , &, unsigned char, 1, newval)
146 SUBWORD_SYNC_OP (xor, , ^, unsigned char, 1, newval)
147 SUBWORD_SYNC_OP (nand, ~, &, unsigned char, 1, newval)
149 int HIDDEN
150 __sync_val_compare_and_swap_4 (int *ptr, int oldval, int newval)
152 int actual_oldval, fail;
154 while (1)
156 actual_oldval = *ptr;
158 if (__builtin_expect (oldval != actual_oldval, 0))
159 return actual_oldval;
161 fail = __kernel_cmpxchg (actual_oldval, newval, ptr);
163 if (__builtin_expect (!fail, 1))
164 return oldval;
168 #define SUBWORD_VAL_CAS(TYPE, WIDTH) \
169 TYPE HIDDEN \
170 __sync_val_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval, \
171 TYPE newval) \
173 int *wordptr = (int *)((unsigned int) ptr & ~3), fail; \
174 unsigned int mask, shift, actual_oldval, actual_newval; \
176 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
177 mask = MASK_##WIDTH << shift; \
179 while (1) \
181 actual_oldval = *wordptr; \
183 if (__builtin_expect (((actual_oldval & mask) >> shift) != \
184 (unsigned int) oldval, 0)) \
185 return (actual_oldval & mask) >> shift; \
187 actual_newval = (actual_oldval & ~mask) \
188 | (((unsigned int) newval << shift) & mask); \
190 fail = __kernel_cmpxchg (actual_oldval, actual_newval, \
191 wordptr); \
193 if (__builtin_expect (!fail, 1)) \
194 return oldval; \
198 SUBWORD_VAL_CAS (unsigned short, 2)
199 SUBWORD_VAL_CAS (unsigned char, 1)
201 typedef unsigned char bool;
203 bool HIDDEN
204 __sync_bool_compare_and_swap_4 (int *ptr, int oldval, int newval)
206 int failure = __kernel_cmpxchg (oldval, newval, ptr);
207 return (failure == 0);
210 #define SUBWORD_BOOL_CAS(TYPE, WIDTH) \
211 bool HIDDEN \
212 __sync_bool_compare_and_swap_##WIDTH (TYPE *ptr, TYPE oldval, \
213 TYPE newval) \
215 TYPE actual_oldval \
216 = __sync_val_compare_and_swap_##WIDTH (ptr, oldval, newval); \
217 return (oldval == actual_oldval); \
220 SUBWORD_BOOL_CAS (unsigned short, 2)
221 SUBWORD_BOOL_CAS (unsigned char, 1)
223 void HIDDEN
224 __sync_synchronize (void)
226 __kernel_dmb ();
229 int HIDDEN
230 __sync_lock_test_and_set_4 (int *ptr, int val)
232 int failure, oldval;
234 do {
235 oldval = *ptr;
236 failure = __kernel_cmpxchg (oldval, val, ptr);
237 } while (failure != 0);
239 return oldval;
242 #define SUBWORD_TEST_AND_SET(TYPE, WIDTH) \
243 TYPE HIDDEN \
244 __sync_lock_test_and_set_##WIDTH (TYPE *ptr, TYPE val) \
246 int failure; \
247 unsigned int oldval, newval, shift, mask; \
248 int *wordptr = (int *) ((unsigned int) ptr & ~3); \
250 shift = (((unsigned int) ptr & 3) << 3) ^ INVERT_MASK_##WIDTH; \
251 mask = MASK_##WIDTH << shift; \
253 do { \
254 oldval = *wordptr; \
255 newval = (oldval & ~mask) \
256 | (((unsigned int) val << shift) & mask); \
257 failure = __kernel_cmpxchg (oldval, newval, wordptr); \
258 } while (failure != 0); \
260 return (oldval & mask) >> shift; \
263 SUBWORD_TEST_AND_SET (unsigned short, 2)
264 SUBWORD_TEST_AND_SET (unsigned char, 1)
266 #define SYNC_LOCK_RELEASE(TYPE, WIDTH) \
267 void HIDDEN \
268 __sync_lock_release_##WIDTH (TYPE *ptr) \
270 /* All writes before this point must be seen before we release \
271 the lock itself. */ \
272 __kernel_dmb (); \
273 *ptr = 0; \
276 SYNC_LOCK_RELEASE (int, 4)
277 SYNC_LOCK_RELEASE (short, 2)
278 SYNC_LOCK_RELEASE (char, 1)