Updated to fedora-glibc-20090102T2110
[glibc.git] / nptl / sysdeps / unix / sysv / linux / sh / lowlevellock.h
blobe709667e54abdaac1692c5488fa935b284e754c6
1 /* Copyright (C) 2003, 2004, 2006, 2007, 2008 Free Software Foundation, Inc.
2 This file is part of the GNU C Library.
4 The GNU C Library is free software; you can redistribute it and/or
5 modify it under the terms of the GNU Lesser General Public
6 License as published by the Free Software Foundation; either
7 version 2.1 of the License, or (at your option) any later version.
9 The GNU C Library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 Lesser General Public License for more details.
14 You should have received a copy of the GNU Lesser General Public
15 License along with the GNU C Library; if not, write to the Free
16 Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
17 02111-1307 USA. */
19 #ifndef _LOWLEVELLOCK_H
20 #define _LOWLEVELLOCK_H 1
22 #ifndef __ASSEMBLER__
23 #include <time.h>
24 #include <sys/param.h>
25 #include <bits/pthreadtypes.h>
26 #include <kernel-features.h>
27 #endif
29 #define SYS_futex 240
30 #define FUTEX_WAIT 0
31 #define FUTEX_WAKE 1
32 #define FUTEX_CMP_REQUEUE 4
33 #define FUTEX_WAKE_OP 5
34 #define FUTEX_LOCK_PI 6
35 #define FUTEX_UNLOCK_PI 7
36 #define FUTEX_TRYLOCK_PI 8
37 #define FUTEX_WAIT_BITSET 9
38 #define FUTEX_WAKE_BITSET 10
39 #define FUTEX_PRIVATE_FLAG 128
41 #define FUTEX_OP_CLEAR_WAKE_IF_GT_ONE ((4 << 24) | 1)
43 /* Values for 'private' parameter of locking macros. Yes, the
44 definition seems to be backwards. But it is not. The bit will be
45 reversed before passing to the system call. */
46 #define LLL_PRIVATE 0
47 #define LLL_SHARED FUTEX_PRIVATE_FLAG
50 #if !defined NOT_IN_libc || defined IS_IN_rtld
51 /* In libc.so or ld.so all futexes are private. */
52 # ifdef __ASSUME_PRIVATE_FUTEX
53 # define __lll_private_flag(fl, private) \
54 ((fl) | FUTEX_PRIVATE_FLAG)
55 # else
56 # define __lll_private_flag(fl, private) \
57 ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex))
58 # endif
59 #else
60 # ifdef __ASSUME_PRIVATE_FUTEX
61 # define __lll_private_flag(fl, private) \
62 (((fl) | FUTEX_PRIVATE_FLAG) ^ (private))
63 # else
64 # define __lll_private_flag(fl, private) \
65 (__builtin_constant_p (private) \
66 ? ((private) == 0 \
67 ? ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex)) \
68 : (fl)) \
69 : ((fl) | (((private) ^ FUTEX_PRIVATE_FLAG) \
70 & THREAD_GETMEM (THREAD_SELF, header.private_futex))))
71 # endif
72 #endif
74 #ifndef __ASSEMBLER__
76 /* Initializer for compatibility lock. */
77 #define LLL_LOCK_INITIALIZER (0)
78 #define LLL_LOCK_INITIALIZER_LOCKED (1)
79 #define LLL_LOCK_INITIALIZER_WAITERS (2)
81 extern int __lll_lock_wait_private (int val, int *__futex)
82 attribute_hidden;
83 extern int __lll_lock_wait (int val, int *__futex, int private)
84 attribute_hidden;
85 extern int __lll_timedlock_wait (int val, int *__futex,
86 const struct timespec *abstime, int private)
87 attribute_hidden;
88 extern int __lll_robust_lock_wait (int val, int *__futex, int private)
89 attribute_hidden;
90 extern int __lll_robust_timedlock_wait (int val, int *__futex,
91 const struct timespec *abstime,
92 int private)
93 attribute_hidden;
94 extern int __lll_unlock_wake_private (int *__futex) attribute_hidden;
95 extern int __lll_unlock_wake (int *__futex, int private) attribute_hidden;
97 #define lll_trylock(futex) \
98 ({ unsigned char __result; \
99 __asm __volatile ("\
100 .align 2\n\
101 mova 1f,r0\n\
102 nop\n\
103 mov r15,r1\n\
104 mov #-8,r15\n\
105 0: mov.l @%1,r2\n\
106 cmp/eq r2,%3\n\
107 bf 1f\n\
108 mov.l %2,@%1\n\
109 1: mov r1,r15\n\
110 mov #-1,%0\n\
111 negc %0,%0"\
112 : "=r" (__result) \
113 : "r" (&(futex)), \
114 "r" (LLL_LOCK_INITIALIZER_LOCKED), \
115 "r" (LLL_LOCK_INITIALIZER) \
116 : "r0", "r1", "r2", "t", "memory"); \
117 __result; })
119 #define lll_robust_trylock(futex, id) \
120 ({ unsigned char __result; \
121 __asm __volatile ("\
122 .align 2\n\
123 mova 1f,r0\n\
124 nop\n\
125 mov r15,r1\n\
126 mov #-8,r15\n\
127 0: mov.l @%1,r2\n\
128 cmp/eq r2,%3\n\
129 bf 1f\n\
130 mov.l %2,@%1\n\
131 1: mov r1,r15\n\
132 mov #-1,%0\n\
133 negc %0,%0"\
134 : "=r" (__result) \
135 : "r" (&(futex)), \
136 "r" (id), \
137 "r" (LLL_LOCK_INITIALIZER) \
138 : "r0", "r1", "r2", "t", "memory"); \
139 __result; })
141 #define lll_cond_trylock(futex) \
142 ({ unsigned char __result; \
143 __asm __volatile ("\
144 .align 2\n\
145 mova 1f,r0\n\
146 nop\n\
147 mov r15,r1\n\
148 mov #-8,r15\n\
149 0: mov.l @%1,r2\n\
150 cmp/eq r2,%3\n\
151 bf 1f\n\
152 mov.l %2,@%1\n\
153 1: mov r1,r15\n\
154 mov #-1,%0\n\
155 negc %0,%0"\
156 : "=r" (__result) \
157 : "r" (&(futex)), \
158 "r" (LLL_LOCK_INITIALIZER_WAITERS), \
159 "r" (LLL_LOCK_INITIALIZER) \
160 : "r0", "r1", "r2", "t", "memory"); \
161 __result; })
163 #define lll_lock(futex, private) \
164 (void) ({ int __result, *__futex = &(futex); \
165 __asm __volatile ("\
166 .align 2\n\
167 mova 1f,r0\n\
168 nop\n\
169 mov r15,r1\n\
170 mov #-8,r15\n\
171 0: mov.l @%2,%0\n\
172 tst %0,%0\n\
173 bf 1f\n\
174 mov.l %1,@%2\n\
175 1: mov r1,r15"\
176 : "=&r" (__result) : "r" (1), "r" (__futex) \
177 : "r0", "r1", "t", "memory"); \
178 if (__result) \
180 if (__builtin_constant_p (private) \
181 && (private) == LLL_PRIVATE) \
182 __lll_lock_wait_private (__result, __futex); \
183 else \
184 __lll_lock_wait (__result, __futex, (private)); \
188 #define lll_robust_lock(futex, id, private) \
189 ({ int __result, *__futex = &(futex); \
190 __asm __volatile ("\
191 .align 2\n\
192 mova 1f,r0\n\
193 nop\n\
194 mov r15,r1\n\
195 mov #-8,r15\n\
196 0: mov.l @%2,%0\n\
197 tst %0,%0\n\
198 bf 1f\n\
199 mov.l %1,@%2\n\
200 1: mov r1,r15"\
201 : "=&r" (__result) : "r" (id), "r" (__futex) \
202 : "r0", "r1", "t", "memory"); \
203 if (__result) \
204 __result = __lll_robust_lock_wait (__result, __futex, private); \
205 __result; })
207 /* Special version of lll_mutex_lock which causes the unlock function to
208 always wakeup waiters. */
209 #define lll_cond_lock(futex, private) \
210 (void) ({ int __result, *__futex = &(futex); \
211 __asm __volatile ("\
212 .align 2\n\
213 mova 1f,r0\n\
214 nop\n\
215 mov r15,r1\n\
216 mov #-8,r15\n\
217 0: mov.l @%2,%0\n\
218 tst %0,%0\n\
219 bf 1f\n\
220 mov.l %1,@%2\n\
221 1: mov r1,r15"\
222 : "=&r" (__result) : "r" (2), "r" (__futex) \
223 : "r0", "r1", "t", "memory"); \
224 if (__result) \
225 __lll_lock_wait (__result, __futex, private); })
227 #define lll_robust_cond_lock(futex, id, private) \
228 ({ int __result, *__futex = &(futex); \
229 __asm __volatile ("\
230 .align 2\n\
231 mova 1f,r0\n\
232 nop\n\
233 mov r15,r1\n\
234 mov #-8,r15\n\
235 0: mov.l @%2,%0\n\
236 tst %0,%0\n\
237 bf 1f\n\
238 mov.l %1,@%2\n\
239 1: mov r1,r15"\
240 : "=&r" (__result) : "r" (id | FUTEX_WAITERS), "r" (__futex) \
241 : "r0", "r1", "t", "memory"); \
242 if (__result) \
243 __result = __lll_robust_lock_wait (__result, __futex, private); \
244 __result; })
246 #define lll_timedlock(futex, timeout, private) \
247 ({ int __result, *__futex = &(futex); \
248 __asm __volatile ("\
249 .align 2\n\
250 mova 1f,r0\n\
251 nop\n\
252 mov r15,r1\n\
253 mov #-8,r15\n\
254 0: mov.l @%2,%0\n\
255 tst %0,%0\n\
256 bf 1f\n\
257 mov.l %1,@%2\n\
258 1: mov r1,r15"\
259 : "=&r" (__result) : "r" (1), "r" (__futex) \
260 : "r0", "r1", "t", "memory"); \
261 if (__result) \
262 __result = __lll_timedlock_wait (__result, __futex, timeout, private); \
263 __result; })
265 #define lll_robust_timedlock(futex, timeout, id, private) \
266 ({ int __result, *__futex = &(futex); \
267 __asm __volatile ("\
268 .align 2\n\
269 mova 1f,r0\n\
270 nop\n\
271 mov r15,r1\n\
272 mov #-8,r15\n\
273 0: mov.l @%2,%0\n\
274 tst %0,%0\n\
275 bf 1f\n\
276 mov.l %1,@%2\n\
277 1: mov r1,r15"\
278 : "=&r" (__result) : "r" (id), "r" (__futex) \
279 : "r0", "r1", "t", "memory"); \
280 if (__result) \
281 __result = __lll_robust_timedlock_wait (__result, __futex, \
282 timeout, private); \
283 __result; })
285 #define lll_unlock(futex, private) \
286 (void) ({ int __result, *__futex = &(futex); \
287 __asm __volatile ("\
288 .align 2\n\
289 mova 1f,r0\n\
290 mov r15,r1\n\
291 mov #-6,r15\n\
292 0: mov.l @%1,%0\n\
293 add #-1,%0\n\
294 mov.l %0,@%1\n\
295 1: mov r1,r15"\
296 : "=&r" (__result) : "r" (__futex) \
297 : "r0", "r1", "memory"); \
298 if (__result) \
300 if (__builtin_constant_p (private) \
301 && (private) == LLL_PRIVATE) \
302 __lll_unlock_wake_private (__futex); \
303 else \
304 __lll_unlock_wake (__futex, (private)); \
308 #define lll_robust_unlock(futex, private) \
309 (void) ({ int __result, *__futex = &(futex); \
310 __asm __volatile ("\
311 .align 2\n\
312 mova 1f,r0\n\
313 mov r15,r1\n\
314 mov #-6,r15\n\
315 0: mov.l @%1,%0\n\
316 and %2,%0\n\
317 mov.l %0,@%1\n\
318 1: mov r1,r15"\
319 : "=&r" (__result) : "r" (__futex), "r" (FUTEX_WAITERS) \
320 : "r0", "r1", "memory"); \
321 if (__result) \
322 __lll_unlock_wake (__futex, private); })
324 #define lll_robust_dead(futex, private) \
325 (void) ({ int __ignore, *__futex = &(futex); \
326 __asm __volatile ("\
327 .align 2\n\
328 mova 1f,r0\n\
329 mov r15,r1\n\
330 mov #-6,r15\n\
331 0: mov.l @%1,%0\n\
332 or %2,%0\n\
333 mov.l %0,@%1\n\
334 1: mov r1,r15"\
335 : "=&r" (__ignore) : "r" (__futex), "r" (FUTEX_OWNER_DIED) \
336 : "r0", "r1", "memory"); \
337 lll_futex_wake (__futex, 1, private); })
339 # ifdef NEED_SYSCALL_INST_PAD
340 # define SYSCALL_WITH_INST_PAD "\
341 trapa #0x14; or r0,r0; or r0,r0; or r0,r0; or r0,r0; or r0,r0"
342 # else
343 # define SYSCALL_WITH_INST_PAD "\
344 trapa #0x14"
345 # endif
347 #define lll_futex_wait(futex, val, private) \
348 lll_futex_timed_wait (futex, val, NULL, private)
351 #define lll_futex_timed_wait(futex, val, timeout, private) \
352 ({ \
353 int __status; \
354 register unsigned long __r3 asm ("r3") = SYS_futex; \
355 register unsigned long __r4 asm ("r4") = (unsigned long) (futex); \
356 register unsigned long __r5 asm ("r5") \
357 = __lll_private_flag (FUTEX_WAIT, private); \
358 register unsigned long __r6 asm ("r6") = (unsigned long) (val); \
359 register unsigned long __r7 asm ("r7") = (timeout); \
360 __asm __volatile (SYSCALL_WITH_INST_PAD \
361 : "=z" (__status) \
362 : "r" (__r3), "r" (__r4), "r" (__r5), \
363 "r" (__r6), "r" (__r7) \
364 : "memory", "t"); \
365 __status; \
369 #define lll_futex_wake(futex, nr, private) \
370 do { \
371 int __ignore; \
372 register unsigned long __r3 asm ("r3") = SYS_futex; \
373 register unsigned long __r4 asm ("r4") = (unsigned long) (futex); \
374 register unsigned long __r5 asm ("r5") \
375 = __lll_private_flag (FUTEX_WAKE, private); \
376 register unsigned long __r6 asm ("r6") = (unsigned long) (nr); \
377 register unsigned long __r7 asm ("r7") = 0; \
378 __asm __volatile (SYSCALL_WITH_INST_PAD \
379 : "=z" (__ignore) \
380 : "r" (__r3), "r" (__r4), "r" (__r5), \
381 "r" (__r6), "r" (__r7) \
382 : "memory", "t"); \
383 } while (0)
386 #define lll_islocked(futex) \
387 (futex != LLL_LOCK_INITIALIZER)
389 /* The kernel notifies a process with uses CLONE_CLEARTID via futex
390 wakeup when the clone terminates. The memory location contains the
391 thread ID while the clone is running and is reset to zero
392 afterwards. */
394 #define lll_wait_tid(tid) \
395 do { \
396 __typeof (tid) *__tid = &(tid); \
397 while (*__tid != 0) \
398 lll_futex_wait (__tid, *__tid, LLL_SHARED); \
399 } while (0)
401 extern int __lll_timedwait_tid (int *tid, const struct timespec *abstime)
402 attribute_hidden;
403 #define lll_timedwait_tid(tid, abstime) \
404 ({ \
405 int __result = 0; \
406 if (tid != 0) \
408 if (abstime->tv_nsec < 0 || abstime->tv_nsec >= 1000000000) \
409 __result = EINVAL; \
410 else \
411 __result = __lll_timedwait_tid (&tid, abstime); \
413 __result; })
415 #endif /* !__ASSEMBLER__ */
417 #endif /* lowlevellock.h */