2.9
[glibc/nacl-glibc.git] / nptl / sysdeps / unix / sysv / linux / sh / lowlevellock.h
blob88e94b9905b3ce6b53b291d473353bd64edc1473
1 /* Copyright (C) 2003, 2004, 2006, 2007 Free Software Foundation, Inc.
2 This file is part of the GNU C Library.
4 The GNU C Library is free software; you can redistribute it and/or
5 modify it under the terms of the GNU Lesser General Public
6 License as published by the Free Software Foundation; either
7 version 2.1 of the License, or (at your option) any later version.
9 The GNU C Library is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 Lesser General Public License for more details.
14 You should have received a copy of the GNU Lesser General Public
15 License along with the GNU C Library; if not, write to the Free
16 Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
17 02111-1307 USA. */
19 #ifndef _LOWLEVELLOCK_H
20 #define _LOWLEVELLOCK_H 1
22 #ifndef __ASSEMBLER__
23 #include <time.h>
24 #include <sys/param.h>
25 #include <bits/pthreadtypes.h>
26 #include <kernel-features.h>
27 #endif
29 #define SYS_futex 240
30 #define FUTEX_WAIT 0
31 #define FUTEX_WAKE 1
32 #define FUTEX_CMP_REQUEUE 4
33 #define FUTEX_WAKE_OP 5
34 #define FUTEX_LOCK_PI 6
35 #define FUTEX_UNLOCK_PI 7
36 #define FUTEX_TRYLOCK_PI 8
37 #define FUTEX_PRIVATE_FLAG 128
39 #define FUTEX_OP_CLEAR_WAKE_IF_GT_ONE ((4 << 24) | 1)
41 /* Values for 'private' parameter of locking macros. Yes, the
42 definition seems to be backwards. But it is not. The bit will be
43 reversed before passing to the system call. */
44 #define LLL_PRIVATE 0
45 #define LLL_SHARED FUTEX_PRIVATE_FLAG
48 #if !defined NOT_IN_libc || defined IS_IN_rtld
49 /* In libc.so or ld.so all futexes are private. */
50 # ifdef __ASSUME_PRIVATE_FUTEX
51 # define __lll_private_flag(fl, private) \
52 ((fl) | FUTEX_PRIVATE_FLAG)
53 # else
54 # define __lll_private_flag(fl, private) \
55 ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex))
56 # endif
57 #else
58 # ifdef __ASSUME_PRIVATE_FUTEX
59 # define __lll_private_flag(fl, private) \
60 (((fl) | FUTEX_PRIVATE_FLAG) ^ (private))
61 # else
62 # define __lll_private_flag(fl, private) \
63 (__builtin_constant_p (private) \
64 ? ((private) == 0 \
65 ? ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex)) \
66 : (fl)) \
67 : ((fl) | (((private) ^ FUTEX_PRIVATE_FLAG) \
68 & THREAD_GETMEM (THREAD_SELF, header.private_futex))))
69 # endif
70 #endif
72 #ifndef __ASSEMBLER__
74 /* Initializer for compatibility lock. */
75 #define LLL_LOCK_INITIALIZER (0)
76 #define LLL_LOCK_INITIALIZER_LOCKED (1)
77 #define LLL_LOCK_INITIALIZER_WAITERS (2)
79 extern int __lll_lock_wait_private (int val, int *__futex)
80 attribute_hidden;
81 extern int __lll_lock_wait (int val, int *__futex, int private)
82 attribute_hidden;
83 extern int __lll_timedlock_wait (int val, int *__futex,
84 const struct timespec *abstime, int private)
85 attribute_hidden;
86 extern int __lll_robust_lock_wait (int val, int *__futex, int private)
87 attribute_hidden;
88 extern int __lll_robust_timedlock_wait (int val, int *__futex,
89 const struct timespec *abstime,
90 int private)
91 attribute_hidden;
92 extern int __lll_unlock_wake_private (int *__futex) attribute_hidden;
93 extern int __lll_unlock_wake (int *__futex, int private) attribute_hidden;
95 #define lll_trylock(futex) \
96 ({ unsigned char __result; \
97 __asm __volatile ("\
98 .align 2\n\
99 mova 1f,r0\n\
100 nop\n\
101 mov r15,r1\n\
102 mov #-8,r15\n\
103 0: mov.l @%1,r2\n\
104 cmp/eq r2,%3\n\
105 bf 1f\n\
106 mov.l %2,@%1\n\
107 1: mov r1,r15\n\
108 mov #-1,%0\n\
109 negc %0,%0"\
110 : "=r" (__result) \
111 : "r" (&(futex)), \
112 "r" (LLL_LOCK_INITIALIZER_LOCKED), \
113 "r" (LLL_LOCK_INITIALIZER) \
114 : "r0", "r1", "r2", "t", "memory"); \
115 __result; })
117 #define lll_robust_trylock(futex, id) \
118 ({ unsigned char __result; \
119 __asm __volatile ("\
120 .align 2\n\
121 mova 1f,r0\n\
122 nop\n\
123 mov r15,r1\n\
124 mov #-8,r15\n\
125 0: mov.l @%1,r2\n\
126 cmp/eq r2,%3\n\
127 bf 1f\n\
128 mov.l %2,@%1\n\
129 1: mov r1,r15\n\
130 mov #-1,%0\n\
131 negc %0,%0"\
132 : "=r" (__result) \
133 : "r" (&(futex)), \
134 "r" (id), \
135 "r" (LLL_LOCK_INITIALIZER) \
136 : "r0", "r1", "r2", "t", "memory"); \
137 __result; })
139 #define lll_cond_trylock(futex) \
140 ({ unsigned char __result; \
141 __asm __volatile ("\
142 .align 2\n\
143 mova 1f,r0\n\
144 nop\n\
145 mov r15,r1\n\
146 mov #-8,r15\n\
147 0: mov.l @%1,r2\n\
148 cmp/eq r2,%3\n\
149 bf 1f\n\
150 mov.l %2,@%1\n\
151 1: mov r1,r15\n\
152 mov #-1,%0\n\
153 negc %0,%0"\
154 : "=r" (__result) \
155 : "r" (&(futex)), \
156 "r" (LLL_LOCK_INITIALIZER_WAITERS), \
157 "r" (LLL_LOCK_INITIALIZER) \
158 : "r0", "r1", "r2", "t", "memory"); \
159 __result; })
161 #define lll_lock(futex, private) \
162 (void) ({ int __result, *__futex = &(futex); \
163 __asm __volatile ("\
164 .align 2\n\
165 mova 1f,r0\n\
166 nop\n\
167 mov r15,r1\n\
168 mov #-8,r15\n\
169 0: mov.l @%2,%0\n\
170 tst %0,%0\n\
171 bf 1f\n\
172 mov.l %1,@%2\n\
173 1: mov r1,r15"\
174 : "=&r" (__result) : "r" (1), "r" (__futex) \
175 : "r0", "r1", "t", "memory"); \
176 if (__result) \
178 if (__builtin_constant_p (private) \
179 && (private) == LLL_PRIVATE) \
180 __lll_lock_wait_private (__result, __futex); \
181 else \
182 __lll_lock_wait (__result, __futex, (private)); \
186 #define lll_robust_lock(futex, id, private) \
187 ({ int __result, *__futex = &(futex); \
188 __asm __volatile ("\
189 .align 2\n\
190 mova 1f,r0\n\
191 nop\n\
192 mov r15,r1\n\
193 mov #-8,r15\n\
194 0: mov.l @%2,%0\n\
195 tst %0,%0\n\
196 bf 1f\n\
197 mov.l %1,@%2\n\
198 1: mov r1,r15"\
199 : "=&r" (__result) : "r" (id), "r" (__futex) \
200 : "r0", "r1", "t", "memory"); \
201 if (__result) \
202 __result = __lll_robust_lock_wait (__result, __futex, private); \
203 __result; })
205 /* Special version of lll_mutex_lock which causes the unlock function to
206 always wakeup waiters. */
207 #define lll_cond_lock(futex, private) \
208 (void) ({ int __result, *__futex = &(futex); \
209 __asm __volatile ("\
210 .align 2\n\
211 mova 1f,r0\n\
212 nop\n\
213 mov r15,r1\n\
214 mov #-8,r15\n\
215 0: mov.l @%2,%0\n\
216 tst %0,%0\n\
217 bf 1f\n\
218 mov.l %1,@%2\n\
219 1: mov r1,r15"\
220 : "=&r" (__result) : "r" (2), "r" (__futex) \
221 : "r0", "r1", "t", "memory"); \
222 if (__result) \
223 __lll_lock_wait (__result, __futex, private); })
225 #define lll_robust_cond_lock(futex, id, private) \
226 ({ int __result, *__futex = &(futex); \
227 __asm __volatile ("\
228 .align 2\n\
229 mova 1f,r0\n\
230 nop\n\
231 mov r15,r1\n\
232 mov #-8,r15\n\
233 0: mov.l @%2,%0\n\
234 tst %0,%0\n\
235 bf 1f\n\
236 mov.l %1,@%2\n\
237 1: mov r1,r15"\
238 : "=&r" (__result) : "r" (id | FUTEX_WAITERS), "r" (__futex) \
239 : "r0", "r1", "t", "memory"); \
240 if (__result) \
241 __result = __lll_robust_lock_wait (__result, __futex, private); \
242 __result; })
244 #define lll_timedlock(futex, timeout, private) \
245 ({ int __result, *__futex = &(futex); \
246 __asm __volatile ("\
247 .align 2\n\
248 mova 1f,r0\n\
249 nop\n\
250 mov r15,r1\n\
251 mov #-8,r15\n\
252 0: mov.l @%2,%0\n\
253 tst %0,%0\n\
254 bf 1f\n\
255 mov.l %1,@%2\n\
256 1: mov r1,r15"\
257 : "=&r" (__result) : "r" (1), "r" (__futex) \
258 : "r0", "r1", "t", "memory"); \
259 if (__result) \
260 __result = __lll_timedlock_wait (__result, __futex, timeout, private); \
261 __result; })
263 #define lll_robust_timedlock(futex, timeout, id, private) \
264 ({ int __result, *__futex = &(futex); \
265 __asm __volatile ("\
266 .align 2\n\
267 mova 1f,r0\n\
268 nop\n\
269 mov r15,r1\n\
270 mov #-8,r15\n\
271 0: mov.l @%2,%0\n\
272 tst %0,%0\n\
273 bf 1f\n\
274 mov.l %1,@%2\n\
275 1: mov r1,r15"\
276 : "=&r" (__result) : "r" (id), "r" (__futex) \
277 : "r0", "r1", "t", "memory"); \
278 if (__result) \
279 __result = __lll_robust_timedlock_wait (__result, __futex, \
280 timeout, private); \
281 __result; })
283 #define lll_unlock(futex, private) \
284 (void) ({ int __result, *__futex = &(futex); \
285 __asm __volatile ("\
286 .align 2\n\
287 mova 1f,r0\n\
288 mov r15,r1\n\
289 mov #-6,r15\n\
290 0: mov.l @%1,%0\n\
291 add #-1,%0\n\
292 mov.l %0,@%1\n\
293 1: mov r1,r15"\
294 : "=&r" (__result) : "r" (__futex) \
295 : "r0", "r1", "memory"); \
296 if (__result) \
298 if (__builtin_constant_p (private) \
299 && (private) == LLL_PRIVATE) \
300 __lll_unlock_wake_private (__futex); \
301 else \
302 __lll_unlock_wake (__futex, (private)); \
306 #define lll_robust_unlock(futex, private) \
307 (void) ({ int __result, *__futex = &(futex); \
308 __asm __volatile ("\
309 .align 2\n\
310 mova 1f,r0\n\
311 mov r15,r1\n\
312 mov #-6,r15\n\
313 0: mov.l @%1,%0\n\
314 and %2,%0\n\
315 mov.l %0,@%1\n\
316 1: mov r1,r15"\
317 : "=&r" (__result) : "r" (__futex), "r" (FUTEX_WAITERS) \
318 : "r0", "r1", "memory"); \
319 if (__result) \
320 __lll_unlock_wake (__futex, private); })
322 #define lll_robust_dead(futex, private) \
323 (void) ({ int __ignore, *__futex = &(futex); \
324 __asm __volatile ("\
325 .align 2\n\
326 mova 1f,r0\n\
327 mov r15,r1\n\
328 mov #-6,r15\n\
329 0: mov.l @%1,%0\n\
330 or %2,%0\n\
331 mov.l %0,@%1\n\
332 1: mov r1,r15"\
333 : "=&r" (__ignore) : "r" (__futex), "r" (FUTEX_OWNER_DIED) \
334 : "r0", "r1", "memory"); \
335 lll_futex_wake (__futex, 1, private); })
337 # ifdef NEED_SYSCALL_INST_PAD
338 # define SYSCALL_WITH_INST_PAD "\
339 trapa #0x14; or r0,r0; or r0,r0; or r0,r0; or r0,r0; or r0,r0"
340 # else
341 # define SYSCALL_WITH_INST_PAD "\
342 trapa #0x14"
343 # endif
345 #define lll_futex_wait(futex, val, private) \
346 lll_futex_timed_wait (futex, val, NULL, private)
349 #define lll_futex_timed_wait(futex, val, timeout, private) \
350 ({ \
351 int __status; \
352 register unsigned long __r3 asm ("r3") = SYS_futex; \
353 register unsigned long __r4 asm ("r4") = (unsigned long) (futex); \
354 register unsigned long __r5 asm ("r5") \
355 = __lll_private_flag (FUTEX_WAIT, private); \
356 register unsigned long __r6 asm ("r6") = (unsigned long) (val); \
357 register unsigned long __r7 asm ("r7") = (timeout); \
358 __asm __volatile (SYSCALL_WITH_INST_PAD \
359 : "=z" (__status) \
360 : "r" (__r3), "r" (__r4), "r" (__r5), \
361 "r" (__r6), "r" (__r7) \
362 : "memory", "t"); \
363 __status; \
367 #define lll_futex_wake(futex, nr, private) \
368 do { \
369 int __ignore; \
370 register unsigned long __r3 asm ("r3") = SYS_futex; \
371 register unsigned long __r4 asm ("r4") = (unsigned long) (futex); \
372 register unsigned long __r5 asm ("r5") \
373 = __lll_private_flag (FUTEX_WAKE, private); \
374 register unsigned long __r6 asm ("r6") = (unsigned long) (nr); \
375 register unsigned long __r7 asm ("r7") = 0; \
376 __asm __volatile (SYSCALL_WITH_INST_PAD \
377 : "=z" (__ignore) \
378 : "r" (__r3), "r" (__r4), "r" (__r5), \
379 "r" (__r6), "r" (__r7) \
380 : "memory", "t"); \
381 } while (0)
384 #define lll_islocked(futex) \
385 (futex != LLL_LOCK_INITIALIZER)
387 /* The kernel notifies a process with uses CLONE_CLEARTID via futex
388 wakeup when the clone terminates. The memory location contains the
389 thread ID while the clone is running and is reset to zero
390 afterwards. */
392 #define lll_wait_tid(tid) \
393 do { \
394 __typeof (tid) *__tid = &(tid); \
395 while (*__tid != 0) \
396 lll_futex_wait (__tid, *__tid, LLL_SHARED); \
397 } while (0)
399 extern int __lll_timedwait_tid (int *tid, const struct timespec *abstime)
400 attribute_hidden;
401 #define lll_timedwait_tid(tid, abstime) \
402 ({ \
403 int __result = 0; \
404 if (tid != 0) \
406 if (abstime->tv_nsec < 0 || abstime->tv_nsec >= 1000000000) \
407 __result = EINVAL; \
408 else \
409 __result = __lll_timedwait_tid (&tid, abstime); \
411 __result; })
413 #endif /* !__ASSEMBLER__ */
415 #endif /* lowlevellock.h */