Replace FSF snail mail address with URLs.
[glibc.git] / nptl / sysdeps / unix / sysv / linux / sh / lowlevellock.h
blobf3ef3883a85f8e00314e1bab8f6891f9e10e812a
1 /* Copyright (C) 2003, 2004, 2006, 2007, 2008, 2009
2 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
19 #ifndef _LOWLEVELLOCK_H
20 #define _LOWLEVELLOCK_H 1
22 #ifndef __ASSEMBLER__
23 #include <time.h>
24 #include <sys/param.h>
25 #include <bits/pthreadtypes.h>
26 #include <kernel-features.h>
27 #endif
29 #define SYS_futex 240
30 #define FUTEX_WAIT 0
31 #define FUTEX_WAKE 1
32 #define FUTEX_CMP_REQUEUE 4
33 #define FUTEX_WAKE_OP 5
34 #define FUTEX_LOCK_PI 6
35 #define FUTEX_UNLOCK_PI 7
36 #define FUTEX_TRYLOCK_PI 8
37 #define FUTEX_WAIT_BITSET 9
38 #define FUTEX_WAKE_BITSET 10
39 #define FUTEX_PRIVATE_FLAG 128
40 #define FUTEX_CLOCK_REALTIME 256
42 #define FUTEX_BITSET_MATCH_ANY 0xffffffff
44 #define FUTEX_OP_CLEAR_WAKE_IF_GT_ONE ((4 << 24) | 1)
46 /* Values for 'private' parameter of locking macros. Yes, the
47 definition seems to be backwards. But it is not. The bit will be
48 reversed before passing to the system call. */
49 #define LLL_PRIVATE 0
50 #define LLL_SHARED FUTEX_PRIVATE_FLAG
53 #if !defined NOT_IN_libc || defined IS_IN_rtld
54 /* In libc.so or ld.so all futexes are private. */
55 # ifdef __ASSUME_PRIVATE_FUTEX
56 # define __lll_private_flag(fl, private) \
57 ((fl) | FUTEX_PRIVATE_FLAG)
58 # else
59 # define __lll_private_flag(fl, private) \
60 ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex))
61 # endif
62 #else
63 # ifdef __ASSUME_PRIVATE_FUTEX
64 # define __lll_private_flag(fl, private) \
65 (((fl) | FUTEX_PRIVATE_FLAG) ^ (private))
66 # else
67 # define __lll_private_flag(fl, private) \
68 (__builtin_constant_p (private) \
69 ? ((private) == 0 \
70 ? ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex)) \
71 : (fl)) \
72 : ((fl) | (((private) ^ FUTEX_PRIVATE_FLAG) \
73 & THREAD_GETMEM (THREAD_SELF, header.private_futex))))
74 # endif
75 #endif
77 #ifndef __ASSEMBLER__
79 /* Initializer for compatibility lock. */
80 #define LLL_LOCK_INITIALIZER (0)
81 #define LLL_LOCK_INITIALIZER_LOCKED (1)
82 #define LLL_LOCK_INITIALIZER_WAITERS (2)
84 extern int __lll_lock_wait_private (int val, int *__futex)
85 attribute_hidden;
86 extern int __lll_lock_wait (int val, int *__futex, int private)
87 attribute_hidden;
88 extern int __lll_timedlock_wait (int val, int *__futex,
89 const struct timespec *abstime, int private)
90 attribute_hidden;
91 extern int __lll_robust_lock_wait (int val, int *__futex, int private)
92 attribute_hidden;
93 extern int __lll_robust_timedlock_wait (int val, int *__futex,
94 const struct timespec *abstime,
95 int private)
96 attribute_hidden;
97 extern int __lll_unlock_wake_private (int *__futex) attribute_hidden;
98 extern int __lll_unlock_wake (int *__futex, int private) attribute_hidden;
100 #define lll_trylock(futex) \
101 ({ unsigned char __result; \
102 __asm __volatile ("\
103 .align 2\n\
104 mova 1f,r0\n\
105 nop\n\
106 mov r15,r1\n\
107 mov #-8,r15\n\
108 0: mov.l @%1,r2\n\
109 cmp/eq r2,%3\n\
110 bf 1f\n\
111 mov.l %2,@%1\n\
112 1: mov r1,r15\n\
113 mov #-1,%0\n\
114 negc %0,%0"\
115 : "=r" (__result) \
116 : "r" (&(futex)), \
117 "r" (LLL_LOCK_INITIALIZER_LOCKED), \
118 "r" (LLL_LOCK_INITIALIZER) \
119 : "r0", "r1", "r2", "t", "memory"); \
120 __result; })
122 #define lll_robust_trylock(futex, id) \
123 ({ unsigned char __result; \
124 __asm __volatile ("\
125 .align 2\n\
126 mova 1f,r0\n\
127 nop\n\
128 mov r15,r1\n\
129 mov #-8,r15\n\
130 0: mov.l @%1,r2\n\
131 cmp/eq r2,%3\n\
132 bf 1f\n\
133 mov.l %2,@%1\n\
134 1: mov r1,r15\n\
135 mov #-1,%0\n\
136 negc %0,%0"\
137 : "=r" (__result) \
138 : "r" (&(futex)), \
139 "r" (id), \
140 "r" (LLL_LOCK_INITIALIZER) \
141 : "r0", "r1", "r2", "t", "memory"); \
142 __result; })
144 #define lll_cond_trylock(futex) \
145 ({ unsigned char __result; \
146 __asm __volatile ("\
147 .align 2\n\
148 mova 1f,r0\n\
149 nop\n\
150 mov r15,r1\n\
151 mov #-8,r15\n\
152 0: mov.l @%1,r2\n\
153 cmp/eq r2,%3\n\
154 bf 1f\n\
155 mov.l %2,@%1\n\
156 1: mov r1,r15\n\
157 mov #-1,%0\n\
158 negc %0,%0"\
159 : "=r" (__result) \
160 : "r" (&(futex)), \
161 "r" (LLL_LOCK_INITIALIZER_WAITERS), \
162 "r" (LLL_LOCK_INITIALIZER) \
163 : "r0", "r1", "r2", "t", "memory"); \
164 __result; })
166 #define lll_lock(futex, private) \
167 (void) ({ int __result, *__futex = &(futex); \
168 __asm __volatile ("\
169 .align 2\n\
170 mova 1f,r0\n\
171 nop\n\
172 mov r15,r1\n\
173 mov #-8,r15\n\
174 0: mov.l @%2,%0\n\
175 tst %0,%0\n\
176 bf 1f\n\
177 mov.l %1,@%2\n\
178 1: mov r1,r15"\
179 : "=&r" (__result) : "r" (1), "r" (__futex) \
180 : "r0", "r1", "t", "memory"); \
181 if (__result) \
183 if (__builtin_constant_p (private) \
184 && (private) == LLL_PRIVATE) \
185 __lll_lock_wait_private (__result, __futex); \
186 else \
187 __lll_lock_wait (__result, __futex, (private)); \
191 #define lll_robust_lock(futex, id, private) \
192 ({ int __result, *__futex = &(futex); \
193 __asm __volatile ("\
194 .align 2\n\
195 mova 1f,r0\n\
196 nop\n\
197 mov r15,r1\n\
198 mov #-8,r15\n\
199 0: mov.l @%2,%0\n\
200 tst %0,%0\n\
201 bf 1f\n\
202 mov.l %1,@%2\n\
203 1: mov r1,r15"\
204 : "=&r" (__result) : "r" (id), "r" (__futex) \
205 : "r0", "r1", "t", "memory"); \
206 if (__result) \
207 __result = __lll_robust_lock_wait (__result, __futex, private); \
208 __result; })
210 /* Special version of lll_mutex_lock which causes the unlock function to
211 always wakeup waiters. */
212 #define lll_cond_lock(futex, private) \
213 (void) ({ int __result, *__futex = &(futex); \
214 __asm __volatile ("\
215 .align 2\n\
216 mova 1f,r0\n\
217 nop\n\
218 mov r15,r1\n\
219 mov #-8,r15\n\
220 0: mov.l @%2,%0\n\
221 tst %0,%0\n\
222 bf 1f\n\
223 mov.l %1,@%2\n\
224 1: mov r1,r15"\
225 : "=&r" (__result) : "r" (2), "r" (__futex) \
226 : "r0", "r1", "t", "memory"); \
227 if (__result) \
228 __lll_lock_wait (__result, __futex, private); })
230 #define lll_robust_cond_lock(futex, id, private) \
231 ({ int __result, *__futex = &(futex); \
232 __asm __volatile ("\
233 .align 2\n\
234 mova 1f,r0\n\
235 nop\n\
236 mov r15,r1\n\
237 mov #-8,r15\n\
238 0: mov.l @%2,%0\n\
239 tst %0,%0\n\
240 bf 1f\n\
241 mov.l %1,@%2\n\
242 1: mov r1,r15"\
243 : "=&r" (__result) : "r" (id | FUTEX_WAITERS), "r" (__futex) \
244 : "r0", "r1", "t", "memory"); \
245 if (__result) \
246 __result = __lll_robust_lock_wait (__result, __futex, private); \
247 __result; })
249 #define lll_timedlock(futex, timeout, private) \
250 ({ int __result, *__futex = &(futex); \
251 __asm __volatile ("\
252 .align 2\n\
253 mova 1f,r0\n\
254 nop\n\
255 mov r15,r1\n\
256 mov #-8,r15\n\
257 0: mov.l @%2,%0\n\
258 tst %0,%0\n\
259 bf 1f\n\
260 mov.l %1,@%2\n\
261 1: mov r1,r15"\
262 : "=&r" (__result) : "r" (1), "r" (__futex) \
263 : "r0", "r1", "t", "memory"); \
264 if (__result) \
265 __result = __lll_timedlock_wait (__result, __futex, timeout, private); \
266 __result; })
268 #define lll_robust_timedlock(futex, timeout, id, private) \
269 ({ int __result, *__futex = &(futex); \
270 __asm __volatile ("\
271 .align 2\n\
272 mova 1f,r0\n\
273 nop\n\
274 mov r15,r1\n\
275 mov #-8,r15\n\
276 0: mov.l @%2,%0\n\
277 tst %0,%0\n\
278 bf 1f\n\
279 mov.l %1,@%2\n\
280 1: mov r1,r15"\
281 : "=&r" (__result) : "r" (id), "r" (__futex) \
282 : "r0", "r1", "t", "memory"); \
283 if (__result) \
284 __result = __lll_robust_timedlock_wait (__result, __futex, \
285 timeout, private); \
286 __result; })
288 #define lll_unlock(futex, private) \
289 (void) ({ int __result, *__futex = &(futex); \
290 __asm __volatile ("\
291 .align 2\n\
292 mova 1f,r0\n\
293 mov r15,r1\n\
294 mov #-6,r15\n\
295 0: mov.l @%1,%0\n\
296 add #-1,%0\n\
297 mov.l %0,@%1\n\
298 1: mov r1,r15"\
299 : "=&r" (__result) : "r" (__futex) \
300 : "r0", "r1", "memory"); \
301 if (__result) \
303 if (__builtin_constant_p (private) \
304 && (private) == LLL_PRIVATE) \
305 __lll_unlock_wake_private (__futex); \
306 else \
307 __lll_unlock_wake (__futex, (private)); \
311 #define lll_robust_unlock(futex, private) \
312 (void) ({ int __result, *__futex = &(futex); \
313 __asm __volatile ("\
314 .align 2\n\
315 mova 1f,r0\n\
316 mov r15,r1\n\
317 mov #-6,r15\n\
318 0: mov.l @%1,%0\n\
319 and %2,%0\n\
320 mov.l %0,@%1\n\
321 1: mov r1,r15"\
322 : "=&r" (__result) : "r" (__futex), "r" (FUTEX_WAITERS) \
323 : "r0", "r1", "memory"); \
324 if (__result) \
325 __lll_unlock_wake (__futex, private); })
327 #define lll_robust_dead(futex, private) \
328 (void) ({ int __ignore, *__futex = &(futex); \
329 __asm __volatile ("\
330 .align 2\n\
331 mova 1f,r0\n\
332 mov r15,r1\n\
333 mov #-6,r15\n\
334 0: mov.l @%1,%0\n\
335 or %2,%0\n\
336 mov.l %0,@%1\n\
337 1: mov r1,r15"\
338 : "=&r" (__ignore) : "r" (__futex), "r" (FUTEX_OWNER_DIED) \
339 : "r0", "r1", "memory"); \
340 lll_futex_wake (__futex, 1, private); })
342 # ifdef NEED_SYSCALL_INST_PAD
343 # define SYSCALL_WITH_INST_PAD "\
344 trapa #0x14; or r0,r0; or r0,r0; or r0,r0; or r0,r0; or r0,r0"
345 # else
346 # define SYSCALL_WITH_INST_PAD "\
347 trapa #0x14"
348 # endif
350 #define lll_futex_wait(futex, val, private) \
351 lll_futex_timed_wait (futex, val, NULL, private)
354 #define lll_futex_timed_wait(futex, val, timeout, private) \
355 ({ \
356 int __status; \
357 register unsigned long __r3 asm ("r3") = SYS_futex; \
358 register unsigned long __r4 asm ("r4") = (unsigned long) (futex); \
359 register unsigned long __r5 asm ("r5") \
360 = __lll_private_flag (FUTEX_WAIT, private); \
361 register unsigned long __r6 asm ("r6") = (unsigned long) (val); \
362 register unsigned long __r7 asm ("r7") = (timeout); \
363 __asm __volatile (SYSCALL_WITH_INST_PAD \
364 : "=z" (__status) \
365 : "r" (__r3), "r" (__r4), "r" (__r5), \
366 "r" (__r6), "r" (__r7) \
367 : "memory", "t"); \
368 __status; \
372 #define lll_futex_wake(futex, nr, private) \
373 do { \
374 int __ignore; \
375 register unsigned long __r3 asm ("r3") = SYS_futex; \
376 register unsigned long __r4 asm ("r4") = (unsigned long) (futex); \
377 register unsigned long __r5 asm ("r5") \
378 = __lll_private_flag (FUTEX_WAKE, private); \
379 register unsigned long __r6 asm ("r6") = (unsigned long) (nr); \
380 register unsigned long __r7 asm ("r7") = 0; \
381 __asm __volatile (SYSCALL_WITH_INST_PAD \
382 : "=z" (__ignore) \
383 : "r" (__r3), "r" (__r4), "r" (__r5), \
384 "r" (__r6), "r" (__r7) \
385 : "memory", "t"); \
386 } while (0)
389 #define lll_islocked(futex) \
390 (futex != LLL_LOCK_INITIALIZER)
392 /* The kernel notifies a process with uses CLONE_CLEARTID via futex
393 wakeup when the clone terminates. The memory location contains the
394 thread ID while the clone is running and is reset to zero
395 afterwards. */
397 #define lll_wait_tid(tid) \
398 do { \
399 __typeof (tid) *__tid = &(tid); \
400 while (*__tid != 0) \
401 lll_futex_wait (__tid, *__tid, LLL_SHARED); \
402 } while (0)
404 extern int __lll_timedwait_tid (int *tid, const struct timespec *abstime)
405 attribute_hidden;
406 #define lll_timedwait_tid(tid, abstime) \
407 ({ \
408 int __result = 0; \
409 if (tid != 0) \
411 if (abstime->tv_nsec < 0 || abstime->tv_nsec >= 1000000000) \
412 __result = EINVAL; \
413 else \
414 __result = __lll_timedwait_tid (&tid, abstime); \
416 __result; })
418 #endif /* !__ASSEMBLER__ */
420 #endif /* lowlevellock.h */