NPTL: SH: silence warning
[uclibc-ng.git] / libpthread / nptl / sysdeps / unix / sysv / linux / sh / lowlevellock.h
bloba9652bbb1aa00562b4f11c2583c89ddeaa99d18f
1 /* Copyright (C) 2003, 2004, 2006, 2007, 2008, 2009
2 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
19 #ifndef _LOWLEVELLOCK_H
20 #define _LOWLEVELLOCK_H 1
22 #ifndef __ASSEMBLER__
23 #include <time.h>
24 #include <sys/param.h>
25 #include <bits/pthreadtypes.h>
26 #include <bits/kernel-features.h>
27 #endif
29 #define FUTEX_WAIT 0
30 #define FUTEX_WAKE 1
31 #define FUTEX_CMP_REQUEUE 4
32 #define FUTEX_WAKE_OP 5
33 #define FUTEX_LOCK_PI 6
34 #define FUTEX_UNLOCK_PI 7
35 #define FUTEX_TRYLOCK_PI 8
36 #define FUTEX_WAIT_BITSET 9
37 #define FUTEX_WAKE_BITSET 10
38 #define FUTEX_PRIVATE_FLAG 128
39 #define FUTEX_CLOCK_REALTIME 256
41 #define FUTEX_BITSET_MATCH_ANY 0xffffffff
43 #define FUTEX_OP_CLEAR_WAKE_IF_GT_ONE ((4 << 24) | 1)
45 /* Values for 'private' parameter of locking macros. Yes, the
46 definition seems to be backwards. But it is not. The bit will be
47 reversed before passing to the system call. */
48 #define LLL_PRIVATE 0
49 #define LLL_SHARED FUTEX_PRIVATE_FLAG
52 #if !defined NOT_IN_libc || defined IS_IN_rtld
53 /* In libc.so or ld.so all futexes are private. */
54 # ifdef __ASSUME_PRIVATE_FUTEX
55 # define __lll_private_flag(fl, private) \
56 ((fl) | FUTEX_PRIVATE_FLAG)
57 # else
58 # define __lll_private_flag(fl, private) \
59 ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex))
60 # endif
61 #else
62 # ifdef __ASSUME_PRIVATE_FUTEX
63 # define __lll_private_flag(fl, private) \
64 (((fl) | FUTEX_PRIVATE_FLAG) ^ (private))
65 # else
66 # define __lll_private_flag(fl, private) \
67 (__builtin_constant_p (private) \
68 ? ((private) == 0 \
69 ? ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex)) \
70 : (fl)) \
71 : ((fl) | (((private) ^ FUTEX_PRIVATE_FLAG) \
72 & THREAD_GETMEM (THREAD_SELF, header.private_futex))))
73 # endif
74 #endif
76 #ifndef __ASSEMBLER__
78 /* Initializer for compatibility lock. */
79 #define LLL_LOCK_INITIALIZER (0)
80 #define LLL_LOCK_INITIALIZER_LOCKED (1)
81 #define LLL_LOCK_INITIALIZER_WAITERS (2)
83 extern int __lll_lock_wait_private (int val, int *__futex)
84 attribute_hidden;
85 extern int __lll_lock_wait (int val, int *__futex, int private)
86 attribute_hidden;
87 extern int __lll_timedlock_wait (int val, int *__futex,
88 const struct timespec *abstime, int private)
89 attribute_hidden;
90 extern int __lll_robust_lock_wait (int val, int *__futex, int private)
91 attribute_hidden;
92 extern int __lll_robust_timedlock_wait (int val, int *__futex,
93 const struct timespec *abstime,
94 int private)
95 attribute_hidden;
96 extern int __lll_unlock_wake_private (int *__futex) attribute_hidden;
97 extern int __lll_unlock_wake (int *__futex, int private) attribute_hidden;
99 #define lll_trylock(futex) \
100 ({ unsigned char __ret; \
101 __asm__ __volatile__ ("\
102 .align 2\n\
103 mova 1f,r0\n\
104 nop\n\
105 mov r15,r1\n\
106 mov #-8,r15\n\
107 0: mov.l @%1,r2\n\
108 cmp/eq r2,%3\n\
109 bf 1f\n\
110 mov.l %2,@%1\n\
111 1: mov r1,r15\n\
112 mov #-1,%0\n\
113 negc %0,%0"\
114 : "=r" (__ret) \
115 : "r" (&(futex)), \
116 "r" (LLL_LOCK_INITIALIZER_LOCKED), \
117 "r" (LLL_LOCK_INITIALIZER) \
118 : "r0", "r1", "r2", "t", "memory"); \
119 __ret; })
121 #define lll_robust_trylock(futex, id) \
122 ({ unsigned char __ret; \
123 __asm__ __volatile__ ("\
124 .align 2\n\
125 mova 1f,r0\n\
126 nop\n\
127 mov r15,r1\n\
128 mov #-8,r15\n\
129 0: mov.l @%1,r2\n\
130 cmp/eq r2,%3\n\
131 bf 1f\n\
132 mov.l %2,@%1\n\
133 1: mov r1,r15\n\
134 mov #-1,%0\n\
135 negc %0,%0"\
136 : "=r" (__ret) \
137 : "r" (&(futex)), \
138 "r" (id), \
139 "r" (LLL_LOCK_INITIALIZER) \
140 : "r0", "r1", "r2", "t", "memory"); \
141 __ret; })
143 #define lll_cond_trylock(futex) \
144 ({ unsigned char __ret; \
145 __asm__ __volatile__ ("\
146 .align 2\n\
147 mova 1f,r0\n\
148 nop\n\
149 mov r15,r1\n\
150 mov #-8,r15\n\
151 0: mov.l @%1,r2\n\
152 cmp/eq r2,%3\n\
153 bf 1f\n\
154 mov.l %2,@%1\n\
155 1: mov r1,r15\n\
156 mov #-1,%0\n\
157 negc %0,%0"\
158 : "=r" (__ret) \
159 : "r" (&(futex)), \
160 "r" (LLL_LOCK_INITIALIZER_WAITERS), \
161 "r" (LLL_LOCK_INITIALIZER) \
162 : "r0", "r1", "r2", "t", "memory"); \
163 __ret; })
165 #define lll_lock(futex, private) \
166 (void) ({ int __ret, *__futex = &(futex); \
167 __asm__ __volatile__ ("\
168 .align 2\n\
169 mova 1f,r0\n\
170 nop\n\
171 mov r15,r1\n\
172 mov #-8,r15\n\
173 0: mov.l @%2,%0\n\
174 tst %0,%0\n\
175 bf 1f\n\
176 mov.l %1,@%2\n\
177 1: mov r1,r15"\
178 : "=&r" (__ret) : "r" (1), "r" (__futex) \
179 : "r0", "r1", "t", "memory"); \
180 if (__ret) \
182 if (__builtin_constant_p (private) \
183 && (private) == LLL_PRIVATE) \
184 __lll_lock_wait_private (__ret, __futex); \
185 else \
186 __lll_lock_wait (__ret, __futex, (private)); \
190 #define lll_robust_lock(futex, id, private) \
191 ({ int __ret, *__futex = &(futex); \
192 __asm__ __volatile__ ("\
193 .align 2\n\
194 mova 1f,r0\n\
195 nop\n\
196 mov r15,r1\n\
197 mov #-8,r15\n\
198 0: mov.l @%2,%0\n\
199 tst %0,%0\n\
200 bf 1f\n\
201 mov.l %1,@%2\n\
202 1: mov r1,r15"\
203 : "=&r" (__ret) : "r" (id), "r" (__futex) \
204 : "r0", "r1", "t", "memory"); \
205 if (__ret) \
206 __ret = __lll_robust_lock_wait (__ret, __futex, private); \
207 __ret; })
209 /* Special version of lll_mutex_lock which causes the unlock function to
210 always wakeup waiters. */
211 #define lll_cond_lock(futex, private) \
212 (void) ({ int __ret, *__futex = &(futex); \
213 __asm__ __volatile__ ("\
214 .align 2\n\
215 mova 1f,r0\n\
216 nop\n\
217 mov r15,r1\n\
218 mov #-8,r15\n\
219 0: mov.l @%2,%0\n\
220 tst %0,%0\n\
221 bf 1f\n\
222 mov.l %1,@%2\n\
223 1: mov r1,r15"\
224 : "=&r" (__ret) : "r" (2), "r" (__futex) \
225 : "r0", "r1", "t", "memory"); \
226 if (__ret) \
227 __lll_lock_wait (__ret, __futex, private); })
229 #define lll_robust_cond_lock(futex, id, private) \
230 ({ int __ret, *__futex = &(futex); \
231 __asm__ __volatile__ ("\
232 .align 2\n\
233 mova 1f,r0\n\
234 nop\n\
235 mov r15,r1\n\
236 mov #-8,r15\n\
237 0: mov.l @%2,%0\n\
238 tst %0,%0\n\
239 bf 1f\n\
240 mov.l %1,@%2\n\
241 1: mov r1,r15"\
242 : "=&r" (__ret) : "r" (id | FUTEX_WAITERS), "r" (__futex) \
243 : "r0", "r1", "t", "memory"); \
244 if (__ret) \
245 __ret = __lll_robust_lock_wait (__ret, __futex, private); \
246 __ret; })
248 #define lll_timedlock(futex, timeout, private) \
249 ({ int __ret, *__futex = &(futex); \
250 __asm__ __volatile__ ("\
251 .align 2\n\
252 mova 1f,r0\n\
253 nop\n\
254 mov r15,r1\n\
255 mov #-8,r15\n\
256 0: mov.l @%2,%0\n\
257 tst %0,%0\n\
258 bf 1f\n\
259 mov.l %1,@%2\n\
260 1: mov r1,r15"\
261 : "=&r" (__ret) : "r" (1), "r" (__futex) \
262 : "r0", "r1", "t", "memory"); \
263 if (__ret) \
264 __ret = __lll_timedlock_wait (__ret, __futex, timeout, private); \
265 __ret; })
267 #define lll_robust_timedlock(futex, timeout, id, private) \
268 ({ int __ret, *__futex = &(futex); \
269 __asm__ __volatile__ ("\
270 .align 2\n\
271 mova 1f,r0\n\
272 nop\n\
273 mov r15,r1\n\
274 mov #-8,r15\n\
275 0: mov.l @%2,%0\n\
276 tst %0,%0\n\
277 bf 1f\n\
278 mov.l %1,@%2\n\
279 1: mov r1,r15"\
280 : "=&r" (__ret) : "r" (id), "r" (__futex) \
281 : "r0", "r1", "t", "memory"); \
282 if (__ret) \
283 __ret = __lll_robust_timedlock_wait (__ret, __futex, \
284 timeout, private); \
285 __ret; })
287 #define lll_unlock(futex, private) \
288 (void) ({ int __ret, *__futex = &(futex); \
289 __asm__ __volatile__ ("\
290 .align 2\n\
291 mova 1f,r0\n\
292 mov r15,r1\n\
293 mov #-6,r15\n\
294 0: mov.l @%1,%0\n\
295 add #-1,%0\n\
296 mov.l %0,@%1\n\
297 1: mov r1,r15"\
298 : "=&r" (__ret) : "r" (__futex) \
299 : "r0", "r1", "memory"); \
300 if (__ret) \
302 if (__builtin_constant_p (private) \
303 && (private) == LLL_PRIVATE) \
304 __lll_unlock_wake_private (__futex); \
305 else \
306 __lll_unlock_wake (__futex, (private)); \
310 #define lll_robust_unlock(futex, private) \
311 (void) ({ int __ret, *__futex = &(futex); \
312 __asm__ __volatile__ ("\
313 .align 2\n\
314 mova 1f,r0\n\
315 mov r15,r1\n\
316 mov #-6,r15\n\
317 0: mov.l @%1,%0\n\
318 and %2,%0\n\
319 mov.l %0,@%1\n\
320 1: mov r1,r15"\
321 : "=&r" (__ret) : "r" (__futex), "r" (FUTEX_WAITERS) \
322 : "r0", "r1", "memory"); \
323 if (__ret) \
324 __lll_unlock_wake (__futex, private); })
326 #define lll_robust_dead(futex, private) \
327 (void) ({ int __ignore, *__futex = &(futex); \
328 __asm__ __volatile__ ("\
329 .align 2\n\
330 mova 1f,r0\n\
331 mov r15,r1\n\
332 mov #-6,r15\n\
333 0: mov.l @%1,%0\n\
334 or %2,%0\n\
335 mov.l %0,@%1\n\
336 1: mov r1,r15"\
337 : "=&r" (__ignore) : "r" (__futex), "r" (FUTEX_OWNER_DIED) \
338 : "r0", "r1", "memory"); \
339 lll_futex_wake (__futex, 1, private); })
341 # ifdef NEED_SYSCALL_INST_PAD
342 # define SYSCALL_WITH_INST_PAD "\
343 trapa #0x14; or r0,r0; or r0,r0; or r0,r0; or r0,r0; or r0,r0"
344 # else
345 # define SYSCALL_WITH_INST_PAD "\
346 trapa #0x14"
347 # endif
349 #define lll_futex_wait(futex, val, private) \
350 lll_futex_timed_wait (futex, val, NULL, private)
353 #define lll_futex_timed_wait(futex, val, timeout, private) \
354 ({ \
355 int __status; \
356 register unsigned long __r3 __asm__ ("r3") = SYS_futex; \
357 register unsigned long __r4 __asm__ ("r4") = (unsigned long) (futex); \
358 register unsigned long __r5 __asm__ ("r5") \
359 = __lll_private_flag (FUTEX_WAIT, private); \
360 register unsigned long __r6 __asm__ ("r6") = (unsigned long) (val); \
361 register unsigned long __r7 __asm__ ("r7") = (unsigned long) (timeout); \
362 __asm__ __volatile__ (SYSCALL_WITH_INST_PAD \
363 : "=z" (__status) \
364 : "r" (__r3), "r" (__r4), "r" (__r5), \
365 "r" (__r6), "r" (__r7) \
366 : "memory", "t"); \
367 __status; \
371 #define lll_futex_wake(futex, nr, private) \
372 do { \
373 int __ignore; \
374 register unsigned long __r3 __asm__ ("r3") = SYS_futex; \
375 register unsigned long __r4 __asm__ ("r4") = (unsigned long) (futex); \
376 register unsigned long __r5 __asm__ ("r5") \
377 = __lll_private_flag (FUTEX_WAKE, private); \
378 register unsigned long __r6 __asm__ ("r6") = (unsigned long) (nr); \
379 register unsigned long __r7 __asm__ ("r7") = 0; \
380 __asm__ __volatile__ (SYSCALL_WITH_INST_PAD \
381 : "=z" (__ignore) \
382 : "r" (__r3), "r" (__r4), "r" (__r5), \
383 "r" (__r6), "r" (__r7) \
384 : "memory", "t"); \
385 } while (0)
388 #define lll_islocked(futex) \
389 (futex != LLL_LOCK_INITIALIZER)
391 /* The kernel notifies a process with uses CLONE_CLEARTID via futex
392 wakeup when the clone terminates. The memory location contains the
393 thread ID while the clone is running and is reset to zero
394 afterwards. */
396 #define lll_wait_tid(tid) \
397 do { \
398 __typeof (tid) __tid; \
399 while ((__tid = (tid)) != 0) \
400 lll_futex_wait (&(tid), __tid, LLL_SHARED); \
401 } while (0)
403 extern int __lll_timedwait_tid (int *tid, const struct timespec *abstime)
404 attribute_hidden;
405 #define lll_timedwait_tid(tid, abstime) \
406 ({ \
407 int __ret = 0; \
408 if (tid != 0) \
410 if (abstime->tv_nsec < 0 || abstime->tv_nsec >= 1000000000) \
411 __ret = EINVAL; \
412 else \
413 __ret = __lll_timedwait_tid (&tid, abstime); \
415 __ret; })
417 #endif /* !__ASSEMBLER__ */
419 #endif /* lowlevellock.h */