Get rid of lll_robust_dead.
[glibc.git] / sysdeps / unix / sysv / linux / x86_64 / lowlevellock.h
blob55b4e16144bd16fb203d5a6569f1ffbe7a0e0be8
1 /* Copyright (C) 2002-2014 Free Software Foundation, Inc.
2 This file is part of the GNU C Library.
3 Contributed by Ulrich Drepper <drepper@redhat.com>, 2002.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
19 #ifndef _LOWLEVELLOCK_H
20 #define _LOWLEVELLOCK_H 1
22 #include <stap-probe.h>
24 #ifndef __ASSEMBLER__
25 # include <time.h>
26 # include <sys/param.h>
27 # include <bits/pthreadtypes.h>
28 # include <kernel-features.h>
29 # include <tcb-offsets.h>
31 # ifndef LOCK_INSTR
32 # ifdef UP
33 # define LOCK_INSTR /* nothing */
34 # else
35 # define LOCK_INSTR "lock;"
36 # endif
37 # endif
38 #else
39 # ifndef LOCK
40 # ifdef UP
41 # define LOCK
42 # else
43 # define LOCK lock
44 # endif
45 # endif
46 #endif
48 #define SYS_futex __NR_futex
49 #define FUTEX_WAIT 0
50 #define FUTEX_WAKE 1
51 #define FUTEX_CMP_REQUEUE 4
52 #define FUTEX_WAKE_OP 5
53 #define FUTEX_LOCK_PI 6
54 #define FUTEX_UNLOCK_PI 7
55 #define FUTEX_TRYLOCK_PI 8
56 #define FUTEX_WAIT_BITSET 9
57 #define FUTEX_WAKE_BITSET 10
58 #define FUTEX_WAIT_REQUEUE_PI 11
59 #define FUTEX_CMP_REQUEUE_PI 12
60 #define FUTEX_PRIVATE_FLAG 128
61 #define FUTEX_CLOCK_REALTIME 256
63 #define FUTEX_BITSET_MATCH_ANY 0xffffffff
65 #define FUTEX_OP_CLEAR_WAKE_IF_GT_ONE ((4 << 24) | 1)
67 /* Values for 'private' parameter of locking macros. Yes, the
68 definition seems to be backwards. But it is not. The bit will be
69 reversed before passing to the system call. */
70 #define LLL_PRIVATE 0
71 #define LLL_SHARED FUTEX_PRIVATE_FLAG
73 #ifndef __ASSEMBLER__
75 #if !defined NOT_IN_libc || defined IS_IN_rtld
76 /* In libc.so or ld.so all futexes are private. */
77 # ifdef __ASSUME_PRIVATE_FUTEX
78 # define __lll_private_flag(fl, private) \
79 ((fl) | FUTEX_PRIVATE_FLAG)
80 # else
81 # define __lll_private_flag(fl, private) \
82 ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex))
83 # endif
84 #else
85 # ifdef __ASSUME_PRIVATE_FUTEX
86 # define __lll_private_flag(fl, private) \
87 (((fl) | FUTEX_PRIVATE_FLAG) ^ (private))
88 # else
89 # define __lll_private_flag(fl, private) \
90 (__builtin_constant_p (private) \
91 ? ((private) == 0 \
92 ? ((fl) | THREAD_GETMEM (THREAD_SELF, header.private_futex)) \
93 : (fl)) \
94 : ({ unsigned int __fl = ((private) ^ FUTEX_PRIVATE_FLAG); \
95 asm ("andl %%fs:%P1, %0" : "+r" (__fl) \
96 : "i" (offsetof (struct pthread, header.private_futex))); \
97 __fl | (fl); }))
98 # endif
99 #endif
101 /* Initializer for lock. */
102 #define LLL_LOCK_INITIALIZER (0)
103 #define LLL_LOCK_INITIALIZER_LOCKED (1)
104 #define LLL_LOCK_INITIALIZER_WAITERS (2)
106 /* Delay in spinlock loop. */
107 #define BUSY_WAIT_NOP asm ("rep; nop")
109 #define lll_futex_wait(futex, val, private) \
110 lll_futex_timed_wait(futex, val, NULL, private)
113 #define lll_futex_timed_wait(futex, val, timeout, private) \
114 ({ \
115 register const struct timespec *__to __asm ("r10") = timeout; \
116 int __status; \
117 register __typeof (val) _val __asm ("edx") = (val); \
118 __asm __volatile ("syscall" \
119 : "=a" (__status) \
120 : "0" (SYS_futex), "D" (futex), \
121 "S" (__lll_private_flag (FUTEX_WAIT, private)), \
122 "d" (_val), "r" (__to) \
123 : "memory", "cc", "r11", "cx"); \
124 __status; \
128 #define lll_futex_wake(futex, nr, private) \
129 ({ \
130 int __status; \
131 register __typeof (nr) _nr __asm ("edx") = (nr); \
132 LIBC_PROBE (lll_futex_wake, 3, futex, nr, private); \
133 __asm __volatile ("syscall" \
134 : "=a" (__status) \
135 : "0" (SYS_futex), "D" (futex), \
136 "S" (__lll_private_flag (FUTEX_WAKE, private)), \
137 "d" (_nr) \
138 : "memory", "cc", "r10", "r11", "cx"); \
139 __status; \
143 /* NB: in the lll_trylock macro we simply return the value in %eax
144 after the cmpxchg instruction. In case the operation succeded this
145 value is zero. In case the operation failed, the cmpxchg instruction
146 has loaded the current value of the memory work which is guaranteed
147 to be nonzero. */
148 #if defined NOT_IN_libc || defined UP
149 # define __lll_trylock_asm LOCK_INSTR "cmpxchgl %2, %1"
150 #else
151 # define __lll_trylock_asm "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
152 "je 0f\n\t" \
153 "lock; cmpxchgl %2, %1\n\t" \
154 "jmp 1f\n\t" \
155 "0:\tcmpxchgl %2, %1\n\t" \
156 "1:"
157 #endif
159 #define lll_trylock(futex) \
160 ({ int ret; \
161 __asm __volatile (__lll_trylock_asm \
162 : "=a" (ret), "=m" (futex) \
163 : "r" (LLL_LOCK_INITIALIZER_LOCKED), "m" (futex), \
164 "0" (LLL_LOCK_INITIALIZER) \
165 : "memory"); \
166 ret; })
168 #define lll_cond_trylock(futex) \
169 ({ int ret; \
170 __asm __volatile (LOCK_INSTR "cmpxchgl %2, %1" \
171 : "=a" (ret), "=m" (futex) \
172 : "r" (LLL_LOCK_INITIALIZER_WAITERS), \
173 "m" (futex), "0" (LLL_LOCK_INITIALIZER) \
174 : "memory"); \
175 ret; })
177 #if defined NOT_IN_libc || defined UP
178 # define __lll_lock_asm_start LOCK_INSTR "cmpxchgl %4, %2\n\t" \
179 "jz 24f\n\t"
180 #else
181 # define __lll_lock_asm_start "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
182 "je 0f\n\t" \
183 "lock; cmpxchgl %4, %2\n\t" \
184 "jnz 1f\n\t" \
185 "jmp 24f\n" \
186 "0:\tcmpxchgl %4, %2\n\t" \
187 "jz 24f\n\t"
188 #endif
190 #define lll_lock(futex, private) \
191 (void) \
192 ({ int ignore1, ignore2, ignore3; \
193 if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
194 __asm __volatile (__lll_lock_asm_start \
195 "1:\tlea %2, %%" RDI_LP "\n" \
196 "2:\tsub $128, %%" RSP_LP "\n" \
197 ".cfi_adjust_cfa_offset 128\n" \
198 "3:\tcallq __lll_lock_wait_private\n" \
199 "4:\tadd $128, %%" RSP_LP "\n" \
200 ".cfi_adjust_cfa_offset -128\n" \
201 "24:" \
202 : "=S" (ignore1), "=&D" (ignore2), "=m" (futex), \
203 "=a" (ignore3) \
204 : "0" (1), "m" (futex), "3" (0) \
205 : "cx", "r11", "cc", "memory"); \
206 else \
207 __asm __volatile (__lll_lock_asm_start \
208 "1:\tlea %2, %%" RDI_LP "\n" \
209 "2:\tsub $128, %%" RSP_LP "\n" \
210 ".cfi_adjust_cfa_offset 128\n" \
211 "3:\tcallq __lll_lock_wait\n" \
212 "4:\tadd $128, %%" RSP_LP "\n" \
213 ".cfi_adjust_cfa_offset -128\n" \
214 "24:" \
215 : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
216 "=a" (ignore3) \
217 : "1" (1), "m" (futex), "3" (0), "0" (private) \
218 : "cx", "r11", "cc", "memory"); \
219 }) \
221 #define lll_robust_lock(futex, id, private) \
222 ({ int result, ignore1, ignore2; \
223 __asm __volatile (LOCK_INSTR "cmpxchgl %4, %2\n\t" \
224 "jz 24f\n" \
225 "1:\tlea %2, %%" RDI_LP "\n" \
226 "2:\tsub $128, %%" RSP_LP "\n" \
227 ".cfi_adjust_cfa_offset 128\n" \
228 "3:\tcallq __lll_robust_lock_wait\n" \
229 "4:\tadd $128, %%" RSP_LP "\n" \
230 ".cfi_adjust_cfa_offset -128\n" \
231 "24:" \
232 : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
233 "=a" (result) \
234 : "1" (id), "m" (futex), "3" (0), "0" (private) \
235 : "cx", "r11", "cc", "memory"); \
236 result; })
238 #define lll_cond_lock(futex, private) \
239 (void) \
240 ({ int ignore1, ignore2, ignore3; \
241 __asm __volatile (LOCK_INSTR "cmpxchgl %4, %2\n\t" \
242 "jz 24f\n" \
243 "1:\tlea %2, %%" RDI_LP "\n" \
244 "2:\tsub $128, %%" RSP_LP "\n" \
245 ".cfi_adjust_cfa_offset 128\n" \
246 "3:\tcallq __lll_lock_wait\n" \
247 "4:\tadd $128, %%" RSP_LP "\n" \
248 ".cfi_adjust_cfa_offset -128\n" \
249 "24:" \
250 : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
251 "=a" (ignore3) \
252 : "1" (2), "m" (futex), "3" (0), "0" (private) \
253 : "cx", "r11", "cc", "memory"); \
256 #define lll_robust_cond_lock(futex, id, private) \
257 ({ int result, ignore1, ignore2; \
258 __asm __volatile (LOCK_INSTR "cmpxchgl %4, %2\n\t" \
259 "jz 24f\n" \
260 "1:\tlea %2, %%" RDI_LP "\n" \
261 "2:\tsub $128, %%" RSP_LP "\n" \
262 ".cfi_adjust_cfa_offset 128\n" \
263 "3:\tcallq __lll_robust_lock_wait\n" \
264 "4:\tadd $128, %%" RSP_LP "\n" \
265 ".cfi_adjust_cfa_offset -128\n" \
266 "24:" \
267 : "=S" (ignore1), "=D" (ignore2), "=m" (futex), \
268 "=a" (result) \
269 : "1" (id | FUTEX_WAITERS), "m" (futex), "3" (0), \
270 "0" (private) \
271 : "cx", "r11", "cc", "memory"); \
272 result; })
274 #define lll_timedlock(futex, timeout, private) \
275 ({ int result, ignore1, ignore2, ignore3; \
276 __asm __volatile (LOCK_INSTR "cmpxchgl %1, %4\n\t" \
277 "jz 24f\n" \
278 "1:\tlea %4, %%" RDI_LP "\n" \
279 "0:\tmov %8, %%" RDX_LP "\n" \
280 "2:\tsub $128, %%" RSP_LP "\n" \
281 ".cfi_adjust_cfa_offset 128\n" \
282 "3:\tcallq __lll_timedlock_wait\n" \
283 "4:\tadd $128, %%" RSP_LP "\n" \
284 ".cfi_adjust_cfa_offset -128\n" \
285 "24:" \
286 : "=a" (result), "=D" (ignore1), "=S" (ignore2), \
287 "=&d" (ignore3), "=m" (futex) \
288 : "0" (0), "1" (1), "m" (futex), "m" (timeout), \
289 "2" (private) \
290 : "memory", "cx", "cc", "r10", "r11"); \
291 result; })
293 extern int __lll_timedlock_elision (int *futex, short *adapt_count,
294 const struct timespec *timeout,
295 int private) attribute_hidden;
297 #define lll_timedlock_elision(futex, adapt_count, timeout, private) \
298 __lll_timedlock_elision(&(futex), &(adapt_count), timeout, private)
300 #define lll_robust_timedlock(futex, timeout, id, private) \
301 ({ int result, ignore1, ignore2, ignore3; \
302 __asm __volatile (LOCK_INSTR "cmpxchgl %1, %4\n\t" \
303 "jz 24f\n\t" \
304 "1:\tlea %4, %%" RDI_LP "\n" \
305 "0:\tmov %8, %%" RDX_LP "\n" \
306 "2:\tsub $128, %%" RSP_LP "\n" \
307 ".cfi_adjust_cfa_offset 128\n" \
308 "3:\tcallq __lll_robust_timedlock_wait\n" \
309 "4:\tadd $128, %%" RSP_LP "\n" \
310 ".cfi_adjust_cfa_offset -128\n" \
311 "24:" \
312 : "=a" (result), "=D" (ignore1), "=S" (ignore2), \
313 "=&d" (ignore3), "=m" (futex) \
314 : "0" (0), "1" (id), "m" (futex), "m" (timeout), \
315 "2" (private) \
316 : "memory", "cx", "cc", "r10", "r11"); \
317 result; })
319 #if defined NOT_IN_libc || defined UP
320 # define __lll_unlock_asm_start LOCK_INSTR "decl %0\n\t" \
321 "je 24f\n\t"
322 #else
323 # define __lll_unlock_asm_start "cmpl $0, __libc_multiple_threads(%%rip)\n\t" \
324 "je 0f\n\t" \
325 "lock; decl %0\n\t" \
326 "jne 1f\n\t" \
327 "jmp 24f\n\t" \
328 "0:\tdecl %0\n\t" \
329 "je 24f\n\t"
330 #endif
332 #define lll_unlock(futex, private) \
333 (void) \
334 ({ int ignore; \
335 if (__builtin_constant_p (private) && (private) == LLL_PRIVATE) \
336 __asm __volatile (__lll_unlock_asm_start \
337 "1:\tlea %0, %%" RDI_LP "\n" \
338 "2:\tsub $128, %%" RSP_LP "\n" \
339 ".cfi_adjust_cfa_offset 128\n" \
340 "3:\tcallq __lll_unlock_wake_private\n" \
341 "4:\tadd $128, %%" RSP_LP "\n" \
342 ".cfi_adjust_cfa_offset -128\n" \
343 "24:" \
344 : "=m" (futex), "=&D" (ignore) \
345 : "m" (futex) \
346 : "ax", "cx", "r11", "cc", "memory"); \
347 else \
348 __asm __volatile (__lll_unlock_asm_start \
349 "1:\tlea %0, %%" RDI_LP "\n" \
350 "2:\tsub $128, %%" RSP_LP "\n" \
351 ".cfi_adjust_cfa_offset 128\n" \
352 "3:\tcallq __lll_unlock_wake\n" \
353 "4:\tadd $128, %%" RSP_LP "\n" \
354 ".cfi_adjust_cfa_offset -128\n" \
355 "24:" \
356 : "=m" (futex), "=&D" (ignore) \
357 : "m" (futex), "S" (private) \
358 : "ax", "cx", "r11", "cc", "memory"); \
361 #define lll_robust_unlock(futex, private) \
362 do \
364 int ignore; \
365 __asm __volatile (LOCK_INSTR "andl %2, %0\n\t" \
366 "je 24f\n\t" \
367 "1:\tlea %0, %%" RDI_LP "\n" \
368 "2:\tsub $128, %%" RSP_LP "\n" \
369 ".cfi_adjust_cfa_offset 128\n" \
370 "3:\tcallq __lll_unlock_wake\n" \
371 "4:\tadd $128, %%" RSP_LP "\n" \
372 ".cfi_adjust_cfa_offset -128\n" \
373 "24:" \
374 : "=m" (futex), "=&D" (ignore) \
375 : "i" (FUTEX_WAITERS), "m" (futex), \
376 "S" (private) \
377 : "ax", "cx", "r11", "cc", "memory"); \
379 while (0)
381 /* Returns non-zero if error happened, zero if success. */
382 #define lll_futex_requeue(ftx, nr_wake, nr_move, mutex, val, private) \
383 ({ int __res; \
384 register int __nr_move __asm ("r10") = nr_move; \
385 register void *__mutex __asm ("r8") = mutex; \
386 register int __val __asm ("r9") = val; \
387 __asm __volatile ("syscall" \
388 : "=a" (__res) \
389 : "0" (__NR_futex), "D" ((void *) ftx), \
390 "S" (__lll_private_flag (FUTEX_CMP_REQUEUE, \
391 private)), "d" (nr_wake), \
392 "r" (__nr_move), "r" (__mutex), "r" (__val) \
393 : "cx", "r11", "cc", "memory"); \
394 __res < 0; })
396 #define lll_islocked(futex) \
397 (futex != LLL_LOCK_INITIALIZER)
400 /* The kernel notifies a process which uses CLONE_CHILD_CLEARTID via futex
401 wakeup when the clone terminates. The memory location contains the
402 thread ID while the clone is running and is reset to zero
403 afterwards.
405 The macro parameter must not have any side effect. */
406 #define lll_wait_tid(tid) \
407 do { \
408 int __ignore; \
409 register __typeof (tid) _tid asm ("edx") = (tid); \
410 if (_tid != 0) \
411 __asm __volatile ("xorq %%r10, %%r10\n\t" \
412 "1:\tmovq %2, %%rax\n\t" \
413 "syscall\n\t" \
414 "cmpl $0, (%%rdi)\n\t" \
415 "jne 1b" \
416 : "=&a" (__ignore) \
417 : "S" (FUTEX_WAIT), "i" (SYS_futex), "D" (&tid), \
418 "d" (_tid) \
419 : "memory", "cc", "r10", "r11", "cx"); \
420 } while (0)
422 extern int __lll_timedwait_tid (int *tid, const struct timespec *abstime)
423 attribute_hidden;
424 #define lll_timedwait_tid(tid, abstime) \
425 ({ \
426 int __result = 0; \
427 if (tid != 0) \
429 if (abstime->tv_nsec < 0 || abstime->tv_nsec >= 1000000000) \
430 __result = EINVAL; \
431 else \
432 __result = __lll_timedwait_tid (&tid, abstime); \
434 __result; })
436 extern int __lll_lock_elision (int *futex, short *adapt_count, int private)
437 attribute_hidden;
439 extern int __lll_unlock_elision (int *lock, int private)
440 attribute_hidden;
442 extern int __lll_trylock_elision (int *lock, short *adapt_count)
443 attribute_hidden;
445 #define lll_lock_elision(futex, adapt_count, private) \
446 __lll_lock_elision (&(futex), &(adapt_count), private)
447 #define lll_unlock_elision(futex, private) \
448 __lll_unlock_elision (&(futex), private)
449 #define lll_trylock_elision(futex, adapt_count) \
450 __lll_trylock_elision (&(futex), &(adapt_count))
452 #endif /* !__ASSEMBLER__ */
454 #endif /* lowlevellock.h */