1 /* Definition for thread-local data handling. nptl/x86_64 version.
2 Copyright (C) 2002-2014 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
23 # include <asm/prctl.h> /* For ARCH_SET_FS. */
29 # include <libc-internal.h>
30 # include <kernel-features.h>
32 /* Replacement type for __m128 since this file is included by ld.so,
33 which is compiled with -mno-sse. It must not change the alignment
34 of rtld_savespace_sse. */
41 /* Type for the dtv. */
55 void *tcb
; /* Pointer to the TCB. Not necessarily the
56 thread descriptor used by libpthread. */
58 void *self
; /* Pointer to the thread descriptor. */
62 uintptr_t stack_guard
;
63 uintptr_t pointer_guard
;
64 unsigned long int vgetcpu_cache
[2];
65 # ifndef __ASSUME_PRIVATE_FUTEX
68 int __glibc_reserved1
;
70 int rtld_must_xmm_save
;
71 /* Reservation of some values for the TM ABI. */
72 void *__private_tm
[4];
73 /* GCC split stack support. */
75 long int __glibc_reserved2
;
76 /* Have space for the post-AVX register size. */
77 __128bits rtld_savespace_sse
[8][4] __attribute__ ((aligned (32)));
82 #else /* __ASSEMBLER__ */
83 # include <tcb-offsets.h>
87 /* Alignment requirement for the stack. */
88 #define STACK_ALIGN 16
92 /* Get system call information. */
97 # define LOCK_PREFIX /* nothing */
99 # define LOCK_PREFIX "lock;"
103 /* This is the size of the initial TCB. Can't be just sizeof (tcbhead_t),
104 because NPTL getpid, __libc_alloca_cutoff etc. need (almost) the whole
105 struct pthread even when not linked with -lpthread. */
106 # define TLS_INIT_TCB_SIZE sizeof (struct pthread)
108 /* Alignment requirements for the initial TCB. */
109 # define TLS_INIT_TCB_ALIGN __alignof__ (struct pthread)
111 /* This is the size of the TCB. */
112 # define TLS_TCB_SIZE sizeof (struct pthread)
114 /* Alignment requirements for the TCB. */
115 # define TLS_TCB_ALIGN __alignof__ (struct pthread)
117 /* The TCB can have any size and the memory following the address the
118 thread pointer points to is unspecified. Allocate the TCB there. */
119 # define TLS_TCB_AT_TP 1
120 # define TLS_DTV_AT_TP 0
122 /* Get the thread descriptor definition. */
123 # include <nptl/descr.h>
126 /* Install the dtv pointer. The pointer passed is to the element with
127 index -1 which contain the length. */
128 # define INSTALL_DTV(descr, dtvp) \
129 ((tcbhead_t *) (descr))->dtv = (dtvp) + 1
131 /* Install new dtv for current thread. */
132 # define INSTALL_NEW_DTV(dtvp) \
133 ({ struct pthread *__pd; \
134 THREAD_SETMEM (__pd, header.dtv, (dtvp)); })
136 /* Return dtv of given thread descriptor. */
137 # define GET_DTV(descr) \
138 (((tcbhead_t *) (descr))->dtv)
141 /* Code to initially initialize the thread pointer. This might need
142 special attention since 'errno' is not yet available and if the
143 operation can cause a failure 'errno' must not be touched.
145 We have to make the syscall for both uses of the macro since the
146 address might be (and probably is) different. */
147 # define TLS_INIT_TP(thrdescr) \
148 ({ void *_thrdescr = (thrdescr); \
149 tcbhead_t *_head = _thrdescr; \
152 _head->tcb = _thrdescr; \
153 /* For now the thread descriptor is at the same address. */ \
154 _head->self = _thrdescr; \
156 /* It is a simple syscall to set the %fs value for the thread. */ \
157 asm volatile ("syscall" \
159 : "0" ((unsigned long int) __NR_arch_prctl), \
160 "D" ((unsigned long int) ARCH_SET_FS), \
162 : "memory", "cc", "r11", "cx"); \
164 _result ? "cannot set %fs base address for thread-local storage" : 0; \
168 /* Return the address of the dtv for the current thread. */
169 # define THREAD_DTV() \
170 ({ struct pthread *__pd; \
171 THREAD_GETMEM (__pd, header.dtv); })
174 /* Return the thread descriptor for the current thread.
176 The contained asm must *not* be marked volatile since otherwise
178 pthread_descr self = thread_self();
179 do not get optimized away. */
180 # define THREAD_SELF \
181 ({ struct pthread *__self; \
182 asm ("mov %%fs:%c1,%0" : "=r" (__self) \
183 : "i" (offsetof (struct pthread, header.self))); \
186 /* Magic for libthread_db to know how to do THREAD_SELF. */
187 # define DB_THREAD_SELF_INCLUDE <sys/reg.h> /* For the FS constant. */
188 # define DB_THREAD_SELF CONST_THREAD_AREA (64, FS)
190 /* Read member of the thread descriptor directly. */
191 # define THREAD_GETMEM(descr, member) \
192 ({ __typeof (descr->member) __value; \
193 if (sizeof (__value) == 1) \
194 asm volatile ("movb %%fs:%P2,%b0" \
196 : "0" (0), "i" (offsetof (struct pthread, member))); \
197 else if (sizeof (__value) == 4) \
198 asm volatile ("movl %%fs:%P1,%0" \
200 : "i" (offsetof (struct pthread, member))); \
203 if (sizeof (__value) != 8) \
204 /* There should not be any value with a size other than 1, \
208 asm volatile ("movq %%fs:%P1,%q0" \
210 : "i" (offsetof (struct pthread, member))); \
215 /* Same as THREAD_GETMEM, but the member offset can be non-constant. */
216 # define THREAD_GETMEM_NC(descr, member, idx) \
217 ({ __typeof (descr->member[0]) __value; \
218 if (sizeof (__value) == 1) \
219 asm volatile ("movb %%fs:%P2(%q3),%b0" \
221 : "0" (0), "i" (offsetof (struct pthread, member[0])), \
223 else if (sizeof (__value) == 4) \
224 asm volatile ("movl %%fs:%P1(,%q2,4),%0" \
226 : "i" (offsetof (struct pthread, member[0])), "r" (idx));\
229 if (sizeof (__value) != 8) \
230 /* There should not be any value with a size other than 1, \
234 asm volatile ("movq %%fs:%P1(,%q2,8),%q0" \
236 : "i" (offsetof (struct pthread, member[0])), \
242 /* Loading addresses of objects on x86-64 needs to be treated special
243 when generating PIC code. */
245 # define IMM_MODE "nr"
247 # define IMM_MODE "ir"
251 /* Same as THREAD_SETMEM, but the member offset can be non-constant. */
252 # define THREAD_SETMEM(descr, member, value) \
253 ({ if (sizeof (descr->member) == 1) \
254 asm volatile ("movb %b0,%%fs:%P1" : \
256 "i" (offsetof (struct pthread, member))); \
257 else if (sizeof (descr->member) == 4) \
258 asm volatile ("movl %0,%%fs:%P1" : \
259 : IMM_MODE (value), \
260 "i" (offsetof (struct pthread, member))); \
263 if (sizeof (descr->member) != 8) \
264 /* There should not be any value with a size other than 1, \
268 asm volatile ("movq %q0,%%fs:%P1" : \
269 : IMM_MODE ((uint64_t) cast_to_integer (value)), \
270 "i" (offsetof (struct pthread, member))); \
274 /* Set member of the thread descriptor directly. */
275 # define THREAD_SETMEM_NC(descr, member, idx, value) \
276 ({ if (sizeof (descr->member[0]) == 1) \
277 asm volatile ("movb %b0,%%fs:%P1(%q2)" : \
279 "i" (offsetof (struct pthread, member[0])), \
281 else if (sizeof (descr->member[0]) == 4) \
282 asm volatile ("movl %0,%%fs:%P1(,%q2,4)" : \
283 : IMM_MODE (value), \
284 "i" (offsetof (struct pthread, member[0])), \
288 if (sizeof (descr->member[0]) != 8) \
289 /* There should not be any value with a size other than 1, \
293 asm volatile ("movq %q0,%%fs:%P1(,%q2,8)" : \
294 : IMM_MODE ((uint64_t) cast_to_integer (value)), \
295 "i" (offsetof (struct pthread, member[0])), \
300 /* Atomic compare and exchange on TLS, returning old value. */
301 # define THREAD_ATOMIC_CMPXCHG_VAL(descr, member, newval, oldval) \
302 ({ __typeof (descr->member) __ret; \
303 __typeof (oldval) __old = (oldval); \
304 if (sizeof (descr->member) == 4) \
305 asm volatile (LOCK_PREFIX "cmpxchgl %2, %%fs:%P3" \
307 : "0" (__old), "r" (newval), \
308 "i" (offsetof (struct pthread, member))); \
310 /* Not necessary for other sizes in the moment. */ \
315 /* Atomic logical and. */
316 # define THREAD_ATOMIC_AND(descr, member, val) \
317 (void) ({ if (sizeof ((descr)->member) == 4) \
318 asm volatile (LOCK_PREFIX "andl %1, %%fs:%P0" \
319 :: "i" (offsetof (struct pthread, member)), \
322 /* Not necessary for other sizes in the moment. */ \
326 /* Atomic set bit. */
327 # define THREAD_ATOMIC_BIT_SET(descr, member, bit) \
328 (void) ({ if (sizeof ((descr)->member) == 4) \
329 asm volatile (LOCK_PREFIX "orl %1, %%fs:%P0" \
330 :: "i" (offsetof (struct pthread, member)), \
331 "ir" (1 << (bit))); \
333 /* Not necessary for other sizes in the moment. */ \
337 # define CALL_THREAD_FCT(descr) \
339 asm volatile ("movq %%fs:%P2, %%rdi\n\t" \
342 : "i" (offsetof (struct pthread, start_routine)), \
343 "i" (offsetof (struct pthread, arg)) \
344 : "di", "si", "cx", "dx", "r8", "r9", "r10", "r11", \
349 /* Set the stack guard field in TCB head. */
350 # define THREAD_SET_STACK_GUARD(value) \
351 THREAD_SETMEM (THREAD_SELF, header.stack_guard, value)
352 # define THREAD_COPY_STACK_GUARD(descr) \
353 ((descr)->header.stack_guard \
354 = THREAD_GETMEM (THREAD_SELF, header.stack_guard))
357 /* Set the pointer guard field in the TCB head. */
358 # define THREAD_SET_POINTER_GUARD(value) \
359 THREAD_SETMEM (THREAD_SELF, header.pointer_guard, value)
360 # define THREAD_COPY_POINTER_GUARD(descr) \
361 ((descr)->header.pointer_guard \
362 = THREAD_GETMEM (THREAD_SELF, header.pointer_guard))
365 /* Get and set the global scope generation counter in the TCB head. */
366 # define THREAD_GSCOPE_FLAG_UNUSED 0
367 # define THREAD_GSCOPE_FLAG_USED 1
368 # define THREAD_GSCOPE_FLAG_WAIT 2
369 # define THREAD_GSCOPE_RESET_FLAG() \
372 asm volatile ("xchgl %0, %%fs:%P1" \
374 : "i" (offsetof (struct pthread, header.gscope_flag)), \
375 "0" (THREAD_GSCOPE_FLAG_UNUSED)); \
376 if (__res == THREAD_GSCOPE_FLAG_WAIT) \
377 lll_futex_wake (&THREAD_SELF->header.gscope_flag, 1, LLL_PRIVATE); \
380 # define THREAD_GSCOPE_SET_FLAG() \
381 THREAD_SETMEM (THREAD_SELF, header.gscope_flag, THREAD_GSCOPE_FLAG_USED)
382 # define THREAD_GSCOPE_WAIT() \
383 GL(dl_wait_lookup_done) ()
387 /* Defined in dl-trampoline.S. */
388 extern void _dl_x86_64_save_sse (void);
389 extern void _dl_x86_64_restore_sse (void);
391 # define RTLD_CHECK_FOREIGN_CALL \
392 (THREAD_GETMEM (THREAD_SELF, header.rtld_must_xmm_save) != 0)
394 /* NB: Don't use the xchg operation because that would imply a lock
395 prefix which is expensive and unnecessary. The cache line is also
396 not contested at all. */
397 # define RTLD_ENABLE_FOREIGN_CALL \
398 int old_rtld_must_xmm_save = THREAD_GETMEM (THREAD_SELF, \
399 header.rtld_must_xmm_save); \
400 THREAD_SETMEM (THREAD_SELF, header.rtld_must_xmm_save, 1)
402 # define RTLD_PREPARE_FOREIGN_CALL \
403 do if (THREAD_GETMEM (THREAD_SELF, header.rtld_must_xmm_save)) \
405 _dl_x86_64_save_sse (); \
406 THREAD_SETMEM (THREAD_SELF, header.rtld_must_xmm_save, 0); \
410 # define RTLD_FINALIZE_FOREIGN_CALL \
412 if (THREAD_GETMEM (THREAD_SELF, header.rtld_must_xmm_save) == 0) \
413 _dl_x86_64_restore_sse (); \
414 THREAD_SETMEM (THREAD_SELF, header.rtld_must_xmm_save, \
415 old_rtld_must_xmm_save); \
420 #endif /* __ASSEMBLER__ */