Benchtests: Remove broken walk benchmarks
[glibc.git] / sysdeps / loongarch / tst-gnu2-tls2.h
blob8e4216785d6c9c43483ae39a62784a37cc9fb6d4
1 /* Test TLSDESC relocation. LoongArch64 version.
2 Copyright (C) 2024 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <https://www.gnu.org/licenses/>. */
19 #include <config.h>
20 #include <string.h>
21 #include <stdlib.h>
22 #include <sys/auxv.h>
24 /* The instruction between BEFORE_TLSDESC_CALL and _dl_tlsdesc_dynamic,
25 and the instruction between _dl_tlsdesc_dynamic and AFTER_TLSDESC_CALL,
26 may modified most of the general-purpose register. */
27 #define SAVE_REGISTER(src) \
28 asm volatile ("st.d $r3, %0" :"=m"(src) :);
30 #ifdef __loongarch_soft_float
32 #define BEFORE_TLSDESC_CALL() \
33 uint64_t src; \
34 SAVE_REGISTER (src);
36 #define AFTER_TLSDESC_CALL() \
37 uint64_t restore; \
38 SAVE_REGISTER (restore); \
39 if (src != restore) \
40 abort ();
42 #else /* hard float */
44 #define SAVE_REGISTER_FCC(src) \
45 asm volatile ("movcf2gr $t0, $fcc0" ::: "$t0"); \
46 asm volatile ("st.d $t0, %0" :"=m"(src[0]) :); \
47 asm volatile ("movcf2gr $t0, $fcc1" ::: "$t0"); \
48 asm volatile ("st.d $t0, %0" :"=m"(src[1]) :); \
49 asm volatile ("movcf2gr $t0, $fcc2" ::: "$t0"); \
50 asm volatile ("st.d $t0, %0" :"=m"(src[2]) :); \
51 asm volatile ("movcf2gr $t0, $fcc3" ::: "$t0"); \
52 asm volatile ("st.d $t0, %0" :"=m"(src[3]) :); \
53 asm volatile ("movcf2gr $t0, $fcc4" ::: "$t0"); \
54 asm volatile ("st.d $t0, %0" :"=m"(src[4]) :); \
55 asm volatile ("movcf2gr $t0, $fcc5" ::: "$t0"); \
56 asm volatile ("st.d $t0, %0" :"=m"(src[5]) :); \
57 asm volatile ("movcf2gr $t0, $fcc6" ::: "$t0"); \
58 asm volatile ("st.d $t0, %0" :"=m"(src[6]) :); \
59 asm volatile ("movcf2gr $t0, $fcc7" ::: "$t0"); \
60 asm volatile ("st.d $t0, %0" :"=m"(src[7]) :);
62 #define LOAD_REGISTER_FCSR() \
63 asm volatile ("li.d $t0, 0x01010101" ::: "$t0"); \
64 asm volatile ("movgr2fcsr $fcsr0, $t0" :::);
66 #define SAVE_REGISTER_FCSR() \
67 asm volatile ("movfcsr2gr $t0, $fcsr0" ::: "$t0"); \
68 asm volatile ("st.d $t0, %0" :"=m"(restore_fcsr) :);
70 # define INIT_TLSDESC_CALL() \
71 unsigned long hwcap = getauxval (AT_HWCAP);
73 #define LOAD_REGISTER_FLOAT() \
74 asm volatile ("fld.d $f0, %0" ::"m"(src_float[0]) :"$f0"); \
75 asm volatile ("fld.d $f1, %0" ::"m"(src_float[1]) :"$f1"); \
76 asm volatile ("fld.d $f2, %0" ::"m"(src_float[2]) :"$f2"); \
77 asm volatile ("fld.d $f3, %0" ::"m"(src_float[3]) :"$f3"); \
78 asm volatile ("fld.d $f4, %0" ::"m"(src_float[4]) :"$f4"); \
79 asm volatile ("fld.d $f5, %0" ::"m"(src_float[5]) :"$f5"); \
80 asm volatile ("fld.d $f6, %0" ::"m"(src_float[6]) :"$f6"); \
81 asm volatile ("fld.d $f7, %0" ::"m"(src_float[7]) :"$f7"); \
82 asm volatile ("fld.d $f8, %0" ::"m"(src_float[8]) :"$f8"); \
83 asm volatile ("fld.d $f9, %0" ::"m"(src_float[9]) :"$f9"); \
84 asm volatile ("fld.d $f10, %0" ::"m"(src_float[10]) :"$f10"); \
85 asm volatile ("fld.d $f11, %0" ::"m"(src_float[11]) :"$f11"); \
86 asm volatile ("fld.d $f12, %0" ::"m"(src_float[12]) :"$f12"); \
87 asm volatile ("fld.d $f13, %0" ::"m"(src_float[13]) :"$f13"); \
88 asm volatile ("fld.d $f14, %0" ::"m"(src_float[14]) :"$f14"); \
89 asm volatile ("fld.d $f15, %0" ::"m"(src_float[15]) :"$f15"); \
90 asm volatile ("fld.d $f16, %0" ::"m"(src_float[16]) :"$f16"); \
91 asm volatile ("fld.d $f17, %0" ::"m"(src_float[17]) :"$f17"); \
92 asm volatile ("fld.d $f18, %0" ::"m"(src_float[18]) :"$f18"); \
93 asm volatile ("fld.d $f19, %0" ::"m"(src_float[19]) :"$f19"); \
94 asm volatile ("fld.d $f20, %0" ::"m"(src_float[20]) :"$f20"); \
95 asm volatile ("fld.d $f21, %0" ::"m"(src_float[21]) :"$f21"); \
96 asm volatile ("fld.d $f22, %0" ::"m"(src_float[22]) :"$f22"); \
97 asm volatile ("fld.d $f23, %0" ::"m"(src_float[23]) :"$f23"); \
98 asm volatile ("fld.d $f24, %0" ::"m"(src_float[24]) :"$f24"); \
99 asm volatile ("fld.d $f25, %0" ::"m"(src_float[25]) :"$f25"); \
100 asm volatile ("fld.d $f26, %0" ::"m"(src_float[26]) :"$f26"); \
101 asm volatile ("fld.d $f27, %0" ::"m"(src_float[27]) :"$f27"); \
102 asm volatile ("fld.d $f28, %0" ::"m"(src_float[28]) :"$f28"); \
103 asm volatile ("fld.d $f29, %0" ::"m"(src_float[29]) :"$f29"); \
104 asm volatile ("fld.d $f30, %0" ::"m"(src_float[30]) :"$f30"); \
105 asm volatile ("fld.d $f31, %0" ::"m"(src_float[31]) :"$f31");
107 #define SAVE_REGISTER_FLOAT() \
108 asm volatile ("fst.d $f0, %0" :"=m"(restore_float[0]) :); \
109 asm volatile ("fst.d $f1, %0" :"=m"(restore_float[1]) :); \
110 asm volatile ("fst.d $f2, %0" :"=m"(restore_float[2]) :); \
111 asm volatile ("fst.d $f3, %0" :"=m"(restore_float[3]) :); \
112 asm volatile ("fst.d $f4, %0" :"=m"(restore_float[4]) :); \
113 asm volatile ("fst.d $f5, %0" :"=m"(restore_float[5]) :); \
114 asm volatile ("fst.d $f6, %0" :"=m"(restore_float[6]) :); \
115 asm volatile ("fst.d $f7, %0" :"=m"(restore_float[7]) :); \
116 asm volatile ("fst.d $f8, %0" :"=m"(restore_float[8]) :); \
117 asm volatile ("fst.d $f9, %0" :"=m"(restore_float[9]) :); \
118 asm volatile ("fst.d $f10, %0" :"=m"(restore_float[10]) :); \
119 asm volatile ("fst.d $f11, %0" :"=m"(restore_float[11]) :); \
120 asm volatile ("fst.d $f12, %0" :"=m"(restore_float[12]) :); \
121 asm volatile ("fst.d $f13, %0" :"=m"(restore_float[13]) :); \
122 asm volatile ("fst.d $f14, %0" :"=m"(restore_float[14]) :); \
123 asm volatile ("fst.d $f15, %0" :"=m"(restore_float[15]) :); \
124 asm volatile ("fst.d $f16, %0" :"=m"(restore_float[16]) :); \
125 asm volatile ("fst.d $f17, %0" :"=m"(restore_float[17]) :); \
126 asm volatile ("fst.d $f18, %0" :"=m"(restore_float[18]) :); \
127 asm volatile ("fst.d $f19, %0" :"=m"(restore_float[19]) :); \
128 asm volatile ("fst.d $f20, %0" :"=m"(restore_float[20]) :); \
129 asm volatile ("fst.d $f21, %0" :"=m"(restore_float[21]) :); \
130 asm volatile ("fst.d $f22, %0" :"=m"(restore_float[22]) :); \
131 asm volatile ("fst.d $f23, %0" :"=m"(restore_float[23]) :); \
132 asm volatile ("fst.d $f24, %0" :"=m"(restore_float[24]) :); \
133 asm volatile ("fst.d $f25, %0" :"=m"(restore_float[25]) :); \
134 asm volatile ("fst.d $f26, %0" :"=m"(restore_float[26]) :); \
135 asm volatile ("fst.d $f27, %0" :"=m"(restore_float[27]) :); \
136 asm volatile ("fst.d $f28, %0" :"=m"(restore_float[28]) :); \
137 asm volatile ("fst.d $f29, %0" :"=m"(restore_float[29]) :); \
138 asm volatile ("fst.d $f30, %0" :"=m"(restore_float[30]) :); \
139 asm volatile ("fst.d $f31, %0" :"=m"(restore_float[31]) :);
141 #ifdef HAVE_LOONGARCH_VEC_COM
142 #define LOAD_REGISTER_LSX() \
143 /* Every byte in $vr0 is 1. */ \
144 asm volatile ("vldi $vr0, 1" ::: "$vr0"); \
145 asm volatile ("vldi $vr1, 2" ::: "$vr1"); \
146 asm volatile ("vldi $vr2, 3" ::: "$vr2"); \
147 asm volatile ("vldi $vr3, 4" ::: "$vr3"); \
148 asm volatile ("vldi $vr4, 5" ::: "$vr4"); \
149 asm volatile ("vldi $vr5, 6" ::: "$vr5"); \
150 asm volatile ("vldi $vr6, 7" ::: "$vr6"); \
151 asm volatile ("vldi $vr7, 8" ::: "$vr7"); \
152 asm volatile ("vldi $vr8, 9" ::: "$vr8"); \
153 asm volatile ("vldi $vr9, 10" ::: "$vr9"); \
154 asm volatile ("vldi $vr10, 11" ::: "$vr10"); \
155 asm volatile ("vldi $vr11, 12" ::: "$vr11"); \
156 asm volatile ("vldi $vr12, 13" ::: "$vr12"); \
157 asm volatile ("vldi $vr13, 14" ::: "$vr13"); \
158 asm volatile ("vldi $vr14, 15" ::: "$vr14"); \
159 asm volatile ("vldi $vr15, 16" ::: "$vr15"); \
160 asm volatile ("vldi $vr16, 17" ::: "$vr16"); \
161 asm volatile ("vldi $vr17, 18" ::: "$vr17"); \
162 asm volatile ("vldi $vr18, 19" ::: "$vr18"); \
163 asm volatile ("vldi $vr19, 20" ::: "$vr19"); \
164 asm volatile ("vldi $vr20, 21" ::: "$vr20"); \
165 asm volatile ("vldi $vr21, 22" ::: "$vr21"); \
166 asm volatile ("vldi $vr22, 23" ::: "$vr22"); \
167 asm volatile ("vldi $vr23, 24" ::: "$vr23"); \
168 asm volatile ("vldi $vr24, 25" ::: "$vr24"); \
169 asm volatile ("vldi $vr25, 26" ::: "$vr25"); \
170 asm volatile ("vldi $vr26, 27" ::: "$vr26"); \
171 asm volatile ("vldi $vr27, 28" ::: "$vr27"); \
172 asm volatile ("vldi $vr28, 29" ::: "$vr28"); \
173 asm volatile ("vldi $vr29, 30" ::: "$vr29"); \
174 asm volatile ("vldi $vr30, 31" ::: "$vr30"); \
175 asm volatile ("vldi $vr31, 32" ::: "$vr31");
176 #else
177 #define LOAD_REGISTER_LSX()
178 #endif
180 #ifdef HAVE_LOONGARCH_VEC_COM
181 #define SAVE_REGISTER_LSX() \
182 int src_lsx[32][4]; \
183 int restore_lsx[32][4]; \
184 asm volatile ("vst $vr0, %0" :"=m"(restore_lsx[0]) :); \
185 asm volatile ("vst $vr1, %0" :"=m"(restore_lsx[1]) :); \
186 asm volatile ("vst $vr2, %0" :"=m"(restore_lsx[2]) :); \
187 asm volatile ("vst $vr3, %0" :"=m"(restore_lsx[3]) :); \
188 asm volatile ("vst $vr4, %0" :"=m"(restore_lsx[4]) :); \
189 asm volatile ("vst $vr5, %0" :"=m"(restore_lsx[5]) :); \
190 asm volatile ("vst $vr6, %0" :"=m"(restore_lsx[6]) :); \
191 asm volatile ("vst $vr7, %0" :"=m"(restore_lsx[7]) :); \
192 asm volatile ("vst $vr8, %0" :"=m"(restore_lsx[8]) :); \
193 asm volatile ("vst $vr9, %0" :"=m"(restore_lsx[9]) :); \
194 asm volatile ("vst $vr10, %0" :"=m"(restore_lsx[10]) :); \
195 asm volatile ("vst $vr11, %0" :"=m"(restore_lsx[11]) :); \
196 asm volatile ("vst $vr12, %0" :"=m"(restore_lsx[12]) :); \
197 asm volatile ("vst $vr13, %0" :"=m"(restore_lsx[13]) :); \
198 asm volatile ("vst $vr14, %0" :"=m"(restore_lsx[14]) :); \
199 asm volatile ("vst $vr15, %0" :"=m"(restore_lsx[15]) :); \
200 asm volatile ("vst $vr16, %0" :"=m"(restore_lsx[16]) :); \
201 asm volatile ("vst $vr17, %0" :"=m"(restore_lsx[17]) :); \
202 asm volatile ("vst $vr18, %0" :"=m"(restore_lsx[18]) :); \
203 asm volatile ("vst $vr19, %0" :"=m"(restore_lsx[19]) :); \
204 asm volatile ("vst $vr20, %0" :"=m"(restore_lsx[20]) :); \
205 asm volatile ("vst $vr21, %0" :"=m"(restore_lsx[21]) :); \
206 asm volatile ("vst $vr22, %0" :"=m"(restore_lsx[22]) :); \
207 asm volatile ("vst $vr23, %0" :"=m"(restore_lsx[23]) :); \
208 asm volatile ("vst $vr24, %0" :"=m"(restore_lsx[24]) :); \
209 asm volatile ("vst $vr25, %0" :"=m"(restore_lsx[25]) :); \
210 asm volatile ("vst $vr26, %0" :"=m"(restore_lsx[26]) :); \
211 asm volatile ("vst $vr27, %0" :"=m"(restore_lsx[27]) :); \
212 asm volatile ("vst $vr28, %0" :"=m"(restore_lsx[28]) :); \
213 asm volatile ("vst $vr29, %0" :"=m"(restore_lsx[29]) :); \
214 asm volatile ("vst $vr30, %0" :"=m"(restore_lsx[30]) :); \
215 asm volatile ("vst $vr31, %0" :"=m"(restore_lsx[31]) :); \
216 for (int i = 0; i < 32; i++) \
217 for (int j = 0; j < 4; j++) \
219 src_lsx[i][j] = 0x01010101 * (i + 1); \
220 if (src_lsx[i][j] != restore_lsx[i][j]) \
221 abort (); \
223 #else
224 #define SAVE_REGISTER_LSX()
225 #endif
227 #ifdef HAVE_LOONGARCH_VEC_COM
228 #define LOAD_REGISTER_LASX() \
229 /* Every byte in $xr0 is 1. */ \
230 asm volatile ("xvldi $xr0, 1" ::: "$xr0"); \
231 asm volatile ("xvldi $xr1, 2" ::: "$xr1"); \
232 asm volatile ("xvldi $xr2, 3" ::: "$xr2"); \
233 asm volatile ("xvldi $xr3, 4" ::: "$xr3"); \
234 asm volatile ("xvldi $xr4, 5" ::: "$xr4"); \
235 asm volatile ("xvldi $xr5, 6" ::: "$xr5"); \
236 asm volatile ("xvldi $xr6, 7" ::: "$xr6"); \
237 asm volatile ("xvldi $xr7, 8" ::: "$xr7"); \
238 asm volatile ("xvldi $xr8, 9" ::: "$xr8"); \
239 asm volatile ("xvldi $xr9, 10" ::: "$xr9"); \
240 asm volatile ("xvldi $xr10, 11" ::: "$xr10"); \
241 asm volatile ("xvldi $xr11, 12" ::: "$xr11"); \
242 asm volatile ("xvldi $xr12, 13" ::: "$xr12"); \
243 asm volatile ("xvldi $xr13, 14" ::: "$xr13"); \
244 asm volatile ("xvldi $xr14, 15" ::: "$xr14"); \
245 asm volatile ("xvldi $xr15, 16" ::: "$xr15"); \
246 asm volatile ("xvldi $xr16, 17" ::: "$xr16"); \
247 asm volatile ("xvldi $xr17, 18" ::: "$xr17"); \
248 asm volatile ("xvldi $xr18, 19" ::: "$xr18"); \
249 asm volatile ("xvldi $xr19, 20" ::: "$xr19"); \
250 asm volatile ("xvldi $xr20, 21" ::: "$xr20"); \
251 asm volatile ("xvldi $xr21, 22" ::: "$xr21"); \
252 asm volatile ("xvldi $xr22, 23" ::: "$xr22"); \
253 asm volatile ("xvldi $xr23, 24" ::: "$xr23"); \
254 asm volatile ("xvldi $xr24, 25" ::: "$xr24"); \
255 asm volatile ("xvldi $xr25, 26" ::: "$xr25"); \
256 asm volatile ("xvldi $xr26, 27" ::: "$xr26"); \
257 asm volatile ("xvldi $xr27, 28" ::: "$xr27"); \
258 asm volatile ("xvldi $xr28, 29" ::: "$xr28"); \
259 asm volatile ("xvldi $xr29, 30" ::: "$xr29"); \
260 asm volatile ("xvldi $xr30, 31" ::: "$xr30"); \
261 asm volatile ("xvldi $xr31, 32" ::: "$xr31");
262 #else
263 #define LOAD_REGISTER_LASX()
264 #endif
266 #ifdef HAVE_LOONGARCH_VEC_COM
267 #define SAVE_REGISTER_LASX() \
268 int src_lasx[32][8]; \
269 int restore_lasx[32][8]; \
270 asm volatile ("xvst $xr0, %0" :"=m"(restore_lasx[0]) :); \
271 asm volatile ("xvst $xr1, %0" :"=m"(restore_lasx[1]) :); \
272 asm volatile ("xvst $xr2, %0" :"=m"(restore_lasx[2]) :); \
273 asm volatile ("xvst $xr3, %0" :"=m"(restore_lasx[3]) :); \
274 asm volatile ("xvst $xr4, %0" :"=m"(restore_lasx[4]) :); \
275 asm volatile ("xvst $xr5, %0" :"=m"(restore_lasx[5]) :); \
276 asm volatile ("xvst $xr6, %0" :"=m"(restore_lasx[6]) :); \
277 asm volatile ("xvst $xr7, %0" :"=m"(restore_lasx[7]) :); \
278 asm volatile ("xvst $xr8, %0" :"=m"(restore_lasx[8]) :); \
279 asm volatile ("xvst $xr9, %0" :"=m"(restore_lasx[9]) :); \
280 asm volatile ("xvst $xr10, %0" :"=m"(restore_lasx[10]) :); \
281 asm volatile ("xvst $xr11, %0" :"=m"(restore_lasx[11]) :); \
282 asm volatile ("xvst $xr12, %0" :"=m"(restore_lasx[12]) :); \
283 asm volatile ("xvst $xr13, %0" :"=m"(restore_lasx[13]) :); \
284 asm volatile ("xvst $xr14, %0" :"=m"(restore_lasx[14]) :); \
285 asm volatile ("xvst $xr15, %0" :"=m"(restore_lasx[15]) :); \
286 asm volatile ("xvst $xr16, %0" :"=m"(restore_lasx[16]) :); \
287 asm volatile ("xvst $xr17, %0" :"=m"(restore_lasx[17]) :); \
288 asm volatile ("xvst $xr18, %0" :"=m"(restore_lasx[18]) :); \
289 asm volatile ("xvst $xr19, %0" :"=m"(restore_lasx[19]) :); \
290 asm volatile ("xvst $xr20, %0" :"=m"(restore_lasx[20]) :); \
291 asm volatile ("xvst $xr21, %0" :"=m"(restore_lasx[21]) :); \
292 asm volatile ("xvst $xr22, %0" :"=m"(restore_lasx[22]) :); \
293 asm volatile ("xvst $xr23, %0" :"=m"(restore_lasx[23]) :); \
294 asm volatile ("xvst $xr24, %0" :"=m"(restore_lasx[24]) :); \
295 asm volatile ("xvst $xr25, %0" :"=m"(restore_lasx[25]) :); \
296 asm volatile ("xvst $xr26, %0" :"=m"(restore_lasx[26]) :); \
297 asm volatile ("xvst $xr27, %0" :"=m"(restore_lasx[27]) :); \
298 asm volatile ("xvst $xr28, %0" :"=m"(restore_lasx[28]) :); \
299 asm volatile ("xvst $xr29, %0" :"=m"(restore_lasx[29]) :); \
300 asm volatile ("xvst $xr30, %0" :"=m"(restore_lasx[30]) :); \
301 asm volatile ("xvst $xr31, %0" :"=m"(restore_lasx[31]) :); \
302 for (int i = 0; i < 32; i++) \
303 for (int j = 0; j < 8; j++) \
305 src_lasx[i][j] = 0x01010101 * (i + 1); \
306 if (src_lasx[i][j] != restore_lasx[i][j]) \
307 abort (); \
309 #else
310 #define SAVE_REGISTER_LASX()
311 #endif
313 #define BEFORE_TLSDESC_CALL() \
314 uint64_t src; \
315 double src_float[32]; \
316 uint64_t src_fcc[8]; \
317 for (int i = 0; i < 32; i++) \
318 src_float[i] = i + 1; \
320 SAVE_REGISTER (src); \
321 LOAD_REGISTER_FCSR (); \
322 SAVE_REGISTER_FCC(src_fcc) \
324 if (hwcap & HWCAP_LOONGARCH_LASX) \
326 LOAD_REGISTER_LASX (); \
328 else if (hwcap & HWCAP_LOONGARCH_LSX) \
330 LOAD_REGISTER_LSX (); \
332 else \
334 LOAD_REGISTER_FLOAT (); \
337 #define AFTER_TLSDESC_CALL() \
338 uint64_t restore; \
339 uint64_t src_fcsr = 0x01010101; \
340 uint64_t restore_fcsr; \
341 uint64_t restore_fcc[8]; \
342 SAVE_REGISTER (restore); \
343 SAVE_REGISTER_FCSR (); \
344 SAVE_REGISTER_FCC(restore_fcc) \
346 /* memcmp_lasx/strlen_lasx corrupts LSX/LASX registers, */ \
347 /* compare LSX/LASX registers first. */ \
348 if (hwcap & HWCAP_LOONGARCH_LASX) \
350 SAVE_REGISTER_LASX (); \
352 else if (hwcap & HWCAP_LOONGARCH_LSX) \
354 SAVE_REGISTER_LSX (); \
356 else \
358 double restore_float[32]; \
359 SAVE_REGISTER_FLOAT (); \
361 for (int i = 0; i < 32; i++) \
362 if (src_float[i] != restore_float[i]) \
363 abort (); \
366 if (src_fcsr != restore_fcsr) \
367 abort (); \
369 if (memcmp (src_fcc, restore_fcc, sizeof (src_fcc)) != 0) \
370 abort (); \
372 if (src != restore) \
373 abort ();
375 #endif /* #ifdef __loongarch_soft_float */
377 #include_next <tst-gnu2-tls2.h>