Replace FSF snail mail address with URLs.
[glibc.git] / elf / dynamic-link.h
blobaa7122738b2fdd977f39ff4e4fea5f8f883e3a57
1 /* Inline functions for dynamic linking.
2 Copyright (C) 1995-2005,2006,2008,2011 Free Software Foundation, Inc.
3 This file is part of the GNU C Library.
5 The GNU C Library is free software; you can redistribute it and/or
6 modify it under the terms of the GNU Lesser General Public
7 License as published by the Free Software Foundation; either
8 version 2.1 of the License, or (at your option) any later version.
10 The GNU C Library is distributed in the hope that it will be useful,
11 but WITHOUT ANY WARRANTY; without even the implied warranty of
12 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 Lesser General Public License for more details.
15 You should have received a copy of the GNU Lesser General Public
16 License along with the GNU C Library; if not, see
17 <http://www.gnu.org/licenses/>. */
19 /* This macro is used as a callback from elf_machine_rel{a,} when a
20 static TLS reloc is about to be performed. Since (in dl-load.c) we
21 permit dynamic loading of objects that might use such relocs, we
22 have to check whether each use is actually doable. If the object
23 whose TLS segment the reference resolves to was allocated space in
24 the static TLS block at startup, then it's ok. Otherwise, we make
25 an attempt to allocate it in surplus space on the fly. If that
26 can't be done, we fall back to the error that DF_STATIC_TLS is
27 intended to produce. */
28 #define CHECK_STATIC_TLS(map, sym_map) \
29 do { \
30 if (__builtin_expect ((sym_map)->l_tls_offset == NO_TLS_OFFSET \
31 || ((sym_map)->l_tls_offset \
32 == FORCED_DYNAMIC_TLS_OFFSET), 0)) \
33 _dl_allocate_static_tls (sym_map); \
34 } while (0)
36 #define TRY_STATIC_TLS(map, sym_map) \
37 (__builtin_expect ((sym_map)->l_tls_offset \
38 != FORCED_DYNAMIC_TLS_OFFSET, 1) \
39 && (__builtin_expect ((sym_map)->l_tls_offset != NO_TLS_OFFSET, 1) \
40 || _dl_try_allocate_static_tls (sym_map) == 0))
42 int internal_function _dl_try_allocate_static_tls (struct link_map *map);
44 #include <elf.h>
45 #include <assert.h>
47 #ifdef RESOLVE_MAP
48 /* We pass reloc_addr as a pointer to void, as opposed to a pointer to
49 ElfW(Addr), because not all architectures can assume that the
50 relocated address is properly aligned, whereas the compiler is
51 entitled to assume that a pointer to a type is properly aligned for
52 the type. Even if we cast the pointer back to some other type with
53 less strict alignment requirements, the compiler might still
54 remember that the pointer was originally more aligned, thereby
55 optimizing away alignment tests or using word instructions for
56 copying memory, breaking the very code written to handle the
57 unaligned cases. */
58 # if ! ELF_MACHINE_NO_REL
59 auto inline void __attribute__((always_inline))
60 elf_machine_rel (struct link_map *map, const ElfW(Rel) *reloc,
61 const ElfW(Sym) *sym, const struct r_found_version *version,
62 void *const reloc_addr, int skip_ifunc);
63 auto inline void __attribute__((always_inline))
64 elf_machine_rel_relative (ElfW(Addr) l_addr, const ElfW(Rel) *reloc,
65 void *const reloc_addr);
66 # endif
67 # if ! ELF_MACHINE_NO_RELA
68 auto inline void __attribute__((always_inline))
69 elf_machine_rela (struct link_map *map, const ElfW(Rela) *reloc,
70 const ElfW(Sym) *sym, const struct r_found_version *version,
71 void *const reloc_addr, int skip_ifunc);
72 auto inline void __attribute__((always_inline))
73 elf_machine_rela_relative (ElfW(Addr) l_addr, const ElfW(Rela) *reloc,
74 void *const reloc_addr);
75 # endif
76 # if ELF_MACHINE_NO_RELA || defined ELF_MACHINE_PLT_REL
77 auto inline void __attribute__((always_inline))
78 elf_machine_lazy_rel (struct link_map *map,
79 ElfW(Addr) l_addr, const ElfW(Rel) *reloc,
80 int skip_ifunc);
81 # else
82 auto inline void __attribute__((always_inline))
83 elf_machine_lazy_rel (struct link_map *map,
84 ElfW(Addr) l_addr, const ElfW(Rela) *reloc,
85 int skip_ifunc);
86 # endif
87 #endif
89 #include <dl-machine.h>
91 #ifndef VERSYMIDX
92 # define VERSYMIDX(sym) (DT_NUM + DT_THISPROCNUM + DT_VERSIONTAGIDX (sym))
93 #endif
96 /* Read the dynamic section at DYN and fill in INFO with indices DT_*. */
97 #ifndef RESOLVE_MAP
98 static
99 #else
100 auto
101 #endif
102 inline void __attribute__ ((unused, always_inline))
103 elf_get_dynamic_info (struct link_map *l, ElfW(Dyn) *temp)
105 ElfW(Dyn) *dyn = l->l_ld;
106 ElfW(Dyn) **info;
107 #if __ELF_NATIVE_CLASS == 32
108 typedef Elf32_Word d_tag_utype;
109 #elif __ELF_NATIVE_CLASS == 64
110 typedef Elf64_Xword d_tag_utype;
111 #endif
113 #ifndef RTLD_BOOTSTRAP
114 if (dyn == NULL)
115 return;
116 #endif
118 info = l->l_info;
120 while (dyn->d_tag != DT_NULL)
122 if ((d_tag_utype) dyn->d_tag < DT_NUM)
123 info[dyn->d_tag] = dyn;
124 else if (dyn->d_tag >= DT_LOPROC &&
125 dyn->d_tag < DT_LOPROC + DT_THISPROCNUM)
126 info[dyn->d_tag - DT_LOPROC + DT_NUM] = dyn;
127 else if ((d_tag_utype) DT_VERSIONTAGIDX (dyn->d_tag) < DT_VERSIONTAGNUM)
128 info[VERSYMIDX (dyn->d_tag)] = dyn;
129 else if ((d_tag_utype) DT_EXTRATAGIDX (dyn->d_tag) < DT_EXTRANUM)
130 info[DT_EXTRATAGIDX (dyn->d_tag) + DT_NUM + DT_THISPROCNUM
131 + DT_VERSIONTAGNUM] = dyn;
132 else if ((d_tag_utype) DT_VALTAGIDX (dyn->d_tag) < DT_VALNUM)
133 info[DT_VALTAGIDX (dyn->d_tag) + DT_NUM + DT_THISPROCNUM
134 + DT_VERSIONTAGNUM + DT_EXTRANUM] = dyn;
135 else if ((d_tag_utype) DT_ADDRTAGIDX (dyn->d_tag) < DT_ADDRNUM)
136 info[DT_ADDRTAGIDX (dyn->d_tag) + DT_NUM + DT_THISPROCNUM
137 + DT_VERSIONTAGNUM + DT_EXTRANUM + DT_VALNUM] = dyn;
138 ++dyn;
141 #define DL_RO_DYN_TEMP_CNT 8
143 #ifndef DL_RO_DYN_SECTION
144 /* Don't adjust .dynamic unnecessarily. */
145 if (l->l_addr != 0)
147 ElfW(Addr) l_addr = l->l_addr;
148 int cnt = 0;
150 # define ADJUST_DYN_INFO(tag) \
151 do \
152 if (info[tag] != NULL) \
154 if (temp) \
156 temp[cnt].d_tag = info[tag]->d_tag; \
157 temp[cnt].d_un.d_ptr = info[tag]->d_un.d_ptr + l_addr; \
158 info[tag] = temp + cnt++; \
160 else \
161 info[tag]->d_un.d_ptr += l_addr; \
163 while (0)
165 ADJUST_DYN_INFO (DT_HASH);
166 ADJUST_DYN_INFO (DT_PLTGOT);
167 ADJUST_DYN_INFO (DT_STRTAB);
168 ADJUST_DYN_INFO (DT_SYMTAB);
169 # if ! ELF_MACHINE_NO_RELA
170 ADJUST_DYN_INFO (DT_RELA);
171 # endif
172 # if ! ELF_MACHINE_NO_REL
173 ADJUST_DYN_INFO (DT_REL);
174 # endif
175 ADJUST_DYN_INFO (DT_JMPREL);
176 ADJUST_DYN_INFO (VERSYMIDX (DT_VERSYM));
177 ADJUST_DYN_INFO (DT_ADDRTAGIDX (DT_GNU_HASH) + DT_NUM + DT_THISPROCNUM
178 + DT_VERSIONTAGNUM + DT_EXTRANUM + DT_VALNUM);
179 # undef ADJUST_DYN_INFO
180 assert (cnt <= DL_RO_DYN_TEMP_CNT);
182 #endif
183 if (info[DT_PLTREL] != NULL)
185 #if ELF_MACHINE_NO_RELA
186 assert (info[DT_PLTREL]->d_un.d_val == DT_REL);
187 #elif ELF_MACHINE_NO_REL
188 assert (info[DT_PLTREL]->d_un.d_val == DT_RELA);
189 #else
190 assert (info[DT_PLTREL]->d_un.d_val == DT_REL
191 || info[DT_PLTREL]->d_un.d_val == DT_RELA);
192 #endif
194 #if ! ELF_MACHINE_NO_RELA
195 if (info[DT_RELA] != NULL)
196 assert (info[DT_RELAENT]->d_un.d_val == sizeof (ElfW(Rela)));
197 # endif
198 # if ! ELF_MACHINE_NO_REL
199 if (info[DT_REL] != NULL)
200 assert (info[DT_RELENT]->d_un.d_val == sizeof (ElfW(Rel)));
201 #endif
202 #ifdef RTLD_BOOTSTRAP
203 /* Only the bind now flags are allowed. */
204 assert (info[VERSYMIDX (DT_FLAGS_1)] == NULL
205 || (info[VERSYMIDX (DT_FLAGS_1)]->d_un.d_val & ~DF_1_NOW) == 0);
206 assert (info[DT_FLAGS] == NULL
207 || (info[DT_FLAGS]->d_un.d_val & ~DF_BIND_NOW) == 0);
208 /* Flags must not be set for ld.so. */
209 assert (info[DT_RUNPATH] == NULL);
210 assert (info[DT_RPATH] == NULL);
211 #else
212 if (info[DT_FLAGS] != NULL)
214 /* Flags are used. Translate to the old form where available.
215 Since these l_info entries are only tested for NULL pointers it
216 is ok if they point to the DT_FLAGS entry. */
217 l->l_flags = info[DT_FLAGS]->d_un.d_val;
219 if (l->l_flags & DF_SYMBOLIC)
220 info[DT_SYMBOLIC] = info[DT_FLAGS];
221 if (l->l_flags & DF_TEXTREL)
222 info[DT_TEXTREL] = info[DT_FLAGS];
223 if (l->l_flags & DF_BIND_NOW)
224 info[DT_BIND_NOW] = info[DT_FLAGS];
226 if (info[VERSYMIDX (DT_FLAGS_1)] != NULL)
228 l->l_flags_1 = info[VERSYMIDX (DT_FLAGS_1)]->d_un.d_val;
230 if (l->l_flags_1 & DF_1_NOW)
231 info[DT_BIND_NOW] = info[VERSYMIDX (DT_FLAGS_1)];
233 if (info[DT_RUNPATH] != NULL)
234 /* If both RUNPATH and RPATH are given, the latter is ignored. */
235 info[DT_RPATH] = NULL;
236 #endif
239 #ifdef RESOLVE_MAP
241 # ifdef RTLD_BOOTSTRAP
242 # define ELF_DURING_STARTUP (1)
243 # else
244 # define ELF_DURING_STARTUP (0)
245 # endif
247 /* Get the definitions of `elf_dynamic_do_rel' and `elf_dynamic_do_rela'.
248 These functions are almost identical, so we use cpp magic to avoid
249 duplicating their code. It cannot be done in a more general function
250 because we must be able to completely inline. */
252 /* On some machines, notably SPARC, DT_REL* includes DT_JMPREL in its
253 range. Note that according to the ELF spec, this is completely legal!
254 But conditionally define things so that on machines we know this will
255 not happen we do something more optimal. */
257 # ifdef ELF_MACHINE_PLTREL_OVERLAP
258 # define _ELF_DYNAMIC_DO_RELOC(RELOC, reloc, map, do_lazy, skip_ifunc, test_rel) \
259 do { \
260 struct { ElfW(Addr) start, size; \
261 __typeof (((ElfW(Dyn) *) 0)->d_un.d_val) nrelative; int lazy; } \
262 ranges[3]; \
263 int ranges_index; \
265 ranges[0].lazy = ranges[2].lazy = 0; \
266 ranges[1].lazy = 1; \
267 ranges[0].size = ranges[1].size = ranges[2].size = 0; \
268 ranges[0].nrelative = ranges[1].nrelative = ranges[2].nrelative = 0; \
270 if ((map)->l_info[DT_##RELOC]) \
272 ranges[0].start = D_PTR ((map), l_info[DT_##RELOC]); \
273 ranges[0].size = (map)->l_info[DT_##RELOC##SZ]->d_un.d_val; \
274 if (map->l_info[VERSYMIDX (DT_##RELOC##COUNT)] != NULL) \
275 ranges[0].nrelative \
276 = MIN (map->l_info[VERSYMIDX (DT_##RELOC##COUNT)]->d_un.d_val, \
277 ranges[0].size / sizeof (ElfW(reloc))); \
280 if ((do_lazy) \
281 && (map)->l_info[DT_PLTREL] \
282 && (!test_rel || (map)->l_info[DT_PLTREL]->d_un.d_val == DT_##RELOC)) \
284 ranges[1].start = D_PTR ((map), l_info[DT_JMPREL]); \
285 ranges[1].size = (map)->l_info[DT_PLTRELSZ]->d_un.d_val; \
286 ranges[2].start = ranges[1].start + ranges[1].size; \
287 ranges[2].size = ranges[0].start + ranges[0].size - ranges[2].start; \
288 ranges[0].size = ranges[1].start - ranges[0].start; \
291 for (ranges_index = 0; ranges_index < 3; ++ranges_index) \
292 elf_dynamic_do_##reloc ((map), \
293 ranges[ranges_index].start, \
294 ranges[ranges_index].size, \
295 ranges[ranges_index].nrelative, \
296 ranges[ranges_index].lazy, \
297 skip_ifunc); \
298 } while (0)
299 # else
300 # define _ELF_DYNAMIC_DO_RELOC(RELOC, reloc, map, do_lazy, skip_ifunc, test_rel) \
301 do { \
302 struct { ElfW(Addr) start, size; \
303 __typeof (((ElfW(Dyn) *) 0)->d_un.d_val) nrelative; int lazy; } \
304 ranges[2] = { { 0, 0, 0, 0 }, { 0, 0, 0, 0 } }; \
306 if ((map)->l_info[DT_##RELOC]) \
308 ranges[0].start = D_PTR ((map), l_info[DT_##RELOC]); \
309 ranges[0].size = (map)->l_info[DT_##RELOC##SZ]->d_un.d_val; \
310 if (map->l_info[VERSYMIDX (DT_##RELOC##COUNT)] != NULL) \
311 ranges[0].nrelative \
312 = MIN (map->l_info[VERSYMIDX (DT_##RELOC##COUNT)]->d_un.d_val, \
313 ranges[0].size / sizeof (ElfW(reloc))); \
315 if ((map)->l_info[DT_PLTREL] \
316 && (!test_rel || (map)->l_info[DT_PLTREL]->d_un.d_val == DT_##RELOC)) \
318 ElfW(Addr) start = D_PTR ((map), l_info[DT_JMPREL]); \
320 if (! ELF_DURING_STARTUP \
321 && ((do_lazy) \
322 /* This test does not only detect whether the relocation \
323 sections are in the right order, it also checks whether \
324 there is a DT_REL/DT_RELA section. */ \
325 || __builtin_expect (ranges[0].start + ranges[0].size \
326 != start, 0))) \
328 ranges[1].start = start; \
329 ranges[1].size = (map)->l_info[DT_PLTRELSZ]->d_un.d_val; \
330 ranges[1].lazy = (do_lazy); \
332 else \
334 /* Combine processing the sections. */ \
335 assert (ranges[0].start + ranges[0].size == start); \
336 ranges[0].size += (map)->l_info[DT_PLTRELSZ]->d_un.d_val; \
340 if (ELF_DURING_STARTUP) \
341 elf_dynamic_do_##reloc ((map), ranges[0].start, ranges[0].size, \
342 ranges[0].nrelative, 0, skip_ifunc); \
343 else \
345 int ranges_index; \
346 for (ranges_index = 0; ranges_index < 2; ++ranges_index) \
347 elf_dynamic_do_##reloc ((map), \
348 ranges[ranges_index].start, \
349 ranges[ranges_index].size, \
350 ranges[ranges_index].nrelative, \
351 ranges[ranges_index].lazy, \
352 skip_ifunc); \
354 } while (0)
355 # endif
357 # if ELF_MACHINE_NO_REL || ELF_MACHINE_NO_RELA
358 # define _ELF_CHECK_REL 0
359 # else
360 # define _ELF_CHECK_REL 1
361 # endif
363 # if ! ELF_MACHINE_NO_REL
364 # include "do-rel.h"
365 # define ELF_DYNAMIC_DO_REL(map, lazy, skip_ifunc) \
366 _ELF_DYNAMIC_DO_RELOC (REL, Rel, map, lazy, skip_ifunc, _ELF_CHECK_REL)
367 # else
368 # define ELF_DYNAMIC_DO_REL(map, lazy, skip_ifunc) /* Nothing to do. */
369 # endif
371 # if ! ELF_MACHINE_NO_RELA
372 # define DO_RELA
373 # include "do-rel.h"
374 # define ELF_DYNAMIC_DO_RELA(map, lazy, skip_ifunc) \
375 _ELF_DYNAMIC_DO_RELOC (RELA, Rela, map, lazy, skip_ifunc, _ELF_CHECK_REL)
376 # else
377 # define ELF_DYNAMIC_DO_RELA(map, lazy, skip_ifunc) /* Nothing to do. */
378 # endif
380 /* This can't just be an inline function because GCC is too dumb
381 to inline functions containing inlines themselves. */
382 # define ELF_DYNAMIC_RELOCATE(map, lazy, consider_profile, skip_ifunc) \
383 do { \
384 int edr_lazy = elf_machine_runtime_setup ((map), (lazy), \
385 (consider_profile)); \
386 ELF_DYNAMIC_DO_REL ((map), edr_lazy, skip_ifunc); \
387 ELF_DYNAMIC_DO_RELA ((map), edr_lazy, skip_ifunc); \
388 } while (0)
390 #endif