sysdeps: sem_open: Clear O_CREAT when semaphore file is expected to exist [BZ #30789]
[glibc.git] / malloc / malloc-debug.c
blobf9d131d22ffd7b6225572b3db0825e7f32469018
1 /* Malloc debug DSO.
2 Copyright (C) 2021-2023 Free Software Foundation, Inc.
3 Copyright The GNU Toolchain Authors.
4 This file is part of the GNU C Library.
6 The GNU C Library is free software; you can redistribute it and/or
7 modify it under the terms of the GNU Lesser General Public License as
8 published by the Free Software Foundation; either version 2.1 of the
9 License, or (at your option) any later version.
11 The GNU C Library is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 Lesser General Public License for more details.
16 You should have received a copy of the GNU Lesser General Public
17 License along with the GNU C Library; see the file COPYING.LIB. If
18 not, see <https://www.gnu.org/licenses/>. */
20 #include <atomic.h>
21 #include <libc-symbols.h>
22 #include <shlib-compat.h>
23 #include <string.h>
24 #include <unistd.h>
25 #include <sys/param.h>
27 /* Support only the glibc allocators. */
28 extern void *__libc_malloc (size_t);
29 extern void __libc_free (void *);
30 extern void *__libc_realloc (void *, size_t);
31 extern void *__libc_memalign (size_t, size_t);
32 extern void *__libc_valloc (size_t);
33 extern void *__libc_pvalloc (size_t);
34 extern void *__libc_calloc (size_t, size_t);
36 #define DEBUG_FN(fn) \
37 static __typeof (__libc_ ## fn) __debug_ ## fn
39 DEBUG_FN(malloc);
40 DEBUG_FN(free);
41 DEBUG_FN(realloc);
42 DEBUG_FN(memalign);
43 DEBUG_FN(valloc);
44 DEBUG_FN(pvalloc);
45 DEBUG_FN(calloc);
47 static int debug_initialized = -1;
49 enum malloc_debug_hooks
51 MALLOC_NONE_HOOK = 0,
52 MALLOC_MCHECK_HOOK = 1 << 0, /* mcheck() */
53 MALLOC_MTRACE_HOOK = 1 << 1, /* mtrace() */
54 MALLOC_CHECK_HOOK = 1 << 2, /* MALLOC_CHECK_ or glibc.malloc.check. */
56 static unsigned __malloc_debugging_hooks;
58 static __always_inline bool
59 __is_malloc_debug_enabled (enum malloc_debug_hooks flag)
61 return __malloc_debugging_hooks & flag;
64 static __always_inline void
65 __malloc_debug_enable (enum malloc_debug_hooks flag)
67 __malloc_debugging_hooks |= flag;
70 static __always_inline void
71 __malloc_debug_disable (enum malloc_debug_hooks flag)
73 __malloc_debugging_hooks &= ~flag;
76 #include "mcheck.c"
77 #include "mtrace.c"
78 #include "malloc-check.c"
80 #if SHLIB_COMPAT (libc_malloc_debug, GLIBC_2_0, GLIBC_2_24)
81 extern void (*__malloc_initialize_hook) (void);
82 compat_symbol_reference (libc, __malloc_initialize_hook,
83 __malloc_initialize_hook, GLIBC_2_0);
84 #endif
86 static void *malloc_hook_ini (size_t, const void *) __THROW;
87 static void *realloc_hook_ini (void *, size_t, const void *) __THROW;
88 static void *memalign_hook_ini (size_t, size_t, const void *) __THROW;
90 void (*__free_hook) (void *, const void *) = NULL;
91 void *(*__malloc_hook) (size_t, const void *) = malloc_hook_ini;
92 void *(*__realloc_hook) (void *, size_t, const void *) = realloc_hook_ini;
93 void *(*__memalign_hook) (size_t, size_t, const void *) = memalign_hook_ini;
95 /* Hooks for debugging versions. The initial hooks just call the
96 initialization routine, then do the normal work. */
98 /* These hooks will get executed only through the interposed allocator
99 functions in libc_malloc_debug.so. This means that the calls to malloc,
100 realloc, etc. will lead back into the interposed functions, which is what we
101 want.
103 These initial hooks are assumed to be called in a single-threaded context,
104 so it is safe to reset all hooks at once upon initialization. */
106 static void
107 generic_hook_ini (void)
109 debug_initialized = 0;
110 __malloc_hook = NULL;
111 __realloc_hook = NULL;
112 __memalign_hook = NULL;
114 /* malloc check does not quite co-exist with libc malloc, so initialize
115 either on or the other. */
116 if (!initialize_malloc_check ())
117 /* The compiler does not know that these functions are allocators, so it
118 will not try to optimize it away. */
119 __libc_free (__libc_malloc (0));
121 #if SHLIB_COMPAT (libc_malloc_debug, GLIBC_2_0, GLIBC_2_24)
122 void (*hook) (void) = __malloc_initialize_hook;
123 if (hook != NULL)
124 (*hook)();
125 #endif
127 debug_initialized = 1;
130 static void *
131 malloc_hook_ini (size_t sz, const void *caller)
133 generic_hook_ini ();
134 return __debug_malloc (sz);
137 static void *
138 realloc_hook_ini (void *ptr, size_t sz, const void *caller)
140 generic_hook_ini ();
141 return __debug_realloc (ptr, sz);
144 static void *
145 memalign_hook_ini (size_t alignment, size_t sz, const void *caller)
147 generic_hook_ini ();
148 return __debug_memalign (alignment, sz);
151 static size_t pagesize;
153 /* These variables are used for undumping support. Chunked are marked
154 as using mmap, but we leave them alone if they fall into this
155 range. NB: The chunk size for these chunks only includes the
156 initial size field (of SIZE_SZ bytes), there is no trailing size
157 field (unlike with regular mmapped chunks). */
158 static mchunkptr dumped_main_arena_start; /* Inclusive. */
159 static mchunkptr dumped_main_arena_end; /* Exclusive. */
161 /* True if the pointer falls into the dumped arena. Use this after
162 chunk_is_mmapped indicates a chunk is mmapped. */
163 #define DUMPED_MAIN_ARENA_CHUNK(p) \
164 ((p) >= dumped_main_arena_start && (p) < dumped_main_arena_end)
166 /* The allocator functions. */
168 static void *
169 __debug_malloc (size_t bytes)
171 void *(*hook) (size_t, const void *) = atomic_forced_read (__malloc_hook);
172 if (__builtin_expect (hook != NULL, 0))
173 return (*hook)(bytes, RETURN_ADDRESS (0));
175 void *victim = NULL;
176 size_t orig_bytes = bytes;
177 if ((!__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK)
178 || !malloc_mcheck_before (&bytes, &victim)))
180 victim = (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK)
181 ? malloc_check (bytes) : __libc_malloc (bytes));
183 if (__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK) && victim != NULL)
184 victim = malloc_mcheck_after (victim, orig_bytes);
185 if (__is_malloc_debug_enabled (MALLOC_MTRACE_HOOK))
186 malloc_mtrace_after (victim, orig_bytes, RETURN_ADDRESS (0));
188 return victim;
190 strong_alias (__debug_malloc, malloc)
192 static void
193 __debug_free (void *mem)
195 void (*hook) (void *, const void *) = atomic_forced_read (__free_hook);
196 if (__builtin_expect (hook != NULL, 0))
198 (*hook)(mem, RETURN_ADDRESS (0));
199 return;
202 if (__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK))
203 mem = free_mcheck (mem);
205 if (DUMPED_MAIN_ARENA_CHUNK (mem2chunk (mem)))
206 /* Do nothing. */;
207 else if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK))
208 free_check (mem);
209 else
210 __libc_free (mem);
211 if (__is_malloc_debug_enabled (MALLOC_MTRACE_HOOK))
212 free_mtrace (mem, RETURN_ADDRESS (0));
214 strong_alias (__debug_free, free)
216 static void *
217 __debug_realloc (void *oldmem, size_t bytes)
219 void *(*hook) (void *, size_t, const void *) =
220 atomic_forced_read (__realloc_hook);
221 if (__builtin_expect (hook != NULL, 0))
222 return (*hook)(oldmem, bytes, RETURN_ADDRESS (0));
224 size_t orig_bytes = bytes, oldsize = 0;
225 void *victim = NULL;
227 if ((!__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK)
228 || !realloc_mcheck_before (&oldmem, &bytes, &oldsize, &victim)))
230 mchunkptr oldp = mem2chunk (oldmem);
232 /* If this is a faked mmapped chunk from the dumped main arena,
233 always make a copy (and do not free the old chunk). */
234 if (DUMPED_MAIN_ARENA_CHUNK (oldp))
236 if (bytes == 0 && oldmem != NULL)
237 victim = NULL;
238 else
240 const INTERNAL_SIZE_T osize = chunksize (oldp);
241 /* Must alloc, copy, free. */
242 victim = __debug_malloc (bytes);
243 /* Copy as many bytes as are available from the old chunk
244 and fit into the new size. NB: The overhead for faked
245 mmapped chunks is only SIZE_SZ, not CHUNK_HDR_SZ as for
246 regular mmapped chunks. */
247 if (victim != NULL)
249 if (bytes > osize - SIZE_SZ)
250 bytes = osize - SIZE_SZ;
251 memcpy (victim, oldmem, bytes);
255 else if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK))
256 victim = realloc_check (oldmem, bytes);
257 else
258 victim = __libc_realloc (oldmem, bytes);
260 if (__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK) && victim != NULL)
261 victim = realloc_mcheck_after (victim, oldmem, orig_bytes,
262 oldsize);
263 if (__is_malloc_debug_enabled (MALLOC_MTRACE_HOOK))
264 realloc_mtrace_after (victim, oldmem, orig_bytes, RETURN_ADDRESS (0));
266 return victim;
268 strong_alias (__debug_realloc, realloc)
270 static void *
271 _debug_mid_memalign (size_t alignment, size_t bytes, const void *address)
273 void *(*hook) (size_t, size_t, const void *) =
274 atomic_forced_read (__memalign_hook);
275 if (__builtin_expect (hook != NULL, 0))
276 return (*hook)(alignment, bytes, address);
278 void *victim = NULL;
279 size_t orig_bytes = bytes;
281 if ((!__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK)
282 || !memalign_mcheck_before (alignment, &bytes, &victim)))
284 victim = (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK)
285 ? memalign_check (alignment, bytes)
286 : __libc_memalign (alignment, bytes));
288 if (__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK) && victim != NULL)
289 victim = memalign_mcheck_after (victim, alignment, orig_bytes);
290 if (__is_malloc_debug_enabled (MALLOC_MTRACE_HOOK))
291 memalign_mtrace_after (victim, orig_bytes, address);
293 return victim;
296 static void *
297 __debug_memalign (size_t alignment, size_t bytes)
299 return _debug_mid_memalign (alignment, bytes, RETURN_ADDRESS (0));
301 strong_alias (__debug_memalign, memalign)
302 static void *
303 __debug_aligned_alloc (size_t alignment, size_t bytes)
305 if (!powerof2 (alignment) || alignment == 0)
306 return NULL;
307 return _debug_mid_memalign (alignment, bytes, RETURN_ADDRESS (0));
309 strong_alias (__debug_aligned_alloc, aligned_alloc)
311 static void *
312 __debug_pvalloc (size_t bytes)
314 size_t rounded_bytes;
316 if (!pagesize)
317 pagesize = sysconf (_SC_PAGESIZE);
319 /* ALIGN_UP with overflow check. */
320 if (__glibc_unlikely (__builtin_add_overflow (bytes,
321 pagesize - 1,
322 &rounded_bytes)))
324 errno = ENOMEM;
325 return NULL;
327 rounded_bytes = rounded_bytes & -(pagesize - 1);
329 return _debug_mid_memalign (pagesize, rounded_bytes, RETURN_ADDRESS (0));
331 strong_alias (__debug_pvalloc, pvalloc)
333 static void *
334 __debug_valloc (size_t bytes)
336 if (!pagesize)
337 pagesize = sysconf (_SC_PAGESIZE);
339 return _debug_mid_memalign (pagesize, bytes, RETURN_ADDRESS (0));
341 strong_alias (__debug_valloc, valloc)
343 static int
344 __debug_posix_memalign (void **memptr, size_t alignment, size_t bytes)
346 /* Test whether the SIZE argument is valid. It must be a power of
347 two multiple of sizeof (void *). */
348 if (alignment % sizeof (void *) != 0
349 || !powerof2 (alignment / sizeof (void *))
350 || alignment == 0)
351 return EINVAL;
353 *memptr = _debug_mid_memalign (alignment, bytes, RETURN_ADDRESS (0));
355 if (*memptr == NULL)
356 return ENOMEM;
358 return 0;
360 strong_alias (__debug_posix_memalign, posix_memalign)
362 static void *
363 __debug_calloc (size_t nmemb, size_t size)
365 size_t bytes;
367 if (__glibc_unlikely (__builtin_mul_overflow (nmemb, size, &bytes)))
369 errno = ENOMEM;
370 return NULL;
373 void *(*hook) (size_t, const void *) = atomic_forced_read (__malloc_hook);
374 if (__builtin_expect (hook != NULL, 0))
376 void *mem = (*hook)(bytes, RETURN_ADDRESS (0));
378 if (mem != NULL)
379 memset (mem, 0, bytes);
381 return mem;
384 size_t orig_bytes = bytes;
385 void *victim = NULL;
387 if ((!__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK)
388 || !malloc_mcheck_before (&bytes, &victim)))
390 victim = (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK)
391 ? malloc_check (bytes) : __libc_malloc (bytes));
393 if (victim != NULL)
395 if (__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK))
396 victim = malloc_mcheck_after (victim, orig_bytes);
397 memset (victim, 0, orig_bytes);
399 if (__is_malloc_debug_enabled (MALLOC_MTRACE_HOOK))
400 malloc_mtrace_after (victim, orig_bytes, RETURN_ADDRESS (0));
402 return victim;
404 strong_alias (__debug_calloc, calloc)
406 size_t
407 malloc_usable_size (void *mem)
409 if (mem == NULL)
410 return 0;
412 if (__is_malloc_debug_enabled (MALLOC_MCHECK_HOOK))
413 return mcheck_usable_size (mem);
414 if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK))
415 return malloc_check_get_size (mem);
417 mchunkptr p = mem2chunk (mem);
418 if (DUMPED_MAIN_ARENA_CHUNK (p))
419 return chunksize (p) - SIZE_SZ;
421 return musable (mem);
424 #define LIBC_SYMBOL(sym) libc_ ## sym
425 #define SYMHANDLE(sym) sym ## _handle
427 #define LOAD_SYM(sym) ({ \
428 static void *SYMHANDLE (sym); \
429 if (SYMHANDLE (sym) == NULL) \
430 SYMHANDLE (sym) = dlsym (RTLD_NEXT, #sym); \
431 SYMHANDLE (sym); \
435 malloc_info (int options, FILE *fp)
437 if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK))
438 return __malloc_info (options, fp);
440 int (*LIBC_SYMBOL (malloc_info)) (int, FILE *) = LOAD_SYM (malloc_info);
441 if (LIBC_SYMBOL (malloc_info) == NULL)
442 return -1;
444 return LIBC_SYMBOL (malloc_info) (options, fp);
448 mallopt (int param_number, int value)
450 if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK))
451 return __libc_mallopt (param_number, value);
453 int (*LIBC_SYMBOL (mallopt)) (int, int) = LOAD_SYM (mallopt);
454 if (LIBC_SYMBOL (mallopt) == NULL)
455 return 0;
457 return LIBC_SYMBOL (mallopt) (param_number, value);
460 void
461 malloc_stats (void)
463 if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK))
464 return __malloc_stats ();
466 void (*LIBC_SYMBOL (malloc_stats)) (void) = LOAD_SYM (malloc_stats);
467 if (LIBC_SYMBOL (malloc_stats) == NULL)
468 return;
470 LIBC_SYMBOL (malloc_stats) ();
473 struct mallinfo2
474 mallinfo2 (void)
476 if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK))
477 return __libc_mallinfo2 ();
479 struct mallinfo2 (*LIBC_SYMBOL (mallinfo2)) (void) = LOAD_SYM (mallinfo2);
480 if (LIBC_SYMBOL (mallinfo2) == NULL)
482 struct mallinfo2 ret = {0};
483 return ret;
486 return LIBC_SYMBOL (mallinfo2) ();
489 struct mallinfo
490 mallinfo (void)
492 if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK))
493 return __libc_mallinfo ();
495 struct mallinfo (*LIBC_SYMBOL (mallinfo)) (void) = LOAD_SYM (mallinfo);
496 if (LIBC_SYMBOL (mallinfo) == NULL)
498 struct mallinfo ret = {0};
499 return ret;
502 return LIBC_SYMBOL (mallinfo) ();
506 malloc_trim (size_t s)
508 if (__is_malloc_debug_enabled (MALLOC_CHECK_HOOK))
509 return __malloc_trim (s);
511 int (*LIBC_SYMBOL (malloc_trim)) (size_t) = LOAD_SYM (malloc_trim);
512 if (LIBC_SYMBOL (malloc_trim) == NULL)
513 return 0;
515 return LIBC_SYMBOL (malloc_trim) (s);
518 #if SHLIB_COMPAT (libc_malloc_debug, GLIBC_2_0, GLIBC_2_25)
520 /* Support for restoring dumped heaps contained in historic Emacs
521 executables. The heap saving feature (malloc_get_state) is no
522 longer implemented in this version of glibc, but we have a heap
523 rewriter in malloc_set_state which transforms the heap into a
524 version compatible with current malloc. */
526 #define MALLOC_STATE_MAGIC 0x444c4541l
527 #define MALLOC_STATE_VERSION (0 * 0x100l + 5l) /* major*0x100 + minor */
529 struct malloc_save_state
531 long magic;
532 long version;
533 mbinptr av[NBINS * 2 + 2];
534 char *sbrk_base;
535 int sbrked_mem_bytes;
536 unsigned long trim_threshold;
537 unsigned long top_pad;
538 unsigned int n_mmaps_max;
539 unsigned long mmap_threshold;
540 int check_action;
541 unsigned long max_sbrked_mem;
542 unsigned long max_total_mem; /* Always 0, for backwards compatibility. */
543 unsigned int n_mmaps;
544 unsigned int max_n_mmaps;
545 unsigned long mmapped_mem;
546 unsigned long max_mmapped_mem;
547 int using_malloc_checking;
548 unsigned long max_fast;
549 unsigned long arena_test;
550 unsigned long arena_max;
551 unsigned long narenas;
554 /* Dummy implementation which always fails. We need to provide this
555 symbol so that existing Emacs binaries continue to work with
556 BIND_NOW. */
557 void *
558 malloc_get_state (void)
560 __set_errno (ENOSYS);
561 return NULL;
563 compat_symbol (libc_malloc_debug, malloc_get_state, malloc_get_state,
564 GLIBC_2_0);
567 malloc_set_state (void *msptr)
569 struct malloc_save_state *ms = (struct malloc_save_state *) msptr;
571 if (ms->magic != MALLOC_STATE_MAGIC)
572 return -1;
574 /* Must fail if the major version is too high. */
575 if ((ms->version & ~0xffl) > (MALLOC_STATE_VERSION & ~0xffl))
576 return -2;
578 if (debug_initialized == 1)
579 return -1;
581 bool check_was_enabled = __is_malloc_debug_enabled (MALLOC_CHECK_HOOK);
583 /* It's not too late, so disable MALLOC_CHECK_ and all of the hooks. */
584 __malloc_hook = NULL;
585 __realloc_hook = NULL;
586 __free_hook = NULL;
587 __memalign_hook = NULL;
588 __malloc_debug_disable (MALLOC_CHECK_HOOK);
590 /* We do not need to perform locking here because malloc_set_state
591 must be called before the first call into the malloc subsystem (usually via
592 __malloc_initialize_hook). pthread_create always calls calloc and thus
593 must be called only afterwards, so there cannot be more than one thread
594 when we reach this point. Also handle initialization if either we ended
595 up being called before the first malloc or through the hook when
596 malloc-check was enabled. */
597 if (debug_initialized < 0)
598 generic_hook_ini ();
599 else if (check_was_enabled)
600 __libc_free (__libc_malloc (0));
602 /* Patch the dumped heap. We no longer try to integrate into the
603 existing heap. Instead, we mark the existing chunks as mmapped.
604 Together with the update to dumped_main_arena_start and
605 dumped_main_arena_end, realloc and free will recognize these
606 chunks as dumped fake mmapped chunks and never free them. */
608 /* Find the chunk with the lowest address with the heap. */
609 mchunkptr chunk = NULL;
611 size_t *candidate = (size_t *) ms->sbrk_base;
612 size_t *end = (size_t *) (ms->sbrk_base + ms->sbrked_mem_bytes);
613 while (candidate < end)
614 if (*candidate != 0)
616 chunk = mem2chunk ((void *) (candidate + 1));
617 break;
619 else
620 ++candidate;
622 if (chunk == NULL)
623 return 0;
625 /* Iterate over the dumped heap and patch the chunks so that they
626 are treated as fake mmapped chunks. */
627 mchunkptr top = ms->av[2];
628 while (chunk < top)
630 if (inuse (chunk))
632 /* Mark chunk as mmapped, to trigger the fallback path. */
633 size_t size = chunksize (chunk);
634 set_head (chunk, size | IS_MMAPPED);
636 chunk = next_chunk (chunk);
639 /* The dumped fake mmapped chunks all lie in this address range. */
640 dumped_main_arena_start = (mchunkptr) ms->sbrk_base;
641 dumped_main_arena_end = top;
643 return 0;
645 compat_symbol (libc_malloc_debug, malloc_set_state, malloc_set_state,
646 GLIBC_2_0);
647 #endif
649 /* Do not allow linking against the library. */
650 compat_symbol (libc_malloc_debug, aligned_alloc, aligned_alloc, GLIBC_2_16);
651 compat_symbol (libc_malloc_debug, calloc, calloc, GLIBC_2_0);
652 compat_symbol (libc_malloc_debug, free, free, GLIBC_2_0);
653 compat_symbol (libc_malloc_debug, mallinfo2, mallinfo2, GLIBC_2_33);
654 compat_symbol (libc_malloc_debug, mallinfo, mallinfo, GLIBC_2_0);
655 compat_symbol (libc_malloc_debug, malloc_info, malloc_info, GLIBC_2_10);
656 compat_symbol (libc_malloc_debug, malloc, malloc, GLIBC_2_0);
657 compat_symbol (libc_malloc_debug, malloc_stats, malloc_stats, GLIBC_2_0);
658 compat_symbol (libc_malloc_debug, malloc_trim, malloc_trim, GLIBC_2_0);
659 compat_symbol (libc_malloc_debug, malloc_usable_size, malloc_usable_size,
660 GLIBC_2_0);
661 compat_symbol (libc_malloc_debug, mallopt, mallopt, GLIBC_2_0);
662 compat_symbol (libc_malloc_debug, mcheck_check_all, mcheck_check_all,
663 GLIBC_2_2);
664 compat_symbol (libc_malloc_debug, mcheck, mcheck, GLIBC_2_0);
665 compat_symbol (libc_malloc_debug, mcheck_pedantic, mcheck_pedantic, GLIBC_2_2);
666 compat_symbol (libc_malloc_debug, memalign, memalign, GLIBC_2_0);
667 compat_symbol (libc_malloc_debug, mprobe, mprobe, GLIBC_2_0);
668 compat_symbol (libc_malloc_debug, mtrace, mtrace, GLIBC_2_0);
669 compat_symbol (libc_malloc_debug, muntrace, muntrace, GLIBC_2_0);
670 compat_symbol (libc_malloc_debug, posix_memalign, posix_memalign, GLIBC_2_2);
671 compat_symbol (libc_malloc_debug, pvalloc, pvalloc, GLIBC_2_0);
672 compat_symbol (libc_malloc_debug, realloc, realloc, GLIBC_2_0);
673 compat_symbol (libc_malloc_debug, valloc, valloc, GLIBC_2_0);
674 compat_symbol (libc_malloc_debug, __free_hook, __free_hook, GLIBC_2_0);
675 compat_symbol (libc_malloc_debug, __malloc_hook, __malloc_hook, GLIBC_2_0);
676 compat_symbol (libc_malloc_debug, __realloc_hook, __realloc_hook, GLIBC_2_0);
677 compat_symbol (libc_malloc_debug, __memalign_hook, __memalign_hook, GLIBC_2_0);