2 * Tiny C Memory and bounds checker
4 * Copyright (c) 2002 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
26 #if !defined(__FreeBSD__) \
27 && !defined(__FreeBSD_kernel__) \
28 && !defined(__DragonFly__) \
29 && !defined(__OpenBSD__) \
30 && !defined(__APPLE__) \
31 && !defined(__NetBSD__)
37 #include <sys/syscall.h>
40 #define BOUND_DEBUG (1)
41 #define BOUND_STATISTIC (1)
44 #define dprintf(a...) if (print_calls) fprintf(a)
50 /* an __attribute__ macro is defined in the system headers */
53 #define FASTCALL __attribute__((regparm(3)))
56 # define DLL_EXPORT __declspec(dllexport)
61 #if defined(__FreeBSD__) \
62 || defined(__FreeBSD_kernel__) \
63 || defined(__DragonFly__) \
64 || defined(__OpenBSD__) \
65 || defined(__NetBSD__) \
66 || defined(__dietlibc__)
74 #define HAVE_MEMALIGN (0)
75 #define MALLOC_REDIR (0)
76 #define HAVE_PTHREAD_CREATE (0)
77 #define HAVE_CTYPE (0)
78 #define HAVE_ERRNO (0)
79 #define HAVE_SIGNAL (0)
80 #define HAVE_SIGACTION (0)
82 #define HAVE_TLS_FUNC (0)
83 #define HAVE_TLS_VAR (0)
89 static CRITICAL_SECTION bounds_sem
;
90 #define INIT_SEM() InitializeCriticalSection(&bounds_sem)
91 #define EXIT_SEM() DeleteCriticalSection(&bounds_sem)
92 #define WAIT_SEM() EnterCriticalSection(&bounds_sem)
93 #define POST_SEM() LeaveCriticalSection(&bounds_sem)
94 #define TRY_SEM() TryEnterCriticalSection(&bounds_sem)
95 #define HAVE_MEMALIGN (0)
96 #define MALLOC_REDIR (0)
97 #define HAVE_PTHREAD_CREATE (0)
98 #define HAVE_CTYPE (0)
99 #define HAVE_ERRNO (0)
100 #define HAVE_SIGNAL (1)
101 #define HAVE_SIGACTION (0)
102 #define HAVE_FORK (0)
103 #define HAVE_TLS_FUNC (1)
104 #define HAVE_TLS_VAR (0)
108 #define __USE_GNU /* get RTLD_NEXT */
109 #include <sys/mman.h>
116 #include <dispatch/dispatch.h>
117 static dispatch_semaphore_t bounds_sem
;
118 #define INIT_SEM() bounds_sem = dispatch_semaphore_create(1)
119 #define EXIT_SEM() dispatch_release(*(dispatch_object_t*)&bounds_sem)
120 #define WAIT_SEM() if (use_sem) dispatch_semaphore_wait(bounds_sem, DISPATCH_TIME_FOREVER)
121 #define POST_SEM() if (use_sem) dispatch_semaphore_signal(bounds_sem)
122 #define TRY_SEM() if (use_sem) dispatch_semaphore_wait(bounds_sem, DISPATCH_TIME_NOW)
124 #include <semaphore.h>
125 static sem_t bounds_sem
;
126 #define INIT_SEM() sem_init (&bounds_sem, 0, 1)
127 #define EXIT_SEM() sem_destroy (&bounds_sem)
128 #define WAIT_SEM() if (use_sem) while (sem_wait (&bounds_sem) < 0 \
130 #define POST_SEM() if (use_sem) sem_post (&bounds_sem)
131 #define TRY_SEM() if (use_sem) while (sem_trywait (&bounds_sem) < 0 \
134 static pthread_mutex_t bounds_mtx
;
135 #define INIT_SEM() pthread_mutex_init (&bounds_mtx, NULL)
136 #define EXIT_SEM() pthread_mutex_destroy (&bounds_mtx)
137 #define WAIT_SEM() if (use_sem) pthread_mutex_lock (&bounds_mtx)
138 #define POST_SEM() if (use_sem) pthread_mutex_unlock (&bounds_mtx)
139 #define TRY_SEM() if (use_sem) pthread_mutex_trylock (&bounds_mtx)
141 static pthread_spinlock_t bounds_spin
;
142 /* about 25% faster then semaphore. */
143 #define INIT_SEM() pthread_spin_init (&bounds_spin, 0)
144 #define EXIT_SEM() pthread_spin_destroy (&bounds_spin)
145 #define WAIT_SEM() if (use_sem) pthread_spin_lock (&bounds_spin)
146 #define POST_SEM() if (use_sem) pthread_spin_unlock (&bounds_spin)
147 #define TRY_SEM() if (use_sem) pthread_spin_trylock (&bounds_spin)
149 #define HAVE_MEMALIGN (1)
150 #define MALLOC_REDIR (1)
151 #define HAVE_PTHREAD_CREATE (1)
152 #define HAVE_CTYPE (1)
153 #define HAVE_ERRNO (1)
154 #define HAVE_SIGNAL (1)
155 #define HAVE_SIGACTION (1)
156 #define HAVE_FORK (1)
157 #if !defined(__APPLE__) && defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
158 #define HAVE_TLS_FUNC (0)
159 #define HAVE_TLS_VAR (1)
161 #define HAVE_TLS_FUNC (1)
162 #define HAVE_TLS_VAR (0)
170 static void *(*malloc_redir
) (size_t);
171 static void *(*calloc_redir
) (size_t, size_t);
172 static void (*free_redir
) (void *);
173 static void *(*realloc_redir
) (void *, size_t);
174 static unsigned int pool_index
;
175 static unsigned char __attribute__((aligned(16))) initial_pool
[256];
178 static void *(*memalign_redir
) (size_t, size_t);
180 #if HAVE_PTHREAD_CREATE
181 static int (*pthread_create_redir
) (pthread_t
*thread
,
182 const pthread_attr_t
*attr
,
183 void *(*start_routine
)(void *), void *arg
);
186 typedef void (*bound_sig
)(int);
187 static bound_sig (*signal_redir
) (int signum
, bound_sig handler
);
190 static int (*sigaction_redir
) (int signum
, const struct sigaction
*act
,
191 struct sigaction
*oldact
);
194 static int (*fork_redir
) (void);
197 #define TCC_TYPE_NONE (0)
198 #define TCC_TYPE_MALLOC (1)
199 #define TCC_TYPE_CALLOC (2)
200 #define TCC_TYPE_REALLOC (3)
201 #define TCC_TYPE_MEMALIGN (4)
202 #define TCC_TYPE_STRDUP (5)
204 /* this pointer is generated when bound check is incorrect */
205 #define INVALID_POINTER ((void *)(-2))
207 typedef struct tree_node Tree
;
209 Tree
* left
, * right
;
213 unsigned char is_invalid
; /* true if pointers outside region are invalid */
216 typedef struct alloca_list_struct
{
220 struct alloca_list_struct
*next
;
224 #define BOUND_TID_TYPE DWORD
225 #define BOUND_GET_TID GetCurrentThreadId()
226 #elif defined(__OpenBSD__)
227 #define BOUND_TID_TYPE pid_t
228 #define BOUND_GET_TID syscall (SYS_getthrid)
229 #elif defined(__FreeBSD__) || defined(__NetBSD__)
230 #define BOUND_TID_TYPE pid_t
231 #define BOUND_GET_TID 0
232 #elif defined(__i386__) || defined(__x86_64__) || defined(__arm__) || defined(__aarch64__) || defined(__riscv)
233 #define BOUND_TID_TYPE pid_t
234 #define BOUND_GET_TID syscall (SYS_gettid)
236 #define BOUND_TID_TYPE int
237 #define BOUND_GET_TID 0
240 typedef struct jmp_list_struct
{
245 struct jmp_list_struct
*next
;
248 #define BOUND_STATISTIC_SPLAY (0)
249 static Tree
* splay (size_t addr
, Tree
*t
);
250 static Tree
* splay_end (size_t addr
, Tree
*t
);
251 static Tree
* splay_insert(size_t addr
, size_t size
, Tree
* t
);
252 static Tree
* splay_delete(size_t addr
, Tree
*t
);
253 void splay_printtree(Tree
* t
, int d
);
255 /* external interface */
256 void __bounds_checking (int no_check
);
257 void __bound_never_fatal (int no_check
);
258 DLL_EXPORT
void * __bound_ptr_add(void *p
, size_t offset
);
259 DLL_EXPORT
void * __bound_ptr_indir1(void *p
, size_t offset
);
260 DLL_EXPORT
void * __bound_ptr_indir2(void *p
, size_t offset
);
261 DLL_EXPORT
void * __bound_ptr_indir4(void *p
, size_t offset
);
262 DLL_EXPORT
void * __bound_ptr_indir8(void *p
, size_t offset
);
263 DLL_EXPORT
void * __bound_ptr_indir12(void *p
, size_t offset
);
264 DLL_EXPORT
void * __bound_ptr_indir16(void *p
, size_t offset
);
265 DLL_EXPORT
void FASTCALL
__bound_local_new(void *p1
);
266 DLL_EXPORT
void FASTCALL
__bound_local_delete(void *p1
);
267 void __bound_init(size_t *, int);
268 void __bound_main_arg(int argc
, char **argv
, char **envp
);
269 void __bound_exit(void);
271 void *__bound_mmap (void *start
, size_t size
, int prot
, int flags
, int fd
,
273 int __bound_munmap (void *start
, size_t size
);
274 DLL_EXPORT
void __bound_siglongjmp(jmp_buf env
, int val
);
276 DLL_EXPORT
void __bound_new_region(void *p
, size_t size
);
277 DLL_EXPORT
void __bound_setjmp(jmp_buf env
);
278 DLL_EXPORT
void __bound_longjmp(jmp_buf env
, int val
);
279 DLL_EXPORT
void *__bound_memcpy(void *dst
, const void *src
, size_t size
);
280 DLL_EXPORT
int __bound_memcmp(const void *s1
, const void *s2
, size_t size
);
281 DLL_EXPORT
void *__bound_memmove(void *dst
, const void *src
, size_t size
);
282 DLL_EXPORT
void *__bound_memset(void *dst
, int c
, size_t size
);
283 DLL_EXPORT
int __bound_strlen(const char *s
);
284 DLL_EXPORT
char *__bound_strcpy(char *dst
, const char *src
);
285 DLL_EXPORT
char *__bound_strncpy(char *dst
, const char *src
, size_t n
);
286 DLL_EXPORT
int __bound_strcmp(const char *s1
, const char *s2
);
287 DLL_EXPORT
int __bound_strncmp(const char *s1
, const char *s2
, size_t n
);
288 DLL_EXPORT
char *__bound_strcat(char *dest
, const char *src
);
289 DLL_EXPORT
char *__bound_strchr(const char *string
, int ch
);
290 DLL_EXPORT
char *__bound_strdup(const char *s
);
292 #if defined(__arm__) && defined(__ARM_EABI__)
293 DLL_EXPORT
void *__bound___aeabi_memcpy(void *dst
, const void *src
, size_t size
);
294 DLL_EXPORT
void *__bound___aeabi_memmove(void *dst
, const void *src
, size_t size
);
295 DLL_EXPORT
void *__bound___aeabi_memmove4(void *dst
, const void *src
, size_t size
);
296 DLL_EXPORT
void *__bound___aeabi_memmove8(void *dst
, const void *src
, size_t size
);
297 DLL_EXPORT
void *__bound___aeabi_memset(void *dst
, int c
, size_t size
);
298 DLL_EXPORT
void *__aeabi_memcpy(void *dst
, const void *src
, size_t size
);
299 DLL_EXPORT
void *__aeabi_memmove(void *dst
, const void *src
, size_t size
);
300 DLL_EXPORT
void *__aeabi_memmove4(void *dst
, const void *src
, size_t size
);
301 DLL_EXPORT
void *__aeabi_memmove8(void *dst
, const void *src
, size_t size
);
302 DLL_EXPORT
void *__aeabi_memset(void *dst
, int c
, size_t size
);
306 #define BOUND_MALLOC(a) malloc_redir(a)
307 #define BOUND_MEMALIGN(a,b) memalign_redir(a,b)
308 #define BOUND_FREE(a) free_redir(a)
309 #define BOUND_REALLOC(a,b) realloc_redir(a,b)
310 #define BOUND_CALLOC(a,b) calloc_redir(a,b)
312 #define BOUND_MALLOC(a) malloc(a)
313 #define BOUND_MEMALIGN(a,b) memalign(a,b)
314 #define BOUND_FREE(a) free(a)
315 #define BOUND_REALLOC(a,b) realloc(a,b)
316 #define BOUND_CALLOC(a,b) calloc(a,b)
317 DLL_EXPORT
void *__bound_malloc(size_t size
, const void *caller
);
318 DLL_EXPORT
void *__bound_memalign(size_t size
, size_t align
, const void *caller
);
319 DLL_EXPORT
void __bound_free(void *ptr
, const void *caller
);
320 DLL_EXPORT
void *__bound_realloc(void *ptr
, size_t size
, const void *caller
);
321 DLL_EXPORT
void *__bound_calloc(size_t nmemb
, size_t size
);
324 #define FREE_REUSE_SIZE (100)
325 static unsigned int free_reuse_index
;
326 static void *free_reuse_list
[FREE_REUSE_SIZE
];
328 static Tree
*tree
= NULL
;
329 #define TREE_REUSE (1)
331 static Tree
*tree_free_list
;
333 static alloca_list_type
*alloca_list
;
334 static jmp_list_type
*jmp_list
;
336 static unsigned char inited
;
337 static unsigned char print_warn_ptr_add
;
338 static unsigned char print_calls
;
339 static unsigned char print_heap
;
340 static unsigned char print_statistic
;
341 static unsigned char no_strdup
;
342 static unsigned char use_sem
;
343 static int never_fatal
;
346 static int no_checking
= 0;
347 static DWORD no_checking_key
;
348 #define NO_CHECKING_CHECK() if (!p) { \
349 p = (int *) LocalAlloc(LPTR, sizeof(int)); \
350 if (!p) bound_alloc_error("tls malloc"); \
352 TlsSetValue(no_checking_key, p); \
354 #define NO_CHECKING_GET() ({ int *p = TlsGetValue(no_checking_key); \
355 NO_CHECKING_CHECK(); \
358 #define NO_CHECKING_SET(v) { int *p = TlsGetValue(no_checking_key); \
359 NO_CHECKING_CHECK(); \
363 static int no_checking
= 0;
364 static pthread_key_t no_checking_key
;
365 #define NO_CHECKING_CHECK() if (!p) { \
366 p = (int *) BOUND_MALLOC(sizeof(int)); \
367 if (!p) bound_alloc_error("tls malloc"); \
369 pthread_setspecific(no_checking_key, p); \
371 #define NO_CHECKING_GET() ({ int *p = pthread_getspecific(no_checking_key); \
372 NO_CHECKING_CHECK(); \
375 #define NO_CHECKING_SET(v) { int *p = pthread_getspecific(no_checking_key); \
376 NO_CHECKING_CHECK(); \
381 static __thread
int no_checking
= 0;
382 #define NO_CHECKING_GET() no_checking
383 #define NO_CHECKING_SET(v) no_checking = v
385 static int no_checking
= 0;
386 #define NO_CHECKING_GET() no_checking
387 #define NO_CHECKING_SET(v) no_checking = v
389 static char exec
[100];
392 static unsigned long long bound_ptr_add_count
;
393 static unsigned long long bound_ptr_indir1_count
;
394 static unsigned long long bound_ptr_indir2_count
;
395 static unsigned long long bound_ptr_indir4_count
;
396 static unsigned long long bound_ptr_indir8_count
;
397 static unsigned long long bound_ptr_indir12_count
;
398 static unsigned long long bound_ptr_indir16_count
;
399 static unsigned long long bound_local_new_count
;
400 static unsigned long long bound_local_delete_count
;
401 static unsigned long long bound_malloc_count
;
402 static unsigned long long bound_calloc_count
;
403 static unsigned long long bound_realloc_count
;
404 static unsigned long long bound_free_count
;
405 static unsigned long long bound_memalign_count
;
406 static unsigned long long bound_mmap_count
;
407 static unsigned long long bound_munmap_count
;
408 static unsigned long long bound_alloca_count
;
409 static unsigned long long bound_setjmp_count
;
410 static unsigned long long bound_longjmp_count
;
411 static unsigned long long bound_mempcy_count
;
412 static unsigned long long bound_memcmp_count
;
413 static unsigned long long bound_memmove_count
;
414 static unsigned long long bound_memset_count
;
415 static unsigned long long bound_strlen_count
;
416 static unsigned long long bound_strcpy_count
;
417 static unsigned long long bound_strncpy_count
;
418 static unsigned long long bound_strcmp_count
;
419 static unsigned long long bound_strncmp_count
;
420 static unsigned long long bound_strcat_count
;
421 static unsigned long long bound_strchr_count
;
422 static unsigned long long bound_strdup_count
;
423 static unsigned long long bound_not_found
;
424 #define INCR_COUNT(x) ++x
426 #define INCR_COUNT(x)
428 #if BOUND_STATISTIC_SPLAY
429 static unsigned long long bound_splay
;
430 static unsigned long long bound_splay_end
;
431 static unsigned long long bound_splay_insert
;
432 static unsigned long long bound_splay_delete
;
433 #define INCR_COUNT_SPLAY(x) ++x
435 #define INCR_COUNT_SPLAY(x)
438 int tcc_backtrace(const char *fmt
, ...);
440 /* print a bound error message */
441 #define bound_warning(...) \
442 tcc_backtrace("^bcheck.c^BCHECK: " __VA_ARGS__)
444 #define bound_error(...) \
446 bound_warning(__VA_ARGS__); \
447 if (never_fatal == 0) \
451 static void bound_alloc_error(const char *s
)
453 fprintf(stderr
,"FATAL: %s\n",s
);
457 static void bound_not_found_warning(const char *file
, const char *function
,
460 dprintf(stderr
, "%s%s, %s(): Not found %p\n", exec
, file
, function
, ptr
);
463 static void fetch_and_add(int* variable
, int value
)
465 #if defined __i386__ || defined __x86_64__
466 __asm__
volatile("lock; addl %0, %1"
467 : "+r" (value
), "+m" (*variable
) // input+output
471 #elif defined __arm__
472 extern void fetch_and_add_arm(int* variable
, int value
);
473 fetch_and_add_arm(variable
, value
);
474 #elif defined __aarch64__
475 extern void fetch_and_add_arm64(int* variable
, int value
);
476 fetch_and_add_arm64(variable
, value
);
477 #elif defined __riscv
478 extern void fetch_and_add_riscv64(int* variable
, int value
);
479 fetch_and_add_riscv64(variable
, value
);
485 /* enable/disable checking. This can be used in signal handlers. */
486 void __bounds_checking (int no_check
)
488 #if HAVE_TLS_FUNC || HAVE_TLS_VAR
489 NO_CHECKING_SET(NO_CHECKING_GET() + no_check
);
491 fetch_and_add (&no_checking
, no_check
);
495 /* enable/disable checking. This can be used in signal handlers. */
496 void __bound_never_fatal (int neverfatal
)
498 fetch_and_add (&never_fatal
, neverfatal
);
501 /* return '(p + offset)' for pointer arithmetic (a pointer can reach
502 the end of a region in this case */
503 void * __bound_ptr_add(void *p
, size_t offset
)
505 size_t addr
= (size_t)p
;
507 if (NO_CHECKING_GET())
510 dprintf(stderr
, "%s, %s(): %p 0x%lx\n",
511 __FILE__
, __FUNCTION__
, p
, (unsigned long)offset
);
514 INCR_COUNT(bound_ptr_add_count
);
517 if (addr
>= tree
->size
) {
519 tree
= splay (addr
, tree
);
522 if (addr
>= tree
->size
) {
524 tree
= splay_end (addr
, tree
);
527 if (addr
<= tree
->size
) {
528 if (tree
->is_invalid
|| addr
+ offset
> tree
->size
) {
530 if (print_warn_ptr_add
)
531 bound_warning("%p is outside of the region", p
+ offset
);
532 if (never_fatal
<= 0)
533 return INVALID_POINTER
; /* return an invalid pointer */
537 else if (p
) { /* Allow NULL + offset. offsetoff is using it. */
538 INCR_COUNT(bound_not_found
);
540 bound_not_found_warning (__FILE__
, __FUNCTION__
, p
);
548 /* return '(p + offset)' for pointer indirection (the resulting must
549 be strictly inside the region */
550 #define BOUND_PTR_INDIR(dsize) \
551 void * __bound_ptr_indir ## dsize (void *p, size_t offset) \
553 size_t addr = (size_t)p; \
555 if (NO_CHECKING_GET()) \
558 dprintf(stderr, "%s, %s(): %p 0x%lx\n", \
559 __FILE__, __FUNCTION__, p, (unsigned long)offset); \
561 INCR_COUNT(bound_ptr_indir ## dsize ## _count); \
563 addr -= tree->start; \
564 if (addr >= tree->size) { \
566 tree = splay (addr, tree); \
567 addr -= tree->start; \
569 if (addr >= tree->size) { \
571 tree = splay_end (addr, tree); \
572 addr -= tree->start; \
574 if (addr <= tree->size) { \
575 if (tree->is_invalid || addr + offset + dsize > tree->size) { \
577 bound_warning("%p is outside of the region", p + offset); \
578 if (never_fatal <= 0) \
579 return INVALID_POINTER; /* return an invalid pointer */ \
584 INCR_COUNT(bound_not_found); \
586 bound_not_found_warning (__FILE__, __FUNCTION__, p); \
601 #if defined(__GNUC__) && (__GNUC__ >= 6)
603 * At least gcc 6.2 complains when __builtin_frame_address is used with
606 #pragma GCC diagnostic push
607 #pragma GCC diagnostic ignored "-Wframe-address"
610 /* return the frame pointer of the caller */
611 #define GET_CALLER_FP(fp)\
613 fp = (size_t)__builtin_frame_address(1);\
616 /* called when entering a function to add all the local regions */
617 void FASTCALL
__bound_local_new(void *p1
)
619 size_t addr
, fp
, *p
= p1
;
621 if (NO_CHECKING_GET())
624 dprintf(stderr
, "%s, %s(): p1=%p fp=%p\n",
625 __FILE__
, __FUNCTION__
, p
, (void *)fp
);
627 while ((addr
= p
[0])) {
628 INCR_COUNT(bound_local_new_count
);
629 tree
= splay_insert(addr
+ fp
, p
[1], tree
);
636 while ((addr
= p
[0])) {
637 dprintf(stderr
, "%s, %s(): %p 0x%lx\n",
638 __FILE__
, __FUNCTION__
,
639 (void *) (addr
+ fp
), (unsigned long) p
[1]);
646 /* called when leaving a function to delete all the local regions */
647 void FASTCALL
__bound_local_delete(void *p1
)
649 size_t addr
, fp
, *p
= p1
;
651 if (NO_CHECKING_GET())
654 dprintf(stderr
, "%s, %s(): p1=%p fp=%p\n",
655 __FILE__
, __FUNCTION__
, p
, (void *)fp
);
657 while ((addr
= p
[0])) {
658 INCR_COUNT(bound_local_delete_count
);
659 tree
= splay_delete(addr
+ fp
, tree
);
663 alloca_list_type
*last
= NULL
;
664 alloca_list_type
*cur
= alloca_list
;
669 last
->next
= cur
->next
;
671 alloca_list
= cur
->next
;
672 tree
= splay_delete ((size_t) cur
->p
, tree
);
673 dprintf(stderr
, "%s, %s(): remove alloca/vla %p\n",
674 __FILE__
, __FUNCTION__
, cur
->p
);
676 cur
= last
? last
->next
: alloca_list
;
685 jmp_list_type
*last
= NULL
;
686 jmp_list_type
*cur
= jmp_list
;
691 last
->next
= cur
->next
;
693 jmp_list
= cur
->next
;
694 dprintf(stderr
, "%s, %s(): remove setjmp %p\n",
695 __FILE__
, __FUNCTION__
, cur
->penv
);
697 cur
= last
? last
->next
: jmp_list
;
710 while ((addr
= p
[0])) {
712 dprintf(stderr
, "%s, %s(): %p 0x%lx\n",
713 __FILE__
, __FUNCTION__
,
714 (void *) (addr
+ fp
), (unsigned long) p
[1]);
723 void __bound_new_region(void *p
, size_t size
)
726 alloca_list_type
*last
;
727 alloca_list_type
*cur
;
728 alloca_list_type
*new;
730 if (NO_CHECKING_GET())
733 dprintf(stderr
, "%s, %s(): %p, 0x%lx\n",
734 __FILE__
, __FUNCTION__
, p
, (unsigned long)size
);
736 new = BOUND_MALLOC (sizeof (alloca_list_type
));
738 INCR_COUNT(bound_alloca_count
);
742 #if defined(__i386__) || (defined(__arm__) && !defined(__ARM_EABI__))
744 #elif defined(__arm__)
749 void *cure
= (void *)((char *)cur
->p
+ ((cur
->size
+ align
) & -align
));
750 void *pe
= (void *)((char *)p
+ ((size
+ align
) & -align
));
751 if (cur
->fp
== fp
&& ((cur
->p
<= p
&& cure
> p
) ||
752 (p
<= cur
->p
&& pe
> cur
->p
))) {
754 last
->next
= cur
->next
;
756 alloca_list
= cur
->next
;
757 tree
= splay_delete((size_t)cur
->p
, tree
);
763 tree
= splay_insert((size_t)p
, size
, tree
);
768 new->next
= alloca_list
;
773 dprintf(stderr
, "%s, %s(): remove alloca/vla %p\n",
774 __FILE__
, __FUNCTION__
, cur
->p
);
779 void __bound_setjmp(jmp_buf env
)
782 void *e
= (void *) env
;
784 if (NO_CHECKING_GET() == 0) {
785 dprintf(stderr
, "%s, %s(): %p\n", __FILE__
, __FUNCTION__
, e
);
787 INCR_COUNT(bound_setjmp_count
);
795 jl
= BOUND_MALLOC (sizeof (jmp_list_type
));
807 jl
->end_fp
= (size_t)__builtin_frame_address(0);
808 jl
->tid
= BOUND_GET_TID
;
814 static void __bound_long_jump(jmp_buf env
, int val
, int sig
, const char *func
)
820 if (NO_CHECKING_GET() == 0) {
823 dprintf(stderr
, "%s, %s(): %p\n", __FILE__
, func
, e
);
825 INCR_COUNT(bound_longjmp_count
);
828 if (jl
->penv
== e
&& jl
->tid
== tid
) {
829 size_t start_fp
= (size_t)__builtin_frame_address(0);
830 size_t end_fp
= jl
->end_fp
;
831 jmp_list_type
*cur
= jmp_list
;
832 jmp_list_type
*last
= NULL
;
834 while (cur
->penv
!= e
|| cur
->tid
!= tid
) {
835 if (cur
->tid
== tid
) {
836 dprintf(stderr
, "%s, %s(): remove setjmp %p\n",
837 __FILE__
, func
, cur
->penv
);
839 last
->next
= cur
->next
;
841 jmp_list
= cur
->next
;
843 cur
= last
? last
->next
: jmp_list
;
852 alloca_list_type
*last
;
853 alloca_list_type
*cur
;
855 while (t
&& (t
->start
< start_fp
|| t
->start
> end_fp
))
856 if (t
->start
< start_fp
)
865 if ((size_t) cur
->p
== t
->start
) {
866 dprintf(stderr
, "%s, %s(): remove alloca/vla %p\n",
867 __FILE__
, func
, cur
->p
);
869 last
->next
= cur
->next
;
871 alloca_list
= cur
->next
;
878 dprintf(stderr
, "%s, %s(): delete %p\n",
879 __FILE__
, func
, (void *) t
->start
);
880 tree
= splay_delete(t
->start
, tree
);
889 sig
? siglongjmp(env
, val
) :
894 void __bound_longjmp(jmp_buf env
, int val
)
896 __bound_long_jump(env
,val
, 0, __FUNCTION__
);
900 void __bound_siglongjmp(jmp_buf env
, int val
)
902 __bound_long_jump(env
,val
, 1, __FUNCTION__
);
906 #if defined(__GNUC__) && (__GNUC__ >= 6)
907 #pragma GCC diagnostic pop
910 void __bound_init(size_t *p
, int mode
)
912 dprintf(stderr
, "%s, %s(): start %s\n", __FILE__
, __FUNCTION__
,
913 mode
< 0 ? "lazy" : mode
== 0 ? "normal use" : "for -run");
923 no_checking_key
= TlsAlloc();
924 TlsSetValue(no_checking_key
, &no_checking
);
926 pthread_key_create(&no_checking_key
, NULL
);
927 pthread_setspecific(no_checking_key
, &no_checking
);
932 print_warn_ptr_add
= getenv ("TCC_BOUNDS_WARN_POINTER_ADD") != NULL
;
933 print_calls
= getenv ("TCC_BOUNDS_PRINT_CALLS") != NULL
;
934 print_heap
= getenv ("TCC_BOUNDS_PRINT_HEAP") != NULL
;
935 print_statistic
= getenv ("TCC_BOUNDS_PRINT_STATISTIC") != NULL
;
936 never_fatal
= getenv ("TCC_BOUNDS_NEVER_FATAL") != NULL
;
942 void *addr
= mode
> 0 ? RTLD_DEFAULT
: RTLD_NEXT
;
944 /* tcc -run required RTLD_DEFAULT. Normal usage requires RTLD_NEXT,
945 but using RTLD_NEXT with -run segfaults on MacOS in dyld as the
946 generated code segment isn't registered with dyld and hence the
947 caller image of dlsym isn't known to it */
948 *(void **) (&malloc_redir
) = dlsym (addr
, "malloc");
949 if (malloc_redir
== NULL
) {
950 dprintf(stderr
, "%s, %s(): use RTLD_DEFAULT\n",
951 __FILE__
, __FUNCTION__
);
953 *(void **) (&malloc_redir
) = dlsym (addr
, "malloc");
955 *(void **) (&calloc_redir
) = dlsym (addr
, "calloc");
956 *(void **) (&free_redir
) = dlsym (addr
, "free");
957 *(void **) (&realloc_redir
) = dlsym (addr
, "realloc");
958 *(void **) (&memalign_redir
) = dlsym (addr
, "memalign");
959 dprintf(stderr
, "%s, %s(): malloc_redir %p\n",
960 __FILE__
, __FUNCTION__
, malloc_redir
);
961 dprintf(stderr
, "%s, %s(): free_redir %p\n",
962 __FILE__
, __FUNCTION__
, free_redir
);
963 dprintf(stderr
, "%s, %s(): realloc_redir %p\n",
964 __FILE__
, __FUNCTION__
, realloc_redir
);
965 dprintf(stderr
, "%s, %s(): memalign_redir %p\n",
966 __FILE__
, __FUNCTION__
, memalign_redir
);
967 if (malloc_redir
== NULL
|| free_redir
== NULL
)
968 bound_alloc_error ("Cannot redirect malloc/free");
969 #if HAVE_PTHREAD_CREATE
970 *(void **) (&pthread_create_redir
) = dlsym (addr
, "pthread_create");
971 dprintf(stderr
, "%s, %s(): pthread_create_redir %p\n",
972 __FILE__
, __FUNCTION__
, pthread_create_redir
);
973 if (pthread_create_redir
== NULL
)
974 bound_alloc_error ("Cannot redirect pthread_create");
977 *(void **) (&signal_redir
) = dlsym (addr
, "signal");
978 dprintf(stderr
, "%s, %s(): signal_redir %p\n",
979 __FILE__
, __FUNCTION__
, signal_redir
);
980 if (signal_redir
== NULL
)
981 bound_alloc_error ("Cannot redirect signal");
984 *(void **) (&sigaction_redir
) = dlsym (addr
, "sigaction");
985 dprintf(stderr
, "%s, %s(): sigaction_redir %p\n",
986 __FILE__
, __FUNCTION__
, sigaction_redir
);
987 if (sigaction_redir
== NULL
)
988 bound_alloc_error ("Cannot redirect sigaction");
991 *(void **) (&fork_redir
) = dlsym (addr
, "fork");
992 dprintf(stderr
, "%s, %s(): fork_redir %p\n",
993 __FILE__
, __FUNCTION__
, fork_redir
);
994 if (fork_redir
== NULL
)
995 bound_alloc_error ("Cannot redirect fork");
1003 unsigned char found
;
1004 unsigned long start
;
1007 (unsigned long) __builtin_return_address(0);
1010 /* Display exec name. Usefull when a lot of code is compiled with tcc */
1011 fp
= fopen ("/proc/self/comm", "r");
1013 memset (exec
, 0, sizeof(exec
));
1014 fread (exec
, 1, sizeof(exec
) - 2, fp
);
1015 if (strchr(exec
,'\n'))
1016 *strchr(exec
,'\n') = '\0';
1020 /* check if dlopen is used (is threre a better way?) */
1022 fp
= fopen ("/proc/self/maps", "r");
1024 while (fgets (line
, sizeof(line
), fp
)) {
1025 if (sscanf (line
, "%lx-%lx", &start
, &end
) == 2 &&
1026 ad
>= start
&& ad
< end
) {
1030 if (strstr (line
,"[heap]"))
1046 tree
= splay_insert((size_t) &_DefaultRuneLocale
,
1047 sizeof (_DefaultRuneLocale
), tree
);
1049 /* XXX: Does not work if locale is changed */
1050 tree
= splay_insert((size_t) __ctype_b_loc(),
1051 sizeof (unsigned short *), tree
);
1052 tree
= splay_insert((size_t) (*__ctype_b_loc() - 128),
1053 384 * sizeof (unsigned short), tree
);
1054 tree
= splay_insert((size_t) __ctype_tolower_loc(),
1055 sizeof (__int32_t
*), tree
);
1056 tree
= splay_insert((size_t) (*__ctype_tolower_loc() - 128),
1057 384 * sizeof (__int32_t
), tree
);
1058 tree
= splay_insert((size_t) __ctype_toupper_loc(),
1059 sizeof (__int32_t
*), tree
);
1060 tree
= splay_insert((size_t) (*__ctype_toupper_loc() - 128),
1061 384 * sizeof (__int32_t
), tree
);
1065 tree
= splay_insert((size_t) (&errno
), sizeof (int), tree
);
1072 /* add all static bound check values */
1074 tree
= splay_insert(p
[0], p
[1], tree
);
1077 dprintf(stderr
, "%s, %s(): static var %p 0x%lx\n",
1078 __FILE__
, __FUNCTION__
,
1079 (void *) p
[0], (unsigned long) p
[1]);
1088 dprintf(stderr
, "%s, %s(): end\n\n", __FILE__
, __FUNCTION__
);
1092 #if (defined(__GLIBC__) && (__GLIBC_MINOR__ >= 4)) || defined(_WIN32)
1093 __attribute__((constructor
))
1095 __bound_main_arg(int argc
, char **argv
, char **envp
)
1097 __bound_init (0, -1);
1102 for (i
= 0; i
< argc
; i
++)
1103 tree
= splay_insert((size_t) argv
[i
], strlen (argv
[i
]) + 1, tree
);
1104 tree
= splay_insert((size_t) argv
, (argc
+ 1) * sizeof(char *), tree
);
1108 for (i
= 0; i
< argc
; i
++)
1109 dprintf(stderr
, "%s, %s(): arg %p 0x%lx\n",
1110 __FILE__
, __FUNCTION__
,
1111 argv
[i
], (unsigned long)(strlen (argv
[i
]) + 1));
1112 dprintf(stderr
, "%s, %s(): argv %p %d\n",
1113 __FILE__
, __FUNCTION__
, argv
,
1114 (int)((argc
+ 1) * sizeof(char *)));
1119 if (envp
&& *envp
) {
1124 tree
= splay_insert((size_t) *p
, strlen (*p
) + 1, tree
);
1127 tree
= splay_insert((size_t) envp
, (++p
- envp
) * sizeof(char *), tree
);
1133 dprintf(stderr
, "%s, %s(): env %p 0x%lx\n",
1134 __FILE__
, __FUNCTION__
,
1135 *p
, (unsigned long)(strlen (*p
) + 1));
1138 dprintf(stderr
, "%s, %s(): environ %p %d\n",
1139 __FILE__
, __FUNCTION__
, envp
,
1140 (int)((++p
- envp
) * sizeof(char *)));
1146 void __attribute__((destructor
)) __bound_exit(void)
1149 static const char * const alloc_type
[] = {
1150 "", "malloc", "calloc", "realloc", "memalign", "strdup"
1153 dprintf(stderr
, "%s, %s():\n", __FILE__
, __FUNCTION__
);
1156 #if !defined(_WIN32) && !defined(__APPLE__) && !defined TCC_MUSL && \
1157 !defined(__OpenBSD__) && !defined(__FreeBSD__) && !defined(__NetBSD__)
1159 extern void __libc_freeres (void);
1167 while (alloca_list
) {
1168 alloca_list_type
*next
= alloca_list
->next
;
1170 tree
= splay_delete ((size_t) alloca_list
->p
, tree
);
1171 BOUND_FREE (alloca_list
);
1175 jmp_list_type
*next
= jmp_list
->next
;
1177 BOUND_FREE (jmp_list
);
1180 for (i
= 0; i
< FREE_REUSE_SIZE
; i
++) {
1181 if (free_reuse_list
[i
]) {
1182 tree
= splay_delete ((size_t) free_reuse_list
[i
], tree
);
1183 BOUND_FREE (free_reuse_list
[i
]);
1187 if (print_heap
&& tree
->type
!= 0)
1188 fprintf (stderr
, "%s, %s(): %s found size %lu\n",
1189 __FILE__
, __FUNCTION__
, alloc_type
[tree
->type
],
1190 (unsigned long) tree
->size
);
1191 tree
= splay_delete (tree
->start
, tree
);
1194 while (tree_free_list
) {
1195 Tree
*next
= tree_free_list
->left
;
1196 BOUND_FREE (tree_free_list
);
1197 tree_free_list
= next
;
1204 TlsFree(no_checking_key
);
1206 pthread_key_delete(no_checking_key
);
1210 if (print_statistic
) {
1212 fprintf (stderr
, "bound_ptr_add_count %llu\n", bound_ptr_add_count
);
1213 fprintf (stderr
, "bound_ptr_indir1_count %llu\n", bound_ptr_indir1_count
);
1214 fprintf (stderr
, "bound_ptr_indir2_count %llu\n", bound_ptr_indir2_count
);
1215 fprintf (stderr
, "bound_ptr_indir4_count %llu\n", bound_ptr_indir4_count
);
1216 fprintf (stderr
, "bound_ptr_indir8_count %llu\n", bound_ptr_indir8_count
);
1217 fprintf (stderr
, "bound_ptr_indir12_count %llu\n", bound_ptr_indir12_count
);
1218 fprintf (stderr
, "bound_ptr_indir16_count %llu\n", bound_ptr_indir16_count
);
1219 fprintf (stderr
, "bound_local_new_count %llu\n", bound_local_new_count
);
1220 fprintf (stderr
, "bound_local_delete_count %llu\n", bound_local_delete_count
);
1221 fprintf (stderr
, "bound_malloc_count %llu\n", bound_malloc_count
);
1222 fprintf (stderr
, "bound_calloc_count %llu\n", bound_calloc_count
);
1223 fprintf (stderr
, "bound_realloc_count %llu\n", bound_realloc_count
);
1224 fprintf (stderr
, "bound_free_count %llu\n", bound_free_count
);
1225 fprintf (stderr
, "bound_memalign_count %llu\n", bound_memalign_count
);
1226 fprintf (stderr
, "bound_mmap_count %llu\n", bound_mmap_count
);
1227 fprintf (stderr
, "bound_munmap_count %llu\n", bound_munmap_count
);
1228 fprintf (stderr
, "bound_alloca_count %llu\n", bound_alloca_count
);
1229 fprintf (stderr
, "bound_setjmp_count %llu\n", bound_setjmp_count
);
1230 fprintf (stderr
, "bound_longjmp_count %llu\n", bound_longjmp_count
);
1231 fprintf (stderr
, "bound_mempcy_count %llu\n", bound_mempcy_count
);
1232 fprintf (stderr
, "bound_memcmp_count %llu\n", bound_memcmp_count
);
1233 fprintf (stderr
, "bound_memmove_count %llu\n", bound_memmove_count
);
1234 fprintf (stderr
, "bound_memset_count %llu\n", bound_memset_count
);
1235 fprintf (stderr
, "bound_strlen_count %llu\n", bound_strlen_count
);
1236 fprintf (stderr
, "bound_strcpy_count %llu\n", bound_strcpy_count
);
1237 fprintf (stderr
, "bound_strncpy_count %llu\n", bound_strncpy_count
);
1238 fprintf (stderr
, "bound_strcmp_count %llu\n", bound_strcmp_count
);
1239 fprintf (stderr
, "bound_strncmp_count %llu\n", bound_strncmp_count
);
1240 fprintf (stderr
, "bound_strcat_count %llu\n", bound_strcat_count
);
1241 fprintf (stderr
, "bound_strchr_count %llu\n", bound_strchr_count
);
1242 fprintf (stderr
, "bound_strdup_count %llu\n", bound_strdup_count
);
1243 fprintf (stderr
, "bound_not_found %llu\n", bound_not_found
);
1245 #if BOUND_STATISTIC_SPLAY
1246 fprintf (stderr
, "bound_splay %llu\n", bound_splay
);
1247 fprintf (stderr
, "bound_splay_end %llu\n", bound_splay_end
);
1248 fprintf (stderr
, "bound_splay_insert %llu\n", bound_splay_insert
);
1249 fprintf (stderr
, "bound_splay_delete %llu\n", bound_splay_delete
);
1255 #if HAVE_PTHREAD_CREATE
1257 void *(*start_routine
) (void *);
1260 } bound_thread_create_type
;
1262 static void *bound_thread_create(void *bdata
)
1264 bound_thread_create_type
*data
= (bound_thread_create_type
*) bdata
;
1267 int *p
= (int *) BOUND_MALLOC(sizeof(int));
1269 if (!p
) bound_alloc_error("bound_thread_create malloc");
1271 pthread_setspecific(no_checking_key
, p
);
1273 pthread_sigmask(SIG_SETMASK
, &data
->old_mask
, NULL
);
1274 retval
= data
->start_routine(data
->arg
);
1276 pthread_setspecific(no_checking_key
, NULL
);
1283 int pthread_create(pthread_t
*thread
, const pthread_attr_t
*attr
,
1284 void *(*start_routine
) (void *), void *arg
)
1287 bound_thread_create_type
*data
;
1292 dprintf (stderr
, "%s, %s()\n", __FILE__
, __FUNCTION__
);
1294 pthread_sigmask(SIG_SETMASK
, &mask
, &old_mask
);
1295 data
= (bound_thread_create_type
*) BOUND_MALLOC(sizeof(bound_thread_create_type
));
1296 if (!data
) bound_alloc_error("bound_thread_create malloc");
1297 data
->start_routine
= start_routine
;
1299 data
->old_mask
= old_mask
;
1300 retval
= pthread_create_redir(thread
, attr
, bound_thread_create
, data
);
1301 pthread_sigmask(SIG_SETMASK
, &old_mask
, NULL
);
1306 #if HAVE_SIGNAL || HAVE_SIGACTION
1309 bound_sig signal_handler
;
1312 void (*sig_handler
)(int);
1313 void (*sig_sigaction
)(int, siginfo_t
*, void *);
1317 static unsigned char bound_sig_used
[NSIG
];
1318 static bound_sig_type bound_sig_data
[NSIG
];
1322 static void signal_handler(int sig
)
1324 __bounds_checking(1);
1325 bound_sig_data
[sig
].signal_handler(sig
);
1326 __bounds_checking(-1);
1329 bound_sig
signal(int signum
, bound_sig handler
)
1333 dprintf (stderr
, "%s, %s() %d %p\n", __FILE__
, __FUNCTION__
,
1335 retval
= signal_redir(signum
, handler
? signal_handler
: handler
);
1336 if (retval
!= SIG_ERR
) {
1337 if (bound_sig_used
[signum
])
1338 retval
= bound_sig_data
[signum
].signal_handler
;
1340 bound_sig_used
[signum
] = 1;
1341 bound_sig_data
[signum
].signal_handler
= handler
;
1349 static void sig_handler(int sig
)
1351 __bounds_checking(1);
1352 bound_sig_data
[sig
].sig_handler(sig
);
1353 __bounds_checking(-1);
1356 static void sig_sigaction(int sig
, siginfo_t
*info
, void *ucontext
)
1358 __bounds_checking(1);
1359 bound_sig_data
[sig
].sig_sigaction(sig
, info
, ucontext
);
1360 __bounds_checking(-1);
1363 int sigaction(int signum
, const struct sigaction
*act
, struct sigaction
*oldact
)
1366 struct sigaction nact
, oact
;
1368 dprintf (stderr
, "%s, %s() %d %p %p\n", __FILE__
, __FUNCTION__
,
1369 signum
, act
, oldact
);
1372 if (nact
.sa_flags
& SA_SIGINFO
)
1373 nact
.sa_sigaction
= sig_sigaction
;
1375 nact
.sa_handler
= sig_handler
;
1376 retval
= sigaction_redir(signum
, &nact
, &oact
);
1379 retval
= sigaction_redir(signum
, act
, &oact
);
1381 if (bound_sig_used
[signum
]) {
1382 if (oact
.sa_flags
& SA_SIGINFO
)
1383 oact
.sa_sigaction
= bound_sig_data
[signum
].sig_sigaction
;
1385 oact
.sa_handler
= bound_sig_data
[signum
].sig_handler
;
1391 bound_sig_used
[signum
] = 1;
1392 if (act
->sa_flags
& SA_SIGINFO
)
1393 bound_sig_data
[signum
].sig_sigaction
= act
->sa_sigaction
;
1395 bound_sig_data
[signum
].sig_handler
= act
->sa_handler
;
1408 retval
= (*fork_redir
)();
1418 void *malloc(size_t size
)
1420 void *__bound_malloc(size_t size
, const void *caller
)
1426 /* This will catch the first dlsym call from __bound_init */
1427 if (malloc_redir
== NULL
) {
1428 __bound_init (0, -1);
1429 if (malloc_redir
== NULL
) {
1430 ptr
= &initial_pool
[pool_index
];
1431 pool_index
= (pool_index
+ size
+ 15) & ~15;
1432 if (pool_index
>= sizeof (initial_pool
))
1433 bound_alloc_error ("initial memory pool too small");
1434 dprintf (stderr
, "%s, %s(): initial %p, 0x%lx\n",
1435 __FILE__
, __FUNCTION__
, ptr
, (unsigned long)size
);
1440 /* we allocate one more byte to ensure the regions will be
1441 separated by at least one byte. With the glibc malloc, it may
1442 be in fact not necessary */
1443 ptr
= BOUND_MALLOC (size
+ 1);
1444 dprintf(stderr
, "%s, %s(): %p, 0x%lx\n",
1445 __FILE__
, __FUNCTION__
, ptr
, (unsigned long)size
);
1447 if (NO_CHECKING_GET() == 0) {
1449 INCR_COUNT(bound_malloc_count
);
1452 tree
= splay_insert ((size_t) ptr
, size
? size
: size
+ 1, tree
);
1453 if (tree
&& tree
->start
== (size_t) ptr
)
1454 tree
->type
= TCC_TYPE_MALLOC
;
1462 void *memalign(size_t size
, size_t align
)
1464 void *__bound_memalign(size_t size
, size_t align
, const void *caller
)
1470 /* we allocate one more byte to ensure the regions will be
1471 separated by at least one byte. With the glibc malloc, it may
1472 be in fact not necessary */
1473 ptr
= BOUND_MEMALIGN(size
+ 1, align
);
1476 /* XXX: handle it ? */
1479 /* we suppose that malloc aligns to at least four bytes */
1480 ptr
= BOUND_MALLOC(size
+ 1);
1483 dprintf(stderr
, "%s, %s(): %p, 0x%lx\n",
1484 __FILE__
, __FUNCTION__
, ptr
, (unsigned long)size
);
1486 if (NO_CHECKING_GET() == 0) {
1488 INCR_COUNT(bound_memalign_count
);
1491 tree
= splay_insert((size_t) ptr
, size
? size
: size
+ 1, tree
);
1492 if (tree
&& tree
->start
== (size_t) ptr
)
1493 tree
->type
= TCC_TYPE_MEMALIGN
;
1501 void free(void *ptr
)
1503 void __bound_free(void *ptr
, const void *caller
)
1506 size_t addr
= (size_t) ptr
;
1509 if (ptr
== NULL
|| tree
== NULL
1511 || ((unsigned char *) ptr
>= &initial_pool
[0] &&
1512 (unsigned char *) ptr
< &initial_pool
[sizeof(initial_pool
)])
1517 dprintf(stderr
, "%s, %s(): %p\n", __FILE__
, __FUNCTION__
, ptr
);
1519 if (NO_CHECKING_GET() == 0) {
1521 INCR_COUNT(bound_free_count
);
1522 tree
= splay (addr
, tree
);
1523 if (tree
->start
== addr
) {
1524 if (tree
->is_invalid
) {
1526 bound_error("freeing invalid region");
1529 tree
->is_invalid
= 1;
1530 memset (ptr
, 0x5a, tree
->size
);
1531 p
= free_reuse_list
[free_reuse_index
];
1532 free_reuse_list
[free_reuse_index
] = ptr
;
1533 free_reuse_index
= (free_reuse_index
+ 1) % FREE_REUSE_SIZE
;
1535 tree
= splay_delete((size_t)p
, tree
);
1544 void *realloc(void *ptr
, size_t size
)
1546 void *__bound_realloc(void *ptr
, size_t size
, const void *caller
)
1555 __bound_free(ptr
, caller
);
1560 new_ptr
= BOUND_REALLOC (ptr
, size
+ 1);
1561 dprintf(stderr
, "%s, %s(): %p, 0x%lx\n",
1562 __FILE__
, __FUNCTION__
, new_ptr
, (unsigned long)size
);
1564 if (NO_CHECKING_GET() == 0) {
1566 INCR_COUNT(bound_realloc_count
);
1569 tree
= splay_delete ((size_t) ptr
, tree
);
1571 tree
= splay_insert ((size_t) new_ptr
, size
? size
: size
+ 1, tree
);
1572 if (tree
&& tree
->start
== (size_t) new_ptr
)
1573 tree
->type
= TCC_TYPE_REALLOC
;
1581 void *calloc(size_t nmemb
, size_t size
)
1583 void *__bound_calloc(size_t nmemb
, size_t size
)
1590 /* This will catch the first dlsym call from __bound_init */
1591 if (malloc_redir
== NULL
) {
1592 __bound_init (0, -1);
1593 if (malloc_redir
== NULL
) {
1594 ptr
= &initial_pool
[pool_index
];
1595 pool_index
= (pool_index
+ size
+ 15) & ~15;
1596 if (pool_index
>= sizeof (initial_pool
))
1597 bound_alloc_error ("initial memory pool too small");
1598 dprintf (stderr
, "%s, %s(): initial %p, 0x%lx\n",
1599 __FILE__
, __FUNCTION__
, ptr
, (unsigned long)size
);
1600 memset (ptr
, 0, size
);
1605 ptr
= BOUND_MALLOC(size
+ 1);
1606 dprintf (stderr
, "%s, %s(): %p, 0x%lx\n",
1607 __FILE__
, __FUNCTION__
, ptr
, (unsigned long)size
);
1610 memset (ptr
, 0, size
);
1611 if (NO_CHECKING_GET() == 0) {
1613 INCR_COUNT(bound_calloc_count
);
1614 tree
= splay_insert ((size_t) ptr
, size
? size
: size
+ 1, tree
);
1615 if (tree
&& tree
->start
== (size_t) ptr
)
1616 tree
->type
= TCC_TYPE_CALLOC
;
1623 #if !defined(_WIN32)
1624 void *__bound_mmap (void *start
, size_t size
, int prot
,
1625 int flags
, int fd
, off_t offset
)
1629 dprintf(stderr
, "%s, %s(): %p, 0x%lx\n",
1630 __FILE__
, __FUNCTION__
, start
, (unsigned long)size
);
1631 result
= mmap (start
, size
, prot
, flags
, fd
, offset
);
1632 if (result
&& NO_CHECKING_GET() == 0) {
1634 INCR_COUNT(bound_mmap_count
);
1635 tree
= splay_insert((size_t)result
, size
, tree
);
1641 int __bound_munmap (void *start
, size_t size
)
1645 dprintf(stderr
, "%s, %s(): %p, 0x%lx\n",
1646 __FILE__
, __FUNCTION__
, start
, (unsigned long)size
);
1647 if (start
&& NO_CHECKING_GET() == 0) {
1649 INCR_COUNT(bound_munmap_count
);
1650 tree
= splay_delete ((size_t) start
, tree
);
1653 result
= munmap (start
, size
);
1658 /* some useful checked functions */
1660 /* check that (p ... p + size - 1) lies inside 'p' region, if any */
1661 static void __bound_check(const void *p
, size_t size
, const char *function
)
1663 if (size
!= 0 && __bound_ptr_add((void *)p
, size
) == INVALID_POINTER
) {
1664 bound_error("invalid pointer %p, size 0x%lx in %s",
1665 p
, (unsigned long)size
, function
);
1669 static int check_overlap (const void *p1
, size_t n1
,
1670 const void *p2
, size_t n2
,
1671 const char *function
)
1673 const void *p1e
= (const void *) ((const char *) p1
+ n1
);
1674 const void *p2e
= (const void *) ((const char *) p2
+ n2
);
1676 if (NO_CHECKING_GET() == 0 && n1
!= 0 && n2
!=0 &&
1677 ((p1
<= p2
&& p1e
> p2
) || /* p1----p2====p1e----p2e */
1678 (p2
<= p1
&& p2e
> p1
))) { /* p2----p1====p2e----p1e */
1679 bound_error("overlapping regions %p(0x%lx), %p(0x%lx) in %s",
1680 p1
, (unsigned long)n1
, p2
, (unsigned long)n2
, function
);
1681 return never_fatal
< 0;
1686 void *__bound_memcpy(void *dest
, const void *src
, size_t n
)
1688 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1689 __FILE__
, __FUNCTION__
, dest
, src
, (unsigned long)n
);
1690 INCR_COUNT(bound_mempcy_count
);
1691 __bound_check(dest
, n
, "memcpy dest");
1692 __bound_check(src
, n
, "memcpy src");
1693 if (check_overlap(dest
, n
, src
, n
, "memcpy"))
1695 return memcpy(dest
, src
, n
);
1698 int __bound_memcmp(const void *s1
, const void *s2
, size_t n
)
1700 const unsigned char *u1
= (const unsigned char *) s1
;
1701 const unsigned char *u2
= (const unsigned char *) s2
;
1704 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1705 __FILE__
, __FUNCTION__
, s1
, s2
, (unsigned long)n
);
1706 INCR_COUNT(bound_memcmp_count
);
1708 if ((ssize_t
) --n
== -1)
1710 else if (*u1
!= *u2
) {
1711 retval
= *u1
++ - *u2
++;
1717 __bound_check(s1
, (const void *)u1
- s1
, "memcmp s1");
1718 __bound_check(s2
, (const void *)u2
- s2
, "memcmp s2");
1722 void *__bound_memmove(void *dest
, const void *src
, size_t n
)
1724 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1725 __FILE__
, __FUNCTION__
, dest
, src
, (unsigned long)n
);
1726 INCR_COUNT(bound_memmove_count
);
1727 __bound_check(dest
, n
, "memmove dest");
1728 __bound_check(src
, n
, "memmove src");
1729 return memmove(dest
, src
, n
);
1732 void *__bound_memset(void *s
, int c
, size_t n
)
1734 dprintf(stderr
, "%s, %s(): %p, %d, 0x%lx\n",
1735 __FILE__
, __FUNCTION__
, s
, c
, (unsigned long)n
);
1736 INCR_COUNT(bound_memset_count
);
1737 __bound_check(s
, n
, "memset");
1738 return memset(s
, c
, n
);
1741 #if defined(__arm__) && defined(__ARM_EABI__)
1742 void *__bound___aeabi_memcpy(void *dest
, const void *src
, size_t n
)
1744 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1745 __FILE__
, __FUNCTION__
, dest
, src
, (unsigned long)n
);
1746 INCR_COUNT(bound_mempcy_count
);
1747 __bound_check(dest
, n
, "memcpy dest");
1748 __bound_check(src
, n
, "memcpy src");
1749 if (check_overlap(dest
, n
, src
, n
, "memcpy"))
1751 return __aeabi_memcpy(dest
, src
, n
);
1754 void *__bound___aeabi_memmove(void *dest
, const void *src
, size_t n
)
1756 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1757 __FILE__
, __FUNCTION__
, dest
, src
, (unsigned long)n
);
1758 INCR_COUNT(bound_memmove_count
);
1759 __bound_check(dest
, n
, "memmove dest");
1760 __bound_check(src
, n
, "memmove src");
1761 return __aeabi_memmove(dest
, src
, n
);
1764 void *__bound___aeabi_memmove4(void *dest
, const void *src
, size_t n
)
1766 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1767 __FILE__
, __FUNCTION__
, dest
, src
, (unsigned long)n
);
1768 INCR_COUNT(bound_memmove_count
);
1769 __bound_check(dest
, n
, "memmove dest");
1770 __bound_check(src
, n
, "memmove src");
1771 return __aeabi_memmove4(dest
, src
, n
);
1774 void *__bound___aeabi_memmove8(void *dest
, const void *src
, size_t n
)
1776 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1777 __FILE__
, __FUNCTION__
, dest
, src
, (unsigned long)n
);
1778 INCR_COUNT(bound_memmove_count
);
1779 __bound_check(dest
, n
, "memmove dest");
1780 __bound_check(src
, n
, "memmove src");
1781 return __aeabi_memmove8(dest
, src
, n
);
1784 void *__bound___aeabi_memset(void *s
, int c
, size_t n
)
1786 dprintf(stderr
, "%s, %s(): %p, %d, 0x%lx\n",
1787 __FILE__
, __FUNCTION__
, s
, c
, (unsigned long)n
);
1788 INCR_COUNT(bound_memset_count
);
1789 __bound_check(s
, n
, "memset");
1790 return __aeabi_memset(s
, c
, n
);
1794 int __bound_strlen(const char *s
)
1798 dprintf(stderr
, "%s, %s(): %p\n",
1799 __FILE__
, __FUNCTION__
, s
);
1800 INCR_COUNT(bound_strlen_count
);
1802 __bound_check(s
, p
- s
, "strlen");
1806 char *__bound_strcpy(char *dest
, const char *src
)
1809 const char *p
= src
;
1811 dprintf(stderr
, "%s, %s(): %p, %p\n",
1812 __FILE__
, __FUNCTION__
, dest
, src
);
1813 INCR_COUNT(bound_strcpy_count
);
1816 __bound_check(dest
, len
, "strcpy dest");
1817 __bound_check(src
, len
, "strcpy src");
1818 if (check_overlap(dest
, len
, src
, len
, "strcpy"))
1820 return strcpy (dest
, src
);
1823 char *__bound_strncpy(char *dest
, const char *src
, size_t n
)
1826 const char *p
= src
;
1828 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1829 __FILE__
, __FUNCTION__
, dest
, src
, (unsigned long)n
);
1830 INCR_COUNT(bound_strncpy_count
);
1831 while (len
-- && *p
++);
1833 __bound_check(dest
, len
, "strncpy dest");
1834 __bound_check(src
, len
, "strncpy src");
1835 if (check_overlap(dest
, len
, src
, len
, "strncpy"))
1837 return strncpy(dest
, src
, n
);
1840 int __bound_strcmp(const char *s1
, const char *s2
)
1842 const unsigned char *u1
= (const unsigned char *) s1
;
1843 const unsigned char *u2
= (const unsigned char *) s2
;
1845 dprintf(stderr
, "%s, %s(): %p, %p\n",
1846 __FILE__
, __FUNCTION__
, s1
, s2
);
1847 INCR_COUNT(bound_strcmp_count
);
1848 while (*u1
&& *u1
== *u2
) {
1852 __bound_check(s1
, ((const char *)u1
- s1
) + 1, "strcmp s1");
1853 __bound_check(s2
, ((const char *)u2
- s2
) + 1, "strcmp s2");
1857 int __bound_strncmp(const char *s1
, const char *s2
, size_t n
)
1859 const unsigned char *u1
= (const unsigned char *) s1
;
1860 const unsigned char *u2
= (const unsigned char *) s2
;
1863 dprintf(stderr
, "%s, %s(): %p, %p, 0x%lx\n",
1864 __FILE__
, __FUNCTION__
, s1
, s2
, (unsigned long)n
);
1865 INCR_COUNT(bound_strncmp_count
);
1867 if ((ssize_t
) --n
== -1)
1869 else if (*u1
!= *u2
) {
1870 retval
= *u1
++ - *u2
++;
1875 __bound_check(s1
, (const char *)u1
- s1
, "strncmp s1");
1876 __bound_check(s2
, (const char *)u2
- s2
, "strncmp s2");
1880 char *__bound_strcat(char *dest
, const char *src
)
1883 const char *s
= src
;
1885 dprintf(stderr
, "%s, %s(): %p, %p\n",
1886 __FILE__
, __FUNCTION__
, dest
, src
);
1887 INCR_COUNT(bound_strcat_count
);
1890 __bound_check(r
, (dest
- r
) + (src
- s
) - 1, "strcat dest");
1891 __bound_check(s
, src
- s
, "strcat src");
1892 if (check_overlap(r
, (dest
- r
) + (src
- s
) - 1, s
, src
- s
, "strcat"))
1894 return strcat(r
, s
);
1897 char *__bound_strchr(const char *s
, int c
)
1899 const unsigned char *str
= (const unsigned char *) s
;
1900 unsigned char ch
= c
;
1902 dprintf(stderr
, "%s, %s(): %p, %d\n",
1903 __FILE__
, __FUNCTION__
, s
, ch
);
1904 INCR_COUNT(bound_strchr_count
);
1910 __bound_check(s
, ((const char *)str
- s
) + 1, "strchr");
1911 return *str
== ch
? (char *) str
: NULL
;
1914 char *__bound_strdup(const char *s
)
1919 INCR_COUNT(bound_strdup_count
);
1921 __bound_check(s
, p
- s
, "strdup");
1922 new = BOUND_MALLOC ((p
- s
) + 1);
1923 dprintf(stderr
, "%s, %s(): %p, 0x%lx\n",
1924 __FILE__
, __FUNCTION__
, new, (unsigned long)(p
-s
));
1926 if (NO_CHECKING_GET() == 0 && no_strdup
== 0) {
1928 tree
= splay_insert((size_t)new, p
- s
, tree
);
1929 if (tree
&& tree
->start
== (size_t) new)
1930 tree
->type
= TCC_TYPE_STRDUP
;
1933 memcpy (new, s
, p
- s
);
1939 An implementation of top-down splaying with sizes
1940 D. Sleator <sleator@cs.cmu.edu>, January 1994.
1942 This extends top-down-splay.c to maintain a size field in each node.
1943 This is the number of nodes in the subtree rooted there. This makes
1944 it possible to efficiently compute the rank of a key. (The rank is
1945 the number of nodes to the left of the given key.) It it also
1946 possible to quickly find the node of a given rank. Both of these
1947 operations are illustrated in the code below. The remainder of this
1948 introduction is taken from top-down-splay.c.
1950 "Splay trees", or "self-adjusting search trees" are a simple and
1951 efficient data structure for storing an ordered set. The data
1952 structure consists of a binary tree, with no additional fields. It
1953 allows searching, insertion, deletion, deletemin, deletemax,
1954 splitting, joining, and many other operations, all with amortized
1955 logarithmic performance. Since the trees adapt to the sequence of
1956 requests, their performance on real access patterns is typically even
1957 better. Splay trees are described in a number of texts and papers
1960 The code here is adapted from simple top-down splay, at the bottom of
1961 page 669 of [2]. It can be obtained via anonymous ftp from
1962 spade.pc.cs.cmu.edu in directory /usr/sleator/public.
1964 The chief modification here is that the splay operation works even if the
1965 item being splayed is not in the tree, and even if the tree root of the
1966 tree is NULL. So the line:
1970 causes it to search for item with key i in the tree rooted at t. If it's
1971 there, it is splayed to the root. If it isn't there, then the node put
1972 at the root is the last one before NULL that would have been reached in a
1973 normal binary search for i. (It's a neighbor of i in the tree.) This
1974 allows many other operations to be easily implemented, as shown below.
1976 [1] "Data Structures and Their Algorithms", Lewis and Denenberg,
1977 Harper Collins, 1991, pp 243-251.
1978 [2] "Self-adjusting Binary Search Trees" Sleator and Tarjan,
1979 JACM Volume 32, No 3, July 1985, pp 652-686.
1980 [3] "Data Structure and Algorithm Analysis", Mark Weiss,
1981 Benjamin Cummins, 1992, pp 119-130.
1982 [4] "Data Structures, Algorithms, and Performance", Derick Wood,
1983 Addison-Wesley, 1993, pp 367-375
1986 /* Code adapted for tcc */
1988 #define compare(start,tstart,tsize) (start < tstart ? -1 : \
1989 start >= tstart+tsize ? 1 : 0)
1991 static Tree
* splay (size_t addr
, Tree
*t
)
1992 /* Splay using the key start (which may or may not be in the tree.) */
1993 /* The starting root is t, and the tree used is defined by rat */
1998 INCR_COUNT_SPLAY(bound_splay
);
1999 if (t
== NULL
) return t
;
2000 N
.left
= N
.right
= NULL
;
2004 comp
= compare(addr
, t
->start
, t
->size
);
2007 if (y
== NULL
) break;
2008 if (compare(addr
, y
->start
, y
->size
) < 0) {
2009 t
->left
= y
->right
; /* rotate right */
2012 if (t
->left
== NULL
) break;
2014 r
->left
= t
; /* link right */
2017 } else if (comp
> 0) {
2019 if (y
== NULL
) break;
2020 if (compare(addr
, y
->start
, y
->size
) > 0) {
2021 t
->right
= y
->left
; /* rotate left */
2024 if (t
->right
== NULL
) break;
2026 l
->right
= t
; /* link left */
2033 l
->right
= t
->left
; /* assemble */
2041 #define compare_end(start,tend) (start < tend ? -1 : \
2042 start > tend ? 1 : 0)
2044 static Tree
* splay_end (size_t addr
, Tree
*t
)
2045 /* Splay using the key start (which may or may not be in the tree.) */
2046 /* The starting root is t, and the tree used is defined by rat */
2051 INCR_COUNT_SPLAY(bound_splay_end
);
2052 if (t
== NULL
) return t
;
2053 N
.left
= N
.right
= NULL
;
2057 comp
= compare_end(addr
, t
->start
+ t
->size
);
2060 if (y
== NULL
) break;
2061 if (compare_end(addr
, y
->start
+ y
->size
) < 0) {
2062 t
->left
= y
->right
; /* rotate right */
2065 if (t
->left
== NULL
) break;
2067 r
->left
= t
; /* link right */
2070 } else if (comp
> 0) {
2072 if (y
== NULL
) break;
2073 if (compare_end(addr
, y
->start
+ y
->size
) > 0) {
2074 t
->right
= y
->left
; /* rotate left */
2077 if (t
->right
== NULL
) break;
2079 l
->right
= t
; /* link left */
2086 l
->right
= t
->left
; /* assemble */
2094 static Tree
* splay_insert(size_t addr
, size_t size
, Tree
* t
)
2095 /* Insert key start into the tree t, if it is not already there. */
2096 /* Return a pointer to the resulting tree. */
2100 INCR_COUNT_SPLAY(bound_splay_insert
);
2103 if (compare(addr
, t
->start
, t
->size
)==0) {
2104 return t
; /* it's already there */
2108 if (tree_free_list
) {
2109 new = tree_free_list
;
2110 tree_free_list
= new->left
;
2115 new = (Tree
*) BOUND_MALLOC (sizeof (Tree
));
2118 bound_alloc_error("not enough memory for bound checking code");
2122 new->left
= new->right
= NULL
;
2123 } else if (compare(addr
, t
->start
, t
->size
) < 0) {
2124 new->left
= t
->left
;
2128 new->right
= t
->right
;
2134 new->type
= TCC_TYPE_NONE
;
2135 new->is_invalid
= 0;
2140 #define compare_destroy(start,tstart) (start < tstart ? -1 : \
2141 start > tstart ? 1 : 0)
2143 static Tree
* splay_delete(size_t addr
, Tree
*t
)
2144 /* Deletes addr from the tree if it's there. */
2145 /* Return a pointer to the resulting tree. */
2149 INCR_COUNT_SPLAY(bound_splay_delete
);
2150 if (t
==NULL
) return NULL
;
2152 if (compare_destroy(addr
, t
->start
) == 0) { /* found it */
2153 if (t
->left
== NULL
) {
2156 x
= splay(addr
, t
->left
);
2157 x
->right
= t
->right
;
2160 t
->left
= tree_free_list
;
2167 return t
; /* It wasn't there */
2171 void splay_printtree(Tree
* t
, int d
)
2174 if (t
== NULL
) return;
2175 splay_printtree(t
->right
, d
+1);
2176 for (i
=0; i
<d
; i
++) fprintf(stderr
," ");
2177 fprintf(stderr
,"%p(0x%lx:%u:%u)\n",
2178 (void *) t
->start
, (unsigned long) t
->size
,
2179 (unsigned)t
->type
, (unsigned)t
->is_invalid
);
2180 splay_printtree(t
->left
, d
+1);