tccgen.c: cleanup reg classes
[tinycc.git] / lib / bcheck.c
blobab8f46759e11a78d9bf683f59a44430dd720a069
1 /*
2 * Tiny C Memory and bounds checker
3 *
4 * Copyright (c) 2002 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
20 #include <stdlib.h>
21 #include <stdio.h>
22 #include <stdarg.h>
23 #include <string.h>
25 #if !defined(__FreeBSD__) \
26 && !defined(__FreeBSD_kernel__) \
27 && !defined(__DragonFly__) \
28 && !defined(__OpenBSD__) \
29 && !defined(__NetBSD__)
30 #include <malloc.h>
31 #endif
33 #if !defined(_WIN32)
34 #include <unistd.h>
35 #endif
37 #define BOUND_DEBUG
38 #define BOUND_STATISTIC
40 #ifdef BOUND_DEBUG
41 #define dprintf(a...) if (print_calls) fprintf(a)
42 #else
43 #define dprintf(a...)
44 #endif
46 /* Check memalign */
47 #define HAVE_MEMALIGN
49 #if defined(__FreeBSD__) \
50 || defined(__FreeBSD_kernel__) \
51 || defined(__DragonFly__) \
52 || defined(__OpenBSD__) \
53 || defined(__NetBSD__) \
54 || defined(__dietlibc__)
55 #undef HAVE_MEMALIGN
56 #define INIT_SEM()
57 #define EXIT_SEM()
58 #define WAIT_SEM()
59 #define POST_SEM()
60 #define HAS_ENVIRON 0
61 #define MALLOC_REDIR (0)
62 #elif defined(_WIN32)
63 #include <windows.h>
64 #undef HAVE_MEMALIGN
65 static CRITICAL_SECTION bounds_sem;
66 #define INIT_SEM() InitializeCriticalSection(&bounds_sem)
67 #define EXIT_SEM() DeleteCriticalSection(&bounds_sem)
68 #define WAIT_SEM() EnterCriticalSection(&bounds_sem)
69 #define POST_SEM() LeaveCriticalSection(&bounds_sem)
70 #define HAS_ENVIRON 0
71 #define MALLOC_REDIR (0)
72 #else
73 #include <sys/mman.h>
74 #include <errno.h>
75 #include <semaphore.h>
76 static sem_t bounds_sem;
77 #define INIT_SEM() sem_init (&bounds_sem, 0, 1)
78 #define EXIT_SEM() sem_destroy (&bounds_sem)
79 #define WAIT_SEM() while (sem_wait (&bounds_sem) < 0 && errno == EINTR);
80 #define POST_SEM() sem_post (&bounds_sem)
81 #define HAS_ENVIRON 0 /* Disabled for now */
82 #define __USE_GNU /* get RTLD_NEXT */
83 #include <dlfcn.h>
84 #define MALLOC_REDIR (1)
85 static void *(*malloc_redir) (size_t) = NULL;
86 static void *(*calloc_redir) (size_t, size_t) = NULL;
87 static void (*free_redir) (void *) = NULL;
88 static void *(*realloc_redir) (void *, size_t) = NULL;
89 static void *(*memalign_redir) (size_t, size_t) = NULL;
90 static int pool_index = 0;
91 static unsigned char initial_pool[256];
92 #endif
94 #define TCC_TYPE_NONE (0)
95 #define TCC_TYPE_MALLOC (1)
96 #define TCC_TYPE_CALLOC (2)
97 #define TCC_TYPE_REALLOC (3)
98 #define TCC_TYPE_MEMALIGN (4)
99 #define TCC_TYPE_STRDUP (5)
101 /* this pointer is generated when bound check is incorrect */
102 #define INVALID_POINTER ((void *)(-2))
104 typedef struct tree_node Tree;
105 struct tree_node {
106 Tree * left, * right;
107 size_t start;
108 size_t size;
109 size_t type;
110 size_t is_invalid; /* true if pointers outside region are invalid */
113 typedef struct alloca_list_struct {
114 size_t fp;
115 void *p;
116 struct alloca_list_struct *next;
117 } alloca_list_type;
119 static Tree * splay (size_t addr, Tree *t);
120 static Tree * splay_end (size_t addr, Tree *t);
121 static Tree * splay_insert(size_t addr, size_t size, Tree * t);
122 static Tree * splay_delete(size_t addr, Tree *t);
123 void splay_printtree(Tree * t, int d);
125 /* external interface */
126 void __bound_init(void);
127 void __bound_exit(void);
129 #ifdef __attribute__
130 /* an __attribute__ macro is defined in the system headers */
131 #undef __attribute__
132 #endif
133 #define FASTCALL __attribute__((regparm(3)))
135 #if !MALLOC_REDIR
136 void *__bound_malloc(size_t size, const void *caller);
137 void *__bound_memalign(size_t size, size_t align, const void *caller);
138 void __bound_free(void *ptr, const void *caller);
139 void *__bound_realloc(void *ptr, size_t size, const void *caller);
140 void *__bound_calloc(size_t nmemb, size_t size);
141 #endif
143 #define FREE_REUSE_SIZE (100)
144 static int free_reuse_index = 0;
145 static void *free_reuse_list[FREE_REUSE_SIZE];
147 /* error message, just for TCC */
148 const char *__bound_error_msg;
150 /* runtime error output */
151 extern void rt_error(size_t pc, const char *fmt, ...);
153 static Tree *tree = NULL;
154 #define TREE_REUSE (1)
155 #if TREE_REUSE
156 static Tree *tree_free_list = NULL;
157 #endif
158 static alloca_list_type *alloca_list = NULL;
160 static int inited = 0;
161 static int print_calls = 0;
162 static int print_heap = 0;
163 static int print_statistic = 0;
164 static int never_fatal = 0;
165 static int no_checking = 0;
167 #ifdef BOUND_STATISTIC
168 static unsigned long long bound_ptr_add_count;
169 static unsigned long long bound_ptr_indir1_count;
170 static unsigned long long bound_ptr_indir2_count;
171 static unsigned long long bound_ptr_indir4_count;
172 static unsigned long long bound_ptr_indir8_count;
173 static unsigned long long bound_ptr_indir12_count;
174 static unsigned long long bound_ptr_indir16_count;
175 static unsigned long long bound_local_new_count;
176 static unsigned long long bound_local_delete_count;
177 static unsigned long long bound_malloc_count;
178 static unsigned long long bound_calloc_count;
179 static unsigned long long bound_realloc_count;
180 static unsigned long long bound_free_count;
181 static unsigned long long bound_memalign_count;
182 static unsigned long long bound_mmap_count;
183 static unsigned long long bound_munmap_count;
184 static unsigned long long bound_alloca_count;
185 static unsigned long long bound_mempcy_count;
186 static unsigned long long bound_memcmp_count;
187 static unsigned long long bound_memmove_count;
188 static unsigned long long bound_memset_count;
189 static unsigned long long bound_strlen_count;
190 static unsigned long long bound_strcpy_count;
191 static unsigned long long bound_strncpy_count;
192 static unsigned long long bound_strcmp_count;
193 static unsigned long long bound_strncmp_count;
194 static unsigned long long bound_strcat_count;
195 static unsigned long long bound_strchr_count;
196 static unsigned long long bound_strdup_count;
197 #define INCR_COUNT(x) x++
198 #else
199 #define INCR_COUNT(x)
200 #endif
202 /* enable/disable checking. This can be used for signal handlers. */
203 void __bound_checking (int no_check)
205 no_checking = no_check;
208 #define no_FASTCALL
209 //#define no_checking 1
211 /* print a bound error message */
212 static void bound_error(const char *fmt, ...)
214 __bound_error_msg = fmt;
215 fprintf(stderr,"%s %s: %s\n", __FILE__, __FUNCTION__, fmt);
216 if (never_fatal == 0)
217 *(void **)0 = 0; /* force a runtime error */
220 static void bound_alloc_error(void)
222 bound_error("not enough memory for bound checking code");
225 /* return '(p + offset)' for pointer arithmetic (a pointer can reach
226 the end of a region in this case */
227 void * no_FASTCALL __bound_ptr_add(void *p, size_t offset)
229 size_t addr = (size_t)p;
231 if (no_checking) {
232 return p + offset;
235 dprintf(stderr, "%s %s : %p 0x%x\n",
236 __FILE__, __FUNCTION__, p, (unsigned)offset);
238 WAIT_SEM ();
239 INCR_COUNT(bound_ptr_add_count);
240 if (tree) {
241 addr -= tree->start;
242 if (addr >= tree->size) {
243 addr = (size_t)p;
244 tree = splay (addr, tree);
245 addr -= tree->start;
247 if (addr >= tree->size) {
248 addr = (size_t)p;
249 tree = splay_end (addr, tree);
250 addr -= tree->start;
252 if (addr <= tree->size) {
253 addr += offset;
254 if (tree->is_invalid || addr > tree->size) {
255 #if 0
256 fprintf(stderr,"%s %s : %p is outside of the region\n",
257 __FILE__, __FUNCTION__, p + offset);
258 #endif
259 if (never_fatal == 0) {
260 POST_SEM ();
261 return INVALID_POINTER; /* return an invalid pointer */
266 POST_SEM ();
267 return p + offset;
270 /* return '(p + offset)' for pointer indirection (the resulting must
271 be strictly inside the region */
272 #define BOUND_PTR_INDIR(dsize) \
273 void * no_FASTCALL __bound_ptr_indir ## dsize (void *p, size_t offset) \
275 size_t addr = (size_t)p; \
277 if (no_checking) { \
278 return p + offset; \
280 dprintf(stderr, "%s %s : %p 0x%x start\n", \
281 __FILE__, __FUNCTION__, p, (unsigned)offset); \
282 WAIT_SEM (); \
283 INCR_COUNT(bound_ptr_indir ## dsize ## _count); \
284 if (tree) { \
285 addr -= tree->start; \
286 if (addr >= tree->size) { \
287 addr = (size_t)p; \
288 tree = splay (addr, tree); \
289 addr -= tree->start; \
291 if (addr >= tree->size) { \
292 addr = (size_t)p; \
293 tree = splay_end (addr, tree); \
294 addr -= tree->start; \
296 if (addr <= tree->size) { \
297 addr += offset + dsize; \
298 if (tree->is_invalid || addr > tree->size) { \
299 fprintf(stderr,"%s %s : %p is outside of the region\n", \
300 __FILE__, __FUNCTION__, p + offset); \
301 if (never_fatal == 0) { \
302 POST_SEM (); \
303 return INVALID_POINTER; /* return an invalid pointer */ \
308 POST_SEM (); \
309 return p + offset; \
312 BOUND_PTR_INDIR(1)
313 BOUND_PTR_INDIR(2)
314 BOUND_PTR_INDIR(4)
315 BOUND_PTR_INDIR(8)
316 BOUND_PTR_INDIR(12)
317 BOUND_PTR_INDIR(16)
319 #if defined(__GNUC__) && (__GNUC__ >= 6)
321 * At least gcc 6.2 complains when __builtin_frame_address is used with
322 * nonzero argument.
324 #pragma GCC diagnostic push
325 #pragma GCC diagnostic ignored "-Wframe-address"
326 #endif
328 /* return the frame pointer of the caller */
329 #define GET_CALLER_FP(fp)\
331 fp = (size_t)__builtin_frame_address(1);\
334 /* called when entering a function to add all the local regions */
335 void FASTCALL __bound_local_new(void *p1)
337 size_t addr, size, fp, *p = p1;
339 if (no_checking)
340 return;
341 GET_CALLER_FP(fp);
342 dprintf(stderr, "%s, %s local new p1=%p fp=%p\n",
343 __FILE__, __FUNCTION__, p, (void *)fp);
344 WAIT_SEM ();
345 for(;;) {
346 addr = p[0];
347 if (addr == 0)
348 break;
349 INCR_COUNT(bound_local_new_count);
350 if (addr == 1) {
351 dprintf(stderr, "%s, %s() alloca/vla used\n",
352 __FILE__, __FUNCTION__);
354 else {
355 addr += fp;
356 size = p[1];
357 dprintf(stderr, "%s, %s() (%p 0x%lx)\n",
358 __FILE__, __FUNCTION__, (void *) addr, (unsigned long) size);
359 tree = splay_insert(addr, size, tree);
361 p += 2;
363 POST_SEM ();
366 /* called when leaving a function to delete all the local regions */
367 void FASTCALL __bound_local_delete(void *p1)
369 size_t addr, fp, *p = p1;
371 if (no_checking)
372 return;
373 GET_CALLER_FP(fp);
374 dprintf(stderr, "%s, %s local delete p1=%p fp=%p\n",
375 __FILE__, __FUNCTION__, p, (void *)fp);
376 WAIT_SEM ();
377 for(;;) {
378 addr = p[0];
379 if (addr == 0)
380 break;
381 INCR_COUNT(bound_local_delete_count);
382 if (addr == 1) {
383 while (alloca_list && alloca_list->fp == fp) {
384 alloca_list_type *next = alloca_list->next;
386 dprintf(stderr, "%s, %s() remove alloca/vla %p\n",
387 __FILE__, __FUNCTION__, alloca_list->p);
388 tree = splay_delete ((size_t) alloca_list->p, tree);
389 #if MALLOC_REDIR
390 free_redir (alloca_list);
391 #else
392 free (alloca_list);
393 #endif
394 alloca_list = next;
397 else {
398 addr += fp;
399 dprintf(stderr, "%s, %s() (%p 0x%lx)\n",
400 __FILE__, __FUNCTION__, (void *) addr, (unsigned long) p[1]);
401 tree = splay_delete(addr, tree);
403 p += 2;
405 POST_SEM ();
408 void __bound_init(void)
410 if (inited)
411 return;
413 inited = 1;
415 print_calls = getenv ("TCC_BOUNDS_PRINT_CALLS") != NULL;
416 print_heap = getenv ("TCC_BOUNDS_PRINT_HEAP") != NULL;
417 print_statistic = getenv ("TCC_BOUNDS_PRINT_STATISTIC") != NULL;
418 never_fatal = getenv ("TCC_BOUNDS_NEVER_FATAL") != NULL;
420 dprintf(stderr, "%s, %s() start\n", __FILE__, __FUNCTION__);
422 INIT_SEM ();
424 #if MALLOC_REDIR
426 void *addr = RTLD_NEXT;
428 /* tcc -run required RTLD_DEFAULT. Normal usage requires RTLD_NEXT */
429 *(void **) (&malloc_redir) = dlsym (addr, "malloc");
430 if (malloc_redir == NULL) {
431 dprintf(stderr, "%s, %s() use RTLD_DEFAULT\n",
432 __FILE__, __FUNCTION__);
433 addr = RTLD_DEFAULT;
434 *(void **) (&malloc_redir) = dlsym (addr, "malloc");
436 *(void **) (&calloc_redir) = dlsym (addr, "calloc");
437 *(void **) (&free_redir) = dlsym (addr, "free");
438 *(void **) (&realloc_redir) = dlsym (addr, "realloc");
439 *(void **) (&memalign_redir) = dlsym (addr, "memalign");
440 dprintf(stderr, "%s, %s() malloc_redir %p\n",
441 __FILE__, __FUNCTION__, malloc_redir);
442 dprintf(stderr, "%s, %s() free_redir %p\n",
443 __FILE__, __FUNCTION__, free_redir);
444 dprintf(stderr, "%s, %s() realloc_redir %p\n",
445 __FILE__, __FUNCTION__, realloc_redir);
446 dprintf(stderr, "%s, %s() memalign_redir %p\n",
447 __FILE__, __FUNCTION__, memalign_redir);
449 #endif
451 tree = NULL;
453 /* save malloc hooks and install bound check hooks */
454 memset (free_reuse_list, 0, sizeof (free_reuse_list));
456 dprintf(stderr, "%s, %s() end\n\n", __FILE__, __FUNCTION__);
459 void __bounds_add_static_var (size_t *p)
461 dprintf(stderr, "%s, %s()\n", __FILE__, __FUNCTION__);
462 /* add all static bound check values */
463 WAIT_SEM ();
464 while (p[0] != 0) {
465 dprintf(stderr, "%s, %s() (%p 0x%lx)\n",
466 __FILE__, __FUNCTION__, (void *) p[0], (unsigned long) p[1]);
467 tree = splay_insert(p[0], p[1], tree);
468 p += 2;
470 POST_SEM ();
473 void __bound_main_arg(char **p)
475 char *start = (char *) p;
477 WAIT_SEM ();
478 while (*p) {
479 dprintf(stderr, "%s, %s() (%p 0x%lx)\n",
480 __FILE__, __FUNCTION__, *p, (unsigned long)(strlen (*p) + 1));
481 tree = splay_insert((size_t) *p, strlen (*p) + 1, tree);
482 p++;
484 dprintf(stderr, "%s, %s() argv (%p 0x%lx)\n",
485 __FILE__, __FUNCTION__, start, (unsigned long)((char *) p - start));
486 tree = splay_insert((size_t) start, (char *) p - start, tree);
488 #if HAS_ENVIRON
490 extern char **environ;
492 p = environ;
493 start = (char *) p;
494 while (*p) {
495 dprintf(stderr, "%s, %s() (%p 0x%lx)\n",
496 __FILE__, __FUNCTION__, *p, (unsigned long)(strlen (*p) + 1));
497 tree = splay_insert((size_t) *p, strlen (*p) + 1, tree);
498 p++;
500 dprintf(stderr, "%s, %s() environ(%p 0x%lx)\n",
501 __FILE__, __FUNCTION__, start, (unsigned long)((char *) p - start));
502 tree = splay_insert((size_t) start, (char *) p - start, tree);
504 #endif
505 POST_SEM ();
508 void __attribute__((destructor)) __bound_exit(void)
510 int i;
511 static const char * const alloc_type[] = {
512 "", "malloc", "calloc", "realloc", "memalign", "strdup"
515 dprintf(stderr, "%s, %s()\n", __FILE__, __FUNCTION__);
517 if (inited) {
518 #if !defined(_WIN32)
519 if (print_heap) {
520 extern void __libc_freeres ();
521 __libc_freeres ();
523 #endif
525 #if TREE_REUSE
526 while (tree_free_list) {
527 Tree *next = tree_free_list->left;
528 #if MALLOC_REDIR
529 free_redir (tree_free_list);
530 #else
531 free (tree_free_list);
532 #endif
533 tree_free_list = next;
535 #endif
536 for (i = 0; i < FREE_REUSE_SIZE; i++) {
537 if (free_reuse_list[i]) {
538 tree = splay_delete ((size_t) free_reuse_list[i], tree);
539 #if MALLOC_REDIR
540 free_redir (free_reuse_list[i]);
541 #else
542 free (free_reuse_list[i]);
543 #endif
546 while (tree) {
547 if (print_heap && tree->type != 0) {
548 fprintf (stderr, "%s, %s() %s found size %lu\n",
549 __FILE__, __FUNCTION__, alloc_type[tree->type],
550 (unsigned long) tree->size);
552 tree = splay_delete (tree->start, tree);
554 EXIT_SEM ();
555 inited = 0;
556 #ifdef BOUND_STATISTIC
557 if (print_statistic) {
558 fprintf (stderr, "bound_ptr_add_count %llu\n", bound_ptr_add_count);
559 fprintf (stderr, "bound_ptr_indir1_count %llu\n", bound_ptr_indir1_count);
560 fprintf (stderr, "bound_ptr_indir2_count %llu\n", bound_ptr_indir2_count);
561 fprintf (stderr, "bound_ptr_indir4_count %llu\n", bound_ptr_indir4_count);
562 fprintf (stderr, "bound_ptr_indir8_count %llu\n", bound_ptr_indir8_count);
563 fprintf (stderr, "bound_ptr_indir12_count %llu\n", bound_ptr_indir12_count);
564 fprintf (stderr, "bound_ptr_indir16_count %llu\n", bound_ptr_indir16_count);
565 fprintf (stderr, "bound_local_new_count %llu\n", bound_local_new_count);
566 fprintf (stderr, "bound_local_delete_count %llu\n", bound_local_delete_count);
567 fprintf (stderr, "bound_malloc_count %llu\n", bound_malloc_count);
568 fprintf (stderr, "bound_calloc_count %llu\n", bound_calloc_count);
569 fprintf (stderr, "bound_realloc_count %llu\n", bound_realloc_count);
570 fprintf (stderr, "bound_free_count %llu\n", bound_free_count);
571 fprintf (stderr, "bound_memalign_count %llu\n", bound_memalign_count);
572 fprintf (stderr, "bound_mmap_count %llu\n", bound_mmap_count);
573 fprintf (stderr, "bound_munmap_count %llu\n", bound_munmap_count);
574 fprintf (stderr, "bound_alloca_count %llu\n", bound_alloca_count);
575 fprintf (stderr, "bound_mempcy_count %llu\n", bound_mempcy_count);
576 fprintf (stderr, "bound_memcmp_count %llu\n", bound_memcmp_count);
577 fprintf (stderr, "bound_memmove_count %llu\n", bound_memmove_count);
578 fprintf (stderr, "bound_memset_count %llu\n", bound_memset_count);
579 fprintf (stderr, "bound_strlen_count %llu\n", bound_strlen_count);
580 fprintf (stderr, "bound_strcpy_count %llu\n", bound_strcpy_count);
581 fprintf (stderr, "bound_strncpy_count %llu\n", bound_strncpy_count);
582 fprintf (stderr, "bound_strcmp_count %llu\n", bound_strcmp_count);
583 fprintf (stderr, "bound_strncmp_count %llu\n", bound_strncmp_count);
584 fprintf (stderr, "bound_strcat_count %llu\n", bound_strcat_count);
585 fprintf (stderr, "bound_strchr_count %llu\n", bound_strchr_count);
586 fprintf (stderr, "bound_strdup_count %llu\n", bound_strdup_count);
588 #endif
592 /* XXX: we should use a malloc which ensure that it is unlikely that
593 two malloc'ed data have the same address if 'free' are made in
594 between. */
595 #if MALLOC_REDIR
596 void *malloc(size_t size)
597 #else
598 void *__bound_malloc(size_t size, const void *caller)
599 #endif
601 void *ptr;
603 #if MALLOC_REDIR
604 /* This will catch the first dlsym call from __bound_init */
605 if (malloc_redir == NULL) {
606 ptr = &initial_pool[pool_index];
607 pool_index = (pool_index + size + 7) & ~8;
608 dprintf (stderr, "%s, %s initial (%p, 0x%x)\n",
609 __FILE__, __FUNCTION__, ptr, (unsigned)size);
610 return ptr;
612 #endif
613 /* we allocate one more byte to ensure the regions will be
614 separated by at least one byte. With the glibc malloc, it may
615 be in fact not necessary */
616 WAIT_SEM ();
617 INCR_COUNT(bound_malloc_count);
618 #if MALLOC_REDIR
619 ptr = malloc_redir (size);
620 #else
621 ptr = malloc(size + 1);
622 #endif
624 dprintf(stderr, "%s, %s (%p, 0x%x)\n",
625 __FILE__, __FUNCTION__, ptr, (unsigned)size);
627 if (ptr) {
628 tree = splay_insert ((size_t) ptr, size, tree);
629 if (tree && tree->start == (size_t) ptr) {
630 tree->type = TCC_TYPE_MALLOC;
633 POST_SEM ();
634 return ptr;
637 #if MALLOC_REDIR
638 void *memalign(size_t size, size_t align)
639 #else
640 void *__bound_memalign(size_t size, size_t align, const void *caller)
641 #endif
643 void *ptr;
645 WAIT_SEM ();
646 INCR_COUNT(bound_memalign_count);
648 #ifndef HAVE_MEMALIGN
649 if (align > 4) {
650 /* XXX: handle it ? */
651 ptr = NULL;
652 } else {
653 /* we suppose that malloc aligns to at least four bytes */
654 #if MALLOC_REDIR
655 ptr = malloc_redir(size + 1);
656 #else
657 ptr = malloc(size + 1);
658 #endif
660 #else
661 /* we allocate one more byte to ensure the regions will be
662 separated by at least one byte. With the glibc malloc, it may
663 be in fact not necessary */
664 #if MALLOC_REDIR
665 ptr = memalign_redir(size + 1, align);
666 #else
667 ptr = memalign(size + 1, align);
668 #endif
669 #endif
670 if (ptr) {
671 dprintf(stderr, "%s, %s (%p, 0x%x)\n",
672 __FILE__, __FUNCTION__, ptr, (unsigned)size);
673 tree = splay_insert((size_t) ptr, size, tree);
674 if (tree && tree->start == (size_t) ptr) {
675 tree->type = TCC_TYPE_MEMALIGN;
678 POST_SEM ();
679 return ptr;
682 #if MALLOC_REDIR
683 void free(void *ptr)
684 #else
685 void __bound_free(void *ptr, const void *caller)
686 #endif
688 size_t addr = (size_t) ptr;
689 void *p;
691 if (ptr == NULL || tree == NULL
692 #if MALLOC_REDIR
693 || ((unsigned char *) ptr >= &initial_pool[0] &&
694 (unsigned char *) ptr < &initial_pool[sizeof(initial_pool)])
695 #endif
697 return;
699 dprintf(stderr, "%s, %s (%p)\n", __FILE__, __FUNCTION__, ptr);
701 WAIT_SEM ();
702 INCR_COUNT(bound_free_count);
703 tree = splay (addr, tree);
704 if (tree->start == addr) {
705 if (tree->is_invalid) {
706 bound_error("freeing invalid region");
707 POST_SEM ();
708 return;
710 tree->is_invalid = 1;
711 memset (ptr, 0x5a, tree->size);
712 p = free_reuse_list[free_reuse_index];
713 free_reuse_list[free_reuse_index] = ptr;
714 free_reuse_index = (free_reuse_index + 1) % FREE_REUSE_SIZE;
715 if (p) {
716 tree = splay_delete((size_t)p, tree);
718 ptr = p;
720 #if MALLOC_REDIR
721 free_redir (ptr);
722 #else
723 free(ptr);
724 #endif
725 POST_SEM ();
728 #if MALLOC_REDIR
729 void *realloc(void *ptr, size_t size)
730 #else
731 void *__bound_realloc(void *ptr, size_t size, const void *caller)
732 #endif
734 if (size == 0) {
735 #if MALLOC_REDIR
736 free(ptr);
737 #else
738 __bound_free(ptr, caller);
739 #endif
740 return NULL;
742 else if (ptr == NULL) {
743 #if MALLOC_REDIR
744 ptr = realloc_redir (ptr, size);
745 #else
746 ptr = realloc (ptr, size);
747 #endif
748 WAIT_SEM ();
749 INCR_COUNT(bound_realloc_count);
750 if (ptr) {
751 tree = splay_insert ((size_t) ptr, size, tree);
752 if (tree && tree->start == (size_t) ptr) {
753 tree->type = TCC_TYPE_REALLOC;
756 POST_SEM ();
757 return ptr;
759 else {
760 WAIT_SEM ();
761 tree = splay_delete ((size_t) ptr, tree);
762 #if MALLOC_REDIR
763 ptr = realloc_redir (ptr, size);
764 #else
765 ptr = realloc (ptr, size);
766 #endif
767 if (ptr) {
768 tree = splay_insert ((size_t) ptr, size, tree);
769 if (tree && tree->start == (size_t) ptr) {
770 tree->type = TCC_TYPE_REALLOC;
773 POST_SEM ();
774 return ptr;
778 #if MALLOC_REDIR
779 void *calloc(size_t nmemb, size_t size)
780 #else
781 void *__bound_calloc(size_t nmemb, size_t size)
782 #endif
784 void *ptr;
786 size *= nmemb;
787 #if MALLOC_REDIR
788 /* This will catch the first dlsym call from __bound_init */
789 if (malloc_redir == NULL) {
790 ptr = &initial_pool[pool_index];
791 pool_index = (pool_index + size + 7) & ~8;
792 dprintf (stderr, "%s, %s initial (%p, 0x%x)\n",
793 __FILE__, __FUNCTION__, ptr, (unsigned)size);
794 memset (ptr, 0, size);
795 return ptr;
797 #endif
798 #if MALLOC_REDIR
799 ptr = malloc_redir(size + 1);
800 #else
801 ptr = malloc(size + 1);
802 #endif
803 if (ptr) {
804 memset (ptr, 0, size);
805 WAIT_SEM ();
806 INCR_COUNT(bound_calloc_count);
807 tree = splay_insert ((size_t) ptr, size, tree);
808 if (tree && tree->start == (size_t) ptr) {
809 tree->type = TCC_TYPE_CALLOC;
811 POST_SEM ();
813 return ptr;
816 #if !defined(_WIN32)
817 void *__bound_mmap (void *start, size_t size, int prot,
818 int flags, int fd, off_t offset)
820 void *result;
822 dprintf(stderr, "%s, %s (%p, 0x%x)\n",
823 __FILE__, __FUNCTION__, start, (unsigned)size);
824 result = mmap (start, size, prot, flags, fd, offset);
825 if (result) {
826 WAIT_SEM ();
827 INCR_COUNT(bound_mmap_count);
828 tree = splay_insert((size_t)result, size, tree);
829 POST_SEM ();
831 return result;
834 int __bound_munmap (void *start, size_t size)
836 int result;
838 dprintf(stderr, "%s, %s (%p, 0x%x)\n",
839 __FILE__, __FUNCTION__, start, (unsigned)size);
840 WAIT_SEM ();
841 INCR_COUNT(bound_munmap_count);
842 tree = splay_delete ((size_t) start, tree);
843 POST_SEM ();
844 result = munmap (start, size);
845 return result;
847 #endif
849 /* used by alloca */
850 void __bound_new_region(void *p, size_t size)
852 size_t fp;
853 alloca_list_type *last;
854 alloca_list_type *cur;
856 dprintf(stderr, "%s, %s (%p, 0x%x)\n",
857 __FILE__, __FUNCTION__, p, (unsigned)size);
858 WAIT_SEM ();
859 INCR_COUNT(bound_alloca_count);
860 GET_CALLER_FP (fp);
861 last = NULL;
862 cur = alloca_list;
863 while (cur && cur->fp == fp) {
864 if (cur->p == p) {
865 dprintf(stderr, "%s, %s() remove alloca/vla %p\n",
866 __FILE__, __FUNCTION__, alloca_list->p);
867 if (last) {
868 last->next = cur->next;
870 else {
871 alloca_list = cur->next;
873 tree = splay_delete((size_t)p, tree);
874 #if MALLOC_REDIR
875 free_redir (cur);
876 #else
877 free (cur);
878 #endif
879 break;
881 last = cur;
882 cur = cur->next;
884 tree = splay_insert((size_t)p, size, tree);
885 #if MALLOC_REDIR
886 cur = malloc_redir (sizeof (alloca_list_type));
887 #else
888 cur = malloc (sizeof (alloca_list_type));
889 #endif
890 if (cur) {
891 cur->fp = fp;
892 cur->p = p;
893 cur->next = alloca_list;
894 alloca_list = cur;
896 POST_SEM ();
899 #if defined(__GNUC__) && (__GNUC__ >= 6)
900 #pragma GCC diagnostic pop
901 #endif
904 /* some useful checked functions */
906 /* check that (p ... p + size - 1) lies inside 'p' region, if any */
907 static void __bound_check(const void *p, size_t size, const char *function)
909 if (no_checking)
910 return;
911 if (size == 0)
912 return;
913 p = __bound_ptr_add((void *)p, size);
914 if (p == INVALID_POINTER)
915 bound_error("invalid pointer");
918 void *__bound_memcpy(void *dst, const void *src, size_t size)
920 void* p;
922 INCR_COUNT(bound_mempcy_count);
923 __bound_check(dst, size, "memcpy");
924 __bound_check(src, size, "memcpy");
925 /* check also region overlap */
926 if (no_checking == 0 && src >= dst && src < dst + size)
927 bound_error("overlapping regions in memcpy()");
929 p = memcpy(dst, src, size);
931 return p;
934 int __bound_memcmp(const void *s1, const void *s2, size_t size)
936 INCR_COUNT(bound_memcmp_count);
937 __bound_check(s1, size, "memcmp");
938 __bound_check(s2, size, "memcmp");
939 return memcmp(s1, s2, size);
942 void *__bound_memmove(void *dst, const void *src, size_t size)
944 INCR_COUNT(bound_memmove_count);
945 __bound_check(dst, size, "memmove");
946 __bound_check(src, size, "memmove");
947 return memmove(dst, src, size);
950 void *__bound_memset(void *dst, int c, size_t size)
952 INCR_COUNT(bound_memset_count);
953 __bound_check(dst, size, "memset");
954 return memset(dst, c, size);
957 int __bound_strlen(const char *s)
959 const char *p = s;
960 size_t len;
962 INCR_COUNT(bound_strlen_count);
963 while (*p++);
964 len = (p - s) - 1;
965 p = __bound_ptr_indir1((char *)s, len);
966 if (p == INVALID_POINTER)
967 bound_error("bad pointer in strlen()");
968 return len;
971 char *__bound_strcpy(char *dst, const char *src)
973 size_t len;
974 void *p;
976 INCR_COUNT(bound_strcpy_count);
977 len = __bound_strlen(src);
978 p = __bound_memcpy(dst, src, len + 1);
979 return p;
982 char *__bound_strncpy(char *dst, const char *src, size_t n)
984 size_t len = n;
985 const char *p = src;
987 while (len-- && *p++);
988 len = p - src;
989 INCR_COUNT(bound_strncpy_count);
990 __bound_check(dst, len, "strncpy");
991 __bound_check(src, len, "strncpy");
992 return strncpy (dst, src, n);
995 int __bound_strcmp(const char *s1, const char *s2)
997 const unsigned char *u1 = (const unsigned char *) s1;
998 const unsigned char *u2 = (const unsigned char *) s2;
1000 INCR_COUNT(bound_strcmp_count);
1001 while (*u1 && *u1 == *u2) {
1002 u1++;
1003 u2++;
1005 __bound_check(s1, ((const char *)u1 - s1) + 1, "strcmp");
1006 __bound_check(s2, ((const char *)u2 - s2) + 1, "strcmp");
1007 return (*u1 - *u2);
1010 int __bound_strncmp(const char *s1, const char *s2, size_t n)
1012 const unsigned char *u1 = (const unsigned char *) s1;
1013 const unsigned char *u2 = (const unsigned char *) s2;
1014 int retval = 0;
1016 INCR_COUNT(bound_strncmp_count);
1017 do {
1018 if ((ssize_t) --n == -1)
1019 break;
1020 else if (*u1 != *u2) {
1021 retval = *u1 - *u2;
1022 break;
1024 u2++;
1025 } while (*u1++);
1026 __bound_check(s1, ((const char *)u1 - s1) + 1, "strncmp");
1027 __bound_check(s2, ((const char *)u1 - s1) + 1, "strncmp");
1028 return retval;
1031 char *__bound_strcat(char *dest, const char *src)
1033 char *r = dest;
1034 const char *s = src;
1036 INCR_COUNT(bound_strcat_count);
1037 while (*dest++);
1038 dest--;
1039 while ((*dest++ = *src++) != 0);
1040 __bound_check(r, dest - r, "strcat");
1041 __bound_check(s, src - s, "strcat");
1042 return r;
1045 char *__bound_strchr(const char *string, int ch)
1047 const unsigned char *s = (const unsigned char *) string;
1048 unsigned char c = ch;
1050 INCR_COUNT(bound_strchr_count);
1051 while (*s) {
1052 if (*s == c) {
1053 break;
1055 s++;
1057 __bound_check(string, ((const char *)s - string) + 1, "strchr");
1058 return *s == c ? (char *) s : NULL;
1061 char *__bound_strdup(const char *s)
1063 const char *p = s;
1064 char *new;
1066 INCR_COUNT(bound_strdup_count);
1067 while (*p++);
1068 __bound_check(s, p - s, "strdup");
1069 #if MALLOC_REDIR
1070 new = malloc_redir (p - s);
1071 #else
1072 new = malloc (p - s);
1073 #endif
1074 if (new) {
1075 WAIT_SEM ();
1076 tree = splay_insert((size_t)new, p - s, tree);
1077 if (tree && tree->start == (size_t) new) {
1078 tree->type = TCC_TYPE_STRDUP;
1080 memcpy (new, s, p - s);
1081 POST_SEM ();
1083 return new;
1087 An implementation of top-down splaying with sizes
1088 D. Sleator <sleator@cs.cmu.edu>, January 1994.
1090 This extends top-down-splay.c to maintain a size field in each node.
1091 This is the number of nodes in the subtree rooted there. This makes
1092 it possible to efficiently compute the rank of a key. (The rank is
1093 the number of nodes to the left of the given key.) It it also
1094 possible to quickly find the node of a given rank. Both of these
1095 operations are illustrated in the code below. The remainder of this
1096 introduction is taken from top-down-splay.c.
1098 "Splay trees", or "self-adjusting search trees" are a simple and
1099 efficient data structure for storing an ordered set. The data
1100 structure consists of a binary tree, with no additional fields. It
1101 allows searching, insertion, deletion, deletemin, deletemax,
1102 splitting, joining, and many other operations, all with amortized
1103 logarithmic performance. Since the trees adapt to the sequence of
1104 requests, their performance on real access patterns is typically even
1105 better. Splay trees are described in a number of texts and papers
1106 [1,2,3,4].
1108 The code here is adapted from simple top-down splay, at the bottom of
1109 page 669 of [2]. It can be obtained via anonymous ftp from
1110 spade.pc.cs.cmu.edu in directory /usr/sleator/public.
1112 The chief modification here is that the splay operation works even if the
1113 item being splayed is not in the tree, and even if the tree root of the
1114 tree is NULL. So the line:
1116 t = splay(i, t);
1118 causes it to search for item with key i in the tree rooted at t. If it's
1119 there, it is splayed to the root. If it isn't there, then the node put
1120 at the root is the last one before NULL that would have been reached in a
1121 normal binary search for i. (It's a neighbor of i in the tree.) This
1122 allows many other operations to be easily implemented, as shown below.
1124 [1] "Data Structures and Their Algorithms", Lewis and Denenberg,
1125 Harper Collins, 1991, pp 243-251.
1126 [2] "Self-adjusting Binary Search Trees" Sleator and Tarjan,
1127 JACM Volume 32, No 3, July 1985, pp 652-686.
1128 [3] "Data Structure and Algorithm Analysis", Mark Weiss,
1129 Benjamin Cummins, 1992, pp 119-130.
1130 [4] "Data Structures, Algorithms, and Performance", Derick Wood,
1131 Addison-Wesley, 1993, pp 367-375
1134 /* Code adapted for tcc */
1136 #define compare(start,tstart,tsize) (start < tstart ? -1 : \
1137 start >= tstart+tsize ? 1 : 0)
1139 /* This is the comparison. */
1140 /* Returns <0 if i<j, =0 if i=j, and >0 if i>j */
1142 static Tree * splay (size_t addr, Tree *t)
1143 /* Splay using the key start (which may or may not be in the tree.) */
1144 /* The starting root is t, and the tree used is defined by rat */
1146 Tree N, *l, *r, *y;
1147 int comp;
1149 if (t == NULL) return t;
1150 N.left = N.right = NULL;
1151 l = r = &N;
1153 for (;;) {
1154 comp = compare(addr, t->start, t->size);
1155 if (comp < 0) {
1156 y = t->left;
1157 if (y == NULL) break;
1158 if (compare(addr, y->start, y->size) < 0) {
1159 t->left = y->right; /* rotate right */
1160 y->right = t;
1161 t = y;
1162 if (t->left == NULL) break;
1164 r->left = t; /* link right */
1165 r = t;
1166 t = t->left;
1167 } else if (comp > 0) {
1168 y = t->right;
1169 if (y == NULL) break;
1170 if (compare(addr, y->start, y->size) > 0) {
1171 t->right = y->left; /* rotate left */
1172 y->left = t;
1173 t = y;
1174 if (t->right == NULL) break;
1176 l->right = t; /* link left */
1177 l = t;
1178 t = t->right;
1179 } else {
1180 break;
1183 l->right = t->left; /* assemble */
1184 r->left = t->right;
1185 t->left = N.right;
1186 t->right = N.left;
1188 return t;
1191 #define compare_end(start,tend) (start < tend ? -1 : \
1192 start > tend ? 1 : 0)
1194 static Tree * splay_end (size_t addr, Tree *t)
1195 /* Splay using the key start (which may or may not be in the tree.) */
1196 /* The starting root is t, and the tree used is defined by rat */
1198 Tree N, *l, *r, *y;
1199 int comp;
1201 if (t == NULL) return t;
1202 N.left = N.right = NULL;
1203 l = r = &N;
1205 for (;;) {
1206 comp = compare_end(addr, t->start + t->size);
1207 if (comp < 0) {
1208 y = t->left;
1209 if (y == NULL) break;
1210 if (compare_end(addr, y->start + y->size) < 0) {
1211 t->left = y->right; /* rotate right */
1212 y->right = t;
1213 t = y;
1214 if (t->left == NULL) break;
1216 r->left = t; /* link right */
1217 r = t;
1218 t = t->left;
1219 } else if (comp > 0) {
1220 y = t->right;
1221 if (y == NULL) break;
1222 if (compare_end(addr, y->start + y->size) > 0) {
1223 t->right = y->left; /* rotate left */
1224 y->left = t;
1225 t = y;
1226 if (t->right == NULL) break;
1228 l->right = t; /* link left */
1229 l = t;
1230 t = t->right;
1231 } else {
1232 break;
1235 l->right = t->left; /* assemble */
1236 r->left = t->right;
1237 t->left = N.right;
1238 t->right = N.left;
1240 return t;
1243 static Tree * splay_insert(size_t addr, size_t size, Tree * t)
1244 /* Insert key start into the tree t, if it is not already there. */
1245 /* Return a pointer to the resulting tree. */
1247 Tree * new;
1249 if (t != NULL) {
1250 t = splay(addr,t);
1251 if (compare(addr, t->start, t->size)==0) {
1252 return t; /* it's already there */
1255 #if TREE_REUSE
1256 if (tree_free_list) {
1257 new = tree_free_list;
1258 tree_free_list = new->left;
1260 else
1261 #endif
1263 #if MALLOC_REDIR
1264 new = (Tree *) malloc_redir (sizeof (Tree));
1265 #else
1266 new = (Tree *) malloc (sizeof (Tree));
1267 #endif
1269 if (new == NULL) {
1270 bound_alloc_error();
1272 else {
1273 if (t == NULL) {
1274 new->left = new->right = NULL;
1275 } else if (compare(addr, t->start, t->size) < 0) {
1276 new->left = t->left;
1277 new->right = t;
1278 t->left = NULL;
1279 } else {
1280 new->right = t->right;
1281 new->left = t;
1282 t->right = NULL;
1284 new->start = addr;
1285 new->size = size;
1286 new->type = TCC_TYPE_NONE;
1287 new->is_invalid = 0;
1289 return new;
1292 #define compare_destroy(start,tstart) (start < tstart ? -1 : \
1293 start > tstart ? 1 : 0)
1295 static Tree * splay_delete(size_t addr, Tree *t)
1296 /* Deletes addr from the tree if it's there. */
1297 /* Return a pointer to the resulting tree. */
1299 Tree * x;
1301 if (t==NULL) return NULL;
1302 t = splay(addr,t);
1303 if (compare_destroy(addr, t->start) == 0) { /* found it */
1304 if (t->left == NULL) {
1305 x = t->right;
1306 } else {
1307 x = splay(addr, t->left);
1308 x->right = t->right;
1310 #if TREE_REUSE
1311 t->left = tree_free_list;
1312 tree_free_list = t;
1313 #else
1314 #if MALLOC_REDIR
1315 free_redir(t);
1316 #else
1317 free(t);
1318 #endif
1319 #endif
1320 return x;
1321 } else {
1322 return t; /* It wasn't there */
1326 void splay_printtree(Tree * t, int d)
1328 int i;
1329 if (t == NULL) return;
1330 splay_printtree(t->right, d+1);
1331 for (i=0; i<d; i++) fprintf(stderr," ");
1332 fprintf(stderr,"%p(0x%lx:%u)\n",
1333 (void *) t->start, (unsigned long) t->size, (unsigned)t->is_invalid);
1334 splay_printtree(t->left, d+1);