2 * Tiny C Memory and bounds checker
4 * Copyright (c) 2002 Fabrice Bellard
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
25 #if !defined(__FreeBSD__) \
26 && !defined(__FreeBSD_kernel__) \
27 && !defined(__DragonFly__) \
28 && !defined(__OpenBSD__) \
29 && !defined(__NetBSD__)
38 #define BOUND_STATISTIC
41 #define dprintf(a...) if (print_calls) fprintf(a)
49 #if defined(__FreeBSD__) \
50 || defined(__FreeBSD_kernel__) \
51 || defined(__DragonFly__) \
52 || defined(__OpenBSD__) \
53 || defined(__NetBSD__) \
54 || defined(__dietlibc__)
61 #define MALLOC_REDIR (0)
65 static CRITICAL_SECTION bounds_sem
;
66 #define INIT_SEM() InitializeCriticalSection(&bounds_sem)
67 #define EXIT_SEM() DeleteCriticalSection(&bounds_sem)
68 #define WAIT_SEM() EnterCriticalSection(&bounds_sem)
69 #define POST_SEM() LeaveCriticalSection(&bounds_sem)
71 #define MALLOC_REDIR (0)
75 #include <semaphore.h>
76 static sem_t bounds_sem
;
77 #define INIT_SEM() sem_init (&bounds_sem, 0, 1)
78 #define EXIT_SEM() sem_destroy (&bounds_sem)
79 #define WAIT_SEM() while (sem_wait (&bounds_sem) < 0 && errno == EINTR);
80 #define POST_SEM() sem_post (&bounds_sem)
81 #define HAS_ENVIRON 0 /* Disabled for now */
82 #define __USE_GNU /* get RTLD_NEXT */
84 #define MALLOC_REDIR (1)
85 static void *(*malloc_redir
) (size_t) = NULL
;
86 static void *(*calloc_redir
) (size_t, size_t) = NULL
;
87 static void (*free_redir
) (void *) = NULL
;
88 static void *(*realloc_redir
) (void *, size_t) = NULL
;
89 static void *(*memalign_redir
) (size_t, size_t) = NULL
;
90 static int pool_index
= 0;
91 static unsigned char initial_pool
[256];
94 #define TCC_TYPE_NONE (0)
95 #define TCC_TYPE_MALLOC (1)
96 #define TCC_TYPE_CALLOC (2)
97 #define TCC_TYPE_REALLOC (3)
98 #define TCC_TYPE_MEMALIGN (4)
99 #define TCC_TYPE_STRDUP (5)
101 /* this pointer is generated when bound check is incorrect */
102 #define INVALID_POINTER ((void *)(-2))
104 typedef struct tree_node Tree
;
106 Tree
* left
, * right
;
110 size_t is_invalid
; /* true if pointers outside region are invalid */
113 typedef struct alloca_list_struct
{
116 struct alloca_list_struct
*next
;
119 static Tree
* splay (size_t addr
, Tree
*t
);
120 static Tree
* splay_end (size_t addr
, Tree
*t
);
121 static Tree
* splay_insert(size_t addr
, size_t size
, Tree
* t
);
122 static Tree
* splay_delete(size_t addr
, Tree
*t
);
123 void splay_printtree(Tree
* t
, int d
);
125 /* external interface */
126 void __bound_init(void);
127 void __bound_exit(void);
130 /* an __attribute__ macro is defined in the system headers */
133 #define FASTCALL __attribute__((regparm(3)))
136 void *__bound_malloc(size_t size
, const void *caller
);
137 void *__bound_memalign(size_t size
, size_t align
, const void *caller
);
138 void __bound_free(void *ptr
, const void *caller
);
139 void *__bound_realloc(void *ptr
, size_t size
, const void *caller
);
140 void *__bound_calloc(size_t nmemb
, size_t size
);
143 #define FREE_REUSE_SIZE (100)
144 static int free_reuse_index
= 0;
145 static void *free_reuse_list
[FREE_REUSE_SIZE
];
147 /* error message, just for TCC */
148 const char *__bound_error_msg
;
150 /* runtime error output */
151 extern void rt_error(size_t pc
, const char *fmt
, ...);
153 static Tree
*tree
= NULL
;
154 #define TREE_REUSE (1)
156 static Tree
*tree_free_list
= NULL
;
158 static alloca_list_type
*alloca_list
= NULL
;
160 static int inited
= 0;
161 static int print_calls
= 0;
162 static int print_heap
= 0;
163 static int print_statistic
= 0;
164 static int never_fatal
= 0;
165 static int no_checking
= 0;
167 #ifdef BOUND_STATISTIC
168 static unsigned long long bound_ptr_add_count
;
169 static unsigned long long bound_ptr_indir1_count
;
170 static unsigned long long bound_ptr_indir2_count
;
171 static unsigned long long bound_ptr_indir4_count
;
172 static unsigned long long bound_ptr_indir8_count
;
173 static unsigned long long bound_ptr_indir12_count
;
174 static unsigned long long bound_ptr_indir16_count
;
175 static unsigned long long bound_local_new_count
;
176 static unsigned long long bound_local_delete_count
;
177 static unsigned long long bound_malloc_count
;
178 static unsigned long long bound_calloc_count
;
179 static unsigned long long bound_realloc_count
;
180 static unsigned long long bound_free_count
;
181 static unsigned long long bound_memalign_count
;
182 static unsigned long long bound_mmap_count
;
183 static unsigned long long bound_munmap_count
;
184 static unsigned long long bound_alloca_count
;
185 static unsigned long long bound_mempcy_count
;
186 static unsigned long long bound_memcmp_count
;
187 static unsigned long long bound_memmove_count
;
188 static unsigned long long bound_memset_count
;
189 static unsigned long long bound_strlen_count
;
190 static unsigned long long bound_strcpy_count
;
191 static unsigned long long bound_strncpy_count
;
192 static unsigned long long bound_strcmp_count
;
193 static unsigned long long bound_strncmp_count
;
194 static unsigned long long bound_strcat_count
;
195 static unsigned long long bound_strchr_count
;
196 static unsigned long long bound_strdup_count
;
197 #define INCR_COUNT(x) x++
199 #define INCR_COUNT(x)
202 /* enable/disable checking. This can be used for signal handlers. */
203 void __bound_checking (int no_check
)
205 no_checking
= no_check
;
209 //#define no_checking 1
211 /* print a bound error message */
212 static void bound_error(const char *fmt
, ...)
214 __bound_error_msg
= fmt
;
215 fprintf(stderr
,"%s %s: %s\n", __FILE__
, __FUNCTION__
, fmt
);
216 if (never_fatal
== 0)
217 *(void **)0 = 0; /* force a runtime error */
220 static void bound_alloc_error(void)
222 bound_error("not enough memory for bound checking code");
225 /* return '(p + offset)' for pointer arithmetic (a pointer can reach
226 the end of a region in this case */
227 void * no_FASTCALL
__bound_ptr_add(void *p
, size_t offset
)
229 size_t addr
= (size_t)p
;
235 dprintf(stderr
, "%s %s : %p 0x%x\n",
236 __FILE__
, __FUNCTION__
, p
, (unsigned)offset
);
239 INCR_COUNT(bound_ptr_add_count
);
242 if (addr
>= tree
->size
) {
244 tree
= splay (addr
, tree
);
247 if (addr
>= tree
->size
) {
249 tree
= splay_end (addr
, tree
);
252 if (addr
<= tree
->size
) {
254 if (tree
->is_invalid
|| addr
> tree
->size
) {
256 fprintf(stderr
,"%s %s : %p is outside of the region\n",
257 __FILE__
, __FUNCTION__
, p
+ offset
);
259 if (never_fatal
== 0) {
261 return INVALID_POINTER
; /* return an invalid pointer */
270 /* return '(p + offset)' for pointer indirection (the resulting must
271 be strictly inside the region */
272 #define BOUND_PTR_INDIR(dsize) \
273 void * no_FASTCALL __bound_ptr_indir ## dsize (void *p, size_t offset) \
275 size_t addr = (size_t)p; \
280 dprintf(stderr, "%s %s : %p 0x%x start\n", \
281 __FILE__, __FUNCTION__, p, (unsigned)offset); \
283 INCR_COUNT(bound_ptr_indir ## dsize ## _count); \
285 addr -= tree->start; \
286 if (addr >= tree->size) { \
288 tree = splay (addr, tree); \
289 addr -= tree->start; \
291 if (addr >= tree->size) { \
293 tree = splay_end (addr, tree); \
294 addr -= tree->start; \
296 if (addr <= tree->size) { \
297 addr += offset + dsize; \
298 if (tree->is_invalid || addr > tree->size) { \
299 fprintf(stderr,"%s %s : %p is outside of the region\n", \
300 __FILE__, __FUNCTION__, p + offset); \
301 if (never_fatal == 0) { \
303 return INVALID_POINTER; /* return an invalid pointer */ \
319 #if defined(__GNUC__) && (__GNUC__ >= 6)
321 * At least gcc 6.2 complains when __builtin_frame_address is used with
324 #pragma GCC diagnostic push
325 #pragma GCC diagnostic ignored "-Wframe-address"
328 /* return the frame pointer of the caller */
329 #define GET_CALLER_FP(fp)\
331 fp = (size_t)__builtin_frame_address(1);\
334 /* called when entering a function to add all the local regions */
335 void FASTCALL
__bound_local_new(void *p1
)
337 size_t addr
, size
, fp
, *p
= p1
;
342 dprintf(stderr
, "%s, %s local new p1=%p fp=%p\n",
343 __FILE__
, __FUNCTION__
, p
, (void *)fp
);
349 INCR_COUNT(bound_local_new_count
);
351 dprintf(stderr
, "%s, %s() alloca/vla used\n",
352 __FILE__
, __FUNCTION__
);
357 dprintf(stderr
, "%s, %s() (%p 0x%lx)\n",
358 __FILE__
, __FUNCTION__
, (void *) addr
, (unsigned long) size
);
359 tree
= splay_insert(addr
, size
, tree
);
366 /* called when leaving a function to delete all the local regions */
367 void FASTCALL
__bound_local_delete(void *p1
)
369 size_t addr
, fp
, *p
= p1
;
374 dprintf(stderr
, "%s, %s local delete p1=%p fp=%p\n",
375 __FILE__
, __FUNCTION__
, p
, (void *)fp
);
381 INCR_COUNT(bound_local_delete_count
);
383 while (alloca_list
&& alloca_list
->fp
== fp
) {
384 alloca_list_type
*next
= alloca_list
->next
;
386 dprintf(stderr
, "%s, %s() remove alloca/vla %p\n",
387 __FILE__
, __FUNCTION__
, alloca_list
->p
);
388 tree
= splay_delete ((size_t) alloca_list
->p
, tree
);
390 free_redir (alloca_list
);
399 dprintf(stderr
, "%s, %s() (%p 0x%lx)\n",
400 __FILE__
, __FUNCTION__
, (void *) addr
, (unsigned long) p
[1]);
401 tree
= splay_delete(addr
, tree
);
408 void __bound_init(void)
415 print_calls
= getenv ("TCC_BOUNDS_PRINT_CALLS") != NULL
;
416 print_heap
= getenv ("TCC_BOUNDS_PRINT_HEAP") != NULL
;
417 print_statistic
= getenv ("TCC_BOUNDS_PRINT_STATISTIC") != NULL
;
418 never_fatal
= getenv ("TCC_BOUNDS_NEVER_FATAL") != NULL
;
420 dprintf(stderr
, "%s, %s() start\n", __FILE__
, __FUNCTION__
);
426 void *addr
= RTLD_NEXT
;
428 /* tcc -run required RTLD_DEFAULT. Normal usage requires RTLD_NEXT */
429 *(void **) (&malloc_redir
) = dlsym (addr
, "malloc");
430 if (malloc_redir
== NULL
) {
431 dprintf(stderr
, "%s, %s() use RTLD_DEFAULT\n",
432 __FILE__
, __FUNCTION__
);
434 *(void **) (&malloc_redir
) = dlsym (addr
, "malloc");
436 *(void **) (&calloc_redir
) = dlsym (addr
, "calloc");
437 *(void **) (&free_redir
) = dlsym (addr
, "free");
438 *(void **) (&realloc_redir
) = dlsym (addr
, "realloc");
439 *(void **) (&memalign_redir
) = dlsym (addr
, "memalign");
440 dprintf(stderr
, "%s, %s() malloc_redir %p\n",
441 __FILE__
, __FUNCTION__
, malloc_redir
);
442 dprintf(stderr
, "%s, %s() free_redir %p\n",
443 __FILE__
, __FUNCTION__
, free_redir
);
444 dprintf(stderr
, "%s, %s() realloc_redir %p\n",
445 __FILE__
, __FUNCTION__
, realloc_redir
);
446 dprintf(stderr
, "%s, %s() memalign_redir %p\n",
447 __FILE__
, __FUNCTION__
, memalign_redir
);
453 /* save malloc hooks and install bound check hooks */
454 memset (free_reuse_list
, 0, sizeof (free_reuse_list
));
456 dprintf(stderr
, "%s, %s() end\n\n", __FILE__
, __FUNCTION__
);
459 void __bounds_add_static_var (size_t *p
)
461 dprintf(stderr
, "%s, %s()\n", __FILE__
, __FUNCTION__
);
462 /* add all static bound check values */
465 dprintf(stderr
, "%s, %s() (%p 0x%lx)\n",
466 __FILE__
, __FUNCTION__
, (void *) p
[0], (unsigned long) p
[1]);
467 tree
= splay_insert(p
[0], p
[1], tree
);
473 void __bound_main_arg(char **p
)
475 char *start
= (char *) p
;
479 dprintf(stderr
, "%s, %s() (%p 0x%lx)\n",
480 __FILE__
, __FUNCTION__
, *p
, (unsigned long)(strlen (*p
) + 1));
481 tree
= splay_insert((size_t) *p
, strlen (*p
) + 1, tree
);
484 dprintf(stderr
, "%s, %s() argv (%p 0x%lx)\n",
485 __FILE__
, __FUNCTION__
, start
, (unsigned long)((char *) p
- start
));
486 tree
= splay_insert((size_t) start
, (char *) p
- start
, tree
);
490 extern char **environ
;
495 dprintf(stderr
, "%s, %s() (%p 0x%lx)\n",
496 __FILE__
, __FUNCTION__
, *p
, (unsigned long)(strlen (*p
) + 1));
497 tree
= splay_insert((size_t) *p
, strlen (*p
) + 1, tree
);
500 dprintf(stderr
, "%s, %s() environ(%p 0x%lx)\n",
501 __FILE__
, __FUNCTION__
, start
, (unsigned long)((char *) p
- start
));
502 tree
= splay_insert((size_t) start
, (char *) p
- start
, tree
);
508 void __attribute__((destructor
)) __bound_exit(void)
511 static const char * const alloc_type
[] = {
512 "", "malloc", "calloc", "realloc", "memalign", "strdup"
515 dprintf(stderr
, "%s, %s()\n", __FILE__
, __FUNCTION__
);
520 extern void __libc_freeres ();
526 while (tree_free_list
) {
527 Tree
*next
= tree_free_list
->left
;
529 free_redir (tree_free_list
);
531 free (tree_free_list
);
533 tree_free_list
= next
;
536 for (i
= 0; i
< FREE_REUSE_SIZE
; i
++) {
537 if (free_reuse_list
[i
]) {
538 tree
= splay_delete ((size_t) free_reuse_list
[i
], tree
);
540 free_redir (free_reuse_list
[i
]);
542 free (free_reuse_list
[i
]);
547 if (print_heap
&& tree
->type
!= 0) {
548 fprintf (stderr
, "%s, %s() %s found size %lu\n",
549 __FILE__
, __FUNCTION__
, alloc_type
[tree
->type
],
550 (unsigned long) tree
->size
);
552 tree
= splay_delete (tree
->start
, tree
);
556 #ifdef BOUND_STATISTIC
557 if (print_statistic
) {
558 fprintf (stderr
, "bound_ptr_add_count %llu\n", bound_ptr_add_count
);
559 fprintf (stderr
, "bound_ptr_indir1_count %llu\n", bound_ptr_indir1_count
);
560 fprintf (stderr
, "bound_ptr_indir2_count %llu\n", bound_ptr_indir2_count
);
561 fprintf (stderr
, "bound_ptr_indir4_count %llu\n", bound_ptr_indir4_count
);
562 fprintf (stderr
, "bound_ptr_indir8_count %llu\n", bound_ptr_indir8_count
);
563 fprintf (stderr
, "bound_ptr_indir12_count %llu\n", bound_ptr_indir12_count
);
564 fprintf (stderr
, "bound_ptr_indir16_count %llu\n", bound_ptr_indir16_count
);
565 fprintf (stderr
, "bound_local_new_count %llu\n", bound_local_new_count
);
566 fprintf (stderr
, "bound_local_delete_count %llu\n", bound_local_delete_count
);
567 fprintf (stderr
, "bound_malloc_count %llu\n", bound_malloc_count
);
568 fprintf (stderr
, "bound_calloc_count %llu\n", bound_calloc_count
);
569 fprintf (stderr
, "bound_realloc_count %llu\n", bound_realloc_count
);
570 fprintf (stderr
, "bound_free_count %llu\n", bound_free_count
);
571 fprintf (stderr
, "bound_memalign_count %llu\n", bound_memalign_count
);
572 fprintf (stderr
, "bound_mmap_count %llu\n", bound_mmap_count
);
573 fprintf (stderr
, "bound_munmap_count %llu\n", bound_munmap_count
);
574 fprintf (stderr
, "bound_alloca_count %llu\n", bound_alloca_count
);
575 fprintf (stderr
, "bound_mempcy_count %llu\n", bound_mempcy_count
);
576 fprintf (stderr
, "bound_memcmp_count %llu\n", bound_memcmp_count
);
577 fprintf (stderr
, "bound_memmove_count %llu\n", bound_memmove_count
);
578 fprintf (stderr
, "bound_memset_count %llu\n", bound_memset_count
);
579 fprintf (stderr
, "bound_strlen_count %llu\n", bound_strlen_count
);
580 fprintf (stderr
, "bound_strcpy_count %llu\n", bound_strcpy_count
);
581 fprintf (stderr
, "bound_strncpy_count %llu\n", bound_strncpy_count
);
582 fprintf (stderr
, "bound_strcmp_count %llu\n", bound_strcmp_count
);
583 fprintf (stderr
, "bound_strncmp_count %llu\n", bound_strncmp_count
);
584 fprintf (stderr
, "bound_strcat_count %llu\n", bound_strcat_count
);
585 fprintf (stderr
, "bound_strchr_count %llu\n", bound_strchr_count
);
586 fprintf (stderr
, "bound_strdup_count %llu\n", bound_strdup_count
);
592 /* XXX: we should use a malloc which ensure that it is unlikely that
593 two malloc'ed data have the same address if 'free' are made in
596 void *malloc(size_t size
)
598 void *__bound_malloc(size_t size
, const void *caller
)
604 /* This will catch the first dlsym call from __bound_init */
605 if (malloc_redir
== NULL
) {
606 ptr
= &initial_pool
[pool_index
];
607 pool_index
= (pool_index
+ size
+ 7) & ~8;
608 dprintf (stderr
, "%s, %s initial (%p, 0x%x)\n",
609 __FILE__
, __FUNCTION__
, ptr
, (unsigned)size
);
613 /* we allocate one more byte to ensure the regions will be
614 separated by at least one byte. With the glibc malloc, it may
615 be in fact not necessary */
617 INCR_COUNT(bound_malloc_count
);
619 ptr
= malloc_redir (size
);
621 ptr
= malloc(size
+ 1);
624 dprintf(stderr
, "%s, %s (%p, 0x%x)\n",
625 __FILE__
, __FUNCTION__
, ptr
, (unsigned)size
);
628 tree
= splay_insert ((size_t) ptr
, size
, tree
);
629 if (tree
&& tree
->start
== (size_t) ptr
) {
630 tree
->type
= TCC_TYPE_MALLOC
;
638 void *memalign(size_t size
, size_t align
)
640 void *__bound_memalign(size_t size
, size_t align
, const void *caller
)
646 INCR_COUNT(bound_memalign_count
);
648 #ifndef HAVE_MEMALIGN
650 /* XXX: handle it ? */
653 /* we suppose that malloc aligns to at least four bytes */
655 ptr
= malloc_redir(size
+ 1);
657 ptr
= malloc(size
+ 1);
661 /* we allocate one more byte to ensure the regions will be
662 separated by at least one byte. With the glibc malloc, it may
663 be in fact not necessary */
665 ptr
= memalign_redir(size
+ 1, align
);
667 ptr
= memalign(size
+ 1, align
);
671 dprintf(stderr
, "%s, %s (%p, 0x%x)\n",
672 __FILE__
, __FUNCTION__
, ptr
, (unsigned)size
);
673 tree
= splay_insert((size_t) ptr
, size
, tree
);
674 if (tree
&& tree
->start
== (size_t) ptr
) {
675 tree
->type
= TCC_TYPE_MEMALIGN
;
685 void __bound_free(void *ptr
, const void *caller
)
688 size_t addr
= (size_t) ptr
;
691 if (ptr
== NULL
|| tree
== NULL
693 || ((unsigned char *) ptr
>= &initial_pool
[0] &&
694 (unsigned char *) ptr
< &initial_pool
[sizeof(initial_pool
)])
699 dprintf(stderr
, "%s, %s (%p)\n", __FILE__
, __FUNCTION__
, ptr
);
702 INCR_COUNT(bound_free_count
);
703 tree
= splay (addr
, tree
);
704 if (tree
->start
== addr
) {
705 if (tree
->is_invalid
) {
706 bound_error("freeing invalid region");
710 tree
->is_invalid
= 1;
711 memset (ptr
, 0x5a, tree
->size
);
712 p
= free_reuse_list
[free_reuse_index
];
713 free_reuse_list
[free_reuse_index
] = ptr
;
714 free_reuse_index
= (free_reuse_index
+ 1) % FREE_REUSE_SIZE
;
716 tree
= splay_delete((size_t)p
, tree
);
729 void *realloc(void *ptr
, size_t size
)
731 void *__bound_realloc(void *ptr
, size_t size
, const void *caller
)
738 __bound_free(ptr
, caller
);
742 else if (ptr
== NULL
) {
744 ptr
= realloc_redir (ptr
, size
);
746 ptr
= realloc (ptr
, size
);
749 INCR_COUNT(bound_realloc_count
);
751 tree
= splay_insert ((size_t) ptr
, size
, tree
);
752 if (tree
&& tree
->start
== (size_t) ptr
) {
753 tree
->type
= TCC_TYPE_REALLOC
;
761 tree
= splay_delete ((size_t) ptr
, tree
);
763 ptr
= realloc_redir (ptr
, size
);
765 ptr
= realloc (ptr
, size
);
768 tree
= splay_insert ((size_t) ptr
, size
, tree
);
769 if (tree
&& tree
->start
== (size_t) ptr
) {
770 tree
->type
= TCC_TYPE_REALLOC
;
779 void *calloc(size_t nmemb
, size_t size
)
781 void *__bound_calloc(size_t nmemb
, size_t size
)
788 /* This will catch the first dlsym call from __bound_init */
789 if (malloc_redir
== NULL
) {
790 ptr
= &initial_pool
[pool_index
];
791 pool_index
= (pool_index
+ size
+ 7) & ~8;
792 dprintf (stderr
, "%s, %s initial (%p, 0x%x)\n",
793 __FILE__
, __FUNCTION__
, ptr
, (unsigned)size
);
794 memset (ptr
, 0, size
);
799 ptr
= malloc_redir(size
+ 1);
801 ptr
= malloc(size
+ 1);
804 memset (ptr
, 0, size
);
806 INCR_COUNT(bound_calloc_count
);
807 tree
= splay_insert ((size_t) ptr
, size
, tree
);
808 if (tree
&& tree
->start
== (size_t) ptr
) {
809 tree
->type
= TCC_TYPE_CALLOC
;
817 void *__bound_mmap (void *start
, size_t size
, int prot
,
818 int flags
, int fd
, off_t offset
)
822 dprintf(stderr
, "%s, %s (%p, 0x%x)\n",
823 __FILE__
, __FUNCTION__
, start
, (unsigned)size
);
824 result
= mmap (start
, size
, prot
, flags
, fd
, offset
);
827 INCR_COUNT(bound_mmap_count
);
828 tree
= splay_insert((size_t)result
, size
, tree
);
834 int __bound_munmap (void *start
, size_t size
)
838 dprintf(stderr
, "%s, %s (%p, 0x%x)\n",
839 __FILE__
, __FUNCTION__
, start
, (unsigned)size
);
841 INCR_COUNT(bound_munmap_count
);
842 tree
= splay_delete ((size_t) start
, tree
);
844 result
= munmap (start
, size
);
850 void __bound_new_region(void *p
, size_t size
)
853 alloca_list_type
*last
;
854 alloca_list_type
*cur
;
856 dprintf(stderr
, "%s, %s (%p, 0x%x)\n",
857 __FILE__
, __FUNCTION__
, p
, (unsigned)size
);
859 INCR_COUNT(bound_alloca_count
);
863 while (cur
&& cur
->fp
== fp
) {
865 dprintf(stderr
, "%s, %s() remove alloca/vla %p\n",
866 __FILE__
, __FUNCTION__
, alloca_list
->p
);
868 last
->next
= cur
->next
;
871 alloca_list
= cur
->next
;
873 tree
= splay_delete((size_t)p
, tree
);
884 tree
= splay_insert((size_t)p
, size
, tree
);
886 cur
= malloc_redir (sizeof (alloca_list_type
));
888 cur
= malloc (sizeof (alloca_list_type
));
893 cur
->next
= alloca_list
;
899 #if defined(__GNUC__) && (__GNUC__ >= 6)
900 #pragma GCC diagnostic pop
904 /* some useful checked functions */
906 /* check that (p ... p + size - 1) lies inside 'p' region, if any */
907 static void __bound_check(const void *p
, size_t size
, const char *function
)
913 p
= __bound_ptr_add((void *)p
, size
);
914 if (p
== INVALID_POINTER
)
915 bound_error("invalid pointer");
918 void *__bound_memcpy(void *dst
, const void *src
, size_t size
)
922 INCR_COUNT(bound_mempcy_count
);
923 __bound_check(dst
, size
, "memcpy");
924 __bound_check(src
, size
, "memcpy");
925 /* check also region overlap */
926 if (no_checking
== 0 && src
>= dst
&& src
< dst
+ size
)
927 bound_error("overlapping regions in memcpy()");
929 p
= memcpy(dst
, src
, size
);
934 int __bound_memcmp(const void *s1
, const void *s2
, size_t size
)
936 INCR_COUNT(bound_memcmp_count
);
937 __bound_check(s1
, size
, "memcmp");
938 __bound_check(s2
, size
, "memcmp");
939 return memcmp(s1
, s2
, size
);
942 void *__bound_memmove(void *dst
, const void *src
, size_t size
)
944 INCR_COUNT(bound_memmove_count
);
945 __bound_check(dst
, size
, "memmove");
946 __bound_check(src
, size
, "memmove");
947 return memmove(dst
, src
, size
);
950 void *__bound_memset(void *dst
, int c
, size_t size
)
952 INCR_COUNT(bound_memset_count
);
953 __bound_check(dst
, size
, "memset");
954 return memset(dst
, c
, size
);
957 int __bound_strlen(const char *s
)
962 INCR_COUNT(bound_strlen_count
);
965 p
= __bound_ptr_indir1((char *)s
, len
);
966 if (p
== INVALID_POINTER
)
967 bound_error("bad pointer in strlen()");
971 char *__bound_strcpy(char *dst
, const char *src
)
976 INCR_COUNT(bound_strcpy_count
);
977 len
= __bound_strlen(src
);
978 p
= __bound_memcpy(dst
, src
, len
+ 1);
982 char *__bound_strncpy(char *dst
, const char *src
, size_t n
)
987 while (len
-- && *p
++);
989 INCR_COUNT(bound_strncpy_count
);
990 __bound_check(dst
, len
, "strncpy");
991 __bound_check(src
, len
, "strncpy");
992 return strncpy (dst
, src
, n
);
995 int __bound_strcmp(const char *s1
, const char *s2
)
997 const unsigned char *u1
= (const unsigned char *) s1
;
998 const unsigned char *u2
= (const unsigned char *) s2
;
1000 INCR_COUNT(bound_strcmp_count
);
1001 while (*u1
&& *u1
== *u2
) {
1005 __bound_check(s1
, ((const char *)u1
- s1
) + 1, "strcmp");
1006 __bound_check(s2
, ((const char *)u2
- s2
) + 1, "strcmp");
1010 int __bound_strncmp(const char *s1
, const char *s2
, size_t n
)
1012 const unsigned char *u1
= (const unsigned char *) s1
;
1013 const unsigned char *u2
= (const unsigned char *) s2
;
1016 INCR_COUNT(bound_strncmp_count
);
1018 if ((ssize_t
) --n
== -1)
1020 else if (*u1
!= *u2
) {
1026 __bound_check(s1
, ((const char *)u1
- s1
) + 1, "strncmp");
1027 __bound_check(s2
, ((const char *)u1
- s1
) + 1, "strncmp");
1031 char *__bound_strcat(char *dest
, const char *src
)
1034 const char *s
= src
;
1036 INCR_COUNT(bound_strcat_count
);
1039 while ((*dest
++ = *src
++) != 0);
1040 __bound_check(r
, dest
- r
, "strcat");
1041 __bound_check(s
, src
- s
, "strcat");
1045 char *__bound_strchr(const char *string
, int ch
)
1047 const unsigned char *s
= (const unsigned char *) string
;
1048 unsigned char c
= ch
;
1050 INCR_COUNT(bound_strchr_count
);
1057 __bound_check(string
, ((const char *)s
- string
) + 1, "strchr");
1058 return *s
== c
? (char *) s
: NULL
;
1061 char *__bound_strdup(const char *s
)
1066 INCR_COUNT(bound_strdup_count
);
1068 __bound_check(s
, p
- s
, "strdup");
1070 new = malloc_redir (p
- s
);
1072 new = malloc (p
- s
);
1076 tree
= splay_insert((size_t)new, p
- s
, tree
);
1077 if (tree
&& tree
->start
== (size_t) new) {
1078 tree
->type
= TCC_TYPE_STRDUP
;
1080 memcpy (new, s
, p
- s
);
1087 An implementation of top-down splaying with sizes
1088 D. Sleator <sleator@cs.cmu.edu>, January 1994.
1090 This extends top-down-splay.c to maintain a size field in each node.
1091 This is the number of nodes in the subtree rooted there. This makes
1092 it possible to efficiently compute the rank of a key. (The rank is
1093 the number of nodes to the left of the given key.) It it also
1094 possible to quickly find the node of a given rank. Both of these
1095 operations are illustrated in the code below. The remainder of this
1096 introduction is taken from top-down-splay.c.
1098 "Splay trees", or "self-adjusting search trees" are a simple and
1099 efficient data structure for storing an ordered set. The data
1100 structure consists of a binary tree, with no additional fields. It
1101 allows searching, insertion, deletion, deletemin, deletemax,
1102 splitting, joining, and many other operations, all with amortized
1103 logarithmic performance. Since the trees adapt to the sequence of
1104 requests, their performance on real access patterns is typically even
1105 better. Splay trees are described in a number of texts and papers
1108 The code here is adapted from simple top-down splay, at the bottom of
1109 page 669 of [2]. It can be obtained via anonymous ftp from
1110 spade.pc.cs.cmu.edu in directory /usr/sleator/public.
1112 The chief modification here is that the splay operation works even if the
1113 item being splayed is not in the tree, and even if the tree root of the
1114 tree is NULL. So the line:
1118 causes it to search for item with key i in the tree rooted at t. If it's
1119 there, it is splayed to the root. If it isn't there, then the node put
1120 at the root is the last one before NULL that would have been reached in a
1121 normal binary search for i. (It's a neighbor of i in the tree.) This
1122 allows many other operations to be easily implemented, as shown below.
1124 [1] "Data Structures and Their Algorithms", Lewis and Denenberg,
1125 Harper Collins, 1991, pp 243-251.
1126 [2] "Self-adjusting Binary Search Trees" Sleator and Tarjan,
1127 JACM Volume 32, No 3, July 1985, pp 652-686.
1128 [3] "Data Structure and Algorithm Analysis", Mark Weiss,
1129 Benjamin Cummins, 1992, pp 119-130.
1130 [4] "Data Structures, Algorithms, and Performance", Derick Wood,
1131 Addison-Wesley, 1993, pp 367-375
1134 /* Code adapted for tcc */
1136 #define compare(start,tstart,tsize) (start < tstart ? -1 : \
1137 start >= tstart+tsize ? 1 : 0)
1139 /* This is the comparison. */
1140 /* Returns <0 if i<j, =0 if i=j, and >0 if i>j */
1142 static Tree
* splay (size_t addr
, Tree
*t
)
1143 /* Splay using the key start (which may or may not be in the tree.) */
1144 /* The starting root is t, and the tree used is defined by rat */
1149 if (t
== NULL
) return t
;
1150 N
.left
= N
.right
= NULL
;
1154 comp
= compare(addr
, t
->start
, t
->size
);
1157 if (y
== NULL
) break;
1158 if (compare(addr
, y
->start
, y
->size
) < 0) {
1159 t
->left
= y
->right
; /* rotate right */
1162 if (t
->left
== NULL
) break;
1164 r
->left
= t
; /* link right */
1167 } else if (comp
> 0) {
1169 if (y
== NULL
) break;
1170 if (compare(addr
, y
->start
, y
->size
) > 0) {
1171 t
->right
= y
->left
; /* rotate left */
1174 if (t
->right
== NULL
) break;
1176 l
->right
= t
; /* link left */
1183 l
->right
= t
->left
; /* assemble */
1191 #define compare_end(start,tend) (start < tend ? -1 : \
1192 start > tend ? 1 : 0)
1194 static Tree
* splay_end (size_t addr
, Tree
*t
)
1195 /* Splay using the key start (which may or may not be in the tree.) */
1196 /* The starting root is t, and the tree used is defined by rat */
1201 if (t
== NULL
) return t
;
1202 N
.left
= N
.right
= NULL
;
1206 comp
= compare_end(addr
, t
->start
+ t
->size
);
1209 if (y
== NULL
) break;
1210 if (compare_end(addr
, y
->start
+ y
->size
) < 0) {
1211 t
->left
= y
->right
; /* rotate right */
1214 if (t
->left
== NULL
) break;
1216 r
->left
= t
; /* link right */
1219 } else if (comp
> 0) {
1221 if (y
== NULL
) break;
1222 if (compare_end(addr
, y
->start
+ y
->size
) > 0) {
1223 t
->right
= y
->left
; /* rotate left */
1226 if (t
->right
== NULL
) break;
1228 l
->right
= t
; /* link left */
1235 l
->right
= t
->left
; /* assemble */
1243 static Tree
* splay_insert(size_t addr
, size_t size
, Tree
* t
)
1244 /* Insert key start into the tree t, if it is not already there. */
1245 /* Return a pointer to the resulting tree. */
1251 if (compare(addr
, t
->start
, t
->size
)==0) {
1252 return t
; /* it's already there */
1256 if (tree_free_list
) {
1257 new = tree_free_list
;
1258 tree_free_list
= new->left
;
1264 new = (Tree
*) malloc_redir (sizeof (Tree
));
1266 new = (Tree
*) malloc (sizeof (Tree
));
1270 bound_alloc_error();
1274 new->left
= new->right
= NULL
;
1275 } else if (compare(addr
, t
->start
, t
->size
) < 0) {
1276 new->left
= t
->left
;
1280 new->right
= t
->right
;
1286 new->type
= TCC_TYPE_NONE
;
1287 new->is_invalid
= 0;
1292 #define compare_destroy(start,tstart) (start < tstart ? -1 : \
1293 start > tstart ? 1 : 0)
1295 static Tree
* splay_delete(size_t addr
, Tree
*t
)
1296 /* Deletes addr from the tree if it's there. */
1297 /* Return a pointer to the resulting tree. */
1301 if (t
==NULL
) return NULL
;
1303 if (compare_destroy(addr
, t
->start
) == 0) { /* found it */
1304 if (t
->left
== NULL
) {
1307 x
= splay(addr
, t
->left
);
1308 x
->right
= t
->right
;
1311 t
->left
= tree_free_list
;
1322 return t
; /* It wasn't there */
1326 void splay_printtree(Tree
* t
, int d
)
1329 if (t
== NULL
) return;
1330 splay_printtree(t
->right
, d
+1);
1331 for (i
=0; i
<d
; i
++) fprintf(stderr
," ");
1332 fprintf(stderr
,"%p(0x%lx:%u)\n",
1333 (void *) t
->start
, (unsigned long) t
->size
, (unsigned)t
->is_invalid
);
1334 splay_printtree(t
->left
, d
+1);