Import boehm-gc snapshot, taken from
[official-gcc.git] / boehm-gc / blacklst.c
blob77ad6758eacb6c0c75c7a84496a96f3406bc8a2d
1 /*
2 * Copyright 1988, 1989 Hans-J. Boehm, Alan J. Demers
3 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
5 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
6 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
8 * Permission is hereby granted to use or copy this program
9 * for any purpose, provided the above notices are retained on all copies.
10 * Permission to modify the code and to distribute modified code is granted,
11 * provided the above notices are retained, and a notice that the code was
12 * modified is included with the above copyright notice.
15 #include "private/gc_priv.h"
18 * We maintain several hash tables of hblks that have had false hits.
19 * Each contains one bit per hash bucket; If any page in the bucket
20 * has had a false hit, we assume that all of them have.
21 * See the definition of page_hash_table in gc_private.h.
22 * False hits from the stack(s) are much more dangerous than false hits
23 * from elsewhere, since the former can pin a large object that spans the
24 * block, eventhough it does not start on the dangerous block.
28 * Externally callable routines are:
30 * GC_add_to_black_list_normal
31 * GC_add_to_black_list_stack
32 * GC_promote_black_lists
33 * GC_is_black_listed
35 * All require that the allocator lock is held.
38 /* Pointers to individual tables. We replace one table by another by */
39 /* switching these pointers. */
40 STATIC word * GC_old_normal_bl = NULL;
41 /* Nonstack false references seen at last full */
42 /* collection. */
43 STATIC word * GC_incomplete_normal_bl = NULL;
44 /* Nonstack false references seen since last */
45 /* full collection. */
46 STATIC word * GC_old_stack_bl = NULL;
47 STATIC word * GC_incomplete_stack_bl = NULL;
49 STATIC word GC_total_stack_black_listed = 0;
50 /* Number of bytes on stack blacklist. */
52 GC_INNER word GC_black_list_spacing = MINHINCR * HBLKSIZE;
53 /* Initial rough guess. */
55 STATIC void GC_clear_bl(word *);
57 GC_INNER void GC_default_print_heap_obj_proc(ptr_t p)
59 ptr_t base = GC_base(p);
60 int kind = HDR(base)->hb_obj_kind;
62 GC_err_printf("object at %p of appr. %lu bytes (%s)\n",
63 base, (unsigned long)GC_size(base),
64 kind == PTRFREE ? "atomic" :
65 IS_UNCOLLECTABLE(kind) ? "uncollectable" : "composite");
68 GC_INNER void (*GC_print_heap_obj)(ptr_t p) = GC_default_print_heap_obj_proc;
70 #ifdef PRINT_BLACK_LIST
71 STATIC void GC_print_blacklisted_ptr(word p, ptr_t source,
72 const char *kind_str)
74 ptr_t base = GC_base(source);
76 if (0 == base) {
77 GC_err_printf("Black listing (%s) %p referenced from %p in %s\n",
78 kind_str, (ptr_t)p, source,
79 NULL != source ? "root set" : "register");
80 } else {
81 /* FIXME: We can't call the debug version of GC_print_heap_obj */
82 /* (with PRINT_CALL_CHAIN) here because the lock is held and */
83 /* the world is stopped. */
84 GC_err_printf("Black listing (%s) %p referenced from %p in"
85 " object at %p of appr. %lu bytes\n",
86 kind_str, (ptr_t)p, source,
87 base, (unsigned long)GC_size(base));
90 #endif /* PRINT_BLACK_LIST */
92 GC_INNER void GC_bl_init_no_interiors(void)
94 if (GC_incomplete_normal_bl == 0) {
95 GC_old_normal_bl = (word *)GC_scratch_alloc(sizeof(page_hash_table));
96 GC_incomplete_normal_bl = (word *)GC_scratch_alloc(
97 sizeof(page_hash_table));
98 if (GC_old_normal_bl == 0 || GC_incomplete_normal_bl == 0) {
99 GC_err_printf("Insufficient memory for black list\n");
100 EXIT();
102 GC_clear_bl(GC_old_normal_bl);
103 GC_clear_bl(GC_incomplete_normal_bl);
107 GC_INNER void GC_bl_init(void)
109 if (!GC_all_interior_pointers) {
110 GC_bl_init_no_interiors();
112 GC_old_stack_bl = (word *)GC_scratch_alloc(sizeof(page_hash_table));
113 GC_incomplete_stack_bl = (word *)GC_scratch_alloc(sizeof(page_hash_table));
114 if (GC_old_stack_bl == 0 || GC_incomplete_stack_bl == 0) {
115 GC_err_printf("Insufficient memory for black list\n");
116 EXIT();
118 GC_clear_bl(GC_old_stack_bl);
119 GC_clear_bl(GC_incomplete_stack_bl);
122 STATIC void GC_clear_bl(word *doomed)
124 BZERO(doomed, sizeof(page_hash_table));
127 STATIC void GC_copy_bl(word *old, word *new)
129 BCOPY(old, new, sizeof(page_hash_table));
132 static word total_stack_black_listed(void);
134 /* Signal the completion of a collection. Turn the incomplete black */
135 /* lists into new black lists, etc. */
136 GC_INNER void GC_promote_black_lists(void)
138 word * very_old_normal_bl = GC_old_normal_bl;
139 word * very_old_stack_bl = GC_old_stack_bl;
141 GC_old_normal_bl = GC_incomplete_normal_bl;
142 GC_old_stack_bl = GC_incomplete_stack_bl;
143 if (!GC_all_interior_pointers) {
144 GC_clear_bl(very_old_normal_bl);
146 GC_clear_bl(very_old_stack_bl);
147 GC_incomplete_normal_bl = very_old_normal_bl;
148 GC_incomplete_stack_bl = very_old_stack_bl;
149 GC_total_stack_black_listed = total_stack_black_listed();
150 GC_VERBOSE_LOG_PRINTF(
151 "%lu bytes in heap blacklisted for interior pointers\n",
152 (unsigned long)GC_total_stack_black_listed);
153 if (GC_total_stack_black_listed != 0) {
154 GC_black_list_spacing =
155 HBLKSIZE*(GC_heapsize/GC_total_stack_black_listed);
157 if (GC_black_list_spacing < 3 * HBLKSIZE) {
158 GC_black_list_spacing = 3 * HBLKSIZE;
160 if (GC_black_list_spacing > MAXHINCR * HBLKSIZE) {
161 GC_black_list_spacing = MAXHINCR * HBLKSIZE;
162 /* Makes it easier to allocate really huge blocks, which otherwise */
163 /* may have problems with nonuniform blacklist distributions. */
164 /* This way we should always succeed immediately after growing the */
165 /* heap. */
169 GC_INNER void GC_unpromote_black_lists(void)
171 if (!GC_all_interior_pointers) {
172 GC_copy_bl(GC_old_normal_bl, GC_incomplete_normal_bl);
174 GC_copy_bl(GC_old_stack_bl, GC_incomplete_stack_bl);
177 /* P is not a valid pointer reference, but it falls inside */
178 /* the plausible heap bounds. */
179 /* Add it to the normal incomplete black list if appropriate. */
180 #ifdef PRINT_BLACK_LIST
181 GC_INNER void GC_add_to_black_list_normal(word p, ptr_t source)
182 #else
183 GC_INNER void GC_add_to_black_list_normal(word p)
184 #endif
186 if (GC_modws_valid_offsets[p & (sizeof(word)-1)]) {
187 word index = PHT_HASH((word)p);
189 if (HDR(p) == 0 || get_pht_entry_from_index(GC_old_normal_bl, index)) {
190 # ifdef PRINT_BLACK_LIST
191 if (!get_pht_entry_from_index(GC_incomplete_normal_bl, index)) {
192 GC_print_blacklisted_ptr(p, source, "normal");
194 # endif
195 set_pht_entry_from_index(GC_incomplete_normal_bl, index);
196 } /* else this is probably just an interior pointer to an allocated */
197 /* object, and isn't worth black listing. */
201 /* And the same for false pointers from the stack. */
202 #ifdef PRINT_BLACK_LIST
203 GC_INNER void GC_add_to_black_list_stack(word p, ptr_t source)
204 #else
205 GC_INNER void GC_add_to_black_list_stack(word p)
206 #endif
208 word index = PHT_HASH((word)p);
210 if (HDR(p) == 0 || get_pht_entry_from_index(GC_old_stack_bl, index)) {
211 # ifdef PRINT_BLACK_LIST
212 if (!get_pht_entry_from_index(GC_incomplete_stack_bl, index)) {
213 GC_print_blacklisted_ptr(p, source, "stack");
215 # endif
216 set_pht_entry_from_index(GC_incomplete_stack_bl, index);
221 * Is the block starting at h of size len bytes black listed? If so,
222 * return the address of the next plausible r such that (r, len) might not
223 * be black listed. (R may not actually be in the heap. We guarantee only
224 * that every smaller value of r after h is also black listed.)
225 * If (h,len) is not black listed, return 0.
226 * Knows about the structure of the black list hash tables.
228 struct hblk * GC_is_black_listed(struct hblk *h, word len)
230 word index = PHT_HASH((word)h);
231 word i;
232 word nblocks;
234 if (!GC_all_interior_pointers
235 && (get_pht_entry_from_index(GC_old_normal_bl, index)
236 || get_pht_entry_from_index(GC_incomplete_normal_bl, index))) {
237 return (h+1);
240 nblocks = divHBLKSZ(len);
241 for (i = 0;;) {
242 if (GC_old_stack_bl[divWORDSZ(index)] == 0
243 && GC_incomplete_stack_bl[divWORDSZ(index)] == 0) {
244 /* An easy case */
245 i += WORDSZ - modWORDSZ(index);
246 } else {
247 if (get_pht_entry_from_index(GC_old_stack_bl, index)
248 || get_pht_entry_from_index(GC_incomplete_stack_bl, index)) {
249 return(h+i+1);
251 i++;
253 if (i >= nblocks) break;
254 index = PHT_HASH((word)(h+i));
256 return(0);
259 /* Return the number of blacklisted blocks in a given range. */
260 /* Used only for statistical purposes. */
261 /* Looks only at the GC_incomplete_stack_bl. */
262 STATIC word GC_number_stack_black_listed(struct hblk *start,
263 struct hblk *endp1)
265 register struct hblk * h;
266 word result = 0;
268 for (h = start; (word)h < (word)endp1; h++) {
269 word index = PHT_HASH((word)h);
271 if (get_pht_entry_from_index(GC_old_stack_bl, index)) result++;
273 return(result);
276 /* Return the total number of (stack) black-listed bytes. */
277 static word total_stack_black_listed(void)
279 register unsigned i;
280 word total = 0;
282 for (i = 0; i < GC_n_heap_sects; i++) {
283 struct hblk * start = (struct hblk *) GC_heap_sects[i].hs_start;
284 struct hblk * endp1 = start + GC_heap_sects[i].hs_bytes/HBLKSIZE;
286 total += GC_number_stack_black_listed(start, endp1);
288 return(total * HBLKSIZE);