* recog.c (preproces_constraints): Zero recog_op_alt before
[official-gcc.git] / boehm-gc / mark_rts.c
blobefe6b64058f61d65fcb14377b9d740ef03202b2a
1 /*
2 * Copyright 1988, 1989 Hans-J. Boehm, Alan J. Demers
3 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
5 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
6 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
8 * Permission is hereby granted to use or copy this program
9 * for any purpose, provided the above notices are retained on all copies.
10 * Permission to modify the code and to distribute modified code is granted,
11 * provided the above notices are retained, and a notice that the code was
12 * modified is included with the above copyright notice.
14 /* Boehm, October 9, 1995 1:06 pm PDT */
15 # include <stdio.h>
16 # include "gc_priv.h"
18 /* MAX_ROOT_SETS is the maximum number of ranges that can be */
19 /* registered as static roots. */
20 # ifdef LARGE_CONFIG
21 # define MAX_ROOT_SETS 4096
22 # else
23 # ifdef PCR
24 # define MAX_ROOT_SETS 1024
25 # else
26 # ifdef MSWIN32
27 # define MAX_ROOT_SETS 512
28 /* Under NT, we add only written pages, which can result */
29 /* in many small root sets. */
30 # else
31 # define MAX_ROOT_SETS 64
32 # endif
33 # endif
34 # endif
36 # define MAX_EXCLUSIONS (MAX_ROOT_SETS/4)
37 /* Maximum number of segments that can be excluded from root sets. */
39 /* Data structure for list of root sets. */
40 /* We keep a hash table, so that we can filter out duplicate additions. */
41 /* Under Win32, we need to do a better job of filtering overlaps, so */
42 /* we resort to sequential search, and pay the price. */
43 struct roots {
44 ptr_t r_start;
45 ptr_t r_end;
46 # ifndef MSWIN32
47 struct roots * r_next;
48 # endif
49 GC_bool r_tmp;
50 /* Delete before registering new dynamic libraries */
53 static struct roots static_roots[MAX_ROOT_SETS];
55 static int n_root_sets = 0;
57 /* static_roots[0..n_root_sets) contains the valid root sets. */
59 # if !defined(NO_DEBUGGING)
60 /* For debugging: */
61 void GC_print_static_roots()
63 register int i;
64 size_t total = 0;
66 for (i = 0; i < n_root_sets; i++) {
67 GC_printf2("From 0x%lx to 0x%lx ",
68 (unsigned long) static_roots[i].r_start,
69 (unsigned long) static_roots[i].r_end);
70 if (static_roots[i].r_tmp) {
71 GC_printf0(" (temporary)\n");
72 } else {
73 GC_printf0("\n");
75 total += static_roots[i].r_end - static_roots[i].r_start;
77 GC_printf1("Total size: %ld\n", (unsigned long) total);
78 if (GC_root_size != total) {
79 GC_printf1("GC_root_size incorrect: %ld!!\n",
80 (unsigned long) GC_root_size);
83 # endif /* NO_DEBUGGING */
85 /* Primarily for debugging support: */
86 /* Is the address p in one of the registered static */
87 /* root sections? */
88 GC_bool GC_is_static_root(p)
89 ptr_t p;
91 static int last_root_set = 0;
92 register int i;
95 if (p >= static_roots[last_root_set].r_start
96 && p < static_roots[last_root_set].r_end) return(TRUE);
97 for (i = 0; i < n_root_sets; i++) {
98 if (p >= static_roots[i].r_start
99 && p < static_roots[i].r_end) {
100 last_root_set = i;
101 return(TRUE);
104 return(FALSE);
107 #ifndef MSWIN32
108 # define LOG_RT_SIZE 6
109 # define RT_SIZE (1 << LOG_RT_SIZE) /* Power of 2, may be != MAX_ROOT_SETS */
111 static struct roots * root_index[RT_SIZE];
112 /* Hash table header. Used only to check whether a range is */
113 /* already present. */
115 static int rt_hash(addr)
116 char * addr;
118 word result = (word) addr;
119 # if CPP_WORDSZ > 8*LOG_RT_SIZE
120 result ^= result >> 8*LOG_RT_SIZE;
121 # endif
122 # if CPP_WORDSZ > 4*LOG_RT_SIZE
123 result ^= result >> 4*LOG_RT_SIZE;
124 # endif
125 result ^= result >> 2*LOG_RT_SIZE;
126 result ^= result >> LOG_RT_SIZE;
127 result &= (RT_SIZE-1);
128 return(result);
131 /* Is a range starting at b already in the table? If so return a */
132 /* pointer to it, else NIL. */
133 struct roots * GC_roots_present(b)
134 char *b;
136 register int h = rt_hash(b);
137 register struct roots *p = root_index[h];
139 while (p != 0) {
140 if (p -> r_start == (ptr_t)b) return(p);
141 p = p -> r_next;
143 return(FALSE);
146 /* Add the given root structure to the index. */
147 static void add_roots_to_index(p)
148 struct roots *p;
150 register int h = rt_hash(p -> r_start);
152 p -> r_next = root_index[h];
153 root_index[h] = p;
156 # else /* MSWIN32 */
158 # define add_roots_to_index(p)
160 # endif
165 word GC_root_size = 0;
167 void GC_add_roots(b, e)
168 char * b; char * e;
170 DCL_LOCK_STATE;
172 DISABLE_SIGNALS();
173 LOCK();
174 GC_add_roots_inner(b, e, FALSE);
175 UNLOCK();
176 ENABLE_SIGNALS();
180 /* Add [b,e) to the root set. Adding the same interval a second time */
181 /* is a moderately fast noop, and hence benign. We do not handle */
182 /* different but overlapping intervals efficiently. (We do handle */
183 /* them correctly.) */
184 /* Tmp specifies that the interval may be deleted before */
185 /* reregistering dynamic libraries. */
186 void GC_add_roots_inner(b, e, tmp)
187 char * b; char * e;
188 GC_bool tmp;
190 struct roots * old;
192 # ifdef MSWIN32
193 /* Spend the time to ensure that there are no overlapping */
194 /* or adjacent intervals. */
195 /* This could be done faster with e.g. a */
196 /* balanced tree. But the execution time here is */
197 /* virtually guaranteed to be dominated by the time it */
198 /* takes to scan the roots. */
200 register int i;
202 for (i = 0; i < n_root_sets; i++) {
203 old = static_roots + i;
204 if ((ptr_t)b <= old -> r_end && (ptr_t)e >= old -> r_start) {
205 if ((ptr_t)b < old -> r_start) {
206 old -> r_start = (ptr_t)b;
207 GC_root_size += (old -> r_start - (ptr_t)b);
209 if ((ptr_t)e > old -> r_end) {
210 old -> r_end = (ptr_t)e;
211 GC_root_size += ((ptr_t)e - old -> r_end);
213 old -> r_tmp &= tmp;
214 break;
217 if (i < n_root_sets) {
218 /* merge other overlapping intervals */
219 struct roots *other;
221 for (i++; i < n_root_sets; i++) {
222 other = static_roots + i;
223 b = (char *)(other -> r_start);
224 e = (char *)(other -> r_end);
225 if ((ptr_t)b <= old -> r_end && (ptr_t)e >= old -> r_start) {
226 if ((ptr_t)b < old -> r_start) {
227 old -> r_start = (ptr_t)b;
228 GC_root_size += (old -> r_start - (ptr_t)b);
230 if ((ptr_t)e > old -> r_end) {
231 old -> r_end = (ptr_t)e;
232 GC_root_size += ((ptr_t)e - old -> r_end);
234 old -> r_tmp &= other -> r_tmp;
235 /* Delete this entry. */
236 GC_root_size -= (other -> r_end - other -> r_start);
237 other -> r_start = static_roots[n_root_sets-1].r_start;
238 other -> r_end = static_roots[n_root_sets-1].r_end;
239 n_root_sets--;
242 return;
245 # else
246 old = GC_roots_present(b);
247 if (old != 0) {
248 if ((ptr_t)e <= old -> r_end) /* already there */ return;
249 /* else extend */
250 GC_root_size += (ptr_t)e - old -> r_end;
251 old -> r_end = (ptr_t)e;
252 return;
254 # endif
255 if (n_root_sets == MAX_ROOT_SETS) {
256 ABORT("Too many root sets\n");
258 static_roots[n_root_sets].r_start = (ptr_t)b;
259 static_roots[n_root_sets].r_end = (ptr_t)e;
260 static_roots[n_root_sets].r_tmp = tmp;
261 # ifndef MSWIN32
262 static_roots[n_root_sets].r_next = 0;
263 # endif
264 add_roots_to_index(static_roots + n_root_sets);
265 GC_root_size += (ptr_t)e - (ptr_t)b;
266 n_root_sets++;
269 void GC_clear_roots GC_PROTO((void))
271 DCL_LOCK_STATE;
273 DISABLE_SIGNALS();
274 LOCK();
275 n_root_sets = 0;
276 GC_root_size = 0;
277 # ifndef MSWIN32
279 register int i;
281 for (i = 0; i < RT_SIZE; i++) root_index[i] = 0;
283 # endif
284 UNLOCK();
285 ENABLE_SIGNALS();
288 /* Internal use only; lock held. */
289 void GC_remove_tmp_roots()
291 register int i;
293 for (i = 0; i < n_root_sets; ) {
294 if (static_roots[i].r_tmp) {
295 GC_root_size -= (static_roots[i].r_end - static_roots[i].r_start);
296 static_roots[i].r_start = static_roots[n_root_sets-1].r_start;
297 static_roots[i].r_end = static_roots[n_root_sets-1].r_end;
298 static_roots[i].r_tmp = static_roots[n_root_sets-1].r_tmp;
299 n_root_sets--;
300 } else {
301 i++;
304 # ifndef MSWIN32
306 register int i;
308 for (i = 0; i < RT_SIZE; i++) root_index[i] = 0;
309 for (i = 0; i < n_root_sets; i++) add_roots_to_index(static_roots + i);
311 # endif
315 ptr_t GC_approx_sp()
317 word dummy;
319 return((ptr_t)(&dummy));
323 * Data structure for excluded static roots.
325 struct exclusion {
326 ptr_t e_start;
327 ptr_t e_end;
330 struct exclusion excl_table[MAX_EXCLUSIONS];
331 /* Array of exclusions, ascending */
332 /* address order. */
333 size_t excl_table_entries = 0; /* Number of entries in use. */
335 /* Return the first exclusion range that includes an address >= start_addr */
336 /* Assumes the exclusion table contains at least one entry (namely the */
337 /* GC data structures). */
338 struct exclusion * GC_next_exclusion(start_addr)
339 ptr_t start_addr;
341 size_t low = 0;
342 size_t high = excl_table_entries - 1;
343 size_t mid;
345 while (high > low) {
346 mid = (low + high) >> 1;
347 /* low <= mid < high */
348 if ((word) excl_table[mid].e_end <= (word) start_addr) {
349 low = mid + 1;
350 } else {
351 high = mid;
354 if ((word) excl_table[low].e_end <= (word) start_addr) return 0;
355 return excl_table + low;
358 void GC_exclude_static_roots(start, finish)
359 GC_PTR start;
360 GC_PTR finish;
362 struct exclusion * next;
363 size_t next_index, i;
365 if (0 == excl_table_entries) {
366 next = 0;
367 } else {
368 next = GC_next_exclusion(start);
370 if (0 != next) {
371 if ((word)(next -> e_start) < (word) finish) {
372 /* incomplete error check. */
373 ABORT("exclusion ranges overlap");
375 if ((word)(next -> e_start) == (word) finish) {
376 /* extend old range backwards */
377 next -> e_start = (ptr_t)start;
378 return;
380 next_index = next - excl_table;
381 for (i = excl_table_entries; i > next_index; --i) {
382 excl_table[i] = excl_table[i-1];
384 } else {
385 next_index = excl_table_entries;
387 if (excl_table_entries == MAX_EXCLUSIONS) ABORT("Too many exclusions");
388 excl_table[next_index].e_start = (ptr_t)start;
389 excl_table[next_index].e_end = (ptr_t)finish;
390 ++excl_table_entries;
393 /* Invoke push_conditional on ranges that are not excluded. */
394 void GC_push_conditional_with_exclusions(bottom, top, all)
395 ptr_t bottom;
396 ptr_t top;
397 int all;
399 struct exclusion * next;
400 ptr_t excl_start;
402 while (bottom < top) {
403 next = GC_next_exclusion(bottom);
404 if (0 == next || (excl_start = next -> e_start) >= top) {
405 GC_push_conditional(bottom, top, all);
406 return;
408 if (excl_start > bottom) GC_push_conditional(bottom, excl_start, all);
409 bottom = next -> e_end;
414 * Call the mark routines (GC_tl_push for a single pointer, GC_push_conditional
415 * on groups of pointers) on every top level accessible pointer.
416 * If all is FALSE, arrange to push only possibly altered values.
419 void GC_push_roots(all)
420 GC_bool all;
422 register int i;
425 * push registers - i.e., call GC_push_one(r) for each
426 * register contents r.
428 GC_push_regs(); /* usually defined in machine_dep.c */
431 * Next push static data. This must happen early on, since it's
432 * not robust against mark stack overflow.
434 /* Reregister dynamic libraries, in case one got added. */
435 # if (defined(DYNAMIC_LOADING) || defined(MSWIN32) || defined(PCR)) \
436 && !defined(SRC_M3)
437 GC_remove_tmp_roots();
438 GC_register_dynamic_libraries();
439 # endif
440 /* Mark everything in static data areas */
441 for (i = 0; i < n_root_sets; i++) {
442 GC_push_conditional_with_exclusions(
443 static_roots[i].r_start,
444 static_roots[i].r_end, all);
448 * Now traverse stacks.
450 # ifndef THREADS
451 /* Mark everything on the stack. */
452 # ifdef STACK_GROWS_DOWN
453 GC_push_all_stack( GC_approx_sp(), GC_stackbottom );
454 # else
455 GC_push_all_stack( GC_stackbottom, GC_approx_sp() );
456 # endif
457 # endif
458 if (GC_push_other_roots != 0) (*GC_push_other_roots)();
459 /* In the threads case, this also pushes thread stacks. */