* rtl.h (rtunion_def): Constify member `rtstr'.
[official-gcc.git] / gcc / ggc-simple.c
blob6d1545c4112f6615cb8a2479a4cc45b1903a4eea
1 /* Simple garbage collection for the GNU compiler.
2 Copyright (C) 1998, 1999, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "rtl.h"
24 #include "tree.h"
25 #include "tm_p.h"
26 #include "flags.h"
27 #include "varray.h"
28 #include "ggc.h"
30 #ifndef offsetof
31 #define offsetof(TYPE, MEMBER) ((size_t) &((TYPE *)0)->MEMBER)
32 #endif
34 /* Debugging flags. */
36 /* Zap memory before freeing to catch dangling pointers. */
37 #define GGC_POISON
39 /* Collect statistics on how bushy the search tree is. */
40 #undef GGC_BALANCE
42 /* Perform collection every time ggc_collect is invoked. Otherwise,
43 collection is performed only when a significant amount of memory
44 has been allocated since the last collection. */
45 #undef GGC_ALWAYS_COLLECT
47 /* Always verify that the to-be-marked memory is collectable. */
48 #undef GGC_ALWAYS_VERIFY
50 #ifdef ENABLE_GC_CHECKING
51 #define GGC_POISON
52 #define GGC_ALWAYS_VERIFY
53 #endif
54 #ifdef ENABLE_GC_ALWAYS_COLLECT
55 #define GGC_ALWAYS_COLLECT
56 #endif
58 /* Constants for general use. */
60 char *empty_string;
61 extern int gc_time;
63 #ifndef HOST_BITS_PER_PTR
64 #define HOST_BITS_PER_PTR HOST_BITS_PER_LONG
65 #endif
67 /* We'd like a balanced tree, but we don't really want to pay for the
68 cost of keeping the tree balanced. We'll settle for the next best
69 thing -- nearly balanced.
71 In this context, the most natural key is the node pointer itself,
72 but due to the way memory managers work, we'd be virtually certain
73 to wind up with a completely degenerate straight line. What's needed
74 is to make something more variable, and yet predictable, be more
75 significant in the comparison.
77 The handiest source of variability is the low bits of the pointer
78 value itself. Any sort of bit/byte swap would do, but such machine
79 specific operations are not handy, and we don't want to put that much
80 effort into it. */
82 #define PTR_KEY(p) ((size_t)p << (HOST_BITS_PER_PTR - 8) \
83 | ((size_t)p & 0xff00) << (HOST_BITS_PER_PTR - 24) \
84 | (size_t)p >> 16)
86 /* GC'able memory; a node in a binary search tree. */
88 struct ggc_mem
90 /* A combination of the standard left/right nodes, indexable by `<'. */
91 struct ggc_mem *sub[2];
93 unsigned int mark : 1;
94 unsigned int context : 7;
95 unsigned int size : 24;
97 /* Make sure the data is reasonably aligned. */
98 union {
99 HOST_WIDEST_INT i;
100 #ifdef HAVE_LONG_DOUBLE
101 long double d;
102 #else
103 double d;
104 #endif
105 } u;
108 static struct globals
110 /* Root of the object tree. */
111 struct ggc_mem *root;
113 /* Data bytes currently allocated. */
114 size_t allocated;
116 /* Data objects currently allocated. */
117 size_t objects;
119 /* Data bytes allocated at time of last GC. */
120 size_t allocated_last_gc;
122 /* Current context level. */
123 int context;
124 } G;
126 /* Skip garbage collection if the current allocation is not at least
127 this factor times the allocation at the end of the last collection.
128 In other words, total allocation must expand by (this factor minus
129 one) before collection is performed. */
130 #define GGC_MIN_EXPAND_FOR_GC (1.3)
132 /* Bound `allocated_last_gc' to 4MB, to prevent the memory expansion
133 test from triggering too often when the heap is small. */
134 #define GGC_MIN_LAST_ALLOCATED (4 * 1024 * 1024)
136 /* Local function prototypes. */
138 static void tree_insert PARAMS ((struct ggc_mem *));
139 static int tree_lookup PARAMS ((struct ggc_mem *));
140 static void clear_marks PARAMS ((struct ggc_mem *));
141 static void sweep_objs PARAMS ((struct ggc_mem **));
142 static void ggc_pop_context_1 PARAMS ((struct ggc_mem *, int));
144 #ifdef GGC_BALANCE
145 extern void debug_ggc_balance PARAMS ((void));
146 static void tally_leaves PARAMS ((struct ggc_mem *, int, size_t *, size_t *));
147 #endif
149 /* Insert V into the search tree. */
151 static inline void
152 tree_insert (v)
153 struct ggc_mem *v;
155 size_t v_key = PTR_KEY (v);
156 struct ggc_mem *p, **pp;
158 for (pp = &G.root, p = *pp; p ; p = *pp)
160 size_t p_key = PTR_KEY (p);
161 pp = &p->sub[v_key < p_key];
163 *pp = v;
166 /* Return true if V is in the tree. */
168 static inline int
169 tree_lookup (v)
170 struct ggc_mem *v;
172 size_t v_key = PTR_KEY (v);
173 struct ggc_mem *p = G.root;
175 while (p)
177 size_t p_key = PTR_KEY (p);
178 if (p == v)
179 return 1;
180 p = p->sub[v_key < p_key];
183 return 0;
186 /* Alloc SIZE bytes of GC'able memory. If ZERO, clear the memory. */
188 void *
189 ggc_alloc_obj (size, zero)
190 size_t size;
191 int zero;
193 struct ggc_mem *x;
195 x = (struct ggc_mem *) xmalloc (offsetof (struct ggc_mem, u) + size);
196 x->sub[0] = NULL;
197 x->sub[1] = NULL;
198 x->mark = 0;
199 x->context = G.context;
200 x->size = size;
202 if (zero)
203 memset (&x->u, 0, size);
204 #ifdef GGC_POISON
205 else
206 memset (&x->u, 0xaf, size);
207 #endif
209 tree_insert (x);
210 G.allocated += size;
211 G.objects += 1;
213 return &x->u;
216 /* Mark a node. */
219 ggc_set_mark (p)
220 const void *p;
222 struct ggc_mem *x;
224 x = (struct ggc_mem *) ((const char *)p - offsetof (struct ggc_mem, u));
225 #ifdef GGC_ALWAYS_VERIFY
226 if (! tree_lookup (x))
227 abort ();
228 #endif
230 if (x->mark)
231 return 1;
233 x->mark = 1;
234 G.allocated += x->size;
235 G.objects += 1;
237 return 0;
240 /* Mark a node, but check first to see that it's really gc-able memory. */
242 void
243 ggc_mark_if_gcable (p)
244 const void *p;
246 struct ggc_mem *x;
248 if (p == NULL)
249 return;
251 x = (struct ggc_mem *) ((const char *)p - offsetof (struct ggc_mem, u));
252 if (! tree_lookup (x))
253 return;
255 if (x->mark)
256 return;
258 x->mark = 1;
259 G.allocated += x->size;
260 G.objects += 1;
263 /* Return the size of the gc-able object P. */
265 size_t
266 ggc_get_size (p)
267 const void *p;
269 struct ggc_mem *x
270 = (struct ggc_mem *) ((const char *)p - offsetof (struct ggc_mem, u));
271 return x->size;
274 /* Unmark all objects. */
276 static void
277 clear_marks (x)
278 struct ggc_mem *x;
280 x->mark = 0;
281 if (x->sub[0])
282 clear_marks (x->sub[0]);
283 if (x->sub[1])
284 clear_marks (x->sub[1]);
287 /* Free all objects in the current context that are not marked. */
289 static void
290 sweep_objs (root)
291 struct ggc_mem **root;
293 struct ggc_mem *x = *root;
294 if (!x)
295 return;
297 sweep_objs (&x->sub[0]);
298 sweep_objs (&x->sub[1]);
300 if (! x->mark && x->context >= G.context)
302 struct ggc_mem *l, *r;
304 l = x->sub[0];
305 r = x->sub[1];
306 if (!l)
307 *root = r;
308 else if (!r)
309 *root = l;
310 else if (!l->sub[1])
312 *root = l;
313 l->sub[1] = r;
315 else if (!r->sub[0])
317 *root = r;
318 r->sub[0] = l;
320 else
322 *root = l;
323 do {
324 root = &l->sub[1];
325 } while ((l = *root) != NULL);
326 *root = r;
329 #ifdef GGC_POISON
330 memset (&x->u, 0xA5, x->size);
331 #endif
333 free (x);
337 /* The top level mark-and-sweep routine. */
339 void
340 ggc_collect ()
342 int time;
344 #ifndef GGC_ALWAYS_COLLECT
345 if (G.allocated < GGC_MIN_EXPAND_FOR_GC * G.allocated_last_gc)
346 return;
347 #endif
349 #ifdef GGC_BALANCE
350 debug_ggc_balance ();
351 #endif
353 time = get_run_time ();
354 if (!quiet_flag)
355 fprintf (stderr, " {GC %luk -> ", (unsigned long)G.allocated / 1024);
357 G.allocated = 0;
358 G.objects = 0;
360 clear_marks (G.root);
361 ggc_mark_roots ();
362 sweep_objs (&G.root);
364 G.allocated_last_gc = G.allocated;
365 if (G.allocated_last_gc < GGC_MIN_LAST_ALLOCATED)
366 G.allocated_last_gc = GGC_MIN_LAST_ALLOCATED;
368 time = get_run_time () - time;
369 gc_time += time;
371 if (!quiet_flag)
373 fprintf (stderr, "%luk in %.3f}",
374 (unsigned long) G.allocated / 1024, time * 1e-6);
377 #ifdef GGC_BALANCE
378 debug_ggc_balance ();
379 #endif
382 /* Called once to initialize the garbage collector. */
384 void
385 init_ggc ()
387 G.allocated_last_gc = GGC_MIN_LAST_ALLOCATED;
389 empty_string = ggc_alloc_string ("", 0);
390 ggc_add_string_root (&empty_string, 1);
393 /* Start a new GGC context. Memory allocated in previous contexts
394 will not be collected while the new context is active. */
396 void
397 ggc_push_context ()
399 G.context++;
401 /* We only allocated 7 bits in the node for the context. This
402 should be more than enough. */
403 if (G.context >= 128)
404 abort ();
407 /* Finish a GC context. Any uncollected memory in the new context
408 will be merged with the old context. */
410 void
411 ggc_pop_context ()
413 G.context--;
414 if (G.root)
415 ggc_pop_context_1 (G.root, G.context);
418 static void
419 ggc_pop_context_1 (x, c)
420 struct ggc_mem *x;
421 int c;
423 if (x->context > c)
424 x->context = c;
425 if (x->sub[0])
426 ggc_pop_context_1 (x->sub[0], c);
427 if (x->sub[1])
428 ggc_pop_context_1 (x->sub[1], c);
431 /* Dump a tree. */
433 void
434 debug_ggc_tree (p, indent)
435 struct ggc_mem *p;
436 int indent;
438 int i;
440 if (!p)
442 fputs ("(nil)\n", stderr);
443 return;
446 if (p->sub[0])
447 debug_ggc_tree (p->sub[0], indent + 1);
449 for (i = 0; i < indent; ++i)
450 putc (' ', stderr);
451 fprintf (stderr, "%lx %p\n", PTR_KEY (p), p);
453 if (p->sub[1])
454 debug_ggc_tree (p->sub[1], indent + 1);
457 #ifdef GGC_BALANCE
458 /* Collect tree balance metrics */
460 #include <math.h>
462 void
463 debug_ggc_balance ()
465 size_t nleaf, sumdepth;
467 nleaf = sumdepth = 0;
468 tally_leaves (G.root, 0, &nleaf, &sumdepth);
470 fprintf (stderr, " {B %.2f,%.1f,%.1f}",
471 /* In a balanced tree, leaf/node should approach 1/2. */
472 (float)nleaf / (float)G.objects,
473 /* In a balanced tree, average leaf depth should approach lg(n). */
474 (float)sumdepth / (float)nleaf,
475 log ((double) G.objects) / M_LN2);
478 static void
479 tally_leaves (x, depth, nleaf, sumdepth)
480 struct ggc_mem *x;
481 int depth;
482 size_t *nleaf;
483 size_t *sumdepth;
485 if (! x->sub[0] && !x->sub[1])
487 *nleaf += 1;
488 *sumdepth += depth;
490 else
492 if (x->sub[0])
493 tally_leaves (x->sub[0], depth + 1, nleaf, sumdepth);
494 if (x->sub[1])
495 tally_leaves (x->sub[1], depth + 1, nleaf, sumdepth);
498 #endif