* cp-tree.h (DECL_LOCAL_FUCNTION_P): New macro.
[official-gcc.git] / gcc / ggc-simple.c
blob53d6579dbac0afe9d57a8d9593324782aef694b5
1 /* Simple garbage collection for the GNU compiler.
2 Copyright (C) 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "rtl.h"
24 #include "tree.h"
25 #include "tm_p.h"
26 #include "flags.h"
27 #include "varray.h"
28 #include "hash.h"
29 #include "ggc.h"
31 #ifndef offsetof
32 #define offsetof(TYPE, MEMBER) ((size_t) &((TYPE *)0)->MEMBER)
33 #endif
35 /* Debugging flags. */
37 /* Zap memory before freeing to catch dangling pointers. */
38 #define GGC_POISON
40 /* Collect statistics on how bushy the search tree is. */
41 #undef GGC_BALANCE
43 /* Perform collection every time ggc_collect is invoked. Otherwise,
44 collection is performed only when a significant amount of memory
45 has been allocated since the last collection. */
46 #undef GGC_ALWAYS_COLLECT
48 /* Always verify that the to-be-marked memory is collectable. */
49 #undef GGC_ALWAYS_VERIFY
51 #ifdef ENABLE_CHECKING
52 #define GGC_POISON
53 #define GGC_ALWAYS_COLLECT
54 #define GGC_ALWAYS_VERIFY
55 #endif
57 /* Constants for general use. */
59 char *empty_string;
60 extern int gc_time;
62 #ifndef HOST_BITS_PER_PTR
63 #define HOST_BITS_PER_PTR HOST_BITS_PER_LONG
64 #endif
66 /* We'd like a balanced tree, but we don't really want to pay for the
67 cost of keeping the tree balanced. We'll settle for the next best
68 thing -- nearly balanced.
70 In this context, the most natural key is the node pointer itself,
71 but due to the way memory managers work, we'd be virtually certain
72 to wind up with a completely degenerate straight line. What's needed
73 is to make something more variable, and yet predictable, be more
74 significant in the comparison.
76 The handiest source of variability is the low bits of the pointer
77 value itself. Any sort of bit/byte swap would do, but such machine
78 specific operations are not handy, and we don't want to put that much
79 effort into it. */
81 #define PTR_KEY(p) ((size_t)p << (HOST_BITS_PER_PTR - 8) \
82 | ((size_t)p & 0xff00) << (HOST_BITS_PER_PTR - 24) \
83 | (size_t)p >> 16)
85 /* GC'able memory; a node in a binary search tree. */
87 struct ggc_mem
89 /* A combination of the standard left/right nodes, indexable by `<'. */
90 struct ggc_mem *sub[2];
92 unsigned int mark : 1;
93 unsigned int context : 7;
94 unsigned int size : 24;
96 /* Make sure the data is reasonably aligned. */
97 union {
98 HOST_WIDEST_INT i;
99 #ifdef HAVE_LONG_DOUBLE
100 long double d;
101 #else
102 double d;
103 #endif
104 } u;
107 static struct globals
109 /* Root of the object tree. */
110 struct ggc_mem *root;
112 /* Data bytes currently allocated. */
113 size_t allocated;
115 /* Data objects currently allocated. */
116 size_t objects;
118 /* Data bytes allocated at time of last GC. */
119 size_t allocated_last_gc;
121 /* Current context level. */
122 int context;
123 } G;
125 /* Skip garbage collection if the current allocation is not at least
126 this factor times the allocation at the end of the last collection.
127 In other words, total allocation must expand by (this factor minus
128 one) before collection is performed. */
129 #define GGC_MIN_EXPAND_FOR_GC (1.3)
131 /* Bound `allocated_last_gc' to 4MB, to prevent the memory expansion
132 test from triggering too often when the heap is small. */
133 #define GGC_MIN_LAST_ALLOCATED (4 * 1024 * 1024)
135 /* Local function prototypes. */
137 static void tree_insert PROTO ((struct ggc_mem *));
138 static int tree_lookup PROTO ((struct ggc_mem *));
139 static void clear_marks PROTO ((struct ggc_mem *));
140 static void sweep_objs PROTO ((struct ggc_mem **));
141 static void ggc_pop_context_1 PROTO ((struct ggc_mem *, int));
143 #ifdef GGC_BALANCE
144 extern void debug_ggc_balance PROTO ((void));
145 static void tally_leaves PROTO ((struct ggc_mem *, int, size_t *, size_t *));
146 #endif
148 /* Insert V into the search tree. */
150 static inline void
151 tree_insert (v)
152 struct ggc_mem *v;
154 size_t v_key = PTR_KEY (v);
155 struct ggc_mem *p, **pp;
157 for (pp = &G.root, p = *pp; p ; p = *pp)
159 size_t p_key = PTR_KEY (p);
160 pp = &p->sub[v_key < p_key];
162 *pp = v;
165 /* Return true if V is in the tree. */
167 static inline int
168 tree_lookup (v)
169 struct ggc_mem *v;
171 size_t v_key = PTR_KEY (v);
172 struct ggc_mem *p = G.root;
174 while (p)
176 size_t p_key = PTR_KEY (p);
177 if (p == v)
178 return 1;
179 p = p->sub[v_key < p_key];
182 return 0;
185 /* Alloc SIZE bytes of GC'able memory. If ZERO, clear the memory. */
187 void *
188 ggc_alloc_obj (size, zero)
189 size_t size;
190 int zero;
192 struct ggc_mem *x;
194 x = (struct ggc_mem *) xmalloc (offsetof (struct ggc_mem, u) + size);
195 x->sub[0] = NULL;
196 x->sub[1] = NULL;
197 x->mark = 0;
198 x->context = G.context;
199 x->size = size;
201 if (zero)
202 memset (&x->u, 0, size);
203 #ifdef GGC_POISON
204 else
205 memset (&x->u, 0xaf, size);
206 #endif
208 tree_insert (x);
209 G.allocated += size;
210 G.objects += 1;
212 return &x->u;
215 /* Mark a node. */
218 ggc_set_mark (p)
219 void *p;
221 struct ggc_mem *x;
223 x = (struct ggc_mem *) ((char *)p - offsetof (struct ggc_mem, u));
224 #ifdef GGC_ALWAYS_VERIFY
225 if (! tree_lookup (x))
226 abort ();
227 #endif
229 if (x->mark)
230 return 1;
232 x->mark = 1;
233 G.allocated += x->size;
234 G.objects += 1;
236 return 0;
239 /* Mark a node, but check first to see that it's really gc-able memory. */
241 void
242 ggc_mark_if_gcable (p)
243 void *p;
245 struct ggc_mem *x;
247 if (p == NULL)
248 return;
250 x = (struct ggc_mem *) ((char *)p - offsetof (struct ggc_mem, u));
251 if (! tree_lookup (x))
252 return;
254 if (x->mark)
255 return;
257 x->mark = 1;
258 G.allocated += x->size;
259 G.objects += 1;
262 /* Return the size of the gc-able object P. */
264 size_t
265 ggc_get_size (p)
266 void *p;
268 struct ggc_mem *x
269 = (struct ggc_mem *) ((char *)p - offsetof (struct ggc_mem, u));
270 return x->size;
273 /* Unmark all objects. */
275 static void
276 clear_marks (x)
277 struct ggc_mem *x;
279 x->mark = 0;
280 if (x->sub[0])
281 clear_marks (x->sub[0]);
282 if (x->sub[1])
283 clear_marks (x->sub[1]);
286 /* Free all objects in the current context that are not marked. */
288 static void
289 sweep_objs (root)
290 struct ggc_mem **root;
292 struct ggc_mem *x = *root;
293 if (!x)
294 return;
296 sweep_objs (&x->sub[0]);
297 sweep_objs (&x->sub[1]);
299 if (! x->mark && x->context >= G.context)
301 struct ggc_mem *l, *r;
303 l = x->sub[0];
304 r = x->sub[1];
305 if (!l)
306 *root = r;
307 else if (!r)
308 *root = l;
309 else if (!l->sub[1])
311 *root = l;
312 l->sub[1] = r;
314 else if (!r->sub[0])
316 *root = r;
317 r->sub[0] = l;
319 else
321 *root = l;
322 do {
323 root = &l->sub[1];
324 } while ((l = *root) != NULL);
325 *root = r;
328 #ifdef GGC_POISON
329 memset (&x->u, 0xA5, x->size);
330 #endif
332 free (x);
336 /* The top level mark-and-sweep routine. */
338 void
339 ggc_collect ()
341 int time;
343 #ifndef GGC_ALWAYS_COLLECT
344 if (G.allocated < GGC_MIN_EXPAND_FOR_GC * G.allocated_last_gc)
345 return;
346 #endif
348 #ifdef GGC_BALANCE
349 debug_ggc_balance ();
350 #endif
352 time = get_run_time ();
353 if (!quiet_flag)
354 fprintf (stderr, " {GC %luk -> ", (unsigned long)G.allocated / 1024);
356 G.allocated = 0;
357 G.objects = 0;
359 clear_marks (G.root);
360 ggc_mark_roots ();
361 sweep_objs (&G.root);
363 G.allocated_last_gc = G.allocated;
364 if (G.allocated_last_gc < GGC_MIN_LAST_ALLOCATED)
365 G.allocated_last_gc = GGC_MIN_LAST_ALLOCATED;
367 time = get_run_time () - time;
368 gc_time += time;
370 if (!quiet_flag)
372 fprintf (stderr, "%luk in %.3f}",
373 (unsigned long) G.allocated / 1024, time * 1e-6);
376 #ifdef GGC_BALANCE
377 debug_ggc_balance ();
378 #endif
381 /* Called once to initialize the garbage collector. */
383 void
384 init_ggc ()
386 G.allocated_last_gc = GGC_MIN_LAST_ALLOCATED;
388 empty_string = ggc_alloc_string ("", 0);
389 ggc_add_string_root (&empty_string, 1);
392 /* Start a new GGC context. Memory allocated in previous contexts
393 will not be collected while the new context is active. */
395 void
396 ggc_push_context ()
398 G.context++;
400 /* We only allocated 7 bits in the node for the context. This
401 should be more than enough. */
402 if (G.context >= 128)
403 abort ();
406 /* Finish a GC context. Any uncollected memory in the new context
407 will be merged with the old context. */
409 void
410 ggc_pop_context ()
412 G.context--;
413 if (G.root)
414 ggc_pop_context_1 (G.root, G.context);
417 static void
418 ggc_pop_context_1 (x, c)
419 struct ggc_mem *x;
420 int c;
422 if (x->context > c)
423 x->context = c;
424 if (x->sub[0])
425 ggc_pop_context_1 (x->sub[0], c);
426 if (x->sub[1])
427 ggc_pop_context_1 (x->sub[1], c);
430 /* Dump a tree. */
432 void
433 debug_ggc_tree (p, indent)
434 struct ggc_mem *p;
435 int indent;
437 int i;
439 if (!p)
441 fputs ("(nil)\n", stderr);
442 return;
445 if (p->sub[0])
446 debug_ggc_tree (p->sub[0], indent + 1);
448 for (i = 0; i < indent; ++i)
449 putc (' ', stderr);
450 fprintf (stderr, "%lx %p\n", PTR_KEY (p), p);
452 if (p->sub[1])
453 debug_ggc_tree (p->sub[1], indent + 1);
456 #ifdef GGC_BALANCE
457 /* Collect tree balance metrics */
459 #include <math.h>
461 void
462 debug_ggc_balance ()
464 size_t nleaf, sumdepth;
466 nleaf = sumdepth = 0;
467 tally_leaves (G.root, 0, &nleaf, &sumdepth);
469 fprintf (stderr, " {B %.2f,%.1f,%.1f}",
470 /* In a balanced tree, leaf/node should approach 1/2. */
471 (float)nleaf / (float)G.objects,
472 /* In a balanced tree, average leaf depth should approach lg(n). */
473 (float)sumdepth / (float)nleaf,
474 log ((double) G.objects) / M_LN2);
477 static void
478 tally_leaves (x, depth, nleaf, sumdepth)
479 struct ggc_mem *x;
480 int depth;
481 size_t *nleaf;
482 size_t *sumdepth;
484 if (! x->sub[0] && !x->sub[1])
486 *nleaf += 1;
487 *sumdepth += depth;
489 else
491 if (x->sub[0])
492 tally_leaves (x->sub[0], depth + 1, nleaf, sumdepth);
493 if (x->sub[1])
494 tally_leaves (x->sub[1], depth + 1, nleaf, sumdepth);
497 #endif