1 /* Simple garbage collection for the GNU compiler.
2 Copyright (C) 1998, 1999, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
31 #define offsetof(TYPE, MEMBER) ((size_t) &((TYPE *)0)->MEMBER)
34 /* Debugging flags. */
36 /* Zap memory before freeing to catch dangling pointers. */
39 /* Collect statistics on how bushy the search tree is. */
42 /* Perform collection every time ggc_collect is invoked. Otherwise,
43 collection is performed only when a significant amount of memory
44 has been allocated since the last collection. */
45 #undef GGC_ALWAYS_COLLECT
47 /* Always verify that the to-be-marked memory is collectable. */
48 #undef GGC_ALWAYS_VERIFY
50 #ifdef ENABLE_GC_CHECKING
52 #define GGC_ALWAYS_VERIFY
54 #ifdef ENABLE_GC_ALWAYS_COLLECT
55 #define GGC_ALWAYS_COLLECT
58 /* Constants for general use. */
63 #ifndef HOST_BITS_PER_PTR
64 #define HOST_BITS_PER_PTR HOST_BITS_PER_LONG
67 /* We'd like a balanced tree, but we don't really want to pay for the
68 cost of keeping the tree balanced. We'll settle for the next best
69 thing -- nearly balanced.
71 In this context, the most natural key is the node pointer itself,
72 but due to the way memory managers work, we'd be virtually certain
73 to wind up with a completely degenerate straight line. What's needed
74 is to make something more variable, and yet predictable, be more
75 significant in the comparison.
77 The handiest source of variability is the low bits of the pointer
78 value itself. Any sort of bit/byte swap would do, but such machine
79 specific operations are not handy, and we don't want to put that much
82 #define PTR_KEY(p) ((size_t)p << (HOST_BITS_PER_PTR - 8) \
83 | ((size_t)p & 0xff00) << (HOST_BITS_PER_PTR - 24) \
86 /* GC'able memory; a node in a binary search tree. */
90 /* A combination of the standard left/right nodes, indexable by `<'. */
91 struct ggc_mem
*sub
[2];
93 unsigned int mark
: 1;
94 unsigned int context
: 7;
95 unsigned int size
: 24;
97 /* Make sure the data is reasonably aligned. */
100 #ifdef HAVE_LONG_DOUBLE
108 static struct globals
110 /* Root of the object tree. */
111 struct ggc_mem
*root
;
113 /* Data bytes currently allocated. */
116 /* Data objects currently allocated. */
119 /* Data bytes allocated at time of last GC. */
120 size_t allocated_last_gc
;
122 /* Current context level. */
126 /* Skip garbage collection if the current allocation is not at least
127 this factor times the allocation at the end of the last collection.
128 In other words, total allocation must expand by (this factor minus
129 one) before collection is performed. */
130 #define GGC_MIN_EXPAND_FOR_GC (1.3)
132 /* Bound `allocated_last_gc' to 4MB, to prevent the memory expansion
133 test from triggering too often when the heap is small. */
134 #define GGC_MIN_LAST_ALLOCATED (4 * 1024 * 1024)
136 /* Local function prototypes. */
138 static void tree_insert
PARAMS ((struct ggc_mem
*));
139 static int tree_lookup
PARAMS ((struct ggc_mem
*));
140 static void clear_marks
PARAMS ((struct ggc_mem
*));
141 static void sweep_objs
PARAMS ((struct ggc_mem
**));
142 static void ggc_pop_context_1
PARAMS ((struct ggc_mem
*, int));
145 extern void debug_ggc_balance
PARAMS ((void));
146 static void tally_leaves
PARAMS ((struct ggc_mem
*, int, size_t *, size_t *));
149 /* Insert V into the search tree. */
155 size_t v_key
= PTR_KEY (v
);
156 struct ggc_mem
*p
, **pp
;
158 for (pp
= &G
.root
, p
= *pp
; p
; p
= *pp
)
160 size_t p_key
= PTR_KEY (p
);
161 pp
= &p
->sub
[v_key
< p_key
];
166 /* Return true if V is in the tree. */
172 size_t v_key
= PTR_KEY (v
);
173 struct ggc_mem
*p
= G
.root
;
177 size_t p_key
= PTR_KEY (p
);
180 p
= p
->sub
[v_key
< p_key
];
186 /* Alloc SIZE bytes of GC'able memory. If ZERO, clear the memory. */
189 ggc_alloc_obj (size
, zero
)
195 x
= (struct ggc_mem
*) xmalloc (offsetof (struct ggc_mem
, u
) + size
);
199 x
->context
= G
.context
;
203 memset (&x
->u
, 0, size
);
206 memset (&x
->u
, 0xaf, size
);
224 x
= (struct ggc_mem
*) ((const char *)p
- offsetof (struct ggc_mem
, u
));
225 #ifdef GGC_ALWAYS_VERIFY
226 if (! tree_lookup (x
))
234 G
.allocated
+= x
->size
;
240 /* Mark a node, but check first to see that it's really gc-able memory. */
243 ggc_mark_if_gcable (p
)
251 x
= (struct ggc_mem
*) ((const char *)p
- offsetof (struct ggc_mem
, u
));
252 if (! tree_lookup (x
))
259 G
.allocated
+= x
->size
;
263 /* Return the size of the gc-able object P. */
270 = (struct ggc_mem
*) ((const char *)p
- offsetof (struct ggc_mem
, u
));
274 /* Unmark all objects. */
282 clear_marks (x
->sub
[0]);
284 clear_marks (x
->sub
[1]);
287 /* Free all objects in the current context that are not marked. */
291 struct ggc_mem
**root
;
293 struct ggc_mem
*x
= *root
;
297 sweep_objs (&x
->sub
[0]);
298 sweep_objs (&x
->sub
[1]);
300 if (! x
->mark
&& x
->context
>= G
.context
)
302 struct ggc_mem
*l
, *r
;
325 } while ((l
= *root
) != NULL
);
330 memset (&x
->u
, 0xA5, x
->size
);
337 /* The top level mark-and-sweep routine. */
344 #ifndef GGC_ALWAYS_COLLECT
345 if (G
.allocated
< GGC_MIN_EXPAND_FOR_GC
* G
.allocated_last_gc
)
350 debug_ggc_balance ();
353 time
= get_run_time ();
355 fprintf (stderr
, " {GC %luk -> ", (unsigned long)G
.allocated
/ 1024);
360 clear_marks (G
.root
);
362 sweep_objs (&G
.root
);
364 G
.allocated_last_gc
= G
.allocated
;
365 if (G
.allocated_last_gc
< GGC_MIN_LAST_ALLOCATED
)
366 G
.allocated_last_gc
= GGC_MIN_LAST_ALLOCATED
;
368 time
= get_run_time () - time
;
373 fprintf (stderr
, "%luk in %.3f}",
374 (unsigned long) G
.allocated
/ 1024, time
* 1e-6);
378 debug_ggc_balance ();
382 /* Called once to initialize the garbage collector. */
387 G
.allocated_last_gc
= GGC_MIN_LAST_ALLOCATED
;
389 empty_string
= ggc_alloc_string ("", 0);
390 ggc_add_string_root (&empty_string
, 1);
393 /* Start a new GGC context. Memory allocated in previous contexts
394 will not be collected while the new context is active. */
401 /* We only allocated 7 bits in the node for the context. This
402 should be more than enough. */
403 if (G
.context
>= 128)
407 /* Finish a GC context. Any uncollected memory in the new context
408 will be merged with the old context. */
415 ggc_pop_context_1 (G
.root
, G
.context
);
419 ggc_pop_context_1 (x
, c
)
426 ggc_pop_context_1 (x
->sub
[0], c
);
428 ggc_pop_context_1 (x
->sub
[1], c
);
434 debug_ggc_tree (p
, indent
)
442 fputs ("(nil)\n", stderr
);
447 debug_ggc_tree (p
->sub
[0], indent
+ 1);
449 for (i
= 0; i
< indent
; ++i
)
451 fprintf (stderr
, "%lx %p\n", PTR_KEY (p
), p
);
454 debug_ggc_tree (p
->sub
[1], indent
+ 1);
458 /* Collect tree balance metrics */
465 size_t nleaf
, sumdepth
;
467 nleaf
= sumdepth
= 0;
468 tally_leaves (G
.root
, 0, &nleaf
, &sumdepth
);
470 fprintf (stderr
, " {B %.2f,%.1f,%.1f}",
471 /* In a balanced tree, leaf/node should approach 1/2. */
472 (float)nleaf
/ (float)G
.objects
,
473 /* In a balanced tree, average leaf depth should approach lg(n). */
474 (float)sumdepth
/ (float)nleaf
,
475 log ((double) G
.objects
) / M_LN2
);
479 tally_leaves (x
, depth
, nleaf
, sumdepth
)
485 if (! x
->sub
[0] && !x
->sub
[1])
493 tally_leaves (x
->sub
[0], depth
+ 1, nleaf
, sumdepth
);
495 tally_leaves (x
->sub
[1], depth
+ 1, nleaf
, sumdepth
);