1 /* Simple garbage collection for the GNU compiler.
2 Copyright (C) 1998, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
32 #define offsetof(TYPE, MEMBER) ((size_t) &((TYPE *)0)->MEMBER)
35 /* Debugging flags. */
37 /* Zap memory before freeing to catch dangling pointers. */
40 /* Collect statistics on how bushy the search tree is. */
43 /* Perform collection every time ggc_collect is invoked. Otherwise,
44 collection is performed only when a significant amount of memory
45 has been allocated since the last collection. */
46 #undef GGC_ALWAYS_COLLECT
48 /* Always verify that the to-be-marked memory is collectable. */
49 #undef GGC_ALWAYS_VERIFY
51 #ifdef ENABLE_CHECKING
53 #define GGC_ALWAYS_COLLECT
54 #define GGC_ALWAYS_VERIFY
57 /* Constants for general use. */
62 #ifndef HOST_BITS_PER_PTR
63 #define HOST_BITS_PER_PTR HOST_BITS_PER_LONG
66 /* We'd like a balanced tree, but we don't really want to pay for the
67 cost of keeping the tree balanced. We'll settle for the next best
68 thing -- nearly balanced.
70 In this context, the most natural key is the node pointer itself,
71 but due to the way memory managers work, we'd be virtually certain
72 to wind up with a completely degenerate straight line. What's needed
73 is to make something more variable, and yet predictable, be more
74 significant in the comparison.
76 The handiest source of variability is the low bits of the pointer
77 value itself. Any sort of bit/byte swap would do, but such machine
78 specific operations are not handy, and we don't want to put that much
81 #define PTR_KEY(p) ((size_t)p << (HOST_BITS_PER_PTR - 8) \
82 | ((size_t)p & 0xff00) << (HOST_BITS_PER_PTR - 24) \
85 /* GC'able memory; a node in a binary search tree. */
89 /* A combination of the standard left/right nodes, indexable by `<'. */
90 struct ggc_mem
*sub
[2];
92 unsigned int mark
: 1;
93 unsigned int context
: 7;
94 unsigned int size
: 24;
96 /* Make sure the data is reasonably aligned. */
99 #ifdef HAVE_LONG_DOUBLE
107 static struct globals
109 /* Root of the object tree. */
110 struct ggc_mem
*root
;
112 /* Data bytes currently allocated. */
115 /* Data objects currently allocated. */
118 /* Data bytes allocated at time of last GC. */
119 size_t allocated_last_gc
;
121 /* Current context level. */
125 /* Skip garbage collection if the current allocation is not at least
126 this factor times the allocation at the end of the last collection.
127 In other words, total allocation must expand by (this factor minus
128 one) before collection is performed. */
129 #define GGC_MIN_EXPAND_FOR_GC (1.3)
131 /* Bound `allocated_last_gc' to 4MB, to prevent the memory expansion
132 test from triggering too often when the heap is small. */
133 #define GGC_MIN_LAST_ALLOCATED (4 * 1024 * 1024)
135 /* Local function prototypes. */
137 static void tree_insert
PROTO ((struct ggc_mem
*));
138 static int tree_lookup
PROTO ((struct ggc_mem
*));
139 static void clear_marks
PROTO ((struct ggc_mem
*));
140 static void sweep_objs
PROTO ((struct ggc_mem
**));
141 static void ggc_pop_context_1
PROTO ((struct ggc_mem
*, int));
144 extern void debug_ggc_balance
PROTO ((void));
145 static void tally_leaves
PROTO ((struct ggc_mem
*, int, size_t *, size_t *));
148 /* Insert V into the search tree. */
154 size_t v_key
= PTR_KEY (v
);
155 struct ggc_mem
*p
, **pp
;
157 for (pp
= &G
.root
, p
= *pp
; p
; p
= *pp
)
159 size_t p_key
= PTR_KEY (p
);
160 pp
= &p
->sub
[v_key
< p_key
];
165 /* Return true if V is in the tree. */
171 size_t v_key
= PTR_KEY (v
);
172 struct ggc_mem
*p
= G
.root
;
176 size_t p_key
= PTR_KEY (p
);
179 p
= p
->sub
[v_key
< p_key
];
185 /* Alloc SIZE bytes of GC'able memory. If ZERO, clear the memory. */
188 ggc_alloc_obj (size
, zero
)
194 x
= (struct ggc_mem
*) xmalloc (offsetof (struct ggc_mem
, u
) + size
);
198 x
->context
= G
.context
;
202 memset (&x
->u
, 0, size
);
219 x
= (struct ggc_mem
*) ((char *)p
- offsetof (struct ggc_mem
, u
));
220 #ifdef GGC_ALWAYS_VERIFY
221 if (! tree_lookup (x
))
229 G
.allocated
+= x
->size
;
236 ggc_mark_if_gcable (p
)
244 x
= (struct ggc_mem
*) ((char *)p
- offsetof (struct ggc_mem
, u
));
245 if (! tree_lookup (x
))
252 G
.allocated
+= x
->size
;
262 clear_marks (x
->sub
[0]);
264 clear_marks (x
->sub
[1]);
269 struct ggc_mem
**root
;
271 struct ggc_mem
*x
= *root
;
275 sweep_objs (&x
->sub
[0]);
276 sweep_objs (&x
->sub
[1]);
278 if (! x
->mark
&& x
->context
>= G
.context
)
280 struct ggc_mem
*l
, *r
;
303 } while ((l
= *root
) != NULL
);
308 memset (&x
->u
, 0xA5, x
->size
);
315 /* The top level mark-and-sweep routine. */
322 #ifndef GGC_ALWAYS_COLLECT
323 if (G
.allocated
< GGC_MIN_EXPAND_FOR_GC
* G
.allocated_last_gc
)
328 debug_ggc_balance ();
331 time
= get_run_time ();
333 fprintf (stderr
, " {GC %luk -> ", (unsigned long)G
.allocated
/ 1024);
338 clear_marks (G
.root
);
340 sweep_objs (&G
.root
);
342 G
.allocated_last_gc
= G
.allocated
;
343 if (G
.allocated_last_gc
< GGC_MIN_LAST_ALLOCATED
)
344 G
.allocated_last_gc
= GGC_MIN_LAST_ALLOCATED
;
346 time
= get_run_time () - time
;
351 fprintf (stderr
, "%luk in %.3f}",
352 (unsigned long) G
.allocated
/ 1024, time
* 1e-6);
356 debug_ggc_balance ();
360 /* Called once to initialize the garbage collector. */
365 G
.allocated_last_gc
= GGC_MIN_LAST_ALLOCATED
;
367 empty_string
= ggc_alloc_string ("", 0);
368 ggc_add_string_root (&empty_string
, 1);
371 /* Start a new GGC context. Memory allocated in previous contexts
372 will not be collected while the new context is active. */
379 /* We only allocated 7 bits in the node for the context. This
380 should be more than enough. */
381 if (G
.context
>= 128)
385 /* Finish a GC context. Any uncollected memory in the new context
386 will be merged with the old context. */
393 ggc_pop_context_1 (G
.root
, G
.context
);
397 ggc_pop_context_1 (x
, c
)
404 ggc_pop_context_1 (x
->sub
[0], c
);
406 ggc_pop_context_1 (x
->sub
[1], c
);
412 debug_ggc_tree (p
, indent
)
420 fputs ("(nil)\n", stderr
);
425 debug_ggc_tree (p
->sub
[0], indent
+ 1);
427 for (i
= 0; i
< indent
; ++i
)
429 fprintf (stderr
, "%lx %p\n", PTR_KEY (p
), p
);
432 debug_ggc_tree (p
->sub
[1], indent
+ 1);
436 /* Collect tree balance metrics */
443 size_t nleaf
, sumdepth
;
445 nleaf
= sumdepth
= 0;
446 tally_leaves (G
.root
, 0, &nleaf
, &sumdepth
);
448 fprintf (stderr
, " {B %.2f,%.1f,%.1f}",
449 /* In a balanced tree, leaf/node should approach 1/2. */
450 (float)nleaf
/ (float)G
.objects
,
451 /* In a balanced tree, average leaf depth should approach lg(n). */
452 (float)sumdepth
/ (float)nleaf
,
453 log ((double) G
.objects
) / M_LN2
);
457 tally_leaves (x
, depth
, nleaf
, sumdepth
)
463 if (! x
->sub
[0] && !x
->sub
[1])
471 tally_leaves (x
->sub
[0], depth
+ 1, nleaf
, sumdepth
);
473 tally_leaves (x
->sub
[1], depth
+ 1, nleaf
, sumdepth
);