1 /* Simple garbage collection for the GNU compiler.
2 Copyright (C) 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 #include "coretypes.h"
34 /* Debugging flags. */
36 /* Zap memory before freeing to catch dangling pointers. */
39 /* Collect statistics on how bushy the search tree is. */
42 /* Always verify that the to-be-marked memory is collectable. */
43 #undef GGC_ALWAYS_VERIFY
45 #ifdef ENABLE_GC_CHECKING
47 #define GGC_ALWAYS_VERIFY
50 #ifndef HOST_BITS_PER_PTR
51 #define HOST_BITS_PER_PTR HOST_BITS_PER_LONG
54 /* We'd like a balanced tree, but we don't really want to pay for the
55 cost of keeping the tree balanced. We'll settle for the next best
56 thing -- nearly balanced.
58 In this context, the most natural key is the node pointer itself,
59 but due to the way memory managers work, we'd be virtually certain
60 to wind up with a completely degenerate straight line. What's needed
61 is to make something more variable, and yet predictable, be more
62 significant in the comparison.
64 The handiest source of variability is the low bits of the pointer
65 value itself. Any sort of bit/byte swap would do, but such machine
66 specific operations are not handy, and we don't want to put that much
69 #define PTR_KEY(p) ((size_t)p << (HOST_BITS_PER_PTR - 8) \
70 | ((size_t)p & 0xff00) << (HOST_BITS_PER_PTR - 24) \
73 /* GC'able memory; a node in a binary search tree. */
77 /* A combination of the standard left/right nodes, indexable by `<'. */
78 struct ggc_mem
*sub
[2];
80 unsigned int mark
: 1;
81 unsigned int context
: 7;
82 unsigned int size
: 24;
84 /* Make sure the data is reasonably aligned. */
87 #ifdef HAVE_LONG_DOUBLE
97 /* Root of the object tree. */
100 /* Data bytes currently allocated. */
103 /* Data objects currently allocated. */
106 /* Data bytes allocated at time of last GC. */
107 size_t allocated_last_gc
;
109 /* Current context level. */
113 /* Local function prototypes. */
115 static void tree_insert
PARAMS ((struct ggc_mem
*));
116 static int tree_lookup
PARAMS ((struct ggc_mem
*));
117 static void clear_marks
PARAMS ((struct ggc_mem
*));
118 static void sweep_objs
PARAMS ((struct ggc_mem
**));
119 static void ggc_pop_context_1
PARAMS ((struct ggc_mem
*, int));
121 /* For use from debugger. */
122 extern void debug_ggc_tree
PARAMS ((struct ggc_mem
*, int));
125 extern void debug_ggc_balance
PARAMS ((void));
127 static void tally_leaves
PARAMS ((struct ggc_mem
*, int, size_t *, size_t *));
129 /* Insert V into the search tree. */
135 size_t v_key
= PTR_KEY (v
);
136 struct ggc_mem
*p
, **pp
;
138 for (pp
= &G
.root
, p
= *pp
; p
; p
= *pp
)
140 size_t p_key
= PTR_KEY (p
);
141 pp
= &p
->sub
[v_key
< p_key
];
146 /* Return true if V is in the tree. */
152 size_t v_key
= PTR_KEY (v
);
153 struct ggc_mem
*p
= G
.root
;
157 size_t p_key
= PTR_KEY (p
);
160 p
= p
->sub
[v_key
< p_key
];
166 /* Alloc SIZE bytes of GC'able memory. If ZERO, clear the memory. */
174 x
= (struct ggc_mem
*) xmalloc (offsetof (struct ggc_mem
, u
) + size
);
178 x
->context
= G
.context
;
182 memset (&x
->u
, 0xaf, size
);
200 x
= (struct ggc_mem
*) ((const char *)p
- offsetof (struct ggc_mem
, u
));
201 #ifdef GGC_ALWAYS_VERIFY
202 if (! tree_lookup (x
))
210 G
.allocated
+= x
->size
;
216 /* Return 1 if P has been marked, zero otherwise. */
224 x
= (struct ggc_mem
*) ((const char *)p
- offsetof (struct ggc_mem
, u
));
225 #ifdef GGC_ALWAYS_VERIFY
226 if (! tree_lookup (x
))
233 /* Return the size of the gc-able object P. */
240 = (struct ggc_mem
*) ((const char *)p
- offsetof (struct ggc_mem
, u
));
244 /* Unmark all objects. */
252 clear_marks (x
->sub
[0]);
254 clear_marks (x
->sub
[1]);
257 /* Free all objects in the current context that are not marked. */
261 struct ggc_mem
**root
;
263 struct ggc_mem
*x
= *root
;
267 sweep_objs (&x
->sub
[0]);
268 sweep_objs (&x
->sub
[1]);
270 if (! x
->mark
&& x
->context
>= G
.context
)
272 struct ggc_mem
*l
, *r
;
295 } while ((l
= *root
) != NULL
);
300 memset (&x
->u
, 0xA5, x
->size
);
307 /* The top level mark-and-sweep routine. */
312 /* Avoid frequent unnecessary work by skipping collection if the
313 total allocations haven't expanded much since the last
315 size_t allocated_last_gc
=
316 MAX (G
.allocated_last_gc
, (size_t)PARAM_VALUE (GGC_MIN_HEAPSIZE
) * 1024);
318 size_t min_expand
= allocated_last_gc
* PARAM_VALUE (GGC_MIN_EXPAND
) / 100;
320 if (G
.allocated
< allocated_last_gc
+ min_expand
)
324 debug_ggc_balance ();
327 timevar_push (TV_GC
);
329 fprintf (stderr
, " {GC %luk -> ", (unsigned long)G
.allocated
/ 1024);
334 clear_marks (G
.root
);
336 sweep_objs (&G
.root
);
338 G
.allocated_last_gc
= G
.allocated
;
343 fprintf (stderr
, "%luk}", (unsigned long) G
.allocated
/ 1024);
346 debug_ggc_balance ();
350 /* Called once to initialize the garbage collector. */
357 /* Start a new GGC context. Memory allocated in previous contexts
358 will not be collected while the new context is active. */
365 /* We only allocated 7 bits in the node for the context. This
366 should be more than enough. */
367 if (G
.context
>= 128)
371 /* Finish a GC context. Any uncollected memory in the new context
372 will be merged with the old context. */
379 ggc_pop_context_1 (G
.root
, G
.context
);
383 ggc_pop_context_1 (x
, c
)
390 ggc_pop_context_1 (x
->sub
[0], c
);
392 ggc_pop_context_1 (x
->sub
[1], c
);
398 debug_ggc_tree (p
, indent
)
406 fputs ("(nil)\n", stderr
);
411 debug_ggc_tree (p
->sub
[0], indent
+ 1);
413 for (i
= 0; i
< indent
; ++i
)
415 fprintf (stderr
, "%lx %p\n", (unsigned long)PTR_KEY (p
), p
);
418 debug_ggc_tree (p
->sub
[1], indent
+ 1);
422 /* Collect tree balance metrics */
429 size_t nleaf
, sumdepth
;
431 nleaf
= sumdepth
= 0;
432 tally_leaves (G
.root
, 0, &nleaf
, &sumdepth
);
434 fprintf (stderr
, " {B %.2f,%.1f,%.1f}",
435 /* In a balanced tree, leaf/node should approach 1/2. */
436 (float)nleaf
/ (float)G
.objects
,
437 /* In a balanced tree, average leaf depth should approach lg(n). */
438 (float)sumdepth
/ (float)nleaf
,
439 log ((double) G
.objects
) / M_LN2
);
443 /* Used by debug_ggc_balance, and also by ggc_print_statistics. */
445 tally_leaves (x
, depth
, nleaf
, sumdepth
)
451 if (! x
->sub
[0] && !x
->sub
[1])
459 tally_leaves (x
->sub
[0], depth
+ 1, nleaf
, sumdepth
);
461 tally_leaves (x
->sub
[1], depth
+ 1, nleaf
, sumdepth
);
465 #define SCALE(x) ((unsigned long) ((x) < 1024*10 \
467 : ((x) < 1024*1024*10 \
469 : (x) / (1024*1024))))
470 #define LABEL(x) ((x) < 1024*10 ? ' ' : ((x) < 1024*1024*10 ? 'k' : 'M'))
472 /* Report on GC memory usage. */
474 ggc_print_statistics ()
476 struct ggc_statistics stats
;
477 size_t nleaf
= 0, sumdepth
= 0;
479 /* Clear the statistics. */
480 memset (&stats
, 0, sizeof (stats
));
482 /* Make sure collection will really occur. */
483 G
.allocated_last_gc
= 0;
485 /* Collect and print the statistics common across collectors. */
486 ggc_print_common_statistics (stderr
, &stats
);
488 /* Report on tree balancing. */
489 tally_leaves (G
.root
, 0, &nleaf
, &sumdepth
);
491 fprintf (stderr
, "\n\
492 Total internal data (bytes)\t%ld%c\n\
493 Number of leaves in tree\t%d\n\
494 Average leaf depth\t\t%.1f\n",
495 SCALE(G
.objects
* offsetof (struct ggc_mem
, u
)),
496 LABEL(G
.objects
* offsetof (struct ggc_mem
, u
)),
497 nleaf
, (double)sumdepth
/ (double)nleaf
);
499 /* Report overall memory usage. */
500 fprintf (stderr
, "\n\
501 Total objects allocated\t\t%d\n\
502 Total memory in GC arena\t%ld%c\n",
504 SCALE(G
.allocated
), LABEL(G
.allocated
));