1 /* Simple garbage collection for the GNU compiler.
2 Copyright (C) 1998, 1999, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
31 /* Debugging flags. */
33 /* Zap memory before freeing to catch dangling pointers. */
36 /* Collect statistics on how bushy the search tree is. */
39 /* Perform collection every time ggc_collect is invoked. Otherwise,
40 collection is performed only when a significant amount of memory
41 has been allocated since the last collection. */
42 #undef GGC_ALWAYS_COLLECT
44 /* Always verify that the to-be-marked memory is collectable. */
45 #undef GGC_ALWAYS_VERIFY
47 #ifdef ENABLE_GC_CHECKING
49 #define GGC_ALWAYS_VERIFY
51 #ifdef ENABLE_GC_ALWAYS_COLLECT
52 #define GGC_ALWAYS_COLLECT
55 #ifndef HOST_BITS_PER_PTR
56 #define HOST_BITS_PER_PTR HOST_BITS_PER_LONG
59 /* We'd like a balanced tree, but we don't really want to pay for the
60 cost of keeping the tree balanced. We'll settle for the next best
61 thing -- nearly balanced.
63 In this context, the most natural key is the node pointer itself,
64 but due to the way memory managers work, we'd be virtually certain
65 to wind up with a completely degenerate straight line. What's needed
66 is to make something more variable, and yet predictable, be more
67 significant in the comparison.
69 The handiest source of variability is the low bits of the pointer
70 value itself. Any sort of bit/byte swap would do, but such machine
71 specific operations are not handy, and we don't want to put that much
74 #define PTR_KEY(p) ((size_t)p << (HOST_BITS_PER_PTR - 8) \
75 | ((size_t)p & 0xff00) << (HOST_BITS_PER_PTR - 24) \
78 /* GC'able memory; a node in a binary search tree. */
82 /* A combination of the standard left/right nodes, indexable by `<'. */
83 struct ggc_mem
*sub
[2];
85 unsigned int mark
: 1;
86 unsigned int context
: 7;
87 unsigned int size
: 24;
89 /* Make sure the data is reasonably aligned. */
92 #ifdef HAVE_LONG_DOUBLE
100 static struct globals
102 /* Root of the object tree. */
103 struct ggc_mem
*root
;
105 /* Data bytes currently allocated. */
108 /* Data objects currently allocated. */
111 /* Data bytes allocated at time of last GC. */
112 size_t allocated_last_gc
;
114 /* Current context level. */
118 /* Skip garbage collection if the current allocation is not at least
119 this factor times the allocation at the end of the last collection.
120 In other words, total allocation must expand by (this factor minus
121 one) before collection is performed. */
122 #define GGC_MIN_EXPAND_FOR_GC (1.3)
124 /* Bound `allocated_last_gc' to 4MB, to prevent the memory expansion
125 test from triggering too often when the heap is small. */
126 #define GGC_MIN_LAST_ALLOCATED (4 * 1024 * 1024)
128 /* Local function prototypes. */
130 static void tree_insert
PARAMS ((struct ggc_mem
*));
131 static int tree_lookup
PARAMS ((struct ggc_mem
*));
132 static void clear_marks
PARAMS ((struct ggc_mem
*));
133 static void sweep_objs
PARAMS ((struct ggc_mem
**));
134 static void ggc_pop_context_1
PARAMS ((struct ggc_mem
*, int));
136 /* For use from debugger. */
137 extern void debug_ggc_tree
PARAMS ((struct ggc_mem
*, int));
140 extern void debug_ggc_balance
PARAMS ((void));
142 static void tally_leaves
PARAMS ((struct ggc_mem
*, int, size_t *, size_t *));
144 /* Insert V into the search tree. */
150 size_t v_key
= PTR_KEY (v
);
151 struct ggc_mem
*p
, **pp
;
153 for (pp
= &G
.root
, p
= *pp
; p
; p
= *pp
)
155 size_t p_key
= PTR_KEY (p
);
156 pp
= &p
->sub
[v_key
< p_key
];
161 /* Return true if V is in the tree. */
167 size_t v_key
= PTR_KEY (v
);
168 struct ggc_mem
*p
= G
.root
;
172 size_t p_key
= PTR_KEY (p
);
175 p
= p
->sub
[v_key
< p_key
];
181 /* Alloc SIZE bytes of GC'able memory. If ZERO, clear the memory. */
189 x
= (struct ggc_mem
*) xmalloc (offsetof (struct ggc_mem
, u
) + size
);
193 x
->context
= G
.context
;
197 memset (&x
->u
, 0xaf, size
);
215 x
= (struct ggc_mem
*) ((const char *)p
- offsetof (struct ggc_mem
, u
));
216 #ifdef GGC_ALWAYS_VERIFY
217 if (! tree_lookup (x
))
225 G
.allocated
+= x
->size
;
231 /* Mark a node, but check first to see that it's really gc-able memory. */
234 ggc_mark_if_gcable (p
)
242 x
= (struct ggc_mem
*) ((const char *)p
- offsetof (struct ggc_mem
, u
));
243 if (! tree_lookup (x
))
250 G
.allocated
+= x
->size
;
254 /* Return the size of the gc-able object P. */
261 = (struct ggc_mem
*) ((const char *)p
- offsetof (struct ggc_mem
, u
));
265 /* Unmark all objects. */
273 clear_marks (x
->sub
[0]);
275 clear_marks (x
->sub
[1]);
278 /* Free all objects in the current context that are not marked. */
282 struct ggc_mem
**root
;
284 struct ggc_mem
*x
= *root
;
288 sweep_objs (&x
->sub
[0]);
289 sweep_objs (&x
->sub
[1]);
291 if (! x
->mark
&& x
->context
>= G
.context
)
293 struct ggc_mem
*l
, *r
;
316 } while ((l
= *root
) != NULL
);
321 memset (&x
->u
, 0xA5, x
->size
);
328 /* The top level mark-and-sweep routine. */
333 #ifndef GGC_ALWAYS_COLLECT
334 if (G
.allocated
< GGC_MIN_EXPAND_FOR_GC
* G
.allocated_last_gc
)
339 debug_ggc_balance ();
342 timevar_push (TV_GC
);
344 fprintf (stderr
, " {GC %luk -> ", (unsigned long)G
.allocated
/ 1024);
349 clear_marks (G
.root
);
351 sweep_objs (&G
.root
);
353 G
.allocated_last_gc
= G
.allocated
;
354 if (G
.allocated_last_gc
< GGC_MIN_LAST_ALLOCATED
)
355 G
.allocated_last_gc
= GGC_MIN_LAST_ALLOCATED
;
360 fprintf (stderr
, "%luk}", (unsigned long) G
.allocated
/ 1024);
363 debug_ggc_balance ();
367 /* Called once to initialize the garbage collector. */
372 G
.allocated_last_gc
= GGC_MIN_LAST_ALLOCATED
;
375 /* Start a new GGC context. Memory allocated in previous contexts
376 will not be collected while the new context is active. */
383 /* We only allocated 7 bits in the node for the context. This
384 should be more than enough. */
385 if (G
.context
>= 128)
389 /* Finish a GC context. Any uncollected memory in the new context
390 will be merged with the old context. */
397 ggc_pop_context_1 (G
.root
, G
.context
);
401 ggc_pop_context_1 (x
, c
)
408 ggc_pop_context_1 (x
->sub
[0], c
);
410 ggc_pop_context_1 (x
->sub
[1], c
);
416 debug_ggc_tree (p
, indent
)
424 fputs ("(nil)\n", stderr
);
429 debug_ggc_tree (p
->sub
[0], indent
+ 1);
431 for (i
= 0; i
< indent
; ++i
)
433 fprintf (stderr
, "%lx %p\n", (unsigned long)PTR_KEY (p
), p
);
436 debug_ggc_tree (p
->sub
[1], indent
+ 1);
440 /* Collect tree balance metrics */
447 size_t nleaf
, sumdepth
;
449 nleaf
= sumdepth
= 0;
450 tally_leaves (G
.root
, 0, &nleaf
, &sumdepth
);
452 fprintf (stderr
, " {B %.2f,%.1f,%.1f}",
453 /* In a balanced tree, leaf/node should approach 1/2. */
454 (float)nleaf
/ (float)G
.objects
,
455 /* In a balanced tree, average leaf depth should approach lg(n). */
456 (float)sumdepth
/ (float)nleaf
,
457 log ((double) G
.objects
) / M_LN2
);
461 /* Used by debug_ggc_balance, and also by ggc_print_statistics. */
463 tally_leaves (x
, depth
, nleaf
, sumdepth
)
469 if (! x
->sub
[0] && !x
->sub
[1])
477 tally_leaves (x
->sub
[0], depth
+ 1, nleaf
, sumdepth
);
479 tally_leaves (x
->sub
[1], depth
+ 1, nleaf
, sumdepth
);
483 #define SCALE(x) ((unsigned long) ((x) < 1024*10 \
485 : ((x) < 1024*1024*10 \
487 : (x) / (1024*1024))))
488 #define LABEL(x) ((x) < 1024*10 ? ' ' : ((x) < 1024*1024*10 ? 'k' : 'M'))
490 /* Report on GC memory usage. */
492 ggc_print_statistics ()
494 struct ggc_statistics stats
;
495 size_t nleaf
= 0, sumdepth
= 0;
497 /* Clear the statistics. */
498 memset (&stats
, 0, sizeof (stats
));
500 /* Make sure collection will really occur. */
501 G
.allocated_last_gc
= 0;
503 /* Collect and print the statistics common across collectors. */
504 ggc_print_common_statistics (stderr
, &stats
);
506 /* Report on tree balancing. */
507 tally_leaves (G
.root
, 0, &nleaf
, &sumdepth
);
509 fprintf (stderr
, "\n\
510 Total internal data (bytes)\t%ld%c\n\
511 Number of leaves in tree\t%d\n\
512 Average leaf depth\t\t%.1f\n",
513 SCALE(G
.objects
* offsetof (struct ggc_mem
, u
)),
514 LABEL(G
.objects
* offsetof (struct ggc_mem
, u
)),
515 nleaf
, (double)sumdepth
/ (double)nleaf
);
517 /* Report overall memory usage. */
518 fprintf (stderr
, "\n\
519 Total objects allocated\t\t%d\n\
520 Total memory in GC arena\t%ld%c\n",
522 SCALE(G
.allocated
), LABEL(G
.allocated
));