1 /* Simple garbage collection for the GNU compiler.
2 Copyright (C) 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 /* Generic garbage collection (GC) functions and data, not specific to
22 any particular GC implementation. */
32 #include "langhooks.h"
34 #ifdef HAVE_SYS_RESOURCE_H
35 # include <sys/resource.h>
37 #ifdef ENABLE_VALGRIND_CHECKING
40 /* Avoid #ifdef:s when we can help it. */
41 #define VALGRIND_DISCARD(x)
44 /* Statistics about the allocation. */
45 static ggc_statistics
*ggc_stats
;
47 static int ggc_htab_delete
PARAMS ((void **, void *));
48 static double ggc_rlimit_bound
PARAMS ((double));
50 /* Maintain global roots that are preserved during GC. */
52 /* Global roots that are preserved during calls to gc. */
56 struct ggc_root
*next
;
60 void (*cb
) PARAMS ((void *));
63 static struct ggc_root
*roots
;
65 /* Add BASE as a new garbage collection root. It is an array of
66 length NELT with each element SIZE bytes long. CB is a
67 function that will be called with a pointer to each element
68 of the array; it is the intention that CB call the appropriate
69 routine to mark gc-able memory for that element. */
72 ggc_add_root (base
, nelt
, size
, cb
)
75 void (*cb
) PARAMS ((void *));
77 struct ggc_root
*x
= (struct ggc_root
*) xmalloc (sizeof (*x
));
88 /* Process a slot of an htab by deleting it if it has not been marked. */
91 ggc_htab_delete (slot
, info
)
95 const struct ggc_cache_tab
*r
= (const struct ggc_cache_tab
*) info
;
97 if (! (*r
->marked_p
) (*slot
))
98 htab_clear_slot (*r
->base
, slot
);
105 /* Iterate through all registered roots and mark each element. */
111 const struct ggc_root_tab
*const *rt
;
112 const struct ggc_root_tab
*rti
;
113 const struct ggc_cache_tab
*const *ct
;
114 const struct ggc_cache_tab
*cti
;
117 for (rt
= gt_ggc_deletable_rtab
; *rt
; rt
++)
118 for (rti
= *rt
; rti
->base
!= NULL
; rti
++)
119 memset (rti
->base
, 0, rti
->stride
);
121 for (rt
= gt_ggc_rtab
; *rt
; rt
++)
122 for (rti
= *rt
; rti
->base
!= NULL
; rti
++)
123 for (i
= 0; i
< rti
->nelt
; i
++)
124 (*rti
->cb
)(*(void **)((char *)rti
->base
+ rti
->stride
* i
));
126 for (x
= roots
; x
!= NULL
; x
= x
->next
)
129 int s
= x
->size
, n
= x
->nelt
;
130 void (*cb
) PARAMS ((void *)) = x
->cb
;
133 for (i
= 0; i
< n
; ++i
, elt
+= s
)
137 /* Now scan all hash tables that have objects which are to be deleted if
138 they are not already marked. */
139 for (ct
= gt_ggc_cache_rtab
; *ct
; ct
++)
140 for (cti
= *ct
; cti
->base
!= NULL
; cti
++)
142 htab_traverse (*cti
->base
, ggc_htab_delete
, (PTR
) cti
);
145 /* Allocate a block of memory, then clear it. */
147 ggc_alloc_cleared (size
)
150 void *buf
= ggc_alloc (size
);
151 memset (buf
, 0, size
);
155 /* Resize a block of memory, possibly re-allocating it. */
157 ggc_realloc (x
, size
)
165 return ggc_alloc (size
);
167 old_size
= ggc_get_size (x
);
168 if (size
<= old_size
)
170 /* Mark the unwanted memory as unaccessible. We also need to make
171 the "new" size accessible, since ggc_get_size returns the size of
172 the pool, not the size of the individually allocated object, the
173 size which was previously made accessible. Unfortunately, we
174 don't know that previously allocated size. Without that
175 knowledge we have to lose some initialization-tracking for the
176 old parts of the object. An alternative is to mark the whole
177 old_size as reachable, but that would lose tracking of writes
178 after the end of the object (by small offsets). Discard the
179 handle to avoid handle leak. */
180 VALGRIND_DISCARD (VALGRIND_MAKE_NOACCESS ((char *) x
+ size
,
182 VALGRIND_DISCARD (VALGRIND_MAKE_READABLE (x
, size
));
186 r
= ggc_alloc (size
);
188 /* Since ggc_get_size returns the size of the pool, not the size of the
189 individually allocated object, we'd access parts of the old object
190 that were marked invalid with the memcpy below. We lose a bit of the
191 initialization-tracking since some of it may be uninitialized. */
192 VALGRIND_DISCARD (VALGRIND_MAKE_READABLE (x
, old_size
));
194 memcpy (r
, x
, old_size
);
196 /* The old object is not supposed to be used anymore. */
197 VALGRIND_DISCARD (VALGRIND_MAKE_NOACCESS (x
, old_size
));
202 /* Like ggc_alloc_cleared, but performs a multiplication. */
207 return ggc_alloc_cleared (s1
* s2
);
210 /* Print statistics that are independent of the collector in use. */
211 #define SCALE(x) ((unsigned long) ((x) < 1024*10 \
213 : ((x) < 1024*1024*10 \
215 : (x) / (1024*1024))))
216 #define LABEL(x) ((x) < 1024*10 ? ' ' : ((x) < 1024*1024*10 ? 'k' : 'M'))
219 ggc_print_common_statistics (stream
, stats
)
221 ggc_statistics
*stats
;
225 /* Set the pointer so that during collection we will actually gather
229 /* Then do one collection to fill in the statistics. */
232 /* Total the statistics. */
233 for (code
= 0; code
< MAX_TREE_CODES
; ++code
)
235 stats
->total_num_trees
+= stats
->num_trees
[code
];
236 stats
->total_size_trees
+= stats
->size_trees
[code
];
238 for (code
= 0; code
< NUM_RTX_CODE
; ++code
)
240 stats
->total_num_rtxs
+= stats
->num_rtxs
[code
];
241 stats
->total_size_rtxs
+= stats
->size_rtxs
[code
];
244 /* Print the statistics for trees. */
245 fprintf (stream
, "\n%-17s%10s %16s %10s\n", "Tree",
246 "Number", "Bytes", "% Total");
247 for (code
= 0; code
< MAX_TREE_CODES
; ++code
)
248 if (ggc_stats
->num_trees
[code
])
250 fprintf (stream
, "%-17s%10u%16ld%c %10.3f\n",
251 tree_code_name
[code
],
252 ggc_stats
->num_trees
[code
],
253 SCALE (ggc_stats
->size_trees
[code
]),
254 LABEL (ggc_stats
->size_trees
[code
]),
255 (100 * ((double) ggc_stats
->size_trees
[code
])
256 / ggc_stats
->total_size_trees
));
259 "%-17s%10u%16ld%c\n", "Total",
260 ggc_stats
->total_num_trees
,
261 SCALE (ggc_stats
->total_size_trees
),
262 LABEL (ggc_stats
->total_size_trees
));
264 /* Print the statistics for RTL. */
265 fprintf (stream
, "\n%-17s%10s %16s %10s\n", "RTX",
266 "Number", "Bytes", "% Total");
267 for (code
= 0; code
< NUM_RTX_CODE
; ++code
)
268 if (ggc_stats
->num_rtxs
[code
])
270 fprintf (stream
, "%-17s%10u%16ld%c %10.3f\n",
272 ggc_stats
->num_rtxs
[code
],
273 SCALE (ggc_stats
->size_rtxs
[code
]),
274 LABEL (ggc_stats
->size_rtxs
[code
]),
275 (100 * ((double) ggc_stats
->size_rtxs
[code
])
276 / ggc_stats
->total_size_rtxs
));
279 "%-17s%10u%16ld%c\n", "Total",
280 ggc_stats
->total_num_rtxs
,
281 SCALE (ggc_stats
->total_size_rtxs
),
282 LABEL (ggc_stats
->total_size_rtxs
));
284 /* Don't gather statistics any more. */
288 /* Modify the bound based on rlimits. Keep the smallest number found. */
290 ggc_rlimit_bound (limit
)
293 #if defined(HAVE_GETRLIMIT)
296 if (getrlimit (RLIMIT_RSS
, &rlim
) == 0
297 && rlim
.rlim_cur
!= (rlim_t
) RLIM_INFINITY
298 && rlim
.rlim_cur
< limit
)
299 limit
= rlim
.rlim_cur
;
302 if (getrlimit (RLIMIT_DATA
, &rlim
) == 0
303 && rlim
.rlim_cur
!= (rlim_t
) RLIM_INFINITY
304 && rlim
.rlim_cur
< limit
)
305 limit
= rlim
.rlim_cur
;
308 if (getrlimit (RLIMIT_AS
, &rlim
) == 0
309 && rlim
.rlim_cur
!= (rlim_t
) RLIM_INFINITY
310 && rlim
.rlim_cur
< limit
)
311 limit
= rlim
.rlim_cur
;
313 #endif /* HAVE_GETRLIMIT */
318 /* Heuristic to set a default for GGC_MIN_EXPAND. */
320 ggc_min_expand_heuristic()
322 double min_expand
= physmem_total();
324 /* Adjust for rlimits. */
325 min_expand
= ggc_rlimit_bound (min_expand
);
327 /* The heuristic is a percentage equal to 30% + 70%*(RAM/1GB), yielding
328 a lower bound of 30% and an upper bound of 100% (when RAM >= 1GB). */
329 min_expand
/= 1024*1024*1024;
331 min_expand
= MIN (min_expand
, 70);
337 /* Heuristic to set a default for GGC_MIN_HEAPSIZE. */
339 ggc_min_heapsize_heuristic()
341 double min_heap_kbytes
= physmem_total();
343 /* Adjust for rlimits. */
344 min_heap_kbytes
= ggc_rlimit_bound (min_heap_kbytes
);
346 min_heap_kbytes
/= 1024; /* convert to Kbytes. */
348 /* The heuristic is RAM/8, with a lower bound of 4M and an upper
349 bound of 128M (when RAM >= 1GB). */
350 min_heap_kbytes
/= 8;
351 min_heap_kbytes
= MAX (min_heap_kbytes
, 4 * 1024);
352 min_heap_kbytes
= MIN (min_heap_kbytes
, 128 * 1024);
354 return min_heap_kbytes
;
358 init_ggc_heuristics ()
360 #ifndef ENABLE_GC_ALWAYS_COLLECT
361 set_param_value ("ggc-min-expand", ggc_min_expand_heuristic());
362 set_param_value ("ggc-min-heapsize", ggc_min_heapsize_heuristic());