* configure: Rebuilt.
[official-gcc.git] / boehm-gc / malloc.c
blob41553b7f376c91750a1a28f6e2b2e02045d4f2c2
1 /*
2 * Copyright 1988, 1989 Hans-J. Boehm, Alan J. Demers
3 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
5 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
6 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
8 * Permission is hereby granted to use or copy this program
9 * for any purpose, provided the above notices are retained on all copies.
10 * Permission to modify the code and to distribute modified code is granted,
11 * provided the above notices are retained, and a notice that the code was
12 * modified is included with the above copyright notice.
14 /* Boehm, February 7, 1996 4:32 pm PST */
16 #include <stdio.h>
17 #include "gc_priv.h"
19 extern ptr_t GC_clear_stack(); /* in misc.c, behaves like identity */
20 void GC_extend_size_map(); /* in misc.c. */
22 /* Allocate reclaim list for kind: */
23 /* Return TRUE on success */
24 GC_bool GC_alloc_reclaim_list(kind)
25 register struct obj_kind * kind;
27 struct hblk ** result = (struct hblk **)
28 GC_scratch_alloc((MAXOBJSZ+1) * sizeof(struct hblk *));
29 if (result == 0) return(FALSE);
30 BZERO(result, (MAXOBJSZ+1)*sizeof(struct hblk *));
31 kind -> ok_reclaim_list = result;
32 return(TRUE);
35 /* allocate lb bytes for an object of kind. */
36 /* Should not be used to directly to allocate */
37 /* objects such as STUBBORN objects that */
38 /* require special handling on allocation. */
39 /* First a version that assumes we already */
40 /* hold lock: */
41 ptr_t GC_generic_malloc_inner(lb, k)
42 register word lb;
43 register int k;
45 register word lw;
46 register ptr_t op;
47 register ptr_t *opp;
49 if( SMALL_OBJ(lb) ) {
50 register struct obj_kind * kind = GC_obj_kinds + k;
51 # ifdef MERGE_SIZES
52 lw = GC_size_map[lb];
53 # else
54 lw = ALIGNED_WORDS(lb);
55 if (lw == 0) lw = 1;
56 # endif
57 opp = &(kind -> ok_freelist[lw]);
58 if( (op = *opp) == 0 ) {
59 # ifdef MERGE_SIZES
60 if (GC_size_map[lb] == 0) {
61 if (!GC_is_initialized) GC_init_inner();
62 if (GC_size_map[lb] == 0) GC_extend_size_map(lb);
63 return(GC_generic_malloc_inner(lb, k));
65 # else
66 if (!GC_is_initialized) {
67 GC_init_inner();
68 return(GC_generic_malloc_inner(lb, k));
70 # endif
71 if (kind -> ok_reclaim_list == 0) {
72 if (!GC_alloc_reclaim_list(kind)) goto out;
74 op = GC_allocobj(lw, k);
75 if (op == 0) goto out;
77 /* Here everything is in a consistent state. */
78 /* We assume the following assignment is */
79 /* atomic. If we get aborted */
80 /* after the assignment, we lose an object, */
81 /* but that's benign. */
82 /* Volatile declarations may need to be added */
83 /* to prevent the compiler from breaking things.*/
84 *opp = obj_link(op);
85 obj_link(op) = 0;
86 } else {
87 register struct hblk * h;
88 register word n_blocks = divHBLKSZ(ADD_SLOP(lb)
89 + HDR_BYTES + HBLKSIZE-1);
91 if (!GC_is_initialized) GC_init_inner();
92 /* Do our share of marking work */
93 if(GC_incremental && !GC_dont_gc)
94 GC_collect_a_little_inner((int)n_blocks);
95 lw = ROUNDED_UP_WORDS(lb);
96 while ((h = GC_allochblk(lw, k, 0)) == 0
97 && GC_collect_or_expand(n_blocks, FALSE));
98 if (h == 0) {
99 op = 0;
100 } else {
101 op = (ptr_t) (h -> hb_body);
102 GC_words_wasted += BYTES_TO_WORDS(n_blocks * HBLKSIZE) - lw;
105 GC_words_allocd += lw;
107 out:
108 return((ptr_t)op);
111 ptr_t GC_generic_malloc(lb, k)
112 register word lb;
113 register int k;
115 ptr_t result;
116 DCL_LOCK_STATE;
118 GC_INVOKE_FINALIZERS();
119 DISABLE_SIGNALS();
120 LOCK();
121 result = GC_generic_malloc_inner(lb, k);
122 UNLOCK();
123 ENABLE_SIGNALS();
124 if (0 == result) {
125 return((*GC_oom_fn)(lb));
126 } else {
127 return(result);
132 #define GENERAL_MALLOC(lb,k) \
133 (GC_PTR)GC_clear_stack(GC_generic_malloc((word)lb, k))
134 /* We make the GC_clear_stack_call a tail call, hoping to get more of */
135 /* the stack. */
137 /* Allocate lb bytes of atomic (pointerfree) data */
138 # ifdef __STDC__
139 GC_PTR GC_malloc_atomic(size_t lb)
140 # else
141 GC_PTR GC_malloc_atomic(lb)
142 size_t lb;
143 # endif
145 register ptr_t op;
146 register ptr_t * opp;
147 register word lw;
148 DCL_LOCK_STATE;
150 if( SMALL_OBJ(lb) ) {
151 # ifdef MERGE_SIZES
152 lw = GC_size_map[lb];
153 # else
154 lw = ALIGNED_WORDS(lb);
155 # endif
156 opp = &(GC_aobjfreelist[lw]);
157 FASTLOCK();
158 if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
159 FASTUNLOCK();
160 return(GENERAL_MALLOC((word)lb, PTRFREE));
162 /* See above comment on signals. */
163 *opp = obj_link(op);
164 GC_words_allocd += lw;
165 FASTUNLOCK();
166 return((GC_PTR) op);
167 } else {
168 return(GENERAL_MALLOC((word)lb, PTRFREE));
172 /* Allocate lb bytes of composite (pointerful) data */
173 # ifdef __STDC__
174 GC_PTR GC_malloc(size_t lb)
175 # else
176 GC_PTR GC_malloc(lb)
177 size_t lb;
178 # endif
180 register ptr_t op;
181 register ptr_t *opp;
182 register word lw;
183 DCL_LOCK_STATE;
185 if( SMALL_OBJ(lb) ) {
186 # ifdef MERGE_SIZES
187 lw = GC_size_map[lb];
188 # else
189 lw = ALIGNED_WORDS(lb);
190 # endif
191 opp = &(GC_objfreelist[lw]);
192 FASTLOCK();
193 if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
194 FASTUNLOCK();
195 return(GENERAL_MALLOC((word)lb, NORMAL));
197 /* See above comment on signals. */
198 *opp = obj_link(op);
199 obj_link(op) = 0;
200 GC_words_allocd += lw;
201 FASTUNLOCK();
202 return((GC_PTR) op);
203 } else {
204 return(GENERAL_MALLOC((word)lb, NORMAL));
208 # ifdef REDIRECT_MALLOC
209 # ifdef __STDC__
210 GC_PTR malloc(size_t lb)
211 # else
212 GC_PTR malloc(lb)
213 size_t lb;
214 # endif
216 /* It might help to manually inline the GC_malloc call here. */
217 /* But any decent compiler should reduce the extra procedure call */
218 /* to at most a jump instruction in this case. */
219 # if defined(I386) && defined(SOLARIS_THREADS)
221 * Thread initialisation can call malloc before
222 * we're ready for it.
224 if (!GC_is_initialized) return sbrk(lb);
225 # endif /* I386 && SOLARIS_THREADS */
226 return(REDIRECT_MALLOC(lb));
229 # ifdef __STDC__
230 GC_PTR calloc(size_t n, size_t lb)
231 # else
232 GC_PTR calloc(n, lb)
233 size_t n, lb;
234 # endif
236 return(REDIRECT_MALLOC(n*lb));
238 # endif /* REDIRECT_MALLOC */
240 GC_PTR GC_generic_or_special_malloc(lb,knd)
241 word lb;
242 int knd;
244 switch(knd) {
245 # ifdef STUBBORN_ALLOC
246 case STUBBORN:
247 return(GC_malloc_stubborn((size_t)lb));
248 # endif
249 case PTRFREE:
250 return(GC_malloc_atomic((size_t)lb));
251 case NORMAL:
252 return(GC_malloc((size_t)lb));
253 case UNCOLLECTABLE:
254 return(GC_malloc_uncollectable((size_t)lb));
255 # ifdef ATOMIC_UNCOLLECTABLE
256 case AUNCOLLECTABLE:
257 return(GC_malloc_atomic_uncollectable((size_t)lb));
258 # endif /* ATOMIC_UNCOLLECTABLE */
259 default:
260 return(GC_generic_malloc(lb,knd));
265 /* Change the size of the block pointed to by p to contain at least */
266 /* lb bytes. The object may be (and quite likely will be) moved. */
267 /* The kind (e.g. atomic) is the same as that of the old. */
268 /* Shrinking of large blocks is not implemented well. */
269 # ifdef __STDC__
270 GC_PTR GC_realloc(GC_PTR p, size_t lb)
271 # else
272 GC_PTR GC_realloc(p,lb)
273 GC_PTR p;
274 size_t lb;
275 # endif
277 register struct hblk * h;
278 register hdr * hhdr;
279 register word sz; /* Current size in bytes */
280 register word orig_sz; /* Original sz in bytes */
281 int obj_kind;
283 if (p == 0) return(GC_malloc(lb)); /* Required by ANSI */
284 h = HBLKPTR(p);
285 hhdr = HDR(h);
286 sz = hhdr -> hb_sz;
287 obj_kind = hhdr -> hb_obj_kind;
288 sz = WORDS_TO_BYTES(sz);
289 orig_sz = sz;
291 if (sz > WORDS_TO_BYTES(MAXOBJSZ)) {
292 /* Round it up to the next whole heap block */
293 register word descr;
295 sz = (sz+HDR_BYTES+HBLKSIZE-1)
296 & (~HBLKMASK);
297 sz -= HDR_BYTES;
298 hhdr -> hb_sz = BYTES_TO_WORDS(sz);
299 descr = GC_obj_kinds[obj_kind].ok_descriptor;
300 if (GC_obj_kinds[obj_kind].ok_relocate_descr) descr += sz;
301 hhdr -> hb_descr = descr;
302 if (IS_UNCOLLECTABLE(obj_kind)) GC_non_gc_bytes += (sz - orig_sz);
303 /* Extra area is already cleared by allochblk. */
305 if (ADD_SLOP(lb) <= sz) {
306 if (lb >= (sz >> 1)) {
307 # ifdef STUBBORN_ALLOC
308 if (obj_kind == STUBBORN) GC_change_stubborn(p);
309 # endif
310 if (orig_sz > lb) {
311 /* Clear unneeded part of object to avoid bogus pointer */
312 /* tracing. */
313 /* Safe for stubborn objects. */
314 BZERO(((ptr_t)p) + lb, orig_sz - lb);
316 return(p);
317 } else {
318 /* shrink */
319 GC_PTR result =
320 GC_generic_or_special_malloc((word)lb, obj_kind);
322 if (result == 0) return(0);
323 /* Could also return original object. But this */
324 /* gives the client warning of imminent disaster. */
325 BCOPY(p, result, lb);
326 # ifndef IGNORE_FREE
327 GC_free(p);
328 # endif
329 return(result);
331 } else {
332 /* grow */
333 GC_PTR result =
334 GC_generic_or_special_malloc((word)lb, obj_kind);
336 if (result == 0) return(0);
337 BCOPY(p, result, sz);
338 # ifndef IGNORE_FREE
339 GC_free(p);
340 # endif
341 return(result);
345 # ifdef REDIRECT_MALLOC
346 # ifdef __STDC__
347 GC_PTR realloc(GC_PTR p, size_t lb)
348 # else
349 GC_PTR realloc(p,lb)
350 GC_PTR p;
351 size_t lb;
352 # endif
354 return(GC_realloc(p, lb));
356 # endif /* REDIRECT_MALLOC */
358 /* Explicitly deallocate an object p. */
359 # ifdef __STDC__
360 void GC_free(GC_PTR p)
361 # else
362 void GC_free(p)
363 GC_PTR p;
364 # endif
366 register struct hblk *h;
367 register hdr *hhdr;
368 register signed_word sz;
369 register ptr_t * flh;
370 register int knd;
371 register struct obj_kind * ok;
372 DCL_LOCK_STATE;
374 if (p == 0) return;
375 /* Required by ANSI. It's not my fault ... */
376 h = HBLKPTR(p);
377 hhdr = HDR(h);
378 knd = hhdr -> hb_obj_kind;
379 sz = hhdr -> hb_sz;
380 ok = &GC_obj_kinds[knd];
381 if (sz <= MAXOBJSZ) {
382 # ifdef THREADS
383 DISABLE_SIGNALS();
384 LOCK();
385 # endif
386 GC_mem_freed += sz;
387 /* A signal here can make GC_mem_freed and GC_non_gc_bytes */
388 /* inconsistent. We claim this is benign. */
389 if (IS_UNCOLLECTABLE(knd)) GC_non_gc_bytes -= WORDS_TO_BYTES(sz);
390 /* Its unnecessary to clear the mark bit. If the */
391 /* object is reallocated, it doesn't matter. O.w. the */
392 /* collector will do it, since it's on a free list. */
393 if (ok -> ok_init) {
394 BZERO((word *)p + 1, WORDS_TO_BYTES(sz-1));
396 flh = &(ok -> ok_freelist[sz]);
397 obj_link(p) = *flh;
398 *flh = (ptr_t)p;
399 # ifdef THREADS
400 UNLOCK();
401 ENABLE_SIGNALS();
402 # endif
403 } else {
404 DISABLE_SIGNALS();
405 LOCK();
406 GC_mem_freed += sz;
407 if (IS_UNCOLLECTABLE(knd)) GC_non_gc_bytes -= WORDS_TO_BYTES(sz);
408 GC_freehblk(h);
409 UNLOCK();
410 ENABLE_SIGNALS();
414 # ifdef REDIRECT_MALLOC
415 # ifdef __STDC__
416 void free(GC_PTR p)
417 # else
418 void free(p)
419 GC_PTR p;
420 # endif
422 # ifndef IGNORE_FREE
423 GC_free(p);
424 # endif
426 # endif /* REDIRECT_MALLOC */