* output.h: Don't unnecessarily conditionalize prototypes on TREE_CODE.
[official-gcc.git] / boehm-gc / typd_mlc.c
blob387d23058296efbb5e367309c8356daf2db59baa
1 /*
2 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
4 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
5 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
7 * Permission is hereby granted to use or copy this program
8 * for any purpose, provided the above notices are retained on all copies.
9 * Permission to modify the code and to distribute modified code is granted,
10 * provided the above notices are retained, and a notice that the code was
11 * modified is included with the above copyright notice.
14 /* Boehm, July 31, 1995 5:02 pm PDT */
18 * Some simple primitives for allocation with explicit type information.
19 * Simple objects are allocated such that they contain a GC_descr at the
20 * end (in the last allocated word). This descriptor may be a procedure
21 * which then examines an extended descriptor passed as its environment.
23 * Arrays are treated as simple objects if they have sufficiently simple
24 * structure. Otherwise they are allocated from an array kind that supplies
25 * a special mark procedure. These arrays contain a pointer to a
26 * complex_descriptor as their last word.
27 * This is done because the environment field is too small, and the collector
28 * must trace the complex_descriptor.
30 * Note that descriptors inside objects may appear cleared, if we encounter a
31 * false refrence to an object on a free list. In the GC_descr case, this
32 * is OK, since a 0 descriptor corresponds to examining no fields.
33 * In the complex_descriptor case, we explicitly check for that case.
35 * MAJOR PARTS OF THIS CODE HAVE NOT BEEN TESTED AT ALL and are not testable,
36 * since they are not accessible through the current interface.
39 #include "gc_priv.h"
40 #include "gc_mark.h"
41 #include "gc_typed.h"
43 # ifdef ADD_BYTE_AT_END
44 # define EXTRA_BYTES (sizeof(word) - 1)
45 # else
46 # define EXTRA_BYTES (sizeof(word))
47 # endif
49 GC_bool GC_explicit_typing_initialized = FALSE;
51 int GC_explicit_kind; /* Object kind for objects with indirect */
52 /* (possibly extended) descriptors. */
54 int GC_array_kind; /* Object kind for objects with complex */
55 /* descriptors and GC_array_mark_proc. */
57 /* Extended descriptors. GC_typed_mark_proc understands these. */
58 /* These are used for simple objects that are larger than what */
59 /* can be described by a BITMAP_BITS sized bitmap. */
60 typedef struct {
61 word ed_bitmap; /* lsb corresponds to first word. */
62 GC_bool ed_continued; /* next entry is continuation. */
63 } ext_descr;
65 /* Array descriptors. GC_array_mark_proc understands these. */
66 /* We may eventually need to add provisions for headers and */
67 /* trailers. Hence we provide for tree structured descriptors, */
68 /* though we don't really use them currently. */
69 typedef union ComplexDescriptor {
70 struct LeafDescriptor { /* Describes simple array */
71 word ld_tag;
72 # define LEAF_TAG 1
73 word ld_size; /* bytes per element */
74 /* multiple of ALIGNMENT */
75 word ld_nelements; /* Number of elements. */
76 GC_descr ld_descriptor; /* A simple length, bitmap, */
77 /* or procedure descriptor. */
78 } ld;
79 struct ComplexArrayDescriptor {
80 word ad_tag;
81 # define ARRAY_TAG 2
82 word ad_nelements;
83 union ComplexDescriptor * ad_element_descr;
84 } ad;
85 struct SequenceDescriptor {
86 word sd_tag;
87 # define SEQUENCE_TAG 3
88 union ComplexDescriptor * sd_first;
89 union ComplexDescriptor * sd_second;
90 } sd;
91 } complex_descriptor;
92 #define TAG ld.ld_tag
94 ext_descr * GC_ext_descriptors; /* Points to array of extended */
95 /* descriptors. */
97 word GC_ed_size = 0; /* Current size of above arrays. */
98 # define ED_INITIAL_SIZE 100;
100 word GC_avail_descr = 0; /* Next available slot. */
102 int GC_typed_mark_proc_index; /* Indices of my mark */
103 int GC_array_mark_proc_index; /* procedures. */
105 /* Add a multiword bitmap to GC_ext_descriptors arrays. Return */
106 /* starting index. */
107 /* Returns -1 on failure. */
108 /* Caller does not hold allocation lock. */
109 signed_word GC_add_ext_descriptor(bm, nbits)
110 GC_bitmap bm;
111 word nbits;
113 register size_t nwords = divWORDSZ(nbits + WORDSZ-1);
114 register signed_word result;
115 register word i;
116 register word last_part;
117 register int extra_bits;
118 DCL_LOCK_STATE;
120 DISABLE_SIGNALS();
121 LOCK();
122 while (GC_avail_descr + nwords >= GC_ed_size) {
123 ext_descr * new;
124 size_t new_size;
125 word ed_size = GC_ed_size;
127 UNLOCK();
128 ENABLE_SIGNALS();
129 if (ed_size == 0) {
130 new_size = ED_INITIAL_SIZE;
131 } else {
132 new_size = 2 * ed_size;
133 if (new_size > MAX_ENV) return(-1);
135 new = (ext_descr *) GC_malloc_atomic(new_size * sizeof(ext_descr));
136 if (new == 0) return(-1);
137 DISABLE_SIGNALS();
138 LOCK();
139 if (ed_size == GC_ed_size) {
140 if (GC_avail_descr != 0) {
141 BCOPY(GC_ext_descriptors, new,
142 GC_avail_descr * sizeof(ext_descr));
144 GC_ed_size = new_size;
145 GC_ext_descriptors = new;
146 } /* else another thread already resized it in the meantime */
148 result = GC_avail_descr;
149 for (i = 0; i < nwords-1; i++) {
150 GC_ext_descriptors[result + i].ed_bitmap = bm[i];
151 GC_ext_descriptors[result + i].ed_continued = TRUE;
153 last_part = bm[i];
154 /* Clear irrelevant bits. */
155 extra_bits = nwords * WORDSZ - nbits;
156 last_part <<= extra_bits;
157 last_part >>= extra_bits;
158 GC_ext_descriptors[result + i].ed_bitmap = last_part;
159 GC_ext_descriptors[result + i].ed_continued = FALSE;
160 GC_avail_descr += nwords;
161 UNLOCK();
162 ENABLE_SIGNALS();
163 return(result);
166 /* Table of bitmap descriptors for n word long all pointer objects. */
167 GC_descr GC_bm_table[WORDSZ/2];
169 /* Return a descriptor for the concatenation of 2 nwords long objects, */
170 /* each of which is described by descriptor. */
171 /* The result is known to be short enough to fit into a bitmap */
172 /* descriptor. */
173 /* Descriptor is a DS_LENGTH or DS_BITMAP descriptor. */
174 GC_descr GC_double_descr(descriptor, nwords)
175 register GC_descr descriptor;
176 register word nwords;
178 if (descriptor && DS_TAGS == DS_LENGTH) {
179 descriptor = GC_bm_table[BYTES_TO_WORDS((word)descriptor)];
181 descriptor |= (descriptor & ~DS_TAGS) >> nwords;
182 return(descriptor);
185 complex_descriptor * GC_make_sequence_descriptor();
187 /* Build a descriptor for an array with nelements elements, */
188 /* each of which can be described by a simple descriptor. */
189 /* We try to optimize some common cases. */
190 /* If the result is COMPLEX, then a complex_descr* is returned */
191 /* in *complex_d. */
192 /* If the result is LEAF, then we built a LeafDescriptor in */
193 /* the structure pointed to by leaf. */
194 /* The tag in the leaf structure is not set. */
195 /* If the result is SIMPLE, then a GC_descr */
196 /* is returned in *simple_d. */
197 /* If the result is NO_MEM, then */
198 /* we failed to allocate the descriptor. */
199 /* The implementation knows that DS_LENGTH is 0. */
200 /* *leaf, *complex_d, and *simple_d may be used as temporaries */
201 /* during the construction. */
202 # define COMPLEX 2
203 # define LEAF 1
204 # define SIMPLE 0
205 # define NO_MEM (-1)
206 int GC_make_array_descriptor(nelements, size, descriptor,
207 simple_d, complex_d, leaf)
208 word size;
209 word nelements;
210 GC_descr descriptor;
211 GC_descr *simple_d;
212 complex_descriptor **complex_d;
213 struct LeafDescriptor * leaf;
215 # define OPT_THRESHOLD 50
216 /* For larger arrays, we try to combine descriptors of adjacent */
217 /* descriptors to speed up marking, and to reduce the amount */
218 /* of space needed on the mark stack. */
219 if ((descriptor & DS_TAGS) == DS_LENGTH) {
220 if ((word)descriptor == size) {
221 *simple_d = nelements * descriptor;
222 return(SIMPLE);
223 } else if ((word)descriptor == 0) {
224 *simple_d = (GC_descr)0;
225 return(SIMPLE);
228 if (nelements <= OPT_THRESHOLD) {
229 if (nelements <= 1) {
230 if (nelements == 1) {
231 *simple_d = descriptor;
232 return(SIMPLE);
233 } else {
234 *simple_d = (GC_descr)0;
235 return(SIMPLE);
238 } else if (size <= BITMAP_BITS/2
239 && (descriptor & DS_TAGS) != DS_PROC
240 && (size & (sizeof(word)-1)) == 0) {
241 int result =
242 GC_make_array_descriptor(nelements/2, 2*size,
243 GC_double_descr(descriptor,
244 BYTES_TO_WORDS(size)),
245 simple_d, complex_d, leaf);
246 if ((nelements & 1) == 0) {
247 return(result);
248 } else {
249 struct LeafDescriptor * one_element =
250 (struct LeafDescriptor *)
251 GC_malloc_atomic(sizeof(struct LeafDescriptor));
253 if (result == NO_MEM || one_element == 0) return(NO_MEM);
254 one_element -> ld_tag = LEAF_TAG;
255 one_element -> ld_size = size;
256 one_element -> ld_nelements = 1;
257 one_element -> ld_descriptor = descriptor;
258 switch(result) {
259 case SIMPLE:
261 struct LeafDescriptor * beginning =
262 (struct LeafDescriptor *)
263 GC_malloc_atomic(sizeof(struct LeafDescriptor));
264 if (beginning == 0) return(NO_MEM);
265 beginning -> ld_tag = LEAF_TAG;
266 beginning -> ld_size = size;
267 beginning -> ld_nelements = 1;
268 beginning -> ld_descriptor = *simple_d;
269 *complex_d = GC_make_sequence_descriptor(
270 (complex_descriptor *)beginning,
271 (complex_descriptor *)one_element);
272 break;
274 case LEAF:
276 struct LeafDescriptor * beginning =
277 (struct LeafDescriptor *)
278 GC_malloc_atomic(sizeof(struct LeafDescriptor));
279 if (beginning == 0) return(NO_MEM);
280 beginning -> ld_tag = LEAF_TAG;
281 beginning -> ld_size = leaf -> ld_size;
282 beginning -> ld_nelements = leaf -> ld_nelements;
283 beginning -> ld_descriptor = leaf -> ld_descriptor;
284 *complex_d = GC_make_sequence_descriptor(
285 (complex_descriptor *)beginning,
286 (complex_descriptor *)one_element);
287 break;
289 case COMPLEX:
290 *complex_d = GC_make_sequence_descriptor(
291 *complex_d,
292 (complex_descriptor *)one_element);
293 break;
295 return(COMPLEX);
299 leaf -> ld_size = size;
300 leaf -> ld_nelements = nelements;
301 leaf -> ld_descriptor = descriptor;
302 return(LEAF);
306 complex_descriptor * GC_make_sequence_descriptor(first, second)
307 complex_descriptor * first;
308 complex_descriptor * second;
310 struct SequenceDescriptor * result =
311 (struct SequenceDescriptor *)
312 GC_malloc(sizeof(struct SequenceDescriptor));
313 /* Can't result in overly conservative marking, since tags are */
314 /* very small integers. Probably faster than maintaining type */
315 /* info. */
316 if (result != 0) {
317 result -> sd_tag = SEQUENCE_TAG;
318 result -> sd_first = first;
319 result -> sd_second = second;
321 return((complex_descriptor *)result);
324 #ifdef UNDEFINED
325 complex_descriptor * GC_make_complex_array_descriptor(nelements, descr)
326 word nelements;
327 complex_descriptor * descr;
329 struct ComplexArrayDescriptor * result =
330 (struct ComplexArrayDescriptor *)
331 GC_malloc(sizeof(struct ComplexArrayDescriptor));
333 if (result != 0) {
334 result -> ad_tag = ARRAY_TAG;
335 result -> ad_nelements = nelements;
336 result -> ad_element_descr = descr;
338 return((complex_descriptor *)result);
340 #endif
342 ptr_t * GC_eobjfreelist;
344 ptr_t * GC_arobjfreelist;
346 mse * GC_typed_mark_proc();
348 mse * GC_array_mark_proc();
350 GC_descr GC_generic_array_descr;
352 /* Caller does not hold allocation lock. */
353 void GC_init_explicit_typing()
355 register int i;
356 DCL_LOCK_STATE;
359 # ifdef PRINTSTATS
360 if (sizeof(struct LeafDescriptor) % sizeof(word) != 0)
361 ABORT("Bad leaf descriptor size");
362 # endif
363 DISABLE_SIGNALS();
364 LOCK();
365 if (GC_explicit_typing_initialized) {
366 UNLOCK();
367 ENABLE_SIGNALS();
368 return;
370 GC_explicit_typing_initialized = TRUE;
371 /* Set up object kind with simple indirect descriptor. */
372 GC_eobjfreelist = (ptr_t *)
373 GC_generic_malloc_inner((MAXOBJSZ+1)*sizeof(ptr_t), PTRFREE);
374 if (GC_eobjfreelist == 0) ABORT("Couldn't allocate GC_eobjfreelist");
375 BZERO(GC_eobjfreelist, (MAXOBJSZ+1)*sizeof(ptr_t));
376 GC_explicit_kind = GC_n_kinds++;
377 GC_obj_kinds[GC_explicit_kind].ok_freelist = GC_eobjfreelist;
378 GC_obj_kinds[GC_explicit_kind].ok_reclaim_list = 0;
379 GC_obj_kinds[GC_explicit_kind].ok_descriptor =
380 (((word)WORDS_TO_BYTES(-1)) | DS_PER_OBJECT);
381 GC_obj_kinds[GC_explicit_kind].ok_relocate_descr = TRUE;
382 GC_obj_kinds[GC_explicit_kind].ok_init = TRUE;
383 /* Descriptors are in the last word of the object. */
384 GC_typed_mark_proc_index = GC_n_mark_procs;
385 GC_mark_procs[GC_typed_mark_proc_index] = GC_typed_mark_proc;
386 GC_n_mark_procs++;
387 /* Moving this up breaks DEC AXP compiler. */
388 /* Set up object kind with array descriptor. */
389 GC_arobjfreelist = (ptr_t *)
390 GC_generic_malloc_inner((MAXOBJSZ+1)*sizeof(ptr_t), PTRFREE);
391 if (GC_arobjfreelist == 0) ABORT("Couldn't allocate GC_arobjfreelist");
392 BZERO(GC_arobjfreelist, (MAXOBJSZ+1)*sizeof(ptr_t));
393 if (GC_n_mark_procs >= MAX_MARK_PROCS)
394 ABORT("No slot for array mark proc");
395 GC_array_mark_proc_index = GC_n_mark_procs++;
396 if (GC_n_kinds >= MAXOBJKINDS)
397 ABORT("No kind available for array objects");
398 GC_array_kind = GC_n_kinds++;
399 GC_obj_kinds[GC_array_kind].ok_freelist = GC_arobjfreelist;
400 GC_obj_kinds[GC_array_kind].ok_reclaim_list = 0;
401 GC_obj_kinds[GC_array_kind].ok_descriptor =
402 MAKE_PROC(GC_array_mark_proc_index, 0);;
403 GC_obj_kinds[GC_array_kind].ok_relocate_descr = FALSE;
404 GC_obj_kinds[GC_array_kind].ok_init = TRUE;
405 /* Descriptors are in the last word of the object. */
406 GC_mark_procs[GC_array_mark_proc_index] = GC_array_mark_proc;
407 for (i = 0; i < WORDSZ/2; i++) {
408 GC_descr d = (((word)(-1)) >> (WORDSZ - i)) << (WORDSZ - i);
409 d |= DS_BITMAP;
410 GC_bm_table[i] = d;
412 GC_generic_array_descr = MAKE_PROC(GC_array_mark_proc_index, 0);
413 UNLOCK();
414 ENABLE_SIGNALS();
417 mse * GC_typed_mark_proc(addr, mark_stack_ptr, mark_stack_limit, env)
418 register word * addr;
419 register mse * mark_stack_ptr;
420 mse * mark_stack_limit;
421 word env;
423 register word bm = GC_ext_descriptors[env].ed_bitmap;
424 register word * current_p = addr;
425 register word current;
426 register ptr_t greatest_ha = GC_greatest_plausible_heap_addr;
427 register ptr_t least_ha = GC_least_plausible_heap_addr;
429 for (; bm != 0; bm >>= 1, current_p++) {
430 if (bm & 1) {
431 current = *current_p;
432 if ((ptr_t)current >= least_ha && (ptr_t)current <= greatest_ha) {
433 PUSH_CONTENTS(current, mark_stack_ptr,
434 mark_stack_limit, current_p, exit1);
438 if (GC_ext_descriptors[env].ed_continued) {
439 /* Push an entry with the rest of the descriptor back onto the */
440 /* stack. Thus we never do too much work at once. Note that */
441 /* we also can't overflow the mark stack unless we actually */
442 /* mark something. */
443 mark_stack_ptr++;
444 if (mark_stack_ptr >= mark_stack_limit) {
445 mark_stack_ptr = GC_signal_mark_stack_overflow(mark_stack_ptr);
447 mark_stack_ptr -> mse_start = addr + WORDSZ;
448 mark_stack_ptr -> mse_descr =
449 MAKE_PROC(GC_typed_mark_proc_index, env+1);
451 return(mark_stack_ptr);
454 /* Return the size of the object described by d. It would be faster to */
455 /* store this directly, or to compute it as part of */
456 /* GC_push_complex_descriptor, but hopefully it doesn't matter. */
457 word GC_descr_obj_size(d)
458 register complex_descriptor *d;
460 switch(d -> TAG) {
461 case LEAF_TAG:
462 return(d -> ld.ld_nelements * d -> ld.ld_size);
463 case ARRAY_TAG:
464 return(d -> ad.ad_nelements
465 * GC_descr_obj_size(d -> ad.ad_element_descr));
466 case SEQUENCE_TAG:
467 return(GC_descr_obj_size(d -> sd.sd_first)
468 + GC_descr_obj_size(d -> sd.sd_second));
469 default:
470 ABORT("Bad complex descriptor");
471 /*NOTREACHED*/ return 0; /*NOTREACHED*/
475 /* Push descriptors for the object at addr with complex descriptor d */
476 /* onto the mark stack. Return 0 if the mark stack overflowed. */
477 mse * GC_push_complex_descriptor(addr, d, msp, msl)
478 word * addr;
479 register complex_descriptor *d;
480 register mse * msp;
481 mse * msl;
483 register ptr_t current = (ptr_t) addr;
484 register word nelements;
485 register word sz;
486 register word i;
488 switch(d -> TAG) {
489 case LEAF_TAG:
491 register GC_descr descr = d -> ld.ld_descriptor;
493 nelements = d -> ld.ld_nelements;
494 if (msl - msp <= (ptrdiff_t)nelements) return(0);
495 sz = d -> ld.ld_size;
496 for (i = 0; i < nelements; i++) {
497 msp++;
498 msp -> mse_start = (word *)current;
499 msp -> mse_descr = descr;
500 current += sz;
502 return(msp);
504 case ARRAY_TAG:
506 register complex_descriptor *descr = d -> ad.ad_element_descr;
508 nelements = d -> ad.ad_nelements;
509 sz = GC_descr_obj_size(descr);
510 for (i = 0; i < nelements; i++) {
511 msp = GC_push_complex_descriptor((word *)current, descr,
512 msp, msl);
513 if (msp == 0) return(0);
514 current += sz;
516 return(msp);
518 case SEQUENCE_TAG:
520 sz = GC_descr_obj_size(d -> sd.sd_first);
521 msp = GC_push_complex_descriptor((word *)current, d -> sd.sd_first,
522 msp, msl);
523 if (msp == 0) return(0);
524 current += sz;
525 msp = GC_push_complex_descriptor((word *)current, d -> sd.sd_second,
526 msp, msl);
527 return(msp);
529 default:
530 ABORT("Bad complex descriptor");
531 /*NOTREACHED*/ return 0; /*NOTREACHED*/
535 /*ARGSUSED*/
536 mse * GC_array_mark_proc(addr, mark_stack_ptr, mark_stack_limit, env)
537 register word * addr;
538 register mse * mark_stack_ptr;
539 mse * mark_stack_limit;
540 word env;
542 register hdr * hhdr = HDR(addr);
543 register word sz = hhdr -> hb_sz;
544 register complex_descriptor * descr = (complex_descriptor *)(addr[sz-1]);
545 mse * orig_mark_stack_ptr = mark_stack_ptr;
546 mse * new_mark_stack_ptr;
548 if (descr == 0) {
549 /* Found a reference to a free list entry. Ignore it. */
550 return(orig_mark_stack_ptr);
552 /* In use counts were already updated when array descriptor was */
553 /* pushed. Here we only replace it by subobject descriptors, so */
554 /* no update is necessary. */
555 new_mark_stack_ptr = GC_push_complex_descriptor(addr, descr,
556 mark_stack_ptr,
557 mark_stack_limit-1);
558 if (new_mark_stack_ptr == 0) {
559 /* Doesn't fit. Conservatively push the whole array as a unit */
560 /* and request a mark stack expansion. */
561 /* This cannot cause a mark stack overflow, since it replaces */
562 /* the original array entry. */
563 GC_mark_stack_too_small = TRUE;
564 new_mark_stack_ptr = orig_mark_stack_ptr + 1;
565 new_mark_stack_ptr -> mse_start = addr;
566 new_mark_stack_ptr -> mse_descr = WORDS_TO_BYTES(sz) | DS_LENGTH;
567 } else {
568 /* Push descriptor itself */
569 new_mark_stack_ptr++;
570 new_mark_stack_ptr -> mse_start = addr + sz - 1;
571 new_mark_stack_ptr -> mse_descr = sizeof(word) | DS_LENGTH;
573 return(new_mark_stack_ptr);
576 #if defined(__STDC__) || defined(__cplusplus)
577 GC_descr GC_make_descriptor(GC_bitmap bm, size_t len)
578 #else
579 GC_descr GC_make_descriptor(bm, len)
580 GC_bitmap bm;
581 size_t len;
582 #endif
584 register signed_word last_set_bit = len - 1;
585 register word result;
586 register int i;
587 # define HIGH_BIT (((word)1) << (WORDSZ - 1))
589 if (!GC_explicit_typing_initialized) GC_init_explicit_typing();
590 while (last_set_bit >= 0 && !GC_get_bit(bm, last_set_bit)) last_set_bit --;
591 if (last_set_bit < 0) return(0 /* no pointers */);
592 # if ALIGNMENT == CPP_WORDSZ/8
594 register GC_bool all_bits_set = TRUE;
595 for (i = 0; i < last_set_bit; i++) {
596 if (!GC_get_bit(bm, i)) {
597 all_bits_set = FALSE;
598 break;
601 if (all_bits_set) {
602 /* An initial section contains all pointers. Use length descriptor. */
603 return(WORDS_TO_BYTES(last_set_bit+1) | DS_LENGTH);
606 # endif
607 if (last_set_bit < BITMAP_BITS) {
608 /* Hopefully the common case. */
609 /* Build bitmap descriptor (with bits reversed) */
610 result = HIGH_BIT;
611 for (i = last_set_bit - 1; i >= 0; i--) {
612 result >>= 1;
613 if (GC_get_bit(bm, i)) result |= HIGH_BIT;
615 result |= DS_BITMAP;
616 return(result);
617 } else {
618 signed_word index;
620 index = GC_add_ext_descriptor(bm, (word)last_set_bit+1);
621 if (index == -1) return(WORDS_TO_BYTES(last_set_bit+1) | DS_LENGTH);
622 /* Out of memory: use conservative */
623 /* approximation. */
624 result = MAKE_PROC(GC_typed_mark_proc_index, (word)index);
625 return(result);
629 ptr_t GC_clear_stack();
631 #define GENERAL_MALLOC(lb,k) \
632 (GC_PTR)GC_clear_stack(GC_generic_malloc((word)lb, k))
634 #define GENERAL_MALLOC_IOP(lb,k) \
635 (GC_PTR)GC_clear_stack(GC_generic_malloc_ignore_off_page((word)lb, k))
637 #if defined(__STDC__) || defined(__cplusplus)
638 void * GC_malloc_explicitly_typed(size_t lb, GC_descr d)
639 #else
640 char * GC_malloc_explicitly_typed(lb, d)
641 size_t lb;
642 GC_descr d;
643 #endif
645 register ptr_t op;
646 register ptr_t * opp;
647 register word lw;
648 DCL_LOCK_STATE;
650 lb += EXTRA_BYTES;
651 if( SMALL_OBJ(lb) ) {
652 # ifdef MERGE_SIZES
653 lw = GC_size_map[lb];
654 # else
655 lw = ALIGNED_WORDS(lb);
656 # endif
657 opp = &(GC_eobjfreelist[lw]);
658 FASTLOCK();
659 if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
660 FASTUNLOCK();
661 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_explicit_kind);
662 if (0 == op) return(0);
663 # ifdef MERGE_SIZES
664 lw = GC_size_map[lb]; /* May have been uninitialized. */
665 # endif
666 } else {
667 *opp = obj_link(op);
668 GC_words_allocd += lw;
669 FASTUNLOCK();
671 } else {
672 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_explicit_kind);
673 if (op != NULL)
674 lw = BYTES_TO_WORDS(GC_size(op));
676 if (op != NULL)
677 ((word *)op)[lw - 1] = d;
678 return((GC_PTR) op);
681 #if defined(__STDC__) || defined(__cplusplus)
682 void * GC_malloc_explicitly_typed_ignore_off_page(size_t lb, GC_descr d)
683 #else
684 char * GC_malloc_explicitly_typed_ignore_off_page(lb, d)
685 size_t lb;
686 GC_descr d;
687 #endif
689 register ptr_t op;
690 register ptr_t * opp;
691 register word lw;
692 DCL_LOCK_STATE;
694 lb += EXTRA_BYTES;
695 if( SMALL_OBJ(lb) ) {
696 # ifdef MERGE_SIZES
697 lw = GC_size_map[lb];
698 # else
699 lw = ALIGNED_WORDS(lb);
700 # endif
701 opp = &(GC_eobjfreelist[lw]);
702 FASTLOCK();
703 if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
704 FASTUNLOCK();
705 op = (ptr_t)GENERAL_MALLOC_IOP((word)lb, GC_explicit_kind);
706 # ifdef MERGE_SIZES
707 lw = GC_size_map[lb]; /* May have been uninitialized. */
708 # endif
709 } else {
710 *opp = obj_link(op);
711 GC_words_allocd += lw;
712 FASTUNLOCK();
714 } else {
715 op = (ptr_t)GENERAL_MALLOC_IOP((word)lb, GC_explicit_kind);
716 if (op != NULL)
717 lw = BYTES_TO_WORDS(GC_size(op));
719 if (op != NULL)
720 ((word *)op)[lw - 1] = d;
721 return((GC_PTR) op);
724 #if defined(__STDC__) || defined(__cplusplus)
725 void * GC_calloc_explicitly_typed(size_t n,
726 size_t lb,
727 GC_descr d)
728 #else
729 char * GC_calloc_explicitly_typed(n, lb, d)
730 size_t n;
731 size_t lb;
732 GC_descr d;
733 #endif
735 register ptr_t op;
736 register ptr_t * opp;
737 register word lw;
738 GC_descr simple_descr;
739 complex_descriptor *complex_descr;
740 register int descr_type;
741 struct LeafDescriptor leaf;
742 DCL_LOCK_STATE;
744 descr_type = GC_make_array_descriptor((word)n, (word)lb, d,
745 &simple_descr, &complex_descr, &leaf);
746 switch(descr_type) {
747 case NO_MEM: return(0);
748 case SIMPLE: return(GC_malloc_explicitly_typed(n*lb, simple_descr));
749 case LEAF:
750 lb *= n;
751 lb += sizeof(struct LeafDescriptor) + EXTRA_BYTES;
752 break;
753 case COMPLEX:
754 lb *= n;
755 lb += EXTRA_BYTES;
756 break;
758 if( SMALL_OBJ(lb) ) {
759 # ifdef MERGE_SIZES
760 lw = GC_size_map[lb];
761 # else
762 lw = ALIGNED_WORDS(lb);
763 # endif
764 opp = &(GC_arobjfreelist[lw]);
765 FASTLOCK();
766 if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
767 FASTUNLOCK();
768 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_array_kind);
769 if (0 == op) return(0);
770 # ifdef MERGE_SIZES
771 lw = GC_size_map[lb]; /* May have been uninitialized. */
772 # endif
773 } else {
774 *opp = obj_link(op);
775 GC_words_allocd += lw;
776 FASTUNLOCK();
778 } else {
779 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_array_kind);
780 if (0 == op) return(0);
781 lw = BYTES_TO_WORDS(GC_size(op));
783 if (descr_type == LEAF) {
784 /* Set up the descriptor inside the object itself. */
785 VOLATILE struct LeafDescriptor * lp =
786 (struct LeafDescriptor *)
787 ((word *)op
788 + lw - (BYTES_TO_WORDS(sizeof(struct LeafDescriptor)) + 1));
790 lp -> ld_tag = LEAF_TAG;
791 lp -> ld_size = leaf.ld_size;
792 lp -> ld_nelements = leaf.ld_nelements;
793 lp -> ld_descriptor = leaf.ld_descriptor;
794 ((VOLATILE word *)op)[lw - 1] = (word)lp;
795 } else {
796 extern unsigned GC_finalization_failures;
797 unsigned ff = GC_finalization_failures;
799 ((word *)op)[lw - 1] = (word)complex_descr;
800 /* Make sure the descriptor is cleared once there is any danger */
801 /* it may have been collected. */
802 (void)
803 GC_general_register_disappearing_link((GC_PTR *)
804 ((word *)op+lw-1),
805 (GC_PTR) op);
806 if (ff != GC_finalization_failures) {
807 /* Couldn't register it due to lack of memory. Punt. */
808 /* This will probably fail too, but gives the recovery code */
809 /* a chance. */
810 return(GC_malloc(n*lb));
813 return((GC_PTR) op);