PR target/65871
[official-gcc.git] / boehm-gc / typd_mlc.c
blob373257cd260dc7f97a10964a7fd0c9c21a3c0943
1 /*
2 * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
3 * opyright (c) 1999-2000 by Hewlett-Packard Company. All rights reserved.
5 * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
6 * OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
8 * Permission is hereby granted to use or copy this program
9 * for any purpose, provided the above notices are retained on all copies.
10 * Permission to modify the code and to distribute modified code is granted,
11 * provided the above notices are retained, and a notice that the code was
12 * modified is included with the above copyright notice.
18 * Some simple primitives for allocation with explicit type information.
19 * Simple objects are allocated such that they contain a GC_descr at the
20 * end (in the last allocated word). This descriptor may be a procedure
21 * which then examines an extended descriptor passed as its environment.
23 * Arrays are treated as simple objects if they have sufficiently simple
24 * structure. Otherwise they are allocated from an array kind that supplies
25 * a special mark procedure. These arrays contain a pointer to a
26 * complex_descriptor as their last word.
27 * This is done because the environment field is too small, and the collector
28 * must trace the complex_descriptor.
30 * Note that descriptors inside objects may appear cleared, if we encounter a
31 * false refrence to an object on a free list. In the GC_descr case, this
32 * is OK, since a 0 descriptor corresponds to examining no fields.
33 * In the complex_descriptor case, we explicitly check for that case.
35 * MAJOR PARTS OF THIS CODE HAVE NOT BEEN TESTED AT ALL and are not testable,
36 * since they are not accessible through the current interface.
39 #include "private/gc_pmark.h"
40 #include "gc_typed.h"
42 # define TYPD_EXTRA_BYTES (sizeof(word) - EXTRA_BYTES)
44 GC_bool GC_explicit_typing_initialized = FALSE;
46 int GC_explicit_kind; /* Object kind for objects with indirect */
47 /* (possibly extended) descriptors. */
49 int GC_array_kind; /* Object kind for objects with complex */
50 /* descriptors and GC_array_mark_proc. */
52 /* Extended descriptors. GC_typed_mark_proc understands these. */
53 /* These are used for simple objects that are larger than what */
54 /* can be described by a BITMAP_BITS sized bitmap. */
55 typedef struct {
56 word ed_bitmap; /* lsb corresponds to first word. */
57 GC_bool ed_continued; /* next entry is continuation. */
58 } ext_descr;
60 /* Array descriptors. GC_array_mark_proc understands these. */
61 /* We may eventually need to add provisions for headers and */
62 /* trailers. Hence we provide for tree structured descriptors, */
63 /* though we don't really use them currently. */
64 typedef union ComplexDescriptor {
65 struct LeafDescriptor { /* Describes simple array */
66 word ld_tag;
67 # define LEAF_TAG 1
68 word ld_size; /* bytes per element */
69 /* multiple of ALIGNMENT */
70 word ld_nelements; /* Number of elements. */
71 GC_descr ld_descriptor; /* A simple length, bitmap, */
72 /* or procedure descriptor. */
73 } ld;
74 struct ComplexArrayDescriptor {
75 word ad_tag;
76 # define ARRAY_TAG 2
77 word ad_nelements;
78 union ComplexDescriptor * ad_element_descr;
79 } ad;
80 struct SequenceDescriptor {
81 word sd_tag;
82 # define SEQUENCE_TAG 3
83 union ComplexDescriptor * sd_first;
84 union ComplexDescriptor * sd_second;
85 } sd;
86 } complex_descriptor;
87 #define TAG ld.ld_tag
89 ext_descr * GC_ext_descriptors; /* Points to array of extended */
90 /* descriptors. */
92 word GC_ed_size = 0; /* Current size of above arrays. */
93 # define ED_INITIAL_SIZE 100;
95 word GC_avail_descr = 0; /* Next available slot. */
97 int GC_typed_mark_proc_index; /* Indices of my mark */
98 int GC_array_mark_proc_index; /* procedures. */
100 /* Add a multiword bitmap to GC_ext_descriptors arrays. Return */
101 /* starting index. */
102 /* Returns -1 on failure. */
103 /* Caller does not hold allocation lock. */
104 signed_word GC_add_ext_descriptor(bm, nbits)
105 GC_bitmap bm;
106 word nbits;
108 register size_t nwords = divWORDSZ(nbits + WORDSZ-1);
109 register signed_word result;
110 register word i;
111 register word last_part;
112 register int extra_bits;
113 DCL_LOCK_STATE;
115 DISABLE_SIGNALS();
116 LOCK();
117 while (GC_avail_descr + nwords >= GC_ed_size) {
118 ext_descr * new;
119 size_t new_size;
120 word ed_size = GC_ed_size;
122 UNLOCK();
123 ENABLE_SIGNALS();
124 if (ed_size == 0) {
125 new_size = ED_INITIAL_SIZE;
126 } else {
127 new_size = 2 * ed_size;
128 if (new_size > MAX_ENV) return(-1);
130 new = (ext_descr *) GC_malloc_atomic(new_size * sizeof(ext_descr));
131 if (new == 0) return(-1);
132 DISABLE_SIGNALS();
133 LOCK();
134 if (ed_size == GC_ed_size) {
135 if (GC_avail_descr != 0) {
136 BCOPY(GC_ext_descriptors, new,
137 GC_avail_descr * sizeof(ext_descr));
139 GC_ed_size = new_size;
140 GC_ext_descriptors = new;
141 } /* else another thread already resized it in the meantime */
143 result = GC_avail_descr;
144 for (i = 0; i < nwords-1; i++) {
145 GC_ext_descriptors[result + i].ed_bitmap = bm[i];
146 GC_ext_descriptors[result + i].ed_continued = TRUE;
148 last_part = bm[i];
149 /* Clear irrelevant bits. */
150 extra_bits = nwords * WORDSZ - nbits;
151 last_part <<= extra_bits;
152 last_part >>= extra_bits;
153 GC_ext_descriptors[result + i].ed_bitmap = last_part;
154 GC_ext_descriptors[result + i].ed_continued = FALSE;
155 GC_avail_descr += nwords;
156 UNLOCK();
157 ENABLE_SIGNALS();
158 return(result);
161 /* Table of bitmap descriptors for n word long all pointer objects. */
162 GC_descr GC_bm_table[WORDSZ/2];
164 /* Return a descriptor for the concatenation of 2 nwords long objects, */
165 /* each of which is described by descriptor. */
166 /* The result is known to be short enough to fit into a bitmap */
167 /* descriptor. */
168 /* Descriptor is a GC_DS_LENGTH or GC_DS_BITMAP descriptor. */
169 GC_descr GC_double_descr(descriptor, nwords)
170 register GC_descr descriptor;
171 register word nwords;
173 if ((descriptor & GC_DS_TAGS) == GC_DS_LENGTH) {
174 descriptor = GC_bm_table[BYTES_TO_WORDS((word)descriptor)];
176 descriptor |= (descriptor & ~GC_DS_TAGS) >> nwords;
177 return(descriptor);
180 complex_descriptor * GC_make_sequence_descriptor();
182 /* Build a descriptor for an array with nelements elements, */
183 /* each of which can be described by a simple descriptor. */
184 /* We try to optimize some common cases. */
185 /* If the result is COMPLEX, then a complex_descr* is returned */
186 /* in *complex_d. */
187 /* If the result is LEAF, then we built a LeafDescriptor in */
188 /* the structure pointed to by leaf. */
189 /* The tag in the leaf structure is not set. */
190 /* If the result is SIMPLE, then a GC_descr */
191 /* is returned in *simple_d. */
192 /* If the result is NO_MEM, then */
193 /* we failed to allocate the descriptor. */
194 /* The implementation knows that GC_DS_LENGTH is 0. */
195 /* *leaf, *complex_d, and *simple_d may be used as temporaries */
196 /* during the construction. */
197 # define COMPLEX 2
198 # define LEAF 1
199 # define SIMPLE 0
200 # define NO_MEM (-1)
201 int GC_make_array_descriptor(nelements, size, descriptor,
202 simple_d, complex_d, leaf)
203 word size;
204 word nelements;
205 GC_descr descriptor;
206 GC_descr *simple_d;
207 complex_descriptor **complex_d;
208 struct LeafDescriptor * leaf;
210 # define OPT_THRESHOLD 50
211 /* For larger arrays, we try to combine descriptors of adjacent */
212 /* descriptors to speed up marking, and to reduce the amount */
213 /* of space needed on the mark stack. */
214 if ((descriptor & GC_DS_TAGS) == GC_DS_LENGTH) {
215 if ((word)descriptor == size) {
216 *simple_d = nelements * descriptor;
217 return(SIMPLE);
218 } else if ((word)descriptor == 0) {
219 *simple_d = (GC_descr)0;
220 return(SIMPLE);
223 if (nelements <= OPT_THRESHOLD) {
224 if (nelements <= 1) {
225 if (nelements == 1) {
226 *simple_d = descriptor;
227 return(SIMPLE);
228 } else {
229 *simple_d = (GC_descr)0;
230 return(SIMPLE);
233 } else if (size <= BITMAP_BITS/2
234 && (descriptor & GC_DS_TAGS) != GC_DS_PROC
235 && (size & (sizeof(word)-1)) == 0) {
236 int result =
237 GC_make_array_descriptor(nelements/2, 2*size,
238 GC_double_descr(descriptor,
239 BYTES_TO_WORDS(size)),
240 simple_d, complex_d, leaf);
241 if ((nelements & 1) == 0) {
242 return(result);
243 } else {
244 struct LeafDescriptor * one_element =
245 (struct LeafDescriptor *)
246 GC_malloc_atomic(sizeof(struct LeafDescriptor));
248 if (result == NO_MEM || one_element == 0) return(NO_MEM);
249 one_element -> ld_tag = LEAF_TAG;
250 one_element -> ld_size = size;
251 one_element -> ld_nelements = 1;
252 one_element -> ld_descriptor = descriptor;
253 switch(result) {
254 case SIMPLE:
256 struct LeafDescriptor * beginning =
257 (struct LeafDescriptor *)
258 GC_malloc_atomic(sizeof(struct LeafDescriptor));
259 if (beginning == 0) return(NO_MEM);
260 beginning -> ld_tag = LEAF_TAG;
261 beginning -> ld_size = size;
262 beginning -> ld_nelements = 1;
263 beginning -> ld_descriptor = *simple_d;
264 *complex_d = GC_make_sequence_descriptor(
265 (complex_descriptor *)beginning,
266 (complex_descriptor *)one_element);
267 break;
269 case LEAF:
271 struct LeafDescriptor * beginning =
272 (struct LeafDescriptor *)
273 GC_malloc_atomic(sizeof(struct LeafDescriptor));
274 if (beginning == 0) return(NO_MEM);
275 beginning -> ld_tag = LEAF_TAG;
276 beginning -> ld_size = leaf -> ld_size;
277 beginning -> ld_nelements = leaf -> ld_nelements;
278 beginning -> ld_descriptor = leaf -> ld_descriptor;
279 *complex_d = GC_make_sequence_descriptor(
280 (complex_descriptor *)beginning,
281 (complex_descriptor *)one_element);
282 break;
284 case COMPLEX:
285 *complex_d = GC_make_sequence_descriptor(
286 *complex_d,
287 (complex_descriptor *)one_element);
288 break;
290 return(COMPLEX);
294 leaf -> ld_size = size;
295 leaf -> ld_nelements = nelements;
296 leaf -> ld_descriptor = descriptor;
297 return(LEAF);
301 complex_descriptor * GC_make_sequence_descriptor(first, second)
302 complex_descriptor * first;
303 complex_descriptor * second;
305 struct SequenceDescriptor * result =
306 (struct SequenceDescriptor *)
307 GC_malloc(sizeof(struct SequenceDescriptor));
308 /* Can't result in overly conservative marking, since tags are */
309 /* very small integers. Probably faster than maintaining type */
310 /* info. */
311 if (result != 0) {
312 result -> sd_tag = SEQUENCE_TAG;
313 result -> sd_first = first;
314 result -> sd_second = second;
316 return((complex_descriptor *)result);
319 #ifdef UNDEFINED
320 complex_descriptor * GC_make_complex_array_descriptor(nelements, descr)
321 word nelements;
322 complex_descriptor * descr;
324 struct ComplexArrayDescriptor * result =
325 (struct ComplexArrayDescriptor *)
326 GC_malloc(sizeof(struct ComplexArrayDescriptor));
328 if (result != 0) {
329 result -> ad_tag = ARRAY_TAG;
330 result -> ad_nelements = nelements;
331 result -> ad_element_descr = descr;
333 return((complex_descriptor *)result);
335 #endif
337 ptr_t * GC_eobjfreelist;
339 ptr_t * GC_arobjfreelist;
341 mse * GC_typed_mark_proc GC_PROTO((register word * addr,
342 register mse * mark_stack_ptr,
343 mse * mark_stack_limit,
344 word env));
346 mse * GC_array_mark_proc GC_PROTO((register word * addr,
347 register mse * mark_stack_ptr,
348 mse * mark_stack_limit,
349 word env));
351 /* Caller does not hold allocation lock. */
352 void GC_init_explicit_typing()
354 register int i;
355 DCL_LOCK_STATE;
358 # ifdef PRINTSTATS
359 if (sizeof(struct LeafDescriptor) % sizeof(word) != 0)
360 ABORT("Bad leaf descriptor size");
361 # endif
362 DISABLE_SIGNALS();
363 LOCK();
364 if (GC_explicit_typing_initialized) {
365 UNLOCK();
366 ENABLE_SIGNALS();
367 return;
369 GC_explicit_typing_initialized = TRUE;
370 /* Set up object kind with simple indirect descriptor. */
371 GC_eobjfreelist = (ptr_t *)GC_new_free_list_inner();
372 GC_explicit_kind = GC_new_kind_inner(
373 (void **)GC_eobjfreelist,
374 (((word)WORDS_TO_BYTES(-1)) | GC_DS_PER_OBJECT),
375 TRUE, TRUE);
376 /* Descriptors are in the last word of the object. */
377 GC_typed_mark_proc_index = GC_new_proc_inner(GC_typed_mark_proc);
378 /* Set up object kind with array descriptor. */
379 GC_arobjfreelist = (ptr_t *)GC_new_free_list_inner();
380 GC_array_mark_proc_index = GC_new_proc_inner(GC_array_mark_proc);
381 GC_array_kind = GC_new_kind_inner(
382 (void **)GC_arobjfreelist,
383 GC_MAKE_PROC(GC_array_mark_proc_index, 0),
384 FALSE, TRUE);
385 for (i = 0; i < WORDSZ/2; i++) {
386 GC_descr d = (((word)(-1)) >> (WORDSZ - i)) << (WORDSZ - i);
387 d |= GC_DS_BITMAP;
388 GC_bm_table[i] = d;
390 UNLOCK();
391 ENABLE_SIGNALS();
394 # if defined(__STDC__) || defined(__cplusplus)
395 mse * GC_typed_mark_proc(register word * addr,
396 register mse * mark_stack_ptr,
397 mse * mark_stack_limit,
398 word env)
399 # else
400 mse * GC_typed_mark_proc(addr, mark_stack_ptr, mark_stack_limit, env)
401 register word * addr;
402 register mse * mark_stack_ptr;
403 mse * mark_stack_limit;
404 word env;
405 # endif
407 register word bm = GC_ext_descriptors[env].ed_bitmap;
408 register word * current_p = addr;
409 register word current;
410 register ptr_t greatest_ha = GC_greatest_plausible_heap_addr;
411 register ptr_t least_ha = GC_least_plausible_heap_addr;
413 for (; bm != 0; bm >>= 1, current_p++) {
414 if (bm & 1) {
415 current = *current_p;
416 FIXUP_POINTER(current);
417 if ((ptr_t)current >= least_ha && (ptr_t)current <= greatest_ha) {
418 PUSH_CONTENTS((ptr_t)current, mark_stack_ptr,
419 mark_stack_limit, current_p, exit1);
423 if (GC_ext_descriptors[env].ed_continued) {
424 /* Push an entry with the rest of the descriptor back onto the */
425 /* stack. Thus we never do too much work at once. Note that */
426 /* we also can't overflow the mark stack unless we actually */
427 /* mark something. */
428 mark_stack_ptr++;
429 if (mark_stack_ptr >= mark_stack_limit) {
430 mark_stack_ptr = GC_signal_mark_stack_overflow(mark_stack_ptr);
432 mark_stack_ptr -> mse_start = addr + WORDSZ;
433 mark_stack_ptr -> mse_descr =
434 GC_MAKE_PROC(GC_typed_mark_proc_index, env+1);
436 return(mark_stack_ptr);
439 /* Return the size of the object described by d. It would be faster to */
440 /* store this directly, or to compute it as part of */
441 /* GC_push_complex_descriptor, but hopefully it doesn't matter. */
442 word GC_descr_obj_size(d)
443 register complex_descriptor *d;
445 switch(d -> TAG) {
446 case LEAF_TAG:
447 return(d -> ld.ld_nelements * d -> ld.ld_size);
448 case ARRAY_TAG:
449 return(d -> ad.ad_nelements
450 * GC_descr_obj_size(d -> ad.ad_element_descr));
451 case SEQUENCE_TAG:
452 return(GC_descr_obj_size(d -> sd.sd_first)
453 + GC_descr_obj_size(d -> sd.sd_second));
454 default:
455 ABORT("Bad complex descriptor");
456 /*NOTREACHED*/ return 0; /*NOTREACHED*/
460 /* Push descriptors for the object at addr with complex descriptor d */
461 /* onto the mark stack. Return 0 if the mark stack overflowed. */
462 mse * GC_push_complex_descriptor(addr, d, msp, msl)
463 word * addr;
464 register complex_descriptor *d;
465 register mse * msp;
466 mse * msl;
468 register ptr_t current = (ptr_t) addr;
469 register word nelements;
470 register word sz;
471 register word i;
473 switch(d -> TAG) {
474 case LEAF_TAG:
476 register GC_descr descr = d -> ld.ld_descriptor;
478 nelements = d -> ld.ld_nelements;
479 if (msl - msp <= (ptrdiff_t)nelements) return(0);
480 sz = d -> ld.ld_size;
481 for (i = 0; i < nelements; i++) {
482 msp++;
483 msp -> mse_start = (word *)current;
484 msp -> mse_descr = descr;
485 current += sz;
487 return(msp);
489 case ARRAY_TAG:
491 register complex_descriptor *descr = d -> ad.ad_element_descr;
493 nelements = d -> ad.ad_nelements;
494 sz = GC_descr_obj_size(descr);
495 for (i = 0; i < nelements; i++) {
496 msp = GC_push_complex_descriptor((word *)current, descr,
497 msp, msl);
498 if (msp == 0) return(0);
499 current += sz;
501 return(msp);
503 case SEQUENCE_TAG:
505 sz = GC_descr_obj_size(d -> sd.sd_first);
506 msp = GC_push_complex_descriptor((word *)current, d -> sd.sd_first,
507 msp, msl);
508 if (msp == 0) return(0);
509 current += sz;
510 msp = GC_push_complex_descriptor((word *)current, d -> sd.sd_second,
511 msp, msl);
512 return(msp);
514 default:
515 ABORT("Bad complex descriptor");
516 /*NOTREACHED*/ return 0; /*NOTREACHED*/
520 /*ARGSUSED*/
521 # if defined(__STDC__) || defined(__cplusplus)
522 mse * GC_array_mark_proc(register word * addr,
523 register mse * mark_stack_ptr,
524 mse * mark_stack_limit,
525 word env)
526 # else
527 mse * GC_array_mark_proc(addr, mark_stack_ptr, mark_stack_limit, env)
528 register word * addr;
529 register mse * mark_stack_ptr;
530 mse * mark_stack_limit;
531 word env;
532 # endif
534 register hdr * hhdr = HDR(addr);
535 register word sz = hhdr -> hb_sz;
536 register complex_descriptor * descr = (complex_descriptor *)(addr[sz-1]);
537 mse * orig_mark_stack_ptr = mark_stack_ptr;
538 mse * new_mark_stack_ptr;
540 if (descr == 0) {
541 /* Found a reference to a free list entry. Ignore it. */
542 return(orig_mark_stack_ptr);
544 /* In use counts were already updated when array descriptor was */
545 /* pushed. Here we only replace it by subobject descriptors, so */
546 /* no update is necessary. */
547 new_mark_stack_ptr = GC_push_complex_descriptor(addr, descr,
548 mark_stack_ptr,
549 mark_stack_limit-1);
550 if (new_mark_stack_ptr == 0) {
551 /* Doesn't fit. Conservatively push the whole array as a unit */
552 /* and request a mark stack expansion. */
553 /* This cannot cause a mark stack overflow, since it replaces */
554 /* the original array entry. */
555 GC_mark_stack_too_small = TRUE;
556 new_mark_stack_ptr = orig_mark_stack_ptr + 1;
557 new_mark_stack_ptr -> mse_start = addr;
558 new_mark_stack_ptr -> mse_descr = WORDS_TO_BYTES(sz) | GC_DS_LENGTH;
559 } else {
560 /* Push descriptor itself */
561 new_mark_stack_ptr++;
562 new_mark_stack_ptr -> mse_start = addr + sz - 1;
563 new_mark_stack_ptr -> mse_descr = sizeof(word) | GC_DS_LENGTH;
565 return(new_mark_stack_ptr);
568 #if defined(__STDC__) || defined(__cplusplus)
569 GC_descr GC_make_descriptor(GC_bitmap bm, size_t len)
570 #else
571 GC_descr GC_make_descriptor(bm, len)
572 GC_bitmap bm;
573 size_t len;
574 #endif
576 register signed_word last_set_bit = len - 1;
577 register word result;
578 register int i;
579 # define HIGH_BIT (((word)1) << (WORDSZ - 1))
581 if (!GC_explicit_typing_initialized) GC_init_explicit_typing();
582 while (last_set_bit >= 0 && !GC_get_bit(bm, last_set_bit)) last_set_bit --;
583 if (last_set_bit < 0) return(0 /* no pointers */);
584 # if ALIGNMENT == CPP_WORDSZ/8
586 register GC_bool all_bits_set = TRUE;
587 for (i = 0; i < last_set_bit; i++) {
588 if (!GC_get_bit(bm, i)) {
589 all_bits_set = FALSE;
590 break;
593 if (all_bits_set) {
594 /* An initial section contains all pointers. Use length descriptor. */
595 return(WORDS_TO_BYTES(last_set_bit+1) | GC_DS_LENGTH);
598 # endif
599 if (last_set_bit < BITMAP_BITS) {
600 /* Hopefully the common case. */
601 /* Build bitmap descriptor (with bits reversed) */
602 result = HIGH_BIT;
603 for (i = last_set_bit - 1; i >= 0; i--) {
604 result >>= 1;
605 if (GC_get_bit(bm, i)) result |= HIGH_BIT;
607 result |= GC_DS_BITMAP;
608 return(result);
609 } else {
610 signed_word index;
612 index = GC_add_ext_descriptor(bm, (word)last_set_bit+1);
613 if (index == -1) return(WORDS_TO_BYTES(last_set_bit+1) | GC_DS_LENGTH);
614 /* Out of memory: use conservative */
615 /* approximation. */
616 result = GC_MAKE_PROC(GC_typed_mark_proc_index, (word)index);
617 return(result);
621 ptr_t GC_clear_stack();
623 #define GENERAL_MALLOC(lb,k) \
624 (GC_PTR)GC_clear_stack(GC_generic_malloc((word)lb, k))
626 #define GENERAL_MALLOC_IOP(lb,k) \
627 (GC_PTR)GC_clear_stack(GC_generic_malloc_ignore_off_page(lb, k))
629 #if defined(__STDC__) || defined(__cplusplus)
630 void * GC_malloc_explicitly_typed(size_t lb, GC_descr d)
631 #else
632 char * GC_malloc_explicitly_typed(lb, d)
633 size_t lb;
634 GC_descr d;
635 #endif
637 register ptr_t op;
638 register ptr_t * opp;
639 register word lw;
640 DCL_LOCK_STATE;
642 lb += TYPD_EXTRA_BYTES;
643 if( SMALL_OBJ(lb) ) {
644 # ifdef MERGE_SIZES
645 lw = GC_size_map[lb];
646 # else
647 lw = ALIGNED_WORDS(lb);
648 # endif
649 opp = &(GC_eobjfreelist[lw]);
650 FASTLOCK();
651 if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
652 FASTUNLOCK();
653 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_explicit_kind);
654 if (0 == op) return 0;
655 # ifdef MERGE_SIZES
656 lw = GC_size_map[lb]; /* May have been uninitialized. */
657 # endif
658 } else {
659 *opp = obj_link(op);
660 obj_link(op) = 0;
661 GC_words_allocd += lw;
662 FASTUNLOCK();
664 } else {
665 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_explicit_kind);
666 if (op != NULL)
667 lw = BYTES_TO_WORDS(GC_size(op));
669 if (op != NULL)
670 ((word *)op)[lw - 1] = d;
671 return((GC_PTR) op);
674 #if defined(__STDC__) || defined(__cplusplus)
675 void * GC_malloc_explicitly_typed_ignore_off_page(size_t lb, GC_descr d)
676 #else
677 char * GC_malloc_explicitly_typed_ignore_off_page(lb, d)
678 size_t lb;
679 GC_descr d;
680 #endif
682 register ptr_t op;
683 register ptr_t * opp;
684 register word lw;
685 DCL_LOCK_STATE;
687 lb += TYPD_EXTRA_BYTES;
688 if( SMALL_OBJ(lb) ) {
689 # ifdef MERGE_SIZES
690 lw = GC_size_map[lb];
691 # else
692 lw = ALIGNED_WORDS(lb);
693 # endif
694 opp = &(GC_eobjfreelist[lw]);
695 FASTLOCK();
696 if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
697 FASTUNLOCK();
698 op = (ptr_t)GENERAL_MALLOC_IOP(lb, GC_explicit_kind);
699 # ifdef MERGE_SIZES
700 lw = GC_size_map[lb]; /* May have been uninitialized. */
701 # endif
702 } else {
703 *opp = obj_link(op);
704 obj_link(op) = 0;
705 GC_words_allocd += lw;
706 FASTUNLOCK();
708 } else {
709 op = (ptr_t)GENERAL_MALLOC_IOP(lb, GC_explicit_kind);
710 if (op != NULL)
711 lw = BYTES_TO_WORDS(GC_size(op));
713 if (op != NULL)
714 ((word *)op)[lw - 1] = d;
715 return((GC_PTR) op);
718 #if defined(__STDC__) || defined(__cplusplus)
719 void * GC_calloc_explicitly_typed(size_t n,
720 size_t lb,
721 GC_descr d)
722 #else
723 char * GC_calloc_explicitly_typed(n, lb, d)
724 size_t n;
725 size_t lb;
726 GC_descr d;
727 #endif
729 register ptr_t op;
730 register ptr_t * opp;
731 register word lw;
732 GC_descr simple_descr;
733 complex_descriptor *complex_descr;
734 register int descr_type;
735 struct LeafDescriptor leaf;
736 DCL_LOCK_STATE;
738 descr_type = GC_make_array_descriptor((word)n, (word)lb, d,
739 &simple_descr, &complex_descr, &leaf);
740 switch(descr_type) {
741 case NO_MEM: return(0);
742 case SIMPLE: return(GC_malloc_explicitly_typed(n*lb, simple_descr));
743 case LEAF:
744 lb *= n;
745 lb += sizeof(struct LeafDescriptor) + TYPD_EXTRA_BYTES;
746 break;
747 case COMPLEX:
748 lb *= n;
749 lb += TYPD_EXTRA_BYTES;
750 break;
752 if( SMALL_OBJ(lb) ) {
753 # ifdef MERGE_SIZES
754 lw = GC_size_map[lb];
755 # else
756 lw = ALIGNED_WORDS(lb);
757 # endif
758 opp = &(GC_arobjfreelist[lw]);
759 FASTLOCK();
760 if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
761 FASTUNLOCK();
762 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_array_kind);
763 if (0 == op) return(0);
764 # ifdef MERGE_SIZES
765 lw = GC_size_map[lb]; /* May have been uninitialized. */
766 # endif
767 } else {
768 *opp = obj_link(op);
769 obj_link(op) = 0;
770 GC_words_allocd += lw;
771 FASTUNLOCK();
773 } else {
774 op = (ptr_t)GENERAL_MALLOC((word)lb, GC_array_kind);
775 if (0 == op) return(0);
776 lw = BYTES_TO_WORDS(GC_size(op));
778 if (descr_type == LEAF) {
779 /* Set up the descriptor inside the object itself. */
780 VOLATILE struct LeafDescriptor * lp =
781 (struct LeafDescriptor *)
782 ((word *)op
783 + lw - (BYTES_TO_WORDS(sizeof(struct LeafDescriptor)) + 1));
785 lp -> ld_tag = LEAF_TAG;
786 lp -> ld_size = leaf.ld_size;
787 lp -> ld_nelements = leaf.ld_nelements;
788 lp -> ld_descriptor = leaf.ld_descriptor;
789 ((VOLATILE word *)op)[lw - 1] = (word)lp;
790 } else {
791 extern unsigned GC_finalization_failures;
792 unsigned ff = GC_finalization_failures;
794 ((word *)op)[lw - 1] = (word)complex_descr;
795 /* Make sure the descriptor is cleared once there is any danger */
796 /* it may have been collected. */
797 (void)
798 GC_general_register_disappearing_link((GC_PTR *)
799 ((word *)op+lw-1),
800 (GC_PTR) op);
801 if (ff != GC_finalization_failures) {
802 /* Couldn't register it due to lack of memory. Punt. */
803 /* This will probably fail too, but gives the recovery code */
804 /* a chance. */
805 return(GC_malloc(n*lb));
808 return((GC_PTR) op);