Mark ChangeLog
[official-gcc.git] / gcc / stor-layout.c
blob7bb5b4a2636a6923e99e57f27b0ad7814970111c
1 /* C-compiler utilities for types and variables storage layout
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "toplev.h"
32 #include "ggc.h"
34 /* Set to one when set_sizetype has been called. */
35 static int sizetype_set;
37 /* List of types created before set_sizetype has been called. We do not
38 make this a GGC root since we want these nodes to be reclaimed. */
39 static tree early_type_list;
41 /* Data type for the expressions representing sizes of data types.
42 It is the first integer type laid out. */
43 tree sizetype_tab[(int) TYPE_KIND_LAST];
45 /* If nonzero, this is an upper limit on alignment of structure fields.
46 The value is measured in bits. */
47 unsigned int maximum_field_alignment;
49 /* If non-zero, the alignment of a bitstring or (power-)set value, in bits.
50 May be overridden by front-ends. */
51 unsigned int set_alignment = 0;
53 /* Nonzero if all REFERENCE_TYPEs are internal and hence should be
54 allocated in Pmode, not ptr_mode. Set only by internal_reference_types
55 called only by a front end. */
56 static int reference_types_internal = 0;
58 static void finalize_record_size PARAMS ((record_layout_info));
59 static void finalize_type_size PARAMS ((tree));
60 static void place_union_field PARAMS ((record_layout_info, tree));
61 extern void debug_rli PARAMS ((record_layout_info));
63 /* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */
65 static tree pending_sizes;
67 /* Nonzero means cannot safely call expand_expr now,
68 so put variable sizes onto `pending_sizes' instead. */
70 int immediate_size_expand;
72 /* Show that REFERENCE_TYPES are internal and should be Pmode. Called only
73 by front end. */
75 void
76 internal_reference_types ()
78 reference_types_internal = 1;
81 /* Get a list of all the objects put on the pending sizes list. */
83 tree
84 get_pending_sizes ()
86 tree chain = pending_sizes;
87 tree t;
89 /* Put each SAVE_EXPR into the current function. */
90 for (t = chain; t; t = TREE_CHAIN (t))
91 SAVE_EXPR_CONTEXT (TREE_VALUE (t)) = current_function_decl;
93 pending_sizes = 0;
94 return chain;
97 /* Put a chain of objects into the pending sizes list, which must be
98 empty. */
100 void
101 put_pending_sizes (chain)
102 tree chain;
104 if (pending_sizes)
105 abort ();
107 pending_sizes = chain;
110 /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
111 to serve as the actual size-expression for a type or decl. */
113 tree
114 variable_size (size)
115 tree size;
117 /* If the language-processor is to take responsibility for variable-sized
118 items (e.g., languages which have elaboration procedures like Ada),
119 just return SIZE unchanged. Likewise for self-referential sizes. */
120 if (TREE_CONSTANT (size)
121 || global_bindings_p () < 0 || contains_placeholder_p (size))
122 return size;
124 size = save_expr (size);
126 /* If an array with a variable number of elements is declared, and
127 the elements require destruction, we will emit a cleanup for the
128 array. That cleanup is run both on normal exit from the block
129 and in the exception-handler for the block. Normally, when code
130 is used in both ordinary code and in an exception handler it is
131 `unsaved', i.e., all SAVE_EXPRs are recalculated. However, we do
132 not wish to do that here; the array-size is the same in both
133 places. */
134 if (TREE_CODE (size) == SAVE_EXPR)
135 SAVE_EXPR_PERSISTENT_P (size) = 1;
137 if (global_bindings_p ())
139 if (TREE_CONSTANT (size))
140 error ("type size can't be explicitly evaluated");
141 else
142 error ("variable-size type declared outside of any function");
144 return size_one_node;
147 if (immediate_size_expand)
148 /* NULL_RTX is not defined; neither is the rtx type.
149 Also, we would like to pass const0_rtx here, but don't have it. */
150 expand_expr (size, expand_expr (integer_zero_node, NULL_PTR, VOIDmode, 0),
151 VOIDmode, 0);
152 else if (cfun != 0 && cfun->x_dont_save_pending_sizes_p)
153 /* The front-end doesn't want us to keep a list of the expressions
154 that determine sizes for variable size objects. */
156 else if (TREE_CODE (size) == SAVE_EXPR)
157 pending_sizes = tree_cons (NULL_TREE, size, pending_sizes);
159 return size;
162 #ifndef MAX_FIXED_MODE_SIZE
163 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
164 #endif
166 /* Return the machine mode to use for a nonscalar of SIZE bits.
167 The mode must be in class CLASS, and have exactly that many bits.
168 If LIMIT is nonzero, modes of wider than MAX_FIXED_MODE_SIZE will not
169 be used. */
171 enum machine_mode
172 mode_for_size (size, class, limit)
173 unsigned int size;
174 enum mode_class class;
175 int limit;
177 register enum machine_mode mode;
179 if (limit && size > MAX_FIXED_MODE_SIZE)
180 return BLKmode;
182 /* Get the first mode which has this size, in the specified class. */
183 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
184 mode = GET_MODE_WIDER_MODE (mode))
185 if (GET_MODE_BITSIZE (mode) == size)
186 return mode;
188 return BLKmode;
191 /* Similar, except passed a tree node. */
193 enum machine_mode
194 mode_for_size_tree (size, class, limit)
195 tree size;
196 enum mode_class class;
197 int limit;
199 if (TREE_CODE (size) != INTEGER_CST
200 /* What we really want to say here is that the size can fit in a
201 host integer, but we know there's no way we'd find a mode for
202 this many bits, so there's no point in doing the precise test. */
203 || compare_tree_int (size, 1000) > 0)
204 return BLKmode;
205 else
206 return mode_for_size (TREE_INT_CST_LOW (size), class, limit);
209 /* Similar, but never return BLKmode; return the narrowest mode that
210 contains at least the requested number of bits. */
212 enum machine_mode
213 smallest_mode_for_size (size, class)
214 unsigned int size;
215 enum mode_class class;
217 register enum machine_mode mode;
219 /* Get the first mode which has at least this size, in the
220 specified class. */
221 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
222 mode = GET_MODE_WIDER_MODE (mode))
223 if (GET_MODE_BITSIZE (mode) >= size)
224 return mode;
226 abort ();
229 /* Find an integer mode of the exact same size, or BLKmode on failure. */
231 enum machine_mode
232 int_mode_for_mode (mode)
233 enum machine_mode mode;
235 switch (GET_MODE_CLASS (mode))
237 case MODE_INT:
238 case MODE_PARTIAL_INT:
239 break;
241 case MODE_COMPLEX_INT:
242 case MODE_COMPLEX_FLOAT:
243 case MODE_FLOAT:
244 case MODE_VECTOR_INT:
245 case MODE_VECTOR_FLOAT:
246 mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
247 break;
249 case MODE_RANDOM:
250 if (mode == BLKmode)
251 break;
253 /* ... fall through ... */
255 case MODE_CC:
256 default:
257 abort ();
260 return mode;
263 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
264 This can only be applied to objects of a sizetype. */
266 tree
267 round_up (value, divisor)
268 tree value;
269 int divisor;
271 tree arg = size_int_type (divisor, TREE_TYPE (value));
273 return size_binop (MULT_EXPR, size_binop (CEIL_DIV_EXPR, value, arg), arg);
276 /* Likewise, but round down. */
278 tree
279 round_down (value, divisor)
280 tree value;
281 int divisor;
283 tree arg = size_int_type (divisor, TREE_TYPE (value));
285 return size_binop (MULT_EXPR, size_binop (FLOOR_DIV_EXPR, value, arg), arg);
288 /* Set the size, mode and alignment of a ..._DECL node.
289 TYPE_DECL does need this for C++.
290 Note that LABEL_DECL and CONST_DECL nodes do not need this,
291 and FUNCTION_DECL nodes have them set up in a special (and simple) way.
292 Don't call layout_decl for them.
294 KNOWN_ALIGN is the amount of alignment we can assume this
295 decl has with no special effort. It is relevant only for FIELD_DECLs
296 and depends on the previous fields.
297 All that matters about KNOWN_ALIGN is which powers of 2 divide it.
298 If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
299 the record will be aligned to suit. */
301 void
302 layout_decl (decl, known_align)
303 tree decl;
304 unsigned int known_align;
306 register tree type = TREE_TYPE (decl);
307 register enum tree_code code = TREE_CODE (decl);
309 if (code == CONST_DECL)
310 return;
311 else if (code != VAR_DECL && code != PARM_DECL && code != RESULT_DECL
312 && code != TYPE_DECL && code != FIELD_DECL)
313 abort ();
315 if (type == error_mark_node)
316 type = void_type_node;
318 /* Usually the size and mode come from the data type without change,
319 however, the front-end may set the explicit width of the field, so its
320 size may not be the same as the size of its type. This happens with
321 bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
322 also happens with other fields. For example, the C++ front-end creates
323 zero-sized fields corresponding to empty base classes, and depends on
324 layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
325 size in bytes from the size in bits. If we have already set the mode,
326 don't set it again since we can be called twice for FIELD_DECLs. */
328 TREE_UNSIGNED (decl) = TREE_UNSIGNED (type);
329 if (DECL_MODE (decl) == VOIDmode)
330 DECL_MODE (decl) = TYPE_MODE (type);
332 if (DECL_SIZE (decl) == 0)
334 DECL_SIZE (decl) = TYPE_SIZE (type);
335 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
337 else
338 DECL_SIZE_UNIT (decl)
339 = convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
340 bitsize_unit_node));
342 /* Force alignment required for the data type.
343 But if the decl itself wants greater alignment, don't override that.
344 Likewise, if the decl is packed, don't override it. */
345 if (! (code == FIELD_DECL && DECL_BIT_FIELD (decl))
346 && (DECL_ALIGN (decl) == 0
347 || (! (code == FIELD_DECL && DECL_PACKED (decl))
348 && TYPE_ALIGN (type) > DECL_ALIGN (decl))))
350 DECL_ALIGN (decl) = TYPE_ALIGN (type);
351 DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
354 /* For fields, set the bit field type and update the alignment. */
355 if (code == FIELD_DECL)
357 DECL_BIT_FIELD_TYPE (decl) = DECL_BIT_FIELD (decl) ? type : 0;
358 if (maximum_field_alignment != 0)
359 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), maximum_field_alignment);
360 else if (DECL_PACKED (decl))
362 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
363 DECL_USER_ALIGN (decl) = 0;
367 /* See if we can use an ordinary integer mode for a bit-field.
368 Conditions are: a fixed size that is correct for another mode
369 and occupying a complete byte or bytes on proper boundary. */
370 if (code == FIELD_DECL && DECL_BIT_FIELD (decl)
371 && TYPE_SIZE (type) != 0
372 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
373 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
375 register enum machine_mode xmode
376 = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
378 if (xmode != BLKmode && known_align >= GET_MODE_ALIGNMENT (xmode))
380 DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
381 DECL_ALIGN (decl));
382 DECL_MODE (decl) = xmode;
383 DECL_BIT_FIELD (decl) = 0;
387 /* Turn off DECL_BIT_FIELD if we won't need it set. */
388 if (code == FIELD_DECL && DECL_BIT_FIELD (decl)
389 && TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
390 && known_align >= TYPE_ALIGN (type)
391 && DECL_ALIGN (decl) >= TYPE_ALIGN (type)
392 && DECL_SIZE_UNIT (decl) != 0)
393 DECL_BIT_FIELD (decl) = 0;
395 /* Evaluate nonconstant size only once, either now or as soon as safe. */
396 if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
397 DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
398 if (DECL_SIZE_UNIT (decl) != 0
399 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
400 DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
402 /* If requested, warn about definitions of large data objects. */
403 if (warn_larger_than
404 && (code == VAR_DECL || code == PARM_DECL)
405 && ! DECL_EXTERNAL (decl))
407 tree size = DECL_SIZE_UNIT (decl);
409 if (size != 0 && TREE_CODE (size) == INTEGER_CST
410 && compare_tree_int (size, larger_than_size) > 0)
412 unsigned int size_as_int = TREE_INT_CST_LOW (size);
414 if (compare_tree_int (size, size_as_int) == 0)
415 warning_with_decl (decl, "size of `%s' is %d bytes", size_as_int);
416 else
417 warning_with_decl (decl, "size of `%s' is larger than %d bytes",
418 larger_than_size);
423 /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
424 QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
425 is to be passed to all other layout functions for this record. It is the
426 responsibility of the caller to call `free' for the storage returned.
427 Note that garbage collection is not permitted until we finish laying
428 out the record. */
430 record_layout_info
431 start_record_layout (t)
432 tree t;
434 record_layout_info rli
435 = (record_layout_info) xmalloc (sizeof (struct record_layout_info_s));
437 rli->t = t;
439 /* If the type has a minimum specified alignment (via an attribute
440 declaration, for example) use it -- otherwise, start with a
441 one-byte alignment. */
442 rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
443 rli->unpacked_align = rli->record_align;
444 rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
446 #ifdef STRUCTURE_SIZE_BOUNDARY
447 /* Packed structures don't need to have minimum size. */
448 if (! TYPE_PACKED (t))
449 rli->record_align = MAX (rli->record_align, STRUCTURE_SIZE_BOUNDARY);
450 #endif
452 rli->offset = size_zero_node;
453 rli->bitpos = bitsize_zero_node;
454 rli->pending_statics = 0;
455 rli->packed_maybe_necessary = 0;
457 return rli;
460 /* These four routines perform computations that convert between
461 the offset/bitpos forms and byte and bit offsets. */
463 tree
464 bit_from_pos (offset, bitpos)
465 tree offset, bitpos;
467 return size_binop (PLUS_EXPR, bitpos,
468 size_binop (MULT_EXPR, convert (bitsizetype, offset),
469 bitsize_unit_node));
472 tree
473 byte_from_pos (offset, bitpos)
474 tree offset, bitpos;
476 return size_binop (PLUS_EXPR, offset,
477 convert (sizetype,
478 size_binop (TRUNC_DIV_EXPR, bitpos,
479 bitsize_unit_node)));
482 void
483 pos_from_byte (poffset, pbitpos, off_align, pos)
484 tree *poffset, *pbitpos;
485 unsigned int off_align;
486 tree pos;
488 *poffset
489 = size_binop (MULT_EXPR,
490 convert (sizetype,
491 size_binop (FLOOR_DIV_EXPR, pos,
492 bitsize_int (off_align
493 / BITS_PER_UNIT))),
494 size_int (off_align / BITS_PER_UNIT));
495 *pbitpos = size_binop (MULT_EXPR,
496 size_binop (FLOOR_MOD_EXPR, pos,
497 bitsize_int (off_align / BITS_PER_UNIT)),
498 bitsize_unit_node);
501 void
502 pos_from_bit (poffset, pbitpos, off_align, pos)
503 tree *poffset, *pbitpos;
504 unsigned int off_align;
505 tree pos;
507 *poffset = size_binop (MULT_EXPR,
508 convert (sizetype,
509 size_binop (FLOOR_DIV_EXPR, pos,
510 bitsize_int (off_align))),
511 size_int (off_align / BITS_PER_UNIT));
512 *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
515 /* Given a pointer to bit and byte offsets and an offset alignment,
516 normalize the offsets so they are within the alignment. */
518 void
519 normalize_offset (poffset, pbitpos, off_align)
520 tree *poffset, *pbitpos;
521 unsigned int off_align;
523 /* If the bit position is now larger than it should be, adjust it
524 downwards. */
525 if (compare_tree_int (*pbitpos, off_align) >= 0)
527 tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
528 bitsize_int (off_align));
530 *poffset
531 = size_binop (PLUS_EXPR, *poffset,
532 size_binop (MULT_EXPR, convert (sizetype, extra_aligns),
533 size_int (off_align / BITS_PER_UNIT)));
535 *pbitpos
536 = size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
540 /* Print debugging information about the information in RLI. */
542 void
543 debug_rli (rli)
544 record_layout_info rli;
546 print_node_brief (stderr, "type", rli->t, 0);
547 print_node_brief (stderr, "\noffset", rli->offset, 0);
548 print_node_brief (stderr, " bitpos", rli->bitpos, 0);
550 fprintf (stderr, "\nrec_align = %u, unpack_align = %u, off_align = %u\n",
551 rli->record_align, rli->unpacked_align, rli->offset_align);
552 if (rli->packed_maybe_necessary)
553 fprintf (stderr, "packed may be necessary\n");
555 if (rli->pending_statics)
557 fprintf (stderr, "pending statics:\n");
558 debug_tree (rli->pending_statics);
562 /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
563 BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
565 void
566 normalize_rli (rli)
567 record_layout_info rli;
569 normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
572 /* Returns the size in bytes allocated so far. */
574 tree
575 rli_size_unit_so_far (rli)
576 record_layout_info rli;
578 return byte_from_pos (rli->offset, rli->bitpos);
581 /* Returns the size in bits allocated so far. */
583 tree
584 rli_size_so_far (rli)
585 record_layout_info rli;
587 return bit_from_pos (rli->offset, rli->bitpos);
590 /* Called from place_field to handle unions. */
592 static void
593 place_union_field (rli, field)
594 record_layout_info rli;
595 tree field;
597 unsigned int desired_align;
599 layout_decl (field, 0);
601 DECL_FIELD_OFFSET (field) = size_zero_node;
602 DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
603 SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
605 desired_align = DECL_ALIGN (field);
607 #ifdef BIGGEST_FIELD_ALIGNMENT
608 /* Some targets (i.e. i386) limit union field alignment
609 to a lower boundary than alignment of variables unless
610 it was overridden by attribute aligned. */
611 if (! DECL_USER_ALIGN (field))
612 desired_align =
613 MIN (desired_align, (unsigned) BIGGEST_FIELD_ALIGNMENT);
614 #endif
616 /* Union must be at least as aligned as any field requires. */
617 rli->record_align = MAX (rli->record_align, desired_align);
619 #ifdef PCC_BITFIELD_TYPE_MATTERS
620 /* On the m88000, a bit field of declare type `int' forces the
621 entire union to have `int' alignment. */
622 if (PCC_BITFIELD_TYPE_MATTERS && DECL_BIT_FIELD_TYPE (field))
623 rli->record_align = MAX (rli->record_align,
624 TYPE_ALIGN (TREE_TYPE (field)));
625 #endif
627 /* We assume the union's size will be a multiple of a byte so we don't
628 bother with BITPOS. */
629 if (TREE_CODE (rli->t) == UNION_TYPE)
630 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
631 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
632 rli->offset = fold (build (COND_EXPR, sizetype,
633 DECL_QUALIFIER (field),
634 DECL_SIZE_UNIT (field), rli->offset));
637 /* RLI contains information about the layout of a RECORD_TYPE. FIELD
638 is a FIELD_DECL to be added after those fields already present in
639 T. (FIELD is not actually added to the TYPE_FIELDS list here;
640 callers that desire that behavior must manually perform that step.) */
642 void
643 place_field (rli, field)
644 record_layout_info rli;
645 tree field;
647 /* The alignment required for FIELD. */
648 unsigned int desired_align;
649 /* The alignment FIELD would have if we just dropped it into the
650 record as it presently stands. */
651 unsigned int known_align;
652 unsigned int actual_align;
653 unsigned int user_align;
654 /* The type of this field. */
655 tree type = TREE_TYPE (field);
657 if (TREE_CODE (field) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
658 return;
660 /* If FIELD is static, then treat it like a separate variable, not
661 really like a structure field. If it is a FUNCTION_DECL, it's a
662 method. In both cases, all we do is lay out the decl, and we do
663 it *after* the record is laid out. */
664 if (TREE_CODE (field) == VAR_DECL)
666 rli->pending_statics = tree_cons (NULL_TREE, field,
667 rli->pending_statics);
668 return;
671 /* Enumerators and enum types which are local to this class need not
672 be laid out. Likewise for initialized constant fields. */
673 else if (TREE_CODE (field) != FIELD_DECL)
674 return;
676 /* Unions are laid out very differently than records, so split
677 that code off to another function. */
678 else if (TREE_CODE (rli->t) != RECORD_TYPE)
680 place_union_field (rli, field);
681 return;
684 /* Work out the known alignment so far. Note that A & (-A) is the
685 value of the least-significant bit in A that is one. */
686 if (! integer_zerop (rli->bitpos))
687 known_align = (tree_low_cst (rli->bitpos, 1)
688 & - tree_low_cst (rli->bitpos, 1));
689 else if (integer_zerop (rli->offset))
690 known_align = BIGGEST_ALIGNMENT;
691 else if (host_integerp (rli->offset, 1))
692 known_align = (BITS_PER_UNIT
693 * (tree_low_cst (rli->offset, 1)
694 & - tree_low_cst (rli->offset, 1)));
695 else
696 known_align = rli->offset_align;
698 /* Lay out the field so we know what alignment it needs. For a
699 packed field, use the alignment as specified, disregarding what
700 the type would want. */
701 desired_align = DECL_ALIGN (field);
702 user_align = DECL_USER_ALIGN (field);
703 layout_decl (field, known_align);
704 if (! DECL_PACKED (field))
706 desired_align = DECL_ALIGN (field);
707 user_align = DECL_USER_ALIGN (field);
710 /* Some targets (i.e. i386, VMS) limit struct field alignment
711 to a lower boundary than alignment of variables unless
712 it was overridden by attribute aligned. */
713 #ifdef BIGGEST_FIELD_ALIGNMENT
714 if (! user_align)
715 desired_align
716 = MIN (desired_align, (unsigned) BIGGEST_FIELD_ALIGNMENT);
717 #endif
719 #ifdef ADJUST_FIELD_ALIGN
720 desired_align = ADJUST_FIELD_ALIGN (field, desired_align);
721 #endif
723 /* Record must have at least as much alignment as any field.
724 Otherwise, the alignment of the field within the record is
725 meaningless. */
726 #ifdef PCC_BITFIELD_TYPE_MATTERS
727 if (PCC_BITFIELD_TYPE_MATTERS && type != error_mark_node
728 && DECL_BIT_FIELD_TYPE (field)
729 && ! integer_zerop (TYPE_SIZE (type)))
731 /* For these machines, a zero-length field does not
732 affect the alignment of the structure as a whole.
733 It does, however, affect the alignment of the next field
734 within the structure. */
735 if (! integer_zerop (DECL_SIZE (field)))
736 rli->record_align = MAX (rli->record_align, desired_align);
737 else if (! DECL_PACKED (field))
738 desired_align = TYPE_ALIGN (type);
740 /* A named bit field of declared type `int'
741 forces the entire structure to have `int' alignment. */
742 if (DECL_NAME (field) != 0)
744 unsigned int type_align = TYPE_ALIGN (type);
746 if (maximum_field_alignment != 0)
747 type_align = MIN (type_align, maximum_field_alignment);
748 else if (DECL_PACKED (field))
749 type_align = MIN (type_align, BITS_PER_UNIT);
751 rli->record_align = MAX (rli->record_align, type_align);
752 if (warn_packed)
753 rli->unpacked_align = MAX (rli->unpacked_align,
754 TYPE_ALIGN (type));
757 else
758 #endif
760 rli->record_align = MAX (rli->record_align, desired_align);
761 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
764 if (warn_packed && DECL_PACKED (field))
766 if (known_align > TYPE_ALIGN (type))
768 if (TYPE_ALIGN (type) > desired_align)
770 if (STRICT_ALIGNMENT)
771 warning_with_decl (field, "packed attribute causes inefficient alignment for `%s'");
772 else
773 warning_with_decl (field, "packed attribute is unnecessary for `%s'");
776 else
777 rli->packed_maybe_necessary = 1;
780 /* Does this field automatically have alignment it needs by virtue
781 of the fields that precede it and the record's own alignment? */
782 if (known_align < desired_align)
784 /* No, we need to skip space before this field.
785 Bump the cumulative size to multiple of field alignment. */
787 if (warn_padded)
788 warning_with_decl (field, "padding struct to align `%s'");
790 /* If the alignment is still within offset_align, just align
791 the bit position. */
792 if (desired_align < rli->offset_align)
793 rli->bitpos = round_up (rli->bitpos, desired_align);
794 else
796 /* First adjust OFFSET by the partial bits, then align. */
797 rli->offset
798 = size_binop (PLUS_EXPR, rli->offset,
799 convert (sizetype,
800 size_binop (CEIL_DIV_EXPR, rli->bitpos,
801 bitsize_unit_node)));
802 rli->bitpos = bitsize_zero_node;
804 rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
807 if (! TREE_CONSTANT (rli->offset))
808 rli->offset_align = desired_align;
812 /* Handle compatibility with PCC. Note that if the record has any
813 variable-sized fields, we need not worry about compatibility. */
814 #ifdef PCC_BITFIELD_TYPE_MATTERS
815 if (PCC_BITFIELD_TYPE_MATTERS
816 && TREE_CODE (field) == FIELD_DECL
817 && type != error_mark_node
818 && DECL_BIT_FIELD (field)
819 && ! DECL_PACKED (field)
820 && maximum_field_alignment == 0
821 && ! integer_zerop (DECL_SIZE (field))
822 && host_integerp (DECL_SIZE (field), 1)
823 && host_integerp (rli->offset, 1)
824 && host_integerp (TYPE_SIZE (type), 1))
826 unsigned int type_align = TYPE_ALIGN (type);
827 tree dsize = DECL_SIZE (field);
828 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
829 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
830 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
832 /* A bit field may not span more units of alignment of its type
833 than its type itself. Advance to next boundary if necessary. */
834 if ((((offset * BITS_PER_UNIT + bit_offset + field_size +
835 type_align - 1)
836 / type_align)
837 - (offset * BITS_PER_UNIT + bit_offset) / type_align)
838 > tree_low_cst (TYPE_SIZE (type), 1) / type_align)
839 rli->bitpos = round_up (rli->bitpos, type_align);
841 #endif
843 #ifdef BITFIELD_NBYTES_LIMITED
844 if (BITFIELD_NBYTES_LIMITED
845 && TREE_CODE (field) == FIELD_DECL
846 && type != error_mark_node
847 && DECL_BIT_FIELD_TYPE (field)
848 && ! DECL_PACKED (field)
849 && ! integer_zerop (DECL_SIZE (field))
850 && host_integerp (DECL_SIZE (field), 1)
851 && host_integerp (rli->offset, 1)
852 && host_integerp (TYPE_SIZE (type), 1))
854 unsigned int type_align = TYPE_ALIGN (type);
855 tree dsize = DECL_SIZE (field);
856 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
857 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
858 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
860 if (maximum_field_alignment != 0)
861 type_align = MIN (type_align, maximum_field_alignment);
862 /* ??? This test is opposite the test in the containing if
863 statement, so this code is unreachable currently. */
864 else if (DECL_PACKED (field))
865 type_align = MIN (type_align, BITS_PER_UNIT);
867 /* A bit field may not span the unit of alignment of its type.
868 Advance to next boundary if necessary. */
869 /* ??? This code should match the code above for the
870 PCC_BITFIELD_TYPE_MATTERS case. */
871 if ((offset * BITS_PER_UNIT + bit_offset) / type_align
872 != ((offset * BITS_PER_UNIT + bit_offset + field_size - 1)
873 / type_align))
874 rli->bitpos = round_up (rli->bitpos, type_align);
876 #endif
878 /* Offset so far becomes the position of this field after normalizing. */
879 normalize_rli (rli);
880 DECL_FIELD_OFFSET (field) = rli->offset;
881 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
882 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
884 /* If this field ended up more aligned than we thought it would be (we
885 approximate this by seeing if its position changed), lay out the field
886 again; perhaps we can use an integral mode for it now. */
887 if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
888 actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
889 & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
890 else if (integer_zerop (DECL_FIELD_OFFSET (field)))
891 actual_align = BIGGEST_ALIGNMENT;
892 else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
893 actual_align = (BITS_PER_UNIT
894 * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
895 & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
896 else
897 actual_align = DECL_OFFSET_ALIGN (field);
899 if (known_align != actual_align)
900 layout_decl (field, actual_align);
902 /* Now add size of this field to the size of the record. If the size is
903 not constant, treat the field as being a multiple of bytes and just
904 adjust the offset, resetting the bit position. Otherwise, apportion the
905 size amongst the bit position and offset. First handle the case of an
906 unspecified size, which can happen when we have an invalid nested struct
907 definition, such as struct j { struct j { int i; } }. The error message
908 is printed in finish_struct. */
909 if (DECL_SIZE (field) == 0)
910 /* Do nothing. */;
911 else if (TREE_CODE (DECL_SIZE_UNIT (field)) != INTEGER_CST
912 || TREE_CONSTANT_OVERFLOW (DECL_SIZE_UNIT (field)))
914 rli->offset
915 = size_binop (PLUS_EXPR, rli->offset,
916 convert (sizetype,
917 size_binop (CEIL_DIV_EXPR, rli->bitpos,
918 bitsize_unit_node)));
919 rli->offset
920 = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
921 rli->bitpos = bitsize_zero_node;
922 rli->offset_align = MIN (rli->offset_align, DECL_ALIGN (field));
924 else
926 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
927 normalize_rli (rli);
931 /* Assuming that all the fields have been laid out, this function uses
932 RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
933 inidicated by RLI. */
935 static void
936 finalize_record_size (rli)
937 record_layout_info rli;
939 tree unpadded_size, unpadded_size_unit;
941 /* Now we want just byte and bit offsets, so set the offset alignment
942 to be a byte and then normalize. */
943 rli->offset_align = BITS_PER_UNIT;
944 normalize_rli (rli);
946 /* Determine the desired alignment. */
947 #ifdef ROUND_TYPE_ALIGN
948 TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
949 rli->record_align);
950 #else
951 TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
952 #endif
953 TYPE_USER_ALIGN (rli->t) = 1;
955 /* Compute the size so far. Be sure to allow for extra bits in the
956 size in bytes. We have guaranteed above that it will be no more
957 than a single byte. */
958 unpadded_size = rli_size_so_far (rli);
959 unpadded_size_unit = rli_size_unit_so_far (rli);
960 if (! integer_zerop (rli->bitpos))
961 unpadded_size_unit
962 = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
964 /* Record the un-rounded size in the binfo node. But first we check
965 the size of TYPE_BINFO to make sure that BINFO_SIZE is available. */
966 if (TYPE_BINFO (rli->t) && TREE_VEC_LENGTH (TYPE_BINFO (rli->t)) > 6)
968 TYPE_BINFO_SIZE (rli->t) = unpadded_size;
969 TYPE_BINFO_SIZE_UNIT (rli->t) = unpadded_size_unit;
972 /* Round the size up to be a multiple of the required alignment */
973 #ifdef ROUND_TYPE_SIZE
974 TYPE_SIZE (rli->t) = ROUND_TYPE_SIZE (rli->t, unpadded_size,
975 TYPE_ALIGN (rli->t));
976 TYPE_SIZE_UNIT (rli->t)
977 = ROUND_TYPE_SIZE_UNIT (rli->t, unpadded_size_unit,
978 TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
979 #else
980 TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
981 TYPE_SIZE_UNIT (rli->t) = round_up (unpadded_size_unit,
982 TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
983 #endif
985 if (warn_padded && TREE_CONSTANT (unpadded_size)
986 && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
987 warning ("padding struct size to alignment boundary");
989 if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
990 && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
991 && TREE_CONSTANT (unpadded_size))
993 tree unpacked_size;
995 #ifdef ROUND_TYPE_ALIGN
996 rli->unpacked_align
997 = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
998 #else
999 rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1000 #endif
1002 #ifdef ROUND_TYPE_SIZE
1003 unpacked_size = ROUND_TYPE_SIZE (rli->t, TYPE_SIZE (rli->t),
1004 rli->unpacked_align);
1005 #else
1006 unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1007 #endif
1009 if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1011 TYPE_PACKED (rli->t) = 0;
1013 if (TYPE_NAME (rli->t))
1015 const char *name;
1017 if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1018 name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
1019 else
1020 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
1022 if (STRICT_ALIGNMENT)
1023 warning ("packed attribute causes inefficient alignment for `%s'", name);
1024 else
1025 warning ("packed attribute is unnecessary for `%s'", name);
1027 else
1029 if (STRICT_ALIGNMENT)
1030 warning ("packed attribute causes inefficient alignment");
1031 else
1032 warning ("packed attribute is unnecessary");
1038 /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
1040 void
1041 compute_record_mode (type)
1042 tree type;
1044 tree field;
1045 enum machine_mode mode = VOIDmode;
1047 /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1048 However, if possible, we use a mode that fits in a register
1049 instead, in order to allow for better optimization down the
1050 line. */
1051 TYPE_MODE (type) = BLKmode;
1053 if (! host_integerp (TYPE_SIZE (type), 1))
1054 return;
1056 /* A record which has any BLKmode members must itself be
1057 BLKmode; it can't go in a register. Unless the member is
1058 BLKmode only because it isn't aligned. */
1059 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1061 unsigned HOST_WIDE_INT bitpos;
1063 if (TREE_CODE (field) != FIELD_DECL)
1064 continue;
1066 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1067 || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1068 && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field)))
1069 || ! host_integerp (bit_position (field), 1)
1070 || ! host_integerp (DECL_SIZE (field), 1))
1071 return;
1073 bitpos = int_bit_position (field);
1075 /* Must be BLKmode if any field crosses a word boundary,
1076 since extract_bit_field can't handle that in registers. */
1077 if (bitpos / BITS_PER_WORD
1078 != ((tree_low_cst (DECL_SIZE (field), 1) + bitpos - 1)
1079 / BITS_PER_WORD)
1080 /* But there is no problem if the field is entire words. */
1081 && tree_low_cst (DECL_SIZE (field), 1) % BITS_PER_WORD != 0)
1082 return;
1084 /* If this field is the whole struct, remember its mode so
1085 that, say, we can put a double in a class into a DF
1086 register instead of forcing it to live in the stack. */
1087 if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
1088 mode = DECL_MODE (field);
1090 #ifdef STRUCT_FORCE_BLK
1091 /* With some targets, eg. c4x, it is sub-optimal
1092 to access an aligned BLKmode structure as a scalar. */
1093 if (mode == VOIDmode && STRUCT_FORCE_BLK (field))
1094 return;
1095 #endif /* STRUCT_FORCE_BLK */
1098 /* If we only have one real field; use its mode. This only applies to
1099 RECORD_TYPE. This does not apply to unions. */
1100 if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode)
1101 TYPE_MODE (type) = mode;
1102 else
1103 TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1105 /* If structure's known alignment is less than what the scalar
1106 mode would need, and it matters, then stick with BLKmode. */
1107 if (TYPE_MODE (type) != BLKmode
1108 && STRICT_ALIGNMENT
1109 && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1110 || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1112 /* If this is the only reason this type is BLKmode, then
1113 don't force containing types to be BLKmode. */
1114 TYPE_NO_FORCE_BLK (type) = 1;
1115 TYPE_MODE (type) = BLKmode;
1119 /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1120 out. */
1122 static void
1123 finalize_type_size (type)
1124 tree type;
1126 /* Normally, use the alignment corresponding to the mode chosen.
1127 However, where strict alignment is not required, avoid
1128 over-aligning structures, since most compilers do not do this
1129 alignment. */
1131 if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1132 && (STRICT_ALIGNMENT
1133 || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1134 && TREE_CODE (type) != QUAL_UNION_TYPE
1135 && TREE_CODE (type) != ARRAY_TYPE)))
1137 TYPE_ALIGN (type) = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1138 TYPE_USER_ALIGN (type) = 0;
1141 /* Do machine-dependent extra alignment. */
1142 #ifdef ROUND_TYPE_ALIGN
1143 TYPE_ALIGN (type)
1144 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1145 #endif
1147 /* If we failed to find a simple way to calculate the unit size
1148 of the type, find it by division. */
1149 if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1150 /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1151 result will fit in sizetype. We will get more efficient code using
1152 sizetype, so we force a conversion. */
1153 TYPE_SIZE_UNIT (type)
1154 = convert (sizetype,
1155 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1156 bitsize_unit_node));
1158 if (TYPE_SIZE (type) != 0)
1160 #ifdef ROUND_TYPE_SIZE
1161 TYPE_SIZE (type)
1162 = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
1163 TYPE_SIZE_UNIT (type)
1164 = ROUND_TYPE_SIZE_UNIT (type, TYPE_SIZE_UNIT (type),
1165 TYPE_ALIGN (type) / BITS_PER_UNIT);
1166 #else
1167 TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1168 TYPE_SIZE_UNIT (type)
1169 = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN (type) / BITS_PER_UNIT);
1170 #endif
1173 /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
1174 if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1175 TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
1176 if (TYPE_SIZE_UNIT (type) != 0
1177 && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1178 TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1180 /* Also layout any other variants of the type. */
1181 if (TYPE_NEXT_VARIANT (type)
1182 || type != TYPE_MAIN_VARIANT (type))
1184 tree variant;
1185 /* Record layout info of this variant. */
1186 tree size = TYPE_SIZE (type);
1187 tree size_unit = TYPE_SIZE_UNIT (type);
1188 unsigned int align = TYPE_ALIGN (type);
1189 unsigned int user_align = TYPE_USER_ALIGN (type);
1190 enum machine_mode mode = TYPE_MODE (type);
1192 /* Copy it into all variants. */
1193 for (variant = TYPE_MAIN_VARIANT (type);
1194 variant != 0;
1195 variant = TYPE_NEXT_VARIANT (variant))
1197 TYPE_SIZE (variant) = size;
1198 TYPE_SIZE_UNIT (variant) = size_unit;
1199 TYPE_ALIGN (variant) = align;
1200 TYPE_USER_ALIGN (variant) = user_align;
1201 TYPE_MODE (variant) = mode;
1206 /* Do all of the work required to layout the type indicated by RLI,
1207 once the fields have been laid out. This function will call `free'
1208 for RLI. */
1210 void
1211 finish_record_layout (rli)
1212 record_layout_info rli;
1214 /* Compute the final size. */
1215 finalize_record_size (rli);
1217 /* Compute the TYPE_MODE for the record. */
1218 compute_record_mode (rli->t);
1220 /* Perform any last tweaks to the TYPE_SIZE, etc. */
1221 finalize_type_size (rli->t);
1223 /* Lay out any static members. This is done now because their type
1224 may use the record's type. */
1225 while (rli->pending_statics)
1227 layout_decl (TREE_VALUE (rli->pending_statics), 0);
1228 rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1231 /* Clean up. */
1232 free (rli);
1235 /* Calculate the mode, size, and alignment for TYPE.
1236 For an array type, calculate the element separation as well.
1237 Record TYPE on the chain of permanent or temporary types
1238 so that dbxout will find out about it.
1240 TYPE_SIZE of a type is nonzero if the type has been laid out already.
1241 layout_type does nothing on such a type.
1243 If the type is incomplete, its TYPE_SIZE remains zero. */
1245 void
1246 layout_type (type)
1247 tree type;
1249 if (type == 0)
1250 abort ();
1252 /* Do nothing if type has been laid out before. */
1253 if (TYPE_SIZE (type))
1254 return;
1256 switch (TREE_CODE (type))
1258 case LANG_TYPE:
1259 /* This kind of type is the responsibility
1260 of the language-specific code. */
1261 abort ();
1263 case BOOLEAN_TYPE: /* Used for Java, Pascal, and Chill. */
1264 if (TYPE_PRECISION (type) == 0)
1265 TYPE_PRECISION (type) = 1; /* default to one byte/boolean. */
1267 /* ... fall through ... */
1269 case INTEGER_TYPE:
1270 case ENUMERAL_TYPE:
1271 case CHAR_TYPE:
1272 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1273 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1274 TREE_UNSIGNED (type) = 1;
1276 TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1277 MODE_INT);
1278 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1279 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1280 break;
1282 case REAL_TYPE:
1283 TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
1284 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1285 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1286 break;
1288 case COMPLEX_TYPE:
1289 TREE_UNSIGNED (type) = TREE_UNSIGNED (TREE_TYPE (type));
1290 TYPE_MODE (type)
1291 = mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
1292 (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
1293 ? MODE_COMPLEX_INT : MODE_COMPLEX_FLOAT),
1295 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1296 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1297 break;
1299 case VECTOR_TYPE:
1301 tree subtype;
1303 subtype = TREE_TYPE (type);
1304 TREE_UNSIGNED (type) = TREE_UNSIGNED (subtype);
1305 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1306 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1308 break;
1310 case VOID_TYPE:
1311 /* This is an incomplete type and so doesn't have a size. */
1312 TYPE_ALIGN (type) = 1;
1313 TYPE_USER_ALIGN (type) = 0;
1314 TYPE_MODE (type) = VOIDmode;
1315 break;
1317 case OFFSET_TYPE:
1318 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1319 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
1320 TYPE_MODE (type) = ptr_mode;
1321 break;
1323 case FUNCTION_TYPE:
1324 case METHOD_TYPE:
1325 TYPE_MODE (type) = mode_for_size (2 * POINTER_SIZE, MODE_INT, 0);
1326 TYPE_SIZE (type) = bitsize_int (2 * POINTER_SIZE);
1327 TYPE_SIZE_UNIT (type) = size_int ((2 * POINTER_SIZE) / BITS_PER_UNIT);
1328 break;
1330 case POINTER_TYPE:
1331 case REFERENCE_TYPE:
1333 int nbits = ((TREE_CODE (type) == REFERENCE_TYPE
1334 && reference_types_internal)
1335 ? GET_MODE_BITSIZE (Pmode) : POINTER_SIZE);
1337 TYPE_MODE (type) = nbits == POINTER_SIZE ? ptr_mode : Pmode;
1338 TYPE_SIZE (type) = bitsize_int (nbits);
1339 TYPE_SIZE_UNIT (type) = size_int (nbits / BITS_PER_UNIT);
1340 TREE_UNSIGNED (type) = 1;
1341 TYPE_PRECISION (type) = nbits;
1343 break;
1345 case ARRAY_TYPE:
1347 register tree index = TYPE_DOMAIN (type);
1348 register tree element = TREE_TYPE (type);
1350 build_pointer_type (element);
1352 /* We need to know both bounds in order to compute the size. */
1353 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1354 && TYPE_SIZE (element))
1356 tree ub = TYPE_MAX_VALUE (index);
1357 tree lb = TYPE_MIN_VALUE (index);
1358 tree length;
1359 tree element_size;
1361 /* The initial subtraction should happen in the original type so
1362 that (possible) negative values are handled appropriately. */
1363 length = size_binop (PLUS_EXPR, size_one_node,
1364 convert (sizetype,
1365 fold (build (MINUS_EXPR,
1366 TREE_TYPE (lb),
1367 ub, lb))));
1369 /* Special handling for arrays of bits (for Chill). */
1370 element_size = TYPE_SIZE (element);
1371 if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1372 && (integer_zerop (TYPE_MAX_VALUE (element))
1373 || integer_onep (TYPE_MAX_VALUE (element)))
1374 && host_integerp (TYPE_MIN_VALUE (element), 1))
1376 HOST_WIDE_INT maxvalue
1377 = tree_low_cst (TYPE_MAX_VALUE (element), 1);
1378 HOST_WIDE_INT minvalue
1379 = tree_low_cst (TYPE_MIN_VALUE (element), 1);
1381 if (maxvalue - minvalue == 1
1382 && (maxvalue == 1 || maxvalue == 0))
1383 element_size = integer_one_node;
1386 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1387 convert (bitsizetype, length));
1389 /* If we know the size of the element, calculate the total
1390 size directly, rather than do some division thing below.
1391 This optimization helps Fortran assumed-size arrays
1392 (where the size of the array is determined at runtime)
1393 substantially.
1394 Note that we can't do this in the case where the size of
1395 the elements is one bit since TYPE_SIZE_UNIT cannot be
1396 set correctly in that case. */
1397 if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
1398 TYPE_SIZE_UNIT (type)
1399 = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
1402 /* Now round the alignment and size,
1403 using machine-dependent criteria if any. */
1405 #ifdef ROUND_TYPE_ALIGN
1406 TYPE_ALIGN (type)
1407 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1408 #else
1409 TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1410 #endif
1412 #ifdef ROUND_TYPE_SIZE
1413 if (TYPE_SIZE (type) != 0)
1415 tree tmp
1416 = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
1418 /* If the rounding changed the size of the type, remove any
1419 pre-calculated TYPE_SIZE_UNIT. */
1420 if (simple_cst_equal (TYPE_SIZE (type), tmp) != 1)
1421 TYPE_SIZE_UNIT (type) = NULL;
1423 TYPE_SIZE (type) = tmp;
1425 #endif
1427 TYPE_MODE (type) = BLKmode;
1428 if (TYPE_SIZE (type) != 0
1429 /* BLKmode elements force BLKmode aggregate;
1430 else extract/store fields may lose. */
1431 && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1432 || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1434 TYPE_MODE (type)
1435 = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1437 if (TYPE_MODE (type) != BLKmode
1438 && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1439 && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))
1440 && TYPE_MODE (type) != BLKmode)
1442 TYPE_NO_FORCE_BLK (type) = 1;
1443 TYPE_MODE (type) = BLKmode;
1446 break;
1449 case RECORD_TYPE:
1450 case UNION_TYPE:
1451 case QUAL_UNION_TYPE:
1453 tree field;
1454 record_layout_info rli;
1456 /* Initialize the layout information. */
1457 rli = start_record_layout (type);
1459 /* If this is a QUAL_UNION_TYPE, we want to process the fields
1460 in the reverse order in building the COND_EXPR that denotes
1461 its size. We reverse them again later. */
1462 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1463 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1465 /* Place all the fields. */
1466 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1467 place_field (rli, field);
1469 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1470 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1472 /* Finish laying out the record. */
1473 finish_record_layout (rli);
1475 break;
1477 case SET_TYPE: /* Used by Chill and Pascal. */
1478 if (TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST
1479 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
1480 abort();
1481 else
1483 #ifndef SET_WORD_SIZE
1484 #define SET_WORD_SIZE BITS_PER_WORD
1485 #endif
1486 unsigned int alignment
1487 = set_alignment ? set_alignment : SET_WORD_SIZE;
1488 int size_in_bits
1489 = (TREE_INT_CST_LOW (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
1490 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) + 1);
1491 int rounded_size
1492 = ((size_in_bits + alignment - 1) / alignment) * alignment;
1494 if (rounded_size > (int) alignment)
1495 TYPE_MODE (type) = BLKmode;
1496 else
1497 TYPE_MODE (type) = mode_for_size (alignment, MODE_INT, 1);
1499 TYPE_SIZE (type) = bitsize_int (rounded_size);
1500 TYPE_SIZE_UNIT (type) = size_int (rounded_size / BITS_PER_UNIT);
1501 TYPE_ALIGN (type) = alignment;
1502 TYPE_USER_ALIGN (type) = 0;
1503 TYPE_PRECISION (type) = size_in_bits;
1505 break;
1507 case FILE_TYPE:
1508 /* The size may vary in different languages, so the language front end
1509 should fill in the size. */
1510 TYPE_ALIGN (type) = BIGGEST_ALIGNMENT;
1511 TYPE_USER_ALIGN (type) = 0;
1512 TYPE_MODE (type) = BLKmode;
1513 break;
1515 default:
1516 abort ();
1519 /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
1520 records and unions, finish_record_layout already called this
1521 function. */
1522 if (TREE_CODE (type) != RECORD_TYPE
1523 && TREE_CODE (type) != UNION_TYPE
1524 && TREE_CODE (type) != QUAL_UNION_TYPE)
1525 finalize_type_size (type);
1527 /* If this type is created before sizetype has been permanently set,
1528 record it so set_sizetype can fix it up. */
1529 if (! sizetype_set)
1530 early_type_list = tree_cons (NULL_TREE, type, early_type_list);
1532 /* If an alias set has been set for this aggregate when it was incomplete,
1533 force it into alias set 0.
1534 This is too conservative, but we cannot call record_component_aliases
1535 here because some frontends still change the aggregates after
1536 layout_type. */
1537 if (AGGREGATE_TYPE_P (type) && TYPE_ALIAS_SET_KNOWN_P (type))
1538 TYPE_ALIAS_SET (type) = 0;
1541 /* Create and return a type for signed integers of PRECISION bits. */
1543 tree
1544 make_signed_type (precision)
1545 int precision;
1547 register tree type = make_node (INTEGER_TYPE);
1549 TYPE_PRECISION (type) = precision;
1551 fixup_signed_type (type);
1552 return type;
1555 /* Create and return a type for unsigned integers of PRECISION bits. */
1557 tree
1558 make_unsigned_type (precision)
1559 int precision;
1561 register tree type = make_node (INTEGER_TYPE);
1563 TYPE_PRECISION (type) = precision;
1565 fixup_unsigned_type (type);
1566 return type;
1569 /* Initialize sizetype and bitsizetype to a reasonable and temporary
1570 value to enable integer types to be created. */
1572 void
1573 initialize_sizetypes ()
1575 tree t = make_node (INTEGER_TYPE);
1577 /* Set this so we do something reasonable for the build_int_2 calls
1578 below. */
1579 integer_type_node = t;
1581 TYPE_MODE (t) = SImode;
1582 TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
1583 TYPE_USER_ALIGN (t) = 0;
1584 TYPE_SIZE (t) = build_int_2 (GET_MODE_BITSIZE (SImode), 0);
1585 TYPE_SIZE_UNIT (t) = build_int_2 (GET_MODE_SIZE (SImode), 0);
1586 TREE_UNSIGNED (t) = 1;
1587 TYPE_PRECISION (t) = GET_MODE_BITSIZE (SImode);
1588 TYPE_MIN_VALUE (t) = build_int_2 (0, 0);
1589 TYPE_IS_SIZETYPE (t) = 1;
1591 /* 1000 avoids problems with possible overflow and is certainly
1592 larger than any size value we'd want to be storing. */
1593 TYPE_MAX_VALUE (t) = build_int_2 (1000, 0);
1595 /* These two must be different nodes because of the caching done in
1596 size_int_wide. */
1597 sizetype = t;
1598 bitsizetype = copy_node (t);
1599 integer_type_node = 0;
1602 /* Set sizetype to TYPE, and initialize *sizetype accordingly.
1603 Also update the type of any standard type's sizes made so far. */
1605 void
1606 set_sizetype (type)
1607 tree type;
1609 int oprecision = TYPE_PRECISION (type);
1610 /* The *bitsizetype types use a precision that avoids overflows when
1611 calculating signed sizes / offsets in bits. However, when
1612 cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
1613 precision. */
1614 int precision = MIN (oprecision + BITS_PER_UNIT_LOG + 1,
1615 2 * HOST_BITS_PER_WIDE_INT);
1616 unsigned int i;
1617 tree t;
1619 if (sizetype_set)
1620 abort ();
1622 /* Make copies of nodes since we'll be setting TYPE_IS_SIZETYPE. */
1623 sizetype = copy_node (type);
1624 TYPE_DOMAIN (sizetype) = type;
1625 TYPE_IS_SIZETYPE (sizetype) = 1;
1626 bitsizetype = make_node (INTEGER_TYPE);
1627 TYPE_NAME (bitsizetype) = TYPE_NAME (type);
1628 TYPE_PRECISION (bitsizetype) = precision;
1629 TYPE_IS_SIZETYPE (bitsizetype) = 1;
1631 if (TREE_UNSIGNED (type))
1632 fixup_unsigned_type (bitsizetype);
1633 else
1634 fixup_signed_type (bitsizetype);
1636 layout_type (bitsizetype);
1638 if (TREE_UNSIGNED (type))
1640 usizetype = sizetype;
1641 ubitsizetype = bitsizetype;
1642 ssizetype = copy_node (make_signed_type (oprecision));
1643 sbitsizetype = copy_node (make_signed_type (precision));
1645 else
1647 ssizetype = sizetype;
1648 sbitsizetype = bitsizetype;
1649 usizetype = copy_node (make_unsigned_type (oprecision));
1650 ubitsizetype = copy_node (make_unsigned_type (precision));
1653 TYPE_NAME (bitsizetype) = get_identifier ("bit_size_type");
1655 /* Show is a sizetype, is a main type, and has no pointers to it. */
1656 for (i = 0; i < ARRAY_SIZE (sizetype_tab); i++)
1658 TYPE_IS_SIZETYPE (sizetype_tab[i]) = 1;
1659 TYPE_MAIN_VARIANT (sizetype_tab[i]) = sizetype_tab[i];
1660 TYPE_NEXT_VARIANT (sizetype_tab[i]) = 0;
1661 TYPE_POINTER_TO (sizetype_tab[i]) = 0;
1662 TYPE_REFERENCE_TO (sizetype_tab[i]) = 0;
1665 ggc_add_tree_root ((tree *) &sizetype_tab,
1666 sizeof sizetype_tab / sizeof (tree));
1668 /* Go down each of the types we already made and set the proper type
1669 for the sizes in them. */
1670 for (t = early_type_list; t != 0; t = TREE_CHAIN (t))
1672 if (TREE_CODE (TREE_VALUE (t)) != INTEGER_TYPE)
1673 abort ();
1675 TREE_TYPE (TYPE_SIZE (TREE_VALUE (t))) = bitsizetype;
1676 TREE_TYPE (TYPE_SIZE_UNIT (TREE_VALUE (t))) = sizetype;
1679 early_type_list = 0;
1680 sizetype_set = 1;
1683 /* Set the extreme values of TYPE based on its precision in bits,
1684 then lay it out. Used when make_signed_type won't do
1685 because the tree code is not INTEGER_TYPE.
1686 E.g. for Pascal, when the -fsigned-char option is given. */
1688 void
1689 fixup_signed_type (type)
1690 tree type;
1692 register int precision = TYPE_PRECISION (type);
1694 TYPE_MIN_VALUE (type)
1695 = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
1696 ? 0 : (HOST_WIDE_INT) (-1) << (precision - 1)),
1697 (((HOST_WIDE_INT) (-1)
1698 << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
1699 ? precision - HOST_BITS_PER_WIDE_INT - 1
1700 : 0))));
1701 TYPE_MAX_VALUE (type)
1702 = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
1703 ? -1 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
1704 (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
1705 ? (((HOST_WIDE_INT) 1
1706 << (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
1707 : 0));
1709 TREE_TYPE (TYPE_MIN_VALUE (type)) = type;
1710 TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
1712 /* Lay out the type: set its alignment, size, etc. */
1713 layout_type (type);
1716 /* Set the extreme values of TYPE based on its precision in bits,
1717 then lay it out. This is used both in `make_unsigned_type'
1718 and for enumeral types. */
1720 void
1721 fixup_unsigned_type (type)
1722 tree type;
1724 register int precision = TYPE_PRECISION (type);
1726 TYPE_MIN_VALUE (type) = build_int_2 (0, 0);
1727 TYPE_MAX_VALUE (type)
1728 = build_int_2 (precision - HOST_BITS_PER_WIDE_INT >= 0
1729 ? -1 : ((HOST_WIDE_INT) 1 << precision) - 1,
1730 precision - HOST_BITS_PER_WIDE_INT > 0
1731 ? ((unsigned HOST_WIDE_INT) ~0
1732 >> (HOST_BITS_PER_WIDE_INT
1733 - (precision - HOST_BITS_PER_WIDE_INT)))
1734 : 0);
1735 TREE_TYPE (TYPE_MIN_VALUE (type)) = type;
1736 TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
1738 /* Lay out the type: set its alignment, size, etc. */
1739 layout_type (type);
1742 /* Find the best machine mode to use when referencing a bit field of length
1743 BITSIZE bits starting at BITPOS.
1745 The underlying object is known to be aligned to a boundary of ALIGN bits.
1746 If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
1747 larger than LARGEST_MODE (usually SImode).
1749 If no mode meets all these conditions, we return VOIDmode. Otherwise, if
1750 VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest
1751 mode meeting these conditions.
1753 Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), we return
1754 the largest mode (but a mode no wider than UNITS_PER_WORD) that meets
1755 all the conditions. */
1757 enum machine_mode
1758 get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
1759 int bitsize, bitpos;
1760 unsigned int align;
1761 enum machine_mode largest_mode;
1762 int volatilep;
1764 enum machine_mode mode;
1765 unsigned int unit = 0;
1767 /* Find the narrowest integer mode that contains the bit field. */
1768 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1769 mode = GET_MODE_WIDER_MODE (mode))
1771 unit = GET_MODE_BITSIZE (mode);
1772 if ((bitpos % unit) + bitsize <= unit)
1773 break;
1776 if (mode == VOIDmode
1777 /* It is tempting to omit the following line
1778 if STRICT_ALIGNMENT is true.
1779 But that is incorrect, since if the bitfield uses part of 3 bytes
1780 and we use a 4-byte mode, we could get a spurious segv
1781 if the extra 4th byte is past the end of memory.
1782 (Though at least one Unix compiler ignores this problem:
1783 that on the Sequent 386 machine. */
1784 || MIN (unit, BIGGEST_ALIGNMENT) > align
1785 || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
1786 return VOIDmode;
1788 if (SLOW_BYTE_ACCESS && ! volatilep)
1790 enum machine_mode wide_mode = VOIDmode, tmode;
1792 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
1793 tmode = GET_MODE_WIDER_MODE (tmode))
1795 unit = GET_MODE_BITSIZE (tmode);
1796 if (bitpos / unit == (bitpos + bitsize - 1) / unit
1797 && unit <= BITS_PER_WORD
1798 && unit <= MIN (align, BIGGEST_ALIGNMENT)
1799 && (largest_mode == VOIDmode
1800 || unit <= GET_MODE_BITSIZE (largest_mode)))
1801 wide_mode = tmode;
1804 if (wide_mode != VOIDmode)
1805 return wide_mode;
1808 return mode;
1811 /* Return the alignment of MODE. This will be bounded by 1 and
1812 BIGGEST_ALIGNMENT. */
1814 unsigned int
1815 get_mode_alignment (mode)
1816 enum machine_mode mode;
1818 unsigned int alignment = GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT;
1820 /* Extract the LSB of the size. */
1821 alignment = alignment & -alignment;
1823 alignment = MIN (BIGGEST_ALIGNMENT, MAX (1, alignment));
1824 return alignment;
1827 /* This function is run once to initialize stor-layout.c. */
1829 void
1830 init_stor_layout_once ()
1832 ggc_add_tree_root (&pending_sizes, 1);