* c-common.c (check_function_format): Don't suggest adding format
[official-gcc.git] / gcc / stor-layout.c
blob7fb7f2a1ddf1ae3b3df26da7a657367c498c42a7
1 /* C-compiler utilities for types and variables storage layout
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "toplev.h"
32 #include "ggc.h"
34 /* Set to one when set_sizetype has been called. */
35 static int sizetype_set;
37 /* List of types created before set_sizetype has been called. We do not
38 make this a GGC root since we want these nodes to be reclaimed. */
39 static tree early_type_list;
41 /* Data type for the expressions representing sizes of data types.
42 It is the first integer type laid out. */
43 tree sizetype_tab[(int) TYPE_KIND_LAST];
45 /* If nonzero, this is an upper limit on alignment of structure fields.
46 The value is measured in bits. */
47 unsigned int maximum_field_alignment;
49 /* If non-zero, the alignment of a bitstring or (power-)set value, in bits.
50 May be overridden by front-ends. */
51 unsigned int set_alignment = 0;
53 static void finalize_record_size PARAMS ((record_layout_info));
54 static void finalize_type_size PARAMS ((tree));
55 static void place_union_field PARAMS ((record_layout_info, tree));
56 extern void debug_rli PARAMS ((record_layout_info));
58 /* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */
60 static tree pending_sizes;
62 /* Nonzero means cannot safely call expand_expr now,
63 so put variable sizes onto `pending_sizes' instead. */
65 int immediate_size_expand;
67 /* Get a list of all the objects put on the pending sizes list. */
69 tree
70 get_pending_sizes ()
72 tree chain = pending_sizes;
73 tree t;
75 /* Put each SAVE_EXPR into the current function. */
76 for (t = chain; t; t = TREE_CHAIN (t))
77 SAVE_EXPR_CONTEXT (TREE_VALUE (t)) = current_function_decl;
79 pending_sizes = 0;
80 return chain;
83 /* Put a chain of objects into the pending sizes list, which must be
84 empty. */
86 void
87 put_pending_sizes (chain)
88 tree chain;
90 if (pending_sizes)
91 abort ();
93 pending_sizes = chain;
96 /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
97 to serve as the actual size-expression for a type or decl. */
99 tree
100 variable_size (size)
101 tree size;
103 /* If the language-processor is to take responsibility for variable-sized
104 items (e.g., languages which have elaboration procedures like Ada),
105 just return SIZE unchanged. Likewise for self-referential sizes. */
106 if (TREE_CONSTANT (size)
107 || global_bindings_p () < 0 || contains_placeholder_p (size))
108 return size;
110 size = save_expr (size);
112 /* If an array with a variable number of elements is declared, and
113 the elements require destruction, we will emit a cleanup for the
114 array. That cleanup is run both on normal exit from the block
115 and in the exception-handler for the block. Normally, when code
116 is used in both ordinary code and in an exception handler it is
117 `unsaved', i.e., all SAVE_EXPRs are recalculated. However, we do
118 not wish to do that here; the array-size is the same in both
119 places. */
120 if (TREE_CODE (size) == SAVE_EXPR)
121 SAVE_EXPR_PERSISTENT_P (size) = 1;
123 if (global_bindings_p ())
125 if (TREE_CONSTANT (size))
126 error ("type size can't be explicitly evaluated");
127 else
128 error ("variable-size type declared outside of any function");
130 return size_one_node;
133 if (immediate_size_expand)
134 /* NULL_RTX is not defined; neither is the rtx type.
135 Also, we would like to pass const0_rtx here, but don't have it. */
136 expand_expr (size, expand_expr (integer_zero_node, NULL_PTR, VOIDmode, 0),
137 VOIDmode, 0);
138 else if (cfun != 0 && cfun->x_dont_save_pending_sizes_p)
139 /* The front-end doesn't want us to keep a list of the expressions
140 that determine sizes for variable size objects. */
142 else if (TREE_CODE (size) == SAVE_EXPR)
143 pending_sizes = tree_cons (NULL_TREE, size, pending_sizes);
145 return size;
148 #ifndef MAX_FIXED_MODE_SIZE
149 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
150 #endif
152 /* Return the machine mode to use for a nonscalar of SIZE bits.
153 The mode must be in class CLASS, and have exactly that many bits.
154 If LIMIT is nonzero, modes of wider than MAX_FIXED_MODE_SIZE will not
155 be used. */
157 enum machine_mode
158 mode_for_size (size, class, limit)
159 unsigned int size;
160 enum mode_class class;
161 int limit;
163 register enum machine_mode mode;
165 if (limit && size > MAX_FIXED_MODE_SIZE)
166 return BLKmode;
168 /* Get the first mode which has this size, in the specified class. */
169 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
170 mode = GET_MODE_WIDER_MODE (mode))
171 if (GET_MODE_BITSIZE (mode) == size)
172 return mode;
174 return BLKmode;
177 /* Similar, except passed a tree node. */
179 enum machine_mode
180 mode_for_size_tree (size, class, limit)
181 tree size;
182 enum mode_class class;
183 int limit;
185 if (TREE_CODE (size) != INTEGER_CST
186 /* What we really want to say here is that the size can fit in a
187 host integer, but we know there's no way we'd find a mode for
188 this many bits, so there's no point in doing the precise test. */
189 || compare_tree_int (size, 1000) > 0)
190 return BLKmode;
191 else
192 return mode_for_size (TREE_INT_CST_LOW (size), class, limit);
195 /* Similar, but never return BLKmode; return the narrowest mode that
196 contains at least the requested number of bits. */
198 enum machine_mode
199 smallest_mode_for_size (size, class)
200 unsigned int size;
201 enum mode_class class;
203 register enum machine_mode mode;
205 /* Get the first mode which has at least this size, in the
206 specified class. */
207 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
208 mode = GET_MODE_WIDER_MODE (mode))
209 if (GET_MODE_BITSIZE (mode) >= size)
210 return mode;
212 abort ();
215 /* Find an integer mode of the exact same size, or BLKmode on failure. */
217 enum machine_mode
218 int_mode_for_mode (mode)
219 enum machine_mode mode;
221 switch (GET_MODE_CLASS (mode))
223 case MODE_INT:
224 case MODE_PARTIAL_INT:
225 break;
227 case MODE_COMPLEX_INT:
228 case MODE_COMPLEX_FLOAT:
229 case MODE_FLOAT:
230 case MODE_VECTOR_INT:
231 case MODE_VECTOR_FLOAT:
232 mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
233 break;
235 case MODE_RANDOM:
236 if (mode == BLKmode)
237 break;
239 /* ... fall through ... */
241 case MODE_CC:
242 default:
243 abort ();
246 return mode;
249 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
250 This can only be applied to objects of a sizetype. */
252 tree
253 round_up (value, divisor)
254 tree value;
255 int divisor;
257 tree arg = size_int_type (divisor, TREE_TYPE (value));
259 return size_binop (MULT_EXPR, size_binop (CEIL_DIV_EXPR, value, arg), arg);
262 /* Likewise, but round down. */
264 tree
265 round_down (value, divisor)
266 tree value;
267 int divisor;
269 tree arg = size_int_type (divisor, TREE_TYPE (value));
271 return size_binop (MULT_EXPR, size_binop (FLOOR_DIV_EXPR, value, arg), arg);
274 /* Set the size, mode and alignment of a ..._DECL node.
275 TYPE_DECL does need this for C++.
276 Note that LABEL_DECL and CONST_DECL nodes do not need this,
277 and FUNCTION_DECL nodes have them set up in a special (and simple) way.
278 Don't call layout_decl for them.
280 KNOWN_ALIGN is the amount of alignment we can assume this
281 decl has with no special effort. It is relevant only for FIELD_DECLs
282 and depends on the previous fields.
283 All that matters about KNOWN_ALIGN is which powers of 2 divide it.
284 If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
285 the record will be aligned to suit. */
287 void
288 layout_decl (decl, known_align)
289 tree decl;
290 unsigned int known_align;
292 register tree type = TREE_TYPE (decl);
293 register enum tree_code code = TREE_CODE (decl);
295 if (code == CONST_DECL)
296 return;
297 else if (code != VAR_DECL && code != PARM_DECL && code != RESULT_DECL
298 && code != TYPE_DECL && code != FIELD_DECL)
299 abort ();
301 if (type == error_mark_node)
302 type = void_type_node;
304 /* Usually the size and mode come from the data type without change,
305 however, the front-end may set the explicit width of the field, so its
306 size may not be the same as the size of its type. This happens with
307 bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
308 also happens with other fields. For example, the C++ front-end creates
309 zero-sized fields corresponding to empty base classes, and depends on
310 layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
311 size in bytes from the size in bits. If we have already set the mode,
312 don't set it again since we can be called twice for FIELD_DECLs. */
314 TREE_UNSIGNED (decl) = TREE_UNSIGNED (type);
315 if (DECL_MODE (decl) == VOIDmode)
316 DECL_MODE (decl) = TYPE_MODE (type);
318 if (DECL_SIZE (decl) == 0)
320 DECL_SIZE (decl) = TYPE_SIZE (type);
321 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
323 else
324 DECL_SIZE_UNIT (decl)
325 = convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
326 bitsize_unit_node));
328 /* Force alignment required for the data type.
329 But if the decl itself wants greater alignment, don't override that.
330 Likewise, if the decl is packed, don't override it. */
331 if (! (code == FIELD_DECL && DECL_BIT_FIELD (decl))
332 && (DECL_ALIGN (decl) == 0
333 || (! (code == FIELD_DECL && DECL_PACKED (decl))
334 && TYPE_ALIGN (type) > DECL_ALIGN (decl))))
336 DECL_ALIGN (decl) = TYPE_ALIGN (type);
337 DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
340 /* For fields, set the bit field type and update the alignment. */
341 if (code == FIELD_DECL)
343 DECL_BIT_FIELD_TYPE (decl) = DECL_BIT_FIELD (decl) ? type : 0;
344 if (maximum_field_alignment != 0)
345 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), maximum_field_alignment);
346 else if (DECL_PACKED (decl))
348 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
349 DECL_USER_ALIGN (decl) = 0;
353 /* See if we can use an ordinary integer mode for a bit-field.
354 Conditions are: a fixed size that is correct for another mode
355 and occupying a complete byte or bytes on proper boundary. */
356 if (code == FIELD_DECL && DECL_BIT_FIELD (decl)
357 && TYPE_SIZE (type) != 0
358 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
359 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
361 register enum machine_mode xmode
362 = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
364 if (xmode != BLKmode && known_align >= GET_MODE_ALIGNMENT (xmode))
366 DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
367 DECL_ALIGN (decl));
368 DECL_MODE (decl) = xmode;
369 DECL_BIT_FIELD (decl) = 0;
373 /* Turn off DECL_BIT_FIELD if we won't need it set. */
374 if (code == FIELD_DECL && DECL_BIT_FIELD (decl)
375 && TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
376 && known_align >= TYPE_ALIGN (type)
377 && DECL_ALIGN (decl) >= TYPE_ALIGN (type)
378 && DECL_SIZE_UNIT (decl) != 0)
379 DECL_BIT_FIELD (decl) = 0;
381 /* Evaluate nonconstant size only once, either now or as soon as safe. */
382 if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
383 DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
384 if (DECL_SIZE_UNIT (decl) != 0
385 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
386 DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
388 /* If requested, warn about definitions of large data objects. */
389 if (warn_larger_than
390 && (code == VAR_DECL || code == PARM_DECL)
391 && ! DECL_EXTERNAL (decl))
393 tree size = DECL_SIZE_UNIT (decl);
395 if (size != 0 && TREE_CODE (size) == INTEGER_CST
396 && compare_tree_int (size, larger_than_size) > 0)
398 unsigned int size_as_int = TREE_INT_CST_LOW (size);
400 if (compare_tree_int (size, size_as_int) == 0)
401 warning_with_decl (decl, "size of `%s' is %d bytes", size_as_int);
402 else
403 warning_with_decl (decl, "size of `%s' is larger than %d bytes",
404 larger_than_size);
409 /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
410 QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
411 is to be passed to all other layout functions for this record. It is the
412 responsibility of the caller to call `free' for the storage returned.
413 Note that garbage collection is not permitted until we finish laying
414 out the record. */
416 record_layout_info
417 start_record_layout (t)
418 tree t;
420 record_layout_info rli
421 = (record_layout_info) xmalloc (sizeof (struct record_layout_info_s));
423 rli->t = t;
425 /* If the type has a minimum specified alignment (via an attribute
426 declaration, for example) use it -- otherwise, start with a
427 one-byte alignment. */
428 rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
429 rli->unpacked_align = rli->record_align;
430 rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
432 #ifdef STRUCTURE_SIZE_BOUNDARY
433 /* Packed structures don't need to have minimum size. */
434 if (! TYPE_PACKED (t))
435 rli->record_align = MAX (rli->record_align, STRUCTURE_SIZE_BOUNDARY);
436 #endif
438 rli->offset = size_zero_node;
439 rli->bitpos = bitsize_zero_node;
440 rli->pending_statics = 0;
441 rli->packed_maybe_necessary = 0;
443 return rli;
446 /* These four routines perform computations that convert between
447 the offset/bitpos forms and byte and bit offsets. */
449 tree
450 bit_from_pos (offset, bitpos)
451 tree offset, bitpos;
453 return size_binop (PLUS_EXPR, bitpos,
454 size_binop (MULT_EXPR, convert (bitsizetype, offset),
455 bitsize_unit_node));
458 tree
459 byte_from_pos (offset, bitpos)
460 tree offset, bitpos;
462 return size_binop (PLUS_EXPR, offset,
463 convert (sizetype,
464 size_binop (TRUNC_DIV_EXPR, bitpos,
465 bitsize_unit_node)));
468 void
469 pos_from_byte (poffset, pbitpos, off_align, pos)
470 tree *poffset, *pbitpos;
471 unsigned int off_align;
472 tree pos;
474 *poffset
475 = size_binop (MULT_EXPR,
476 convert (sizetype,
477 size_binop (FLOOR_DIV_EXPR, pos,
478 bitsize_int (off_align
479 / BITS_PER_UNIT))),
480 size_int (off_align / BITS_PER_UNIT));
481 *pbitpos = size_binop (MULT_EXPR,
482 size_binop (FLOOR_MOD_EXPR, pos,
483 bitsize_int (off_align / BITS_PER_UNIT)),
484 bitsize_unit_node);
487 void
488 pos_from_bit (poffset, pbitpos, off_align, pos)
489 tree *poffset, *pbitpos;
490 unsigned int off_align;
491 tree pos;
493 *poffset = size_binop (MULT_EXPR,
494 convert (sizetype,
495 size_binop (FLOOR_DIV_EXPR, pos,
496 bitsize_int (off_align))),
497 size_int (off_align / BITS_PER_UNIT));
498 *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
501 /* Given a pointer to bit and byte offsets and an offset alignment,
502 normalize the offsets so they are within the alignment. */
504 void
505 normalize_offset (poffset, pbitpos, off_align)
506 tree *poffset, *pbitpos;
507 unsigned int off_align;
509 /* If the bit position is now larger than it should be, adjust it
510 downwards. */
511 if (compare_tree_int (*pbitpos, off_align) >= 0)
513 tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
514 bitsize_int (off_align));
516 *poffset
517 = size_binop (PLUS_EXPR, *poffset,
518 size_binop (MULT_EXPR, convert (sizetype, extra_aligns),
519 size_int (off_align / BITS_PER_UNIT)));
521 *pbitpos
522 = size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
526 /* Print debugging information about the information in RLI. */
528 void
529 debug_rli (rli)
530 record_layout_info rli;
532 print_node_brief (stderr, "type", rli->t, 0);
533 print_node_brief (stderr, "\noffset", rli->offset, 0);
534 print_node_brief (stderr, " bitpos", rli->bitpos, 0);
536 fprintf (stderr, "\nrec_align = %u, unpack_align = %u, off_align = %u\n",
537 rli->record_align, rli->unpacked_align, rli->offset_align);
538 if (rli->packed_maybe_necessary)
539 fprintf (stderr, "packed may be necessary\n");
541 if (rli->pending_statics)
543 fprintf (stderr, "pending statics:\n");
544 debug_tree (rli->pending_statics);
548 /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
549 BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
551 void
552 normalize_rli (rli)
553 record_layout_info rli;
555 normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
558 /* Returns the size in bytes allocated so far. */
560 tree
561 rli_size_unit_so_far (rli)
562 record_layout_info rli;
564 return byte_from_pos (rli->offset, rli->bitpos);
567 /* Returns the size in bits allocated so far. */
569 tree
570 rli_size_so_far (rli)
571 record_layout_info rli;
573 return bit_from_pos (rli->offset, rli->bitpos);
576 /* Called from place_field to handle unions. */
578 static void
579 place_union_field (rli, field)
580 record_layout_info rli;
581 tree field;
583 unsigned int desired_align;
585 layout_decl (field, 0);
587 DECL_FIELD_OFFSET (field) = size_zero_node;
588 DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
589 SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
591 desired_align = DECL_ALIGN (field);
593 #ifdef BIGGEST_FIELD_ALIGNMENT
594 /* Some targets (i.e. i386) limit union field alignment
595 to a lower boundary than alignment of variables unless
596 it was overridden by attribute aligned. */
597 if (! DECL_USER_ALIGN (field))
598 desired_align =
599 MIN (desired_align, (unsigned) BIGGEST_FIELD_ALIGNMENT);
600 #endif
602 /* Union must be at least as aligned as any field requires. */
603 rli->record_align = MAX (rli->record_align, desired_align);
605 #ifdef PCC_BITFIELD_TYPE_MATTERS
606 /* On the m88000, a bit field of declare type `int' forces the
607 entire union to have `int' alignment. */
608 if (PCC_BITFIELD_TYPE_MATTERS && DECL_BIT_FIELD_TYPE (field))
609 rli->record_align = MAX (rli->record_align,
610 TYPE_ALIGN (TREE_TYPE (field)));
611 #endif
613 /* We assume the union's size will be a multiple of a byte so we don't
614 bother with BITPOS. */
615 if (TREE_CODE (rli->t) == UNION_TYPE)
616 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
617 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
618 rli->offset = fold (build (COND_EXPR, sizetype,
619 DECL_QUALIFIER (field),
620 DECL_SIZE_UNIT (field), rli->offset));
623 /* RLI contains information about the layout of a RECORD_TYPE. FIELD
624 is a FIELD_DECL to be added after those fields already present in
625 T. (FIELD is not actually added to the TYPE_FIELDS list here;
626 callers that desire that behavior must manually perform that step.) */
628 void
629 place_field (rli, field)
630 record_layout_info rli;
631 tree field;
633 /* The alignment required for FIELD. */
634 unsigned int desired_align;
635 /* The alignment FIELD would have if we just dropped it into the
636 record as it presently stands. */
637 unsigned int known_align;
638 unsigned int actual_align;
639 unsigned int user_align;
640 /* The type of this field. */
641 tree type = TREE_TYPE (field);
643 if (TREE_CODE (field) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
644 return;
646 /* If FIELD is static, then treat it like a separate variable, not
647 really like a structure field. If it is a FUNCTION_DECL, it's a
648 method. In both cases, all we do is lay out the decl, and we do
649 it *after* the record is laid out. */
650 if (TREE_CODE (field) == VAR_DECL)
652 rli->pending_statics = tree_cons (NULL_TREE, field,
653 rli->pending_statics);
654 return;
657 /* Enumerators and enum types which are local to this class need not
658 be laid out. Likewise for initialized constant fields. */
659 else if (TREE_CODE (field) != FIELD_DECL)
660 return;
662 /* Unions are laid out very differently than records, so split
663 that code off to another function. */
664 else if (TREE_CODE (rli->t) != RECORD_TYPE)
666 place_union_field (rli, field);
667 return;
670 /* Work out the known alignment so far. Note that A & (-A) is the
671 value of the least-significant bit in A that is one. */
672 if (! integer_zerop (rli->bitpos))
673 known_align = (tree_low_cst (rli->bitpos, 1)
674 & - tree_low_cst (rli->bitpos, 1));
675 else if (integer_zerop (rli->offset))
676 known_align = BIGGEST_ALIGNMENT;
677 else if (host_integerp (rli->offset, 1))
678 known_align = (BITS_PER_UNIT
679 * (tree_low_cst (rli->offset, 1)
680 & - tree_low_cst (rli->offset, 1)));
681 else
682 known_align = rli->offset_align;
684 /* Lay out the field so we know what alignment it needs. For a
685 packed field, use the alignment as specified, disregarding what
686 the type would want. */
687 desired_align = DECL_ALIGN (field);
688 user_align = DECL_USER_ALIGN (field);
689 layout_decl (field, known_align);
690 if (! DECL_PACKED (field))
692 desired_align = DECL_ALIGN (field);
693 user_align = DECL_USER_ALIGN (field);
696 #ifdef BIGGEST_FIELD_ALIGNMENT
697 /* Some targets (i.e. i386, VMS) limit struct field alignment
698 to a lower boundary than alignment of variables unless
699 it was overridden by attribute aligned. */
700 if (! user_align)
701 desired_align =
702 MIN (desired_align, (unsigned) BIGGEST_FIELD_ALIGNMENT);
703 #endif
704 #ifdef ADJUST_FIELD_ALIGN
705 desired_align = ADJUST_FIELD_ALIGN (field, desired_align);
706 #endif
708 /* Record must have at least as much alignment as any field.
709 Otherwise, the alignment of the field within the record is
710 meaningless. */
711 #ifdef PCC_BITFIELD_TYPE_MATTERS
712 if (PCC_BITFIELD_TYPE_MATTERS && type != error_mark_node
713 && DECL_BIT_FIELD_TYPE (field)
714 && ! integer_zerop (TYPE_SIZE (type)))
716 /* For these machines, a zero-length field does not
717 affect the alignment of the structure as a whole.
718 It does, however, affect the alignment of the next field
719 within the structure. */
720 if (! integer_zerop (DECL_SIZE (field)))
721 rli->record_align = MAX (rli->record_align, desired_align);
722 else if (! DECL_PACKED (field))
723 desired_align = TYPE_ALIGN (type);
725 /* A named bit field of declared type `int'
726 forces the entire structure to have `int' alignment. */
727 if (DECL_NAME (field) != 0)
729 unsigned int type_align = TYPE_ALIGN (type);
731 if (maximum_field_alignment != 0)
732 type_align = MIN (type_align, maximum_field_alignment);
733 else if (DECL_PACKED (field))
734 type_align = MIN (type_align, BITS_PER_UNIT);
736 rli->record_align = MAX (rli->record_align, type_align);
737 if (warn_packed)
738 rli->unpacked_align = MAX (rli->unpacked_align,
739 TYPE_ALIGN (type));
742 else
743 #endif
745 rli->record_align = MAX (rli->record_align, desired_align);
746 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
749 if (warn_packed && DECL_PACKED (field))
751 if (known_align > TYPE_ALIGN (type))
753 if (TYPE_ALIGN (type) > desired_align)
755 if (STRICT_ALIGNMENT)
756 warning_with_decl (field, "packed attribute causes inefficient alignment for `%s'");
757 else
758 warning_with_decl (field, "packed attribute is unnecessary for `%s'");
761 else
762 rli->packed_maybe_necessary = 1;
765 /* Does this field automatically have alignment it needs by virtue
766 of the fields that precede it and the record's own alignment? */
767 if (known_align < desired_align)
769 /* No, we need to skip space before this field.
770 Bump the cumulative size to multiple of field alignment. */
772 if (warn_padded)
773 warning_with_decl (field, "padding struct to align `%s'");
775 /* If the alignment is still within offset_align, just align
776 the bit position. */
777 if (desired_align < rli->offset_align)
778 rli->bitpos = round_up (rli->bitpos, desired_align);
779 else
781 /* First adjust OFFSET by the partial bits, then align. */
782 rli->offset
783 = size_binop (PLUS_EXPR, rli->offset,
784 convert (sizetype,
785 size_binop (CEIL_DIV_EXPR, rli->bitpos,
786 bitsize_unit_node)));
787 rli->bitpos = bitsize_zero_node;
789 rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
792 if (! TREE_CONSTANT (rli->offset))
793 rli->offset_align = desired_align;
797 /* Handle compatibility with PCC. Note that if the record has any
798 variable-sized fields, we need not worry about compatibility. */
799 #ifdef PCC_BITFIELD_TYPE_MATTERS
800 if (PCC_BITFIELD_TYPE_MATTERS
801 && TREE_CODE (field) == FIELD_DECL
802 && type != error_mark_node
803 && DECL_BIT_FIELD (field)
804 && ! DECL_PACKED (field)
805 && maximum_field_alignment == 0
806 && ! integer_zerop (DECL_SIZE (field))
807 && host_integerp (DECL_SIZE (field), 1)
808 && host_integerp (rli->offset, 1)
809 && host_integerp (TYPE_SIZE (type), 1))
811 unsigned int type_align = TYPE_ALIGN (type);
812 tree dsize = DECL_SIZE (field);
813 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
814 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
815 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
817 /* A bit field may not span more units of alignment of its type
818 than its type itself. Advance to next boundary if necessary. */
819 if ((((offset * BITS_PER_UNIT + bit_offset + field_size +
820 type_align - 1)
821 / type_align)
822 - (offset * BITS_PER_UNIT + bit_offset) / type_align)
823 > tree_low_cst (TYPE_SIZE (type), 1) / type_align)
824 rli->bitpos = round_up (rli->bitpos, type_align);
826 #endif
828 #ifdef BITFIELD_NBYTES_LIMITED
829 if (BITFIELD_NBYTES_LIMITED
830 && TREE_CODE (field) == FIELD_DECL
831 && type != error_mark_node
832 && DECL_BIT_FIELD_TYPE (field)
833 && ! DECL_PACKED (field)
834 && ! integer_zerop (DECL_SIZE (field))
835 && host_integerp (DECL_SIZE (field), 1)
836 && host_integerp (rli->offset, 1)
837 && host_integerp (TYPE_SIZE (type), 1))
839 unsigned int type_align = TYPE_ALIGN (type);
840 tree dsize = DECL_SIZE (field);
841 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
842 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
843 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
845 if (maximum_field_alignment != 0)
846 type_align = MIN (type_align, maximum_field_alignment);
847 /* ??? This test is opposite the test in the containing if
848 statement, so this code is unreachable currently. */
849 else if (DECL_PACKED (field))
850 type_align = MIN (type_align, BITS_PER_UNIT);
852 /* A bit field may not span the unit of alignment of its type.
853 Advance to next boundary if necessary. */
854 /* ??? This code should match the code above for the
855 PCC_BITFIELD_TYPE_MATTERS case. */
856 if ((offset * BITS_PER_UNIT + bit_offset) / type_align
857 != ((offset * BITS_PER_UNIT + bit_offset + field_size - 1)
858 / type_align))
859 rli->bitpos = round_up (rli->bitpos, type_align);
861 #endif
863 /* Offset so far becomes the position of this field after normalizing. */
864 normalize_rli (rli);
865 DECL_FIELD_OFFSET (field) = rli->offset;
866 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
867 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
869 /* If this field ended up more aligned than we thought it would be (we
870 approximate this by seeing if its position changed), lay out the field
871 again; perhaps we can use an integral mode for it now. */
872 if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
873 actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
874 & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
875 else if (integer_zerop (DECL_FIELD_OFFSET (field)))
876 actual_align = BIGGEST_ALIGNMENT;
877 else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
878 actual_align = (BITS_PER_UNIT
879 * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
880 & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
881 else
882 actual_align = DECL_OFFSET_ALIGN (field);
884 if (known_align != actual_align)
885 layout_decl (field, actual_align);
887 /* Now add size of this field to the size of the record. If the size is
888 not constant, treat the field as being a multiple of bytes and just
889 adjust the offset, resetting the bit position. Otherwise, apportion the
890 size amongst the bit position and offset. First handle the case of an
891 unspecified size, which can happen when we have an invalid nested struct
892 definition, such as struct j { struct j { int i; } }. The error message
893 is printed in finish_struct. */
894 if (DECL_SIZE (field) == 0)
895 /* Do nothing. */;
896 else if (! TREE_CONSTANT (DECL_SIZE_UNIT (field)))
898 rli->offset
899 = size_binop (PLUS_EXPR, rli->offset,
900 convert (sizetype,
901 size_binop (CEIL_DIV_EXPR, rli->bitpos,
902 bitsize_unit_node)));
903 rli->offset
904 = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
905 rli->bitpos = bitsize_zero_node;
906 rli->offset_align = MIN (rli->offset_align, DECL_ALIGN (field));
908 else
910 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
911 normalize_rli (rli);
915 /* Assuming that all the fields have been laid out, this function uses
916 RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
917 inidicated by RLI. */
919 static void
920 finalize_record_size (rli)
921 record_layout_info rli;
923 tree unpadded_size, unpadded_size_unit;
925 /* Now we want just byte and bit offsets, so set the offset alignment
926 to be a byte and then normalize. */
927 rli->offset_align = BITS_PER_UNIT;
928 normalize_rli (rli);
930 /* Determine the desired alignment. */
931 #ifdef ROUND_TYPE_ALIGN
932 TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
933 rli->record_align);
934 #else
935 TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
936 #endif
937 TYPE_USER_ALIGN (rli->t) = 1;
939 /* Compute the size so far. Be sure to allow for extra bits in the
940 size in bytes. We have guaranteed above that it will be no more
941 than a single byte. */
942 unpadded_size = rli_size_so_far (rli);
943 unpadded_size_unit = rli_size_unit_so_far (rli);
944 if (! integer_zerop (rli->bitpos))
945 unpadded_size_unit
946 = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
948 /* Record the un-rounded size in the binfo node. But first we check
949 the size of TYPE_BINFO to make sure that BINFO_SIZE is available. */
950 if (TYPE_BINFO (rli->t) && TREE_VEC_LENGTH (TYPE_BINFO (rli->t)) > 6)
952 TYPE_BINFO_SIZE (rli->t) = unpadded_size;
953 TYPE_BINFO_SIZE_UNIT (rli->t) = unpadded_size_unit;
956 /* Round the size up to be a multiple of the required alignment */
957 #ifdef ROUND_TYPE_SIZE
958 TYPE_SIZE (rli->t) = ROUND_TYPE_SIZE (rli->t, unpadded_size,
959 TYPE_ALIGN (rli->t));
960 TYPE_SIZE_UNIT (rli->t)
961 = ROUND_TYPE_SIZE_UNIT (rli->t, unpadded_size_unit,
962 TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
963 #else
964 TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
965 TYPE_SIZE_UNIT (rli->t) = round_up (unpadded_size_unit,
966 TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
967 #endif
969 if (warn_padded && TREE_CONSTANT (unpadded_size)
970 && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
971 warning ("padding struct size to alignment boundary");
973 if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
974 && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
975 && TREE_CONSTANT (unpadded_size))
977 tree unpacked_size;
979 #ifdef ROUND_TYPE_ALIGN
980 rli->unpacked_align
981 = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
982 #else
983 rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
984 #endif
986 #ifdef ROUND_TYPE_SIZE
987 unpacked_size = ROUND_TYPE_SIZE (rli->t, TYPE_SIZE (rli->t),
988 rli->unpacked_align);
989 #else
990 unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
991 #endif
993 if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
995 TYPE_PACKED (rli->t) = 0;
997 if (TYPE_NAME (rli->t))
999 const char *name;
1001 if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1002 name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
1003 else
1004 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
1006 if (STRICT_ALIGNMENT)
1007 warning ("packed attribute causes inefficient alignment for `%s'", name);
1008 else
1009 warning ("packed attribute is unnecessary for `%s'", name);
1011 else
1013 if (STRICT_ALIGNMENT)
1014 warning ("packed attribute causes inefficient alignment");
1015 else
1016 warning ("packed attribute is unnecessary");
1022 /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
1024 void
1025 compute_record_mode (type)
1026 tree type;
1028 tree field;
1029 enum machine_mode mode = VOIDmode;
1031 /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1032 However, if possible, we use a mode that fits in a register
1033 instead, in order to allow for better optimization down the
1034 line. */
1035 TYPE_MODE (type) = BLKmode;
1037 if (! host_integerp (TYPE_SIZE (type), 1))
1038 return;
1040 /* A record which has any BLKmode members must itself be
1041 BLKmode; it can't go in a register. Unless the member is
1042 BLKmode only because it isn't aligned. */
1043 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1045 unsigned HOST_WIDE_INT bitpos;
1047 if (TREE_CODE (field) != FIELD_DECL)
1048 continue;
1050 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1051 || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1052 && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field)))
1053 || ! host_integerp (bit_position (field), 1)
1054 || ! host_integerp (DECL_SIZE (field), 1))
1055 return;
1057 bitpos = int_bit_position (field);
1059 /* Must be BLKmode if any field crosses a word boundary,
1060 since extract_bit_field can't handle that in registers. */
1061 if (bitpos / BITS_PER_WORD
1062 != ((tree_low_cst (DECL_SIZE (field), 1) + bitpos - 1)
1063 / BITS_PER_WORD)
1064 /* But there is no problem if the field is entire words. */
1065 && tree_low_cst (DECL_SIZE (field), 1) % BITS_PER_WORD != 0)
1066 return;
1068 /* If this field is the whole struct, remember its mode so
1069 that, say, we can put a double in a class into a DF
1070 register instead of forcing it to live in the stack. */
1071 if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
1072 mode = DECL_MODE (field);
1074 #ifdef STRUCT_FORCE_BLK
1075 /* With some targets, eg. c4x, it is sub-optimal
1076 to access an aligned BLKmode structure as a scalar. */
1077 if (mode == VOIDmode && STRUCT_FORCE_BLK (field))
1078 return;
1079 #endif /* STRUCT_FORCE_BLK */
1082 /* If we only have one real field; use its mode. This only applies to
1083 RECORD_TYPE. This does not apply to unions. */
1084 if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode)
1085 TYPE_MODE (type) = mode;
1086 else
1087 TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1089 /* If structure's known alignment is less than what the scalar
1090 mode would need, and it matters, then stick with BLKmode. */
1091 if (TYPE_MODE (type) != BLKmode
1092 && STRICT_ALIGNMENT
1093 && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1094 || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1096 /* If this is the only reason this type is BLKmode, then
1097 don't force containing types to be BLKmode. */
1098 TYPE_NO_FORCE_BLK (type) = 1;
1099 TYPE_MODE (type) = BLKmode;
1103 /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1104 out. */
1106 static void
1107 finalize_type_size (type)
1108 tree type;
1110 /* Normally, use the alignment corresponding to the mode chosen.
1111 However, where strict alignment is not required, avoid
1112 over-aligning structures, since most compilers do not do this
1113 alignment. */
1115 if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1116 && (STRICT_ALIGNMENT
1117 || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1118 && TREE_CODE (type) != QUAL_UNION_TYPE
1119 && TREE_CODE (type) != ARRAY_TYPE)))
1121 TYPE_ALIGN (type) = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1122 TYPE_USER_ALIGN (type) = 0;
1125 /* Do machine-dependent extra alignment. */
1126 #ifdef ROUND_TYPE_ALIGN
1127 TYPE_ALIGN (type)
1128 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1129 #endif
1131 /* If we failed to find a simple way to calculate the unit size
1132 of the type, find it by division. */
1133 if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1134 /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1135 result will fit in sizetype. We will get more efficient code using
1136 sizetype, so we force a conversion. */
1137 TYPE_SIZE_UNIT (type)
1138 = convert (sizetype,
1139 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1140 bitsize_unit_node));
1142 if (TYPE_SIZE (type) != 0)
1144 #ifdef ROUND_TYPE_SIZE
1145 TYPE_SIZE (type)
1146 = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
1147 TYPE_SIZE_UNIT (type)
1148 = ROUND_TYPE_SIZE_UNIT (type, TYPE_SIZE_UNIT (type),
1149 TYPE_ALIGN (type) / BITS_PER_UNIT);
1150 #else
1151 TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1152 TYPE_SIZE_UNIT (type)
1153 = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN (type) / BITS_PER_UNIT);
1154 #endif
1157 /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
1158 if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1159 TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
1160 if (TYPE_SIZE_UNIT (type) != 0
1161 && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1162 TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1164 /* Also layout any other variants of the type. */
1165 if (TYPE_NEXT_VARIANT (type)
1166 || type != TYPE_MAIN_VARIANT (type))
1168 tree variant;
1169 /* Record layout info of this variant. */
1170 tree size = TYPE_SIZE (type);
1171 tree size_unit = TYPE_SIZE_UNIT (type);
1172 unsigned int align = TYPE_ALIGN (type);
1173 unsigned int user_align = TYPE_USER_ALIGN (type);
1174 enum machine_mode mode = TYPE_MODE (type);
1176 /* Copy it into all variants. */
1177 for (variant = TYPE_MAIN_VARIANT (type);
1178 variant != 0;
1179 variant = TYPE_NEXT_VARIANT (variant))
1181 TYPE_SIZE (variant) = size;
1182 TYPE_SIZE_UNIT (variant) = size_unit;
1183 TYPE_ALIGN (variant) = align;
1184 TYPE_USER_ALIGN (variant) = user_align;
1185 TYPE_MODE (variant) = mode;
1190 /* Do all of the work required to layout the type indicated by RLI,
1191 once the fields have been laid out. This function will call `free'
1192 for RLI. */
1194 void
1195 finish_record_layout (rli)
1196 record_layout_info rli;
1198 /* Compute the final size. */
1199 finalize_record_size (rli);
1201 /* Compute the TYPE_MODE for the record. */
1202 compute_record_mode (rli->t);
1204 /* Perform any last tweaks to the TYPE_SIZE, etc. */
1205 finalize_type_size (rli->t);
1207 /* Lay out any static members. This is done now because their type
1208 may use the record's type. */
1209 while (rli->pending_statics)
1211 layout_decl (TREE_VALUE (rli->pending_statics), 0);
1212 rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1215 /* Clean up. */
1216 free (rli);
1219 /* Calculate the mode, size, and alignment for TYPE.
1220 For an array type, calculate the element separation as well.
1221 Record TYPE on the chain of permanent or temporary types
1222 so that dbxout will find out about it.
1224 TYPE_SIZE of a type is nonzero if the type has been laid out already.
1225 layout_type does nothing on such a type.
1227 If the type is incomplete, its TYPE_SIZE remains zero. */
1229 void
1230 layout_type (type)
1231 tree type;
1233 if (type == 0)
1234 abort ();
1236 /* Do nothing if type has been laid out before. */
1237 if (TYPE_SIZE (type))
1238 return;
1240 switch (TREE_CODE (type))
1242 case LANG_TYPE:
1243 /* This kind of type is the responsibility
1244 of the language-specific code. */
1245 abort ();
1247 case BOOLEAN_TYPE: /* Used for Java, Pascal, and Chill. */
1248 if (TYPE_PRECISION (type) == 0)
1249 TYPE_PRECISION (type) = 1; /* default to one byte/boolean. */
1251 /* ... fall through ... */
1253 case INTEGER_TYPE:
1254 case ENUMERAL_TYPE:
1255 case CHAR_TYPE:
1256 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1257 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1258 TREE_UNSIGNED (type) = 1;
1260 TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1261 MODE_INT);
1262 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1263 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1264 break;
1266 case REAL_TYPE:
1267 TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
1268 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1269 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1270 break;
1272 case COMPLEX_TYPE:
1273 TREE_UNSIGNED (type) = TREE_UNSIGNED (TREE_TYPE (type));
1274 TYPE_MODE (type)
1275 = mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
1276 (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
1277 ? MODE_COMPLEX_INT : MODE_COMPLEX_FLOAT),
1279 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1280 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1281 break;
1283 case VECTOR_TYPE:
1285 tree subtype;
1287 subtype = TREE_TYPE (type);
1288 TREE_UNSIGNED (type) = TREE_UNSIGNED (subtype);
1289 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1290 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1292 break;
1294 case VOID_TYPE:
1295 /* This is an incomplete type and so doesn't have a size. */
1296 TYPE_ALIGN (type) = 1;
1297 TYPE_USER_ALIGN (type) = 0;
1298 TYPE_MODE (type) = VOIDmode;
1299 break;
1301 case OFFSET_TYPE:
1302 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1303 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
1304 TYPE_MODE (type) = ptr_mode;
1305 break;
1307 case FUNCTION_TYPE:
1308 case METHOD_TYPE:
1309 TYPE_MODE (type) = mode_for_size (2 * POINTER_SIZE, MODE_INT, 0);
1310 TYPE_SIZE (type) = bitsize_int (2 * POINTER_SIZE);
1311 TYPE_SIZE_UNIT (type) = size_int ((2 * POINTER_SIZE) / BITS_PER_UNIT);
1312 break;
1314 case POINTER_TYPE:
1315 case REFERENCE_TYPE:
1316 TYPE_MODE (type) = ptr_mode;
1317 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1318 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
1319 TREE_UNSIGNED (type) = 1;
1320 TYPE_PRECISION (type) = POINTER_SIZE;
1321 break;
1323 case ARRAY_TYPE:
1325 register tree index = TYPE_DOMAIN (type);
1326 register tree element = TREE_TYPE (type);
1328 build_pointer_type (element);
1330 /* We need to know both bounds in order to compute the size. */
1331 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1332 && TYPE_SIZE (element))
1334 tree ub = TYPE_MAX_VALUE (index);
1335 tree lb = TYPE_MIN_VALUE (index);
1336 tree length;
1337 tree element_size;
1339 /* The initial subtraction should happen in the original type so
1340 that (possible) negative values are handled appropriately. */
1341 length = size_binop (PLUS_EXPR, size_one_node,
1342 convert (sizetype,
1343 fold (build (MINUS_EXPR,
1344 TREE_TYPE (lb),
1345 ub, lb))));
1347 /* Special handling for arrays of bits (for Chill). */
1348 element_size = TYPE_SIZE (element);
1349 if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1350 && (integer_zerop (TYPE_MAX_VALUE (element))
1351 || integer_onep (TYPE_MAX_VALUE (element)))
1352 && host_integerp (TYPE_MIN_VALUE (element), 1))
1354 HOST_WIDE_INT maxvalue
1355 = tree_low_cst (TYPE_MAX_VALUE (element), 1);
1356 HOST_WIDE_INT minvalue
1357 = tree_low_cst (TYPE_MIN_VALUE (element), 1);
1359 if (maxvalue - minvalue == 1
1360 && (maxvalue == 1 || maxvalue == 0))
1361 element_size = integer_one_node;
1364 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1365 convert (bitsizetype, length));
1367 /* If we know the size of the element, calculate the total
1368 size directly, rather than do some division thing below.
1369 This optimization helps Fortran assumed-size arrays
1370 (where the size of the array is determined at runtime)
1371 substantially.
1372 Note that we can't do this in the case where the size of
1373 the elements is one bit since TYPE_SIZE_UNIT cannot be
1374 set correctly in that case. */
1375 if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
1376 TYPE_SIZE_UNIT (type)
1377 = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
1380 /* Now round the alignment and size,
1381 using machine-dependent criteria if any. */
1383 #ifdef ROUND_TYPE_ALIGN
1384 TYPE_ALIGN (type)
1385 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1386 #else
1387 TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1388 #endif
1390 #ifdef ROUND_TYPE_SIZE
1391 if (TYPE_SIZE (type) != 0)
1393 tree tmp
1394 = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
1396 /* If the rounding changed the size of the type, remove any
1397 pre-calculated TYPE_SIZE_UNIT. */
1398 if (simple_cst_equal (TYPE_SIZE (type), tmp) != 1)
1399 TYPE_SIZE_UNIT (type) = NULL;
1401 TYPE_SIZE (type) = tmp;
1403 #endif
1405 TYPE_MODE (type) = BLKmode;
1406 if (TYPE_SIZE (type) != 0
1407 /* BLKmode elements force BLKmode aggregate;
1408 else extract/store fields may lose. */
1409 && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1410 || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1412 TYPE_MODE (type)
1413 = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1415 if (TYPE_MODE (type) != BLKmode
1416 && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1417 && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))
1418 && TYPE_MODE (type) != BLKmode)
1420 TYPE_NO_FORCE_BLK (type) = 1;
1421 TYPE_MODE (type) = BLKmode;
1424 break;
1427 case RECORD_TYPE:
1428 case UNION_TYPE:
1429 case QUAL_UNION_TYPE:
1431 tree field;
1432 record_layout_info rli;
1434 /* Initialize the layout information. */
1435 rli = start_record_layout (type);
1437 /* If this is a QUAL_UNION_TYPE, we want to process the fields
1438 in the reverse order in building the COND_EXPR that denotes
1439 its size. We reverse them again later. */
1440 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1441 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1443 /* Place all the fields. */
1444 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1445 place_field (rli, field);
1447 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1448 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1450 /* Finish laying out the record. */
1451 finish_record_layout (rli);
1453 break;
1455 case SET_TYPE: /* Used by Chill and Pascal. */
1456 if (TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST
1457 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
1458 abort();
1459 else
1461 #ifndef SET_WORD_SIZE
1462 #define SET_WORD_SIZE BITS_PER_WORD
1463 #endif
1464 unsigned int alignment
1465 = set_alignment ? set_alignment : SET_WORD_SIZE;
1466 int size_in_bits
1467 = (TREE_INT_CST_LOW (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
1468 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) + 1);
1469 int rounded_size
1470 = ((size_in_bits + alignment - 1) / alignment) * alignment;
1472 if (rounded_size > (int) alignment)
1473 TYPE_MODE (type) = BLKmode;
1474 else
1475 TYPE_MODE (type) = mode_for_size (alignment, MODE_INT, 1);
1477 TYPE_SIZE (type) = bitsize_int (rounded_size);
1478 TYPE_SIZE_UNIT (type) = size_int (rounded_size / BITS_PER_UNIT);
1479 TYPE_ALIGN (type) = alignment;
1480 TYPE_USER_ALIGN (type) = 0;
1481 TYPE_PRECISION (type) = size_in_bits;
1483 break;
1485 case FILE_TYPE:
1486 /* The size may vary in different languages, so the language front end
1487 should fill in the size. */
1488 TYPE_ALIGN (type) = BIGGEST_ALIGNMENT;
1489 TYPE_USER_ALIGN (type) = 0;
1490 TYPE_MODE (type) = BLKmode;
1491 break;
1493 default:
1494 abort ();
1497 /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
1498 records and unions, finish_record_layout already called this
1499 function. */
1500 if (TREE_CODE (type) != RECORD_TYPE
1501 && TREE_CODE (type) != UNION_TYPE
1502 && TREE_CODE (type) != QUAL_UNION_TYPE)
1503 finalize_type_size (type);
1505 /* If this type is created before sizetype has been permanently set,
1506 record it so set_sizetype can fix it up. */
1507 if (! sizetype_set)
1508 early_type_list = tree_cons (NULL_TREE, type, early_type_list);
1510 /* If an alias set has been set for this aggregate when it was incomplete,
1511 force it into alias set 0.
1512 This is too conservative, but we cannot call record_component_aliases
1513 here because some frontends still change the aggregates after
1514 layout_type. */
1515 if (AGGREGATE_TYPE_P (type) && TYPE_ALIAS_SET_KNOWN_P (type))
1516 TYPE_ALIAS_SET (type) = 0;
1519 /* Create and return a type for signed integers of PRECISION bits. */
1521 tree
1522 make_signed_type (precision)
1523 int precision;
1525 register tree type = make_node (INTEGER_TYPE);
1527 TYPE_PRECISION (type) = precision;
1529 fixup_signed_type (type);
1530 return type;
1533 /* Create and return a type for unsigned integers of PRECISION bits. */
1535 tree
1536 make_unsigned_type (precision)
1537 int precision;
1539 register tree type = make_node (INTEGER_TYPE);
1541 TYPE_PRECISION (type) = precision;
1543 fixup_unsigned_type (type);
1544 return type;
1547 /* Initialize sizetype and bitsizetype to a reasonable and temporary
1548 value to enable integer types to be created. */
1550 void
1551 initialize_sizetypes ()
1553 tree t = make_node (INTEGER_TYPE);
1555 /* Set this so we do something reasonable for the build_int_2 calls
1556 below. */
1557 integer_type_node = t;
1559 TYPE_MODE (t) = SImode;
1560 TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
1561 TYPE_USER_ALIGN (t) = 0;
1562 TYPE_SIZE (t) = build_int_2 (GET_MODE_BITSIZE (SImode), 0);
1563 TYPE_SIZE_UNIT (t) = build_int_2 (GET_MODE_SIZE (SImode), 0);
1564 TREE_UNSIGNED (t) = 1;
1565 TYPE_PRECISION (t) = GET_MODE_BITSIZE (SImode);
1566 TYPE_MIN_VALUE (t) = build_int_2 (0, 0);
1567 TYPE_IS_SIZETYPE (t) = 1;
1569 /* 1000 avoids problems with possible overflow and is certainly
1570 larger than any size value we'd want to be storing. */
1571 TYPE_MAX_VALUE (t) = build_int_2 (1000, 0);
1573 /* These two must be different nodes because of the caching done in
1574 size_int_wide. */
1575 sizetype = t;
1576 bitsizetype = copy_node (t);
1577 integer_type_node = 0;
1580 /* Set sizetype to TYPE, and initialize *sizetype accordingly.
1581 Also update the type of any standard type's sizes made so far. */
1583 void
1584 set_sizetype (type)
1585 tree type;
1587 int oprecision = TYPE_PRECISION (type);
1588 /* The *bitsizetype types use a precision that avoids overflows when
1589 calculating signed sizes / offsets in bits. However, when
1590 cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
1591 precision. */
1592 int precision = MIN (oprecision + BITS_PER_UNIT_LOG + 1,
1593 2 * HOST_BITS_PER_WIDE_INT);
1594 unsigned int i;
1595 tree t;
1597 if (sizetype_set)
1598 abort ();
1600 /* Make copies of nodes since we'll be setting TYPE_IS_SIZETYPE. */
1601 sizetype = copy_node (type);
1602 TYPE_DOMAIN (sizetype) = type;
1603 TYPE_IS_SIZETYPE (sizetype) = 1;
1604 bitsizetype = make_node (INTEGER_TYPE);
1605 TYPE_NAME (bitsizetype) = TYPE_NAME (type);
1606 TYPE_PRECISION (bitsizetype) = precision;
1607 TYPE_IS_SIZETYPE (bitsizetype) = 1;
1609 if (TREE_UNSIGNED (type))
1610 fixup_unsigned_type (bitsizetype);
1611 else
1612 fixup_signed_type (bitsizetype);
1614 layout_type (bitsizetype);
1616 if (TREE_UNSIGNED (type))
1618 usizetype = sizetype;
1619 ubitsizetype = bitsizetype;
1620 ssizetype = copy_node (make_signed_type (oprecision));
1621 sbitsizetype = copy_node (make_signed_type (precision));
1623 else
1625 ssizetype = sizetype;
1626 sbitsizetype = bitsizetype;
1627 usizetype = copy_node (make_unsigned_type (oprecision));
1628 ubitsizetype = copy_node (make_unsigned_type (precision));
1631 TYPE_NAME (bitsizetype) = get_identifier ("bit_size_type");
1633 /* Show is a sizetype, is a main type, and has no pointers to it. */
1634 for (i = 0; i < ARRAY_SIZE (sizetype_tab); i++)
1636 TYPE_IS_SIZETYPE (sizetype_tab[i]) = 1;
1637 TYPE_MAIN_VARIANT (sizetype_tab[i]) = sizetype_tab[i];
1638 TYPE_NEXT_VARIANT (sizetype_tab[i]) = 0;
1639 TYPE_POINTER_TO (sizetype_tab[i]) = 0;
1640 TYPE_REFERENCE_TO (sizetype_tab[i]) = 0;
1643 ggc_add_tree_root ((tree *) &sizetype_tab,
1644 sizeof sizetype_tab / sizeof (tree));
1646 /* Go down each of the types we already made and set the proper type
1647 for the sizes in them. */
1648 for (t = early_type_list; t != 0; t = TREE_CHAIN (t))
1650 if (TREE_CODE (TREE_VALUE (t)) != INTEGER_TYPE)
1651 abort ();
1653 TREE_TYPE (TYPE_SIZE (TREE_VALUE (t))) = bitsizetype;
1654 TREE_TYPE (TYPE_SIZE_UNIT (TREE_VALUE (t))) = sizetype;
1657 early_type_list = 0;
1658 sizetype_set = 1;
1661 /* Set the extreme values of TYPE based on its precision in bits,
1662 then lay it out. Used when make_signed_type won't do
1663 because the tree code is not INTEGER_TYPE.
1664 E.g. for Pascal, when the -fsigned-char option is given. */
1666 void
1667 fixup_signed_type (type)
1668 tree type;
1670 register int precision = TYPE_PRECISION (type);
1672 TYPE_MIN_VALUE (type)
1673 = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
1674 ? 0 : (HOST_WIDE_INT) (-1) << (precision - 1)),
1675 (((HOST_WIDE_INT) (-1)
1676 << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
1677 ? precision - HOST_BITS_PER_WIDE_INT - 1
1678 : 0))));
1679 TYPE_MAX_VALUE (type)
1680 = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
1681 ? -1 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
1682 (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
1683 ? (((HOST_WIDE_INT) 1
1684 << (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
1685 : 0));
1687 TREE_TYPE (TYPE_MIN_VALUE (type)) = type;
1688 TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
1690 /* Lay out the type: set its alignment, size, etc. */
1691 layout_type (type);
1694 /* Set the extreme values of TYPE based on its precision in bits,
1695 then lay it out. This is used both in `make_unsigned_type'
1696 and for enumeral types. */
1698 void
1699 fixup_unsigned_type (type)
1700 tree type;
1702 register int precision = TYPE_PRECISION (type);
1704 TYPE_MIN_VALUE (type) = build_int_2 (0, 0);
1705 TYPE_MAX_VALUE (type)
1706 = build_int_2 (precision - HOST_BITS_PER_WIDE_INT >= 0
1707 ? -1 : ((HOST_WIDE_INT) 1 << precision) - 1,
1708 precision - HOST_BITS_PER_WIDE_INT > 0
1709 ? ((unsigned HOST_WIDE_INT) ~0
1710 >> (HOST_BITS_PER_WIDE_INT
1711 - (precision - HOST_BITS_PER_WIDE_INT)))
1712 : 0);
1713 TREE_TYPE (TYPE_MIN_VALUE (type)) = type;
1714 TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
1716 /* Lay out the type: set its alignment, size, etc. */
1717 layout_type (type);
1720 /* Find the best machine mode to use when referencing a bit field of length
1721 BITSIZE bits starting at BITPOS.
1723 The underlying object is known to be aligned to a boundary of ALIGN bits.
1724 If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
1725 larger than LARGEST_MODE (usually SImode).
1727 If no mode meets all these conditions, we return VOIDmode. Otherwise, if
1728 VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest
1729 mode meeting these conditions.
1731 Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), we return
1732 the largest mode (but a mode no wider than UNITS_PER_WORD) that meets
1733 all the conditions. */
1735 enum machine_mode
1736 get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
1737 int bitsize, bitpos;
1738 unsigned int align;
1739 enum machine_mode largest_mode;
1740 int volatilep;
1742 enum machine_mode mode;
1743 unsigned int unit = 0;
1745 /* Find the narrowest integer mode that contains the bit field. */
1746 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1747 mode = GET_MODE_WIDER_MODE (mode))
1749 unit = GET_MODE_BITSIZE (mode);
1750 if ((bitpos % unit) + bitsize <= unit)
1751 break;
1754 if (mode == VOIDmode
1755 /* It is tempting to omit the following line
1756 if STRICT_ALIGNMENT is true.
1757 But that is incorrect, since if the bitfield uses part of 3 bytes
1758 and we use a 4-byte mode, we could get a spurious segv
1759 if the extra 4th byte is past the end of memory.
1760 (Though at least one Unix compiler ignores this problem:
1761 that on the Sequent 386 machine. */
1762 || MIN (unit, BIGGEST_ALIGNMENT) > align
1763 || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
1764 return VOIDmode;
1766 if (SLOW_BYTE_ACCESS && ! volatilep)
1768 enum machine_mode wide_mode = VOIDmode, tmode;
1770 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
1771 tmode = GET_MODE_WIDER_MODE (tmode))
1773 unit = GET_MODE_BITSIZE (tmode);
1774 if (bitpos / unit == (bitpos + bitsize - 1) / unit
1775 && unit <= BITS_PER_WORD
1776 && unit <= MIN (align, BIGGEST_ALIGNMENT)
1777 && (largest_mode == VOIDmode
1778 || unit <= GET_MODE_BITSIZE (largest_mode)))
1779 wide_mode = tmode;
1782 if (wide_mode != VOIDmode)
1783 return wide_mode;
1786 return mode;
1789 /* Return the alignment of MODE. This will be bounded by 1 and
1790 BIGGEST_ALIGNMENT. */
1792 unsigned int
1793 get_mode_alignment (mode)
1794 enum machine_mode mode;
1796 unsigned int alignment = GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT;
1798 /* Extract the LSB of the size. */
1799 alignment = alignment & -alignment;
1801 alignment = MIN (BIGGEST_ALIGNMENT, MAX (1, alignment));
1802 return alignment;
1805 /* This function is run once to initialize stor-layout.c. */
1807 void
1808 init_stor_layout_once ()
1810 ggc_add_tree_root (&pending_sizes, 1);