Mark ChangeLog
[official-gcc.git] / gcc / stor-layout.c
blob87df45ce2ca1895c81fc98c7a33b1c8da217c4f1
1 /* C-compiler utilities for types and variables storage layout
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "expr.h"
33 #include "output.h"
34 #include "toplev.h"
35 #include "ggc.h"
36 #include "target.h"
37 #include "langhooks.h"
38 #include "regs.h"
39 #include "params.h"
41 /* Data type for the expressions representing sizes of data types.
42 It is the first integer type laid out. */
43 tree sizetype_tab[(int) TYPE_KIND_LAST];
45 /* If nonzero, this is an upper limit on alignment of structure fields.
46 The value is measured in bits. */
47 unsigned int maximum_field_alignment = TARGET_DEFAULT_PACK_STRUCT * BITS_PER_UNIT;
48 /* ... and its original value in bytes, specified via -fpack-struct=<value>. */
49 unsigned int initial_max_fld_align = TARGET_DEFAULT_PACK_STRUCT;
51 /* Nonzero if all REFERENCE_TYPEs are internal and hence should be
52 allocated in Pmode, not ptr_mode. Set only by internal_reference_types
53 called only by a front end. */
54 static int reference_types_internal = 0;
56 static void finalize_record_size (record_layout_info);
57 static void finalize_type_size (tree);
58 static void place_union_field (record_layout_info, tree);
59 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
60 static int excess_unit_span (HOST_WIDE_INT, HOST_WIDE_INT, HOST_WIDE_INT,
61 HOST_WIDE_INT, tree);
62 #endif
63 extern void debug_rli (record_layout_info);
65 /* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */
67 static GTY(()) tree pending_sizes;
69 /* Show that REFERENCE_TYPES are internal and should be Pmode. Called only
70 by front end. */
72 void
73 internal_reference_types (void)
75 reference_types_internal = 1;
78 /* Get a list of all the objects put on the pending sizes list. */
80 tree
81 get_pending_sizes (void)
83 tree chain = pending_sizes;
85 pending_sizes = 0;
86 return chain;
89 /* Add EXPR to the pending sizes list. */
91 void
92 put_pending_size (tree expr)
94 /* Strip any simple arithmetic from EXPR to see if it has an underlying
95 SAVE_EXPR. */
96 expr = skip_simple_arithmetic (expr);
98 if (TREE_CODE (expr) == SAVE_EXPR)
99 pending_sizes = tree_cons (NULL_TREE, expr, pending_sizes);
102 /* Put a chain of objects into the pending sizes list, which must be
103 empty. */
105 void
106 put_pending_sizes (tree chain)
108 gcc_assert (!pending_sizes);
109 pending_sizes = chain;
112 /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
113 to serve as the actual size-expression for a type or decl. */
115 tree
116 variable_size (tree size)
118 tree save;
120 /* If the language-processor is to take responsibility for variable-sized
121 items (e.g., languages which have elaboration procedures like Ada),
122 just return SIZE unchanged. Likewise for self-referential sizes and
123 constant sizes. */
124 if (TREE_CONSTANT (size)
125 || lang_hooks.decls.global_bindings_p () < 0
126 || CONTAINS_PLACEHOLDER_P (size))
127 return size;
129 size = save_expr (size);
131 /* If an array with a variable number of elements is declared, and
132 the elements require destruction, we will emit a cleanup for the
133 array. That cleanup is run both on normal exit from the block
134 and in the exception-handler for the block. Normally, when code
135 is used in both ordinary code and in an exception handler it is
136 `unsaved', i.e., all SAVE_EXPRs are recalculated. However, we do
137 not wish to do that here; the array-size is the same in both
138 places. */
139 save = skip_simple_arithmetic (size);
141 if (cfun && cfun->x_dont_save_pending_sizes_p)
142 /* The front-end doesn't want us to keep a list of the expressions
143 that determine sizes for variable size objects. Trust it. */
144 return size;
146 if (lang_hooks.decls.global_bindings_p ())
148 if (TREE_CONSTANT (size))
149 error ("type size can%'t be explicitly evaluated");
150 else
151 error ("variable-size type declared outside of any function");
153 return size_one_node;
156 put_pending_size (save);
158 return size;
161 #ifndef MAX_FIXED_MODE_SIZE
162 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
163 #endif
165 /* Return the machine mode to use for a nonscalar of SIZE bits. The
166 mode must be in class CLASS, and have exactly that many value bits;
167 it may have padding as well. If LIMIT is nonzero, modes of wider
168 than MAX_FIXED_MODE_SIZE will not be used. */
170 enum machine_mode
171 mode_for_size (unsigned int size, enum mode_class class, int limit)
173 enum machine_mode mode;
175 if (limit && size > MAX_FIXED_MODE_SIZE)
176 return BLKmode;
178 /* Get the first mode which has this size, in the specified class. */
179 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
180 mode = GET_MODE_WIDER_MODE (mode))
181 if (GET_MODE_PRECISION (mode) == size)
182 return mode;
184 return BLKmode;
187 /* Similar, except passed a tree node. */
189 enum machine_mode
190 mode_for_size_tree (tree size, enum mode_class class, int limit)
192 unsigned HOST_WIDE_INT uhwi;
193 unsigned int ui;
195 if (!host_integerp (size, 1))
196 return BLKmode;
197 uhwi = tree_low_cst (size, 1);
198 ui = uhwi;
199 if (uhwi != ui)
200 return BLKmode;
201 return mode_for_size (ui, class, limit);
204 /* Similar, but never return BLKmode; return the narrowest mode that
205 contains at least the requested number of value bits. */
207 enum machine_mode
208 smallest_mode_for_size (unsigned int size, enum mode_class class)
210 enum machine_mode mode;
212 /* Get the first mode which has at least this size, in the
213 specified class. */
214 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
215 mode = GET_MODE_WIDER_MODE (mode))
216 if (GET_MODE_PRECISION (mode) >= size)
217 return mode;
219 gcc_unreachable ();
222 /* Find an integer mode of the exact same size, or BLKmode on failure. */
224 enum machine_mode
225 int_mode_for_mode (enum machine_mode mode)
227 switch (GET_MODE_CLASS (mode))
229 case MODE_INT:
230 case MODE_PARTIAL_INT:
231 break;
233 case MODE_COMPLEX_INT:
234 case MODE_COMPLEX_FLOAT:
235 case MODE_FLOAT:
236 case MODE_DECIMAL_FLOAT:
237 case MODE_VECTOR_INT:
238 case MODE_VECTOR_FLOAT:
239 mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
240 break;
242 case MODE_RANDOM:
243 if (mode == BLKmode)
244 break;
246 /* ... fall through ... */
248 case MODE_CC:
249 default:
250 gcc_unreachable ();
253 return mode;
256 /* Return the alignment of MODE. This will be bounded by 1 and
257 BIGGEST_ALIGNMENT. */
259 unsigned int
260 get_mode_alignment (enum machine_mode mode)
262 return MIN (BIGGEST_ALIGNMENT, MAX (1, mode_base_align[mode]*BITS_PER_UNIT));
266 /* Subroutine of layout_decl: Force alignment required for the data type.
267 But if the decl itself wants greater alignment, don't override that. */
269 static inline void
270 do_type_align (tree type, tree decl)
272 if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
274 DECL_ALIGN (decl) = TYPE_ALIGN (type);
275 if (TREE_CODE (decl) == FIELD_DECL)
276 DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
280 /* Set the size, mode and alignment of a ..._DECL node.
281 TYPE_DECL does need this for C++.
282 Note that LABEL_DECL and CONST_DECL nodes do not need this,
283 and FUNCTION_DECL nodes have them set up in a special (and simple) way.
284 Don't call layout_decl for them.
286 KNOWN_ALIGN is the amount of alignment we can assume this
287 decl has with no special effort. It is relevant only for FIELD_DECLs
288 and depends on the previous fields.
289 All that matters about KNOWN_ALIGN is which powers of 2 divide it.
290 If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
291 the record will be aligned to suit. */
293 void
294 layout_decl (tree decl, unsigned int known_align)
296 tree type = TREE_TYPE (decl);
297 enum tree_code code = TREE_CODE (decl);
298 rtx rtl = NULL_RTX;
300 if (code == CONST_DECL)
301 return;
303 gcc_assert (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL
304 || code == TYPE_DECL ||code == FIELD_DECL);
306 rtl = DECL_RTL_IF_SET (decl);
308 if (type == error_mark_node)
309 type = void_type_node;
311 /* Usually the size and mode come from the data type without change,
312 however, the front-end may set the explicit width of the field, so its
313 size may not be the same as the size of its type. This happens with
314 bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
315 also happens with other fields. For example, the C++ front-end creates
316 zero-sized fields corresponding to empty base classes, and depends on
317 layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
318 size in bytes from the size in bits. If we have already set the mode,
319 don't set it again since we can be called twice for FIELD_DECLs. */
321 DECL_UNSIGNED (decl) = TYPE_UNSIGNED (type);
322 if (DECL_MODE (decl) == VOIDmode)
323 DECL_MODE (decl) = TYPE_MODE (type);
325 if (DECL_SIZE (decl) == 0)
327 DECL_SIZE (decl) = TYPE_SIZE (type);
328 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
330 else if (DECL_SIZE_UNIT (decl) == 0)
331 DECL_SIZE_UNIT (decl)
332 = fold_convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
333 bitsize_unit_node));
335 if (code != FIELD_DECL)
336 /* For non-fields, update the alignment from the type. */
337 do_type_align (type, decl);
338 else
339 /* For fields, it's a bit more complicated... */
341 bool old_user_align = DECL_USER_ALIGN (decl);
342 bool zero_bitfield = false;
343 bool packed_p = DECL_PACKED (decl);
344 unsigned int mfa;
346 if (DECL_BIT_FIELD (decl))
348 DECL_BIT_FIELD_TYPE (decl) = type;
350 /* A zero-length bit-field affects the alignment of the next
351 field. In essence such bit-fields are not influenced by
352 any packing due to #pragma pack or attribute packed. */
353 if (integer_zerop (DECL_SIZE (decl))
354 && ! targetm.ms_bitfield_layout_p (DECL_FIELD_CONTEXT (decl)))
356 zero_bitfield = true;
357 packed_p = false;
358 #ifdef PCC_BITFIELD_TYPE_MATTERS
359 if (PCC_BITFIELD_TYPE_MATTERS)
360 do_type_align (type, decl);
361 else
362 #endif
364 #ifdef EMPTY_FIELD_BOUNDARY
365 if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
367 DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY;
368 DECL_USER_ALIGN (decl) = 0;
370 #endif
374 /* See if we can use an ordinary integer mode for a bit-field.
375 Conditions are: a fixed size that is correct for another mode
376 and occupying a complete byte or bytes on proper boundary. */
377 if (TYPE_SIZE (type) != 0
378 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
379 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
381 enum machine_mode xmode
382 = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
384 if (xmode != BLKmode
385 && (known_align == 0
386 || known_align >= GET_MODE_ALIGNMENT (xmode)))
388 DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
389 DECL_ALIGN (decl));
390 DECL_MODE (decl) = xmode;
391 DECL_BIT_FIELD (decl) = 0;
395 /* Turn off DECL_BIT_FIELD if we won't need it set. */
396 if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
397 && known_align >= TYPE_ALIGN (type)
398 && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
399 DECL_BIT_FIELD (decl) = 0;
401 else if (packed_p && DECL_USER_ALIGN (decl))
402 /* Don't touch DECL_ALIGN. For other packed fields, go ahead and
403 round up; we'll reduce it again below. We want packing to
404 supersede USER_ALIGN inherited from the type, but defer to
405 alignment explicitly specified on the field decl. */;
406 else
407 do_type_align (type, decl);
409 /* If the field is of variable size, we can't misalign it since we
410 have no way to make a temporary to align the result. But this
411 isn't an issue if the decl is not addressable. Likewise if it
412 is of unknown size.
414 Note that do_type_align may set DECL_USER_ALIGN, so we need to
415 check old_user_align instead. */
416 if (packed_p
417 && !old_user_align
418 && (DECL_NONADDRESSABLE_P (decl)
419 || DECL_SIZE_UNIT (decl) == 0
420 || TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST))
421 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
423 if (! packed_p && ! DECL_USER_ALIGN (decl))
425 /* Some targets (i.e. i386, VMS) limit struct field alignment
426 to a lower boundary than alignment of variables unless
427 it was overridden by attribute aligned. */
428 #ifdef BIGGEST_FIELD_ALIGNMENT
429 DECL_ALIGN (decl)
430 = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT);
431 #endif
432 #ifdef ADJUST_FIELD_ALIGN
433 DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl));
434 #endif
437 if (zero_bitfield)
438 mfa = initial_max_fld_align * BITS_PER_UNIT;
439 else
440 mfa = maximum_field_alignment;
441 /* Should this be controlled by DECL_USER_ALIGN, too? */
442 if (mfa != 0)
443 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), mfa);
446 /* Evaluate nonconstant size only once, either now or as soon as safe. */
447 if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
448 DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
449 if (DECL_SIZE_UNIT (decl) != 0
450 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
451 DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
453 /* If requested, warn about definitions of large data objects. */
454 if (warn_larger_than
455 && (code == VAR_DECL || code == PARM_DECL)
456 && ! DECL_EXTERNAL (decl))
458 tree size = DECL_SIZE_UNIT (decl);
460 if (size != 0 && TREE_CODE (size) == INTEGER_CST
461 && compare_tree_int (size, larger_than_size) > 0)
463 int size_as_int = TREE_INT_CST_LOW (size);
465 if (compare_tree_int (size, size_as_int) == 0)
466 warning (0, "size of %q+D is %d bytes", decl, size_as_int);
467 else
468 warning (0, "size of %q+D is larger than %wd bytes",
469 decl, larger_than_size);
473 /* If the RTL was already set, update its mode and mem attributes. */
474 if (rtl)
476 PUT_MODE (rtl, DECL_MODE (decl));
477 SET_DECL_RTL (decl, 0);
478 set_mem_attributes (rtl, decl, 1);
479 SET_DECL_RTL (decl, rtl);
483 /* Given a VAR_DECL, PARM_DECL or RESULT_DECL, clears the results of
484 a previous call to layout_decl and calls it again. */
486 void
487 relayout_decl (tree decl)
489 DECL_SIZE (decl) = DECL_SIZE_UNIT (decl) = 0;
490 DECL_MODE (decl) = VOIDmode;
491 if (!DECL_USER_ALIGN (decl))
492 DECL_ALIGN (decl) = 0;
493 SET_DECL_RTL (decl, 0);
495 layout_decl (decl, 0);
498 /* Hook for a front-end function that can modify the record layout as needed
499 immediately before it is finalized. */
501 static void (*lang_adjust_rli) (record_layout_info) = 0;
503 void
504 set_lang_adjust_rli (void (*f) (record_layout_info))
506 lang_adjust_rli = f;
509 /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
510 QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
511 is to be passed to all other layout functions for this record. It is the
512 responsibility of the caller to call `free' for the storage returned.
513 Note that garbage collection is not permitted until we finish laying
514 out the record. */
516 record_layout_info
517 start_record_layout (tree t)
519 record_layout_info rli = xmalloc (sizeof (struct record_layout_info_s));
521 rli->t = t;
523 /* If the type has a minimum specified alignment (via an attribute
524 declaration, for example) use it -- otherwise, start with a
525 one-byte alignment. */
526 rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
527 rli->unpacked_align = rli->record_align;
528 rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
530 #ifdef STRUCTURE_SIZE_BOUNDARY
531 /* Packed structures don't need to have minimum size. */
532 if (! TYPE_PACKED (t))
533 rli->record_align = MAX (rli->record_align, (unsigned) STRUCTURE_SIZE_BOUNDARY);
534 #endif
536 rli->offset = size_zero_node;
537 rli->bitpos = bitsize_zero_node;
538 rli->prev_field = 0;
539 rli->pending_statics = 0;
540 rli->packed_maybe_necessary = 0;
541 rli->remaining_in_alignment = 0;
543 return rli;
546 /* These four routines perform computations that convert between
547 the offset/bitpos forms and byte and bit offsets. */
549 tree
550 bit_from_pos (tree offset, tree bitpos)
552 return size_binop (PLUS_EXPR, bitpos,
553 size_binop (MULT_EXPR,
554 fold_convert (bitsizetype, offset),
555 bitsize_unit_node));
558 tree
559 byte_from_pos (tree offset, tree bitpos)
561 return size_binop (PLUS_EXPR, offset,
562 fold_convert (sizetype,
563 size_binop (TRUNC_DIV_EXPR, bitpos,
564 bitsize_unit_node)));
567 void
568 pos_from_bit (tree *poffset, tree *pbitpos, unsigned int off_align,
569 tree pos)
571 *poffset = size_binop (MULT_EXPR,
572 fold_convert (sizetype,
573 size_binop (FLOOR_DIV_EXPR, pos,
574 bitsize_int (off_align))),
575 size_int (off_align / BITS_PER_UNIT));
576 *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
579 /* Given a pointer to bit and byte offsets and an offset alignment,
580 normalize the offsets so they are within the alignment. */
582 void
583 normalize_offset (tree *poffset, tree *pbitpos, unsigned int off_align)
585 /* If the bit position is now larger than it should be, adjust it
586 downwards. */
587 if (compare_tree_int (*pbitpos, off_align) >= 0)
589 tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
590 bitsize_int (off_align));
592 *poffset
593 = size_binop (PLUS_EXPR, *poffset,
594 size_binop (MULT_EXPR,
595 fold_convert (sizetype, extra_aligns),
596 size_int (off_align / BITS_PER_UNIT)));
598 *pbitpos
599 = size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
603 /* Print debugging information about the information in RLI. */
605 void
606 debug_rli (record_layout_info rli)
608 print_node_brief (stderr, "type", rli->t, 0);
609 print_node_brief (stderr, "\noffset", rli->offset, 0);
610 print_node_brief (stderr, " bitpos", rli->bitpos, 0);
612 fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
613 rli->record_align, rli->unpacked_align,
614 rli->offset_align);
616 /* The ms_struct code is the only that uses this. */
617 if (targetm.ms_bitfield_layout_p (rli->t))
618 fprintf (stderr, "remaining in alignment = %u\n", rli->remaining_in_alignment);
620 if (rli->packed_maybe_necessary)
621 fprintf (stderr, "packed may be necessary\n");
623 if (rli->pending_statics)
625 fprintf (stderr, "pending statics:\n");
626 debug_tree (rli->pending_statics);
630 /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
631 BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
633 void
634 normalize_rli (record_layout_info rli)
636 normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
639 /* Returns the size in bytes allocated so far. */
641 tree
642 rli_size_unit_so_far (record_layout_info rli)
644 return byte_from_pos (rli->offset, rli->bitpos);
647 /* Returns the size in bits allocated so far. */
649 tree
650 rli_size_so_far (record_layout_info rli)
652 return bit_from_pos (rli->offset, rli->bitpos);
655 /* FIELD is about to be added to RLI->T. The alignment (in bits) of
656 the next available location within the record is given by KNOWN_ALIGN.
657 Update the variable alignment fields in RLI, and return the alignment
658 to give the FIELD. */
660 unsigned int
661 update_alignment_for_field (record_layout_info rli, tree field,
662 unsigned int known_align)
664 /* The alignment required for FIELD. */
665 unsigned int desired_align;
666 /* The type of this field. */
667 tree type = TREE_TYPE (field);
668 /* True if the field was explicitly aligned by the user. */
669 bool user_align;
670 bool is_bitfield;
672 /* Do not attempt to align an ERROR_MARK node */
673 if (TREE_CODE (type) == ERROR_MARK)
674 return 0;
676 /* Lay out the field so we know what alignment it needs. */
677 layout_decl (field, known_align);
678 desired_align = DECL_ALIGN (field);
679 user_align = DECL_USER_ALIGN (field);
681 is_bitfield = (type != error_mark_node
682 && DECL_BIT_FIELD_TYPE (field)
683 && ! integer_zerop (TYPE_SIZE (type)));
685 /* Record must have at least as much alignment as any field.
686 Otherwise, the alignment of the field within the record is
687 meaningless. */
688 if (targetm.ms_bitfield_layout_p (rli->t))
690 /* Here, the alignment of the underlying type of a bitfield can
691 affect the alignment of a record; even a zero-sized field
692 can do this. The alignment should be to the alignment of
693 the type, except that for zero-size bitfields this only
694 applies if there was an immediately prior, nonzero-size
695 bitfield. (That's the way it is, experimentally.) */
696 if ((!is_bitfield && !DECL_PACKED (field))
697 || (!integer_zerop (DECL_SIZE (field))
698 ? !DECL_PACKED (field)
699 : (rli->prev_field
700 && DECL_BIT_FIELD_TYPE (rli->prev_field)
701 && ! integer_zerop (DECL_SIZE (rli->prev_field)))))
703 unsigned int type_align = TYPE_ALIGN (type);
704 type_align = MAX (type_align, desired_align);
705 if (maximum_field_alignment != 0)
706 type_align = MIN (type_align, maximum_field_alignment);
707 rli->record_align = MAX (rli->record_align, type_align);
708 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
711 #ifdef PCC_BITFIELD_TYPE_MATTERS
712 else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
714 /* Named bit-fields cause the entire structure to have the
715 alignment implied by their type. Some targets also apply the same
716 rules to unnamed bitfields. */
717 if (DECL_NAME (field) != 0
718 || targetm.align_anon_bitfield ())
720 unsigned int type_align = TYPE_ALIGN (type);
722 #ifdef ADJUST_FIELD_ALIGN
723 if (! TYPE_USER_ALIGN (type))
724 type_align = ADJUST_FIELD_ALIGN (field, type_align);
725 #endif
727 /* Targets might chose to handle unnamed and hence possibly
728 zero-width bitfield. Those are not influenced by #pragmas
729 or packed attributes. */
730 if (integer_zerop (DECL_SIZE (field)))
732 if (initial_max_fld_align)
733 type_align = MIN (type_align,
734 initial_max_fld_align * BITS_PER_UNIT);
736 else if (maximum_field_alignment != 0)
737 type_align = MIN (type_align, maximum_field_alignment);
738 else if (DECL_PACKED (field))
739 type_align = MIN (type_align, BITS_PER_UNIT);
741 /* The alignment of the record is increased to the maximum
742 of the current alignment, the alignment indicated on the
743 field (i.e., the alignment specified by an __aligned__
744 attribute), and the alignment indicated by the type of
745 the field. */
746 rli->record_align = MAX (rli->record_align, desired_align);
747 rli->record_align = MAX (rli->record_align, type_align);
749 if (warn_packed)
750 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
751 user_align |= TYPE_USER_ALIGN (type);
754 #endif
755 else
757 rli->record_align = MAX (rli->record_align, desired_align);
758 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
761 TYPE_USER_ALIGN (rli->t) |= user_align;
763 return desired_align;
766 /* Called from place_field to handle unions. */
768 static void
769 place_union_field (record_layout_info rli, tree field)
771 update_alignment_for_field (rli, field, /*known_align=*/0);
773 DECL_FIELD_OFFSET (field) = size_zero_node;
774 DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
775 SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
777 /* If this is an ERROR_MARK return *after* having set the
778 field at the start of the union. This helps when parsing
779 invalid fields. */
780 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK)
781 return;
783 /* We assume the union's size will be a multiple of a byte so we don't
784 bother with BITPOS. */
785 if (TREE_CODE (rli->t) == UNION_TYPE)
786 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
787 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
788 rli->offset = fold_build3 (COND_EXPR, sizetype,
789 DECL_QUALIFIER (field),
790 DECL_SIZE_UNIT (field), rli->offset);
793 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
794 /* A bitfield of SIZE with a required access alignment of ALIGN is allocated
795 at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
796 units of alignment than the underlying TYPE. */
797 static int
798 excess_unit_span (HOST_WIDE_INT byte_offset, HOST_WIDE_INT bit_offset,
799 HOST_WIDE_INT size, HOST_WIDE_INT align, tree type)
801 /* Note that the calculation of OFFSET might overflow; we calculate it so
802 that we still get the right result as long as ALIGN is a power of two. */
803 unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
805 offset = offset % align;
806 return ((offset + size + align - 1) / align
807 > ((unsigned HOST_WIDE_INT) tree_low_cst (TYPE_SIZE (type), 1)
808 / align));
810 #endif
812 /* RLI contains information about the layout of a RECORD_TYPE. FIELD
813 is a FIELD_DECL to be added after those fields already present in
814 T. (FIELD is not actually added to the TYPE_FIELDS list here;
815 callers that desire that behavior must manually perform that step.) */
817 void
818 place_field (record_layout_info rli, tree field)
820 /* The alignment required for FIELD. */
821 unsigned int desired_align;
822 /* The alignment FIELD would have if we just dropped it into the
823 record as it presently stands. */
824 unsigned int known_align;
825 unsigned int actual_align;
826 /* The type of this field. */
827 tree type = TREE_TYPE (field);
829 gcc_assert (TREE_CODE (field) != ERROR_MARK);
831 /* If FIELD is static, then treat it like a separate variable, not
832 really like a structure field. If it is a FUNCTION_DECL, it's a
833 method. In both cases, all we do is lay out the decl, and we do
834 it *after* the record is laid out. */
835 if (TREE_CODE (field) == VAR_DECL)
837 rli->pending_statics = tree_cons (NULL_TREE, field,
838 rli->pending_statics);
839 return;
842 /* Enumerators and enum types which are local to this class need not
843 be laid out. Likewise for initialized constant fields. */
844 else if (TREE_CODE (field) != FIELD_DECL)
845 return;
847 /* Unions are laid out very differently than records, so split
848 that code off to another function. */
849 else if (TREE_CODE (rli->t) != RECORD_TYPE)
851 place_union_field (rli, field);
852 return;
855 else if (TREE_CODE (type) == ERROR_MARK)
857 /* Place this field at the current allocation position, so we
858 maintain monotonicity. */
859 DECL_FIELD_OFFSET (field) = rli->offset;
860 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
861 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
862 return;
865 /* Work out the known alignment so far. Note that A & (-A) is the
866 value of the least-significant bit in A that is one. */
867 if (! integer_zerop (rli->bitpos))
868 known_align = (tree_low_cst (rli->bitpos, 1)
869 & - tree_low_cst (rli->bitpos, 1));
870 else if (integer_zerop (rli->offset))
871 known_align = 0;
872 else if (host_integerp (rli->offset, 1))
873 known_align = (BITS_PER_UNIT
874 * (tree_low_cst (rli->offset, 1)
875 & - tree_low_cst (rli->offset, 1)));
876 else
877 known_align = rli->offset_align;
879 desired_align = update_alignment_for_field (rli, field, known_align);
880 if (known_align == 0)
881 known_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
883 if (warn_packed && DECL_PACKED (field))
885 if (known_align >= TYPE_ALIGN (type))
887 if (TYPE_ALIGN (type) > desired_align)
889 if (STRICT_ALIGNMENT)
890 warning (OPT_Wattributes, "packed attribute causes "
891 "inefficient alignment for %q+D", field);
892 else
893 warning (OPT_Wattributes, "packed attribute is "
894 "unnecessary for %q+D", field);
897 else
898 rli->packed_maybe_necessary = 1;
901 /* Does this field automatically have alignment it needs by virtue
902 of the fields that precede it and the record's own alignment?
903 We already align ms_struct fields, so don't re-align them. */
904 if (known_align < desired_align
905 && !targetm.ms_bitfield_layout_p (rli->t))
907 /* No, we need to skip space before this field.
908 Bump the cumulative size to multiple of field alignment. */
910 warning (OPT_Wpadded, "padding struct to align %q+D", field);
912 /* If the alignment is still within offset_align, just align
913 the bit position. */
914 if (desired_align < rli->offset_align)
915 rli->bitpos = round_up (rli->bitpos, desired_align);
916 else
918 /* First adjust OFFSET by the partial bits, then align. */
919 rli->offset
920 = size_binop (PLUS_EXPR, rli->offset,
921 fold_convert (sizetype,
922 size_binop (CEIL_DIV_EXPR, rli->bitpos,
923 bitsize_unit_node)));
924 rli->bitpos = bitsize_zero_node;
926 rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
929 if (! TREE_CONSTANT (rli->offset))
930 rli->offset_align = desired_align;
934 /* Handle compatibility with PCC. Note that if the record has any
935 variable-sized fields, we need not worry about compatibility. */
936 #ifdef PCC_BITFIELD_TYPE_MATTERS
937 if (PCC_BITFIELD_TYPE_MATTERS
938 && ! targetm.ms_bitfield_layout_p (rli->t)
939 && TREE_CODE (field) == FIELD_DECL
940 && type != error_mark_node
941 && DECL_BIT_FIELD (field)
942 && ! DECL_PACKED (field)
943 && maximum_field_alignment == 0
944 && ! integer_zerop (DECL_SIZE (field))
945 && host_integerp (DECL_SIZE (field), 1)
946 && host_integerp (rli->offset, 1)
947 && host_integerp (TYPE_SIZE (type), 1))
949 unsigned int type_align = TYPE_ALIGN (type);
950 tree dsize = DECL_SIZE (field);
951 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
952 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
953 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
955 #ifdef ADJUST_FIELD_ALIGN
956 if (! TYPE_USER_ALIGN (type))
957 type_align = ADJUST_FIELD_ALIGN (field, type_align);
958 #endif
960 /* A bit field may not span more units of alignment of its type
961 than its type itself. Advance to next boundary if necessary. */
962 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
963 rli->bitpos = round_up (rli->bitpos, type_align);
965 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
967 #endif
969 #ifdef BITFIELD_NBYTES_LIMITED
970 if (BITFIELD_NBYTES_LIMITED
971 && ! targetm.ms_bitfield_layout_p (rli->t)
972 && TREE_CODE (field) == FIELD_DECL
973 && type != error_mark_node
974 && DECL_BIT_FIELD_TYPE (field)
975 && ! DECL_PACKED (field)
976 && ! integer_zerop (DECL_SIZE (field))
977 && host_integerp (DECL_SIZE (field), 1)
978 && host_integerp (rli->offset, 1)
979 && host_integerp (TYPE_SIZE (type), 1))
981 unsigned int type_align = TYPE_ALIGN (type);
982 tree dsize = DECL_SIZE (field);
983 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
984 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
985 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
987 #ifdef ADJUST_FIELD_ALIGN
988 if (! TYPE_USER_ALIGN (type))
989 type_align = ADJUST_FIELD_ALIGN (field, type_align);
990 #endif
992 if (maximum_field_alignment != 0)
993 type_align = MIN (type_align, maximum_field_alignment);
994 /* ??? This test is opposite the test in the containing if
995 statement, so this code is unreachable currently. */
996 else if (DECL_PACKED (field))
997 type_align = MIN (type_align, BITS_PER_UNIT);
999 /* A bit field may not span the unit of alignment of its type.
1000 Advance to next boundary if necessary. */
1001 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1002 rli->bitpos = round_up (rli->bitpos, type_align);
1004 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1006 #endif
1008 /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1009 A subtlety:
1010 When a bit field is inserted into a packed record, the whole
1011 size of the underlying type is used by one or more same-size
1012 adjacent bitfields. (That is, if its long:3, 32 bits is
1013 used in the record, and any additional adjacent long bitfields are
1014 packed into the same chunk of 32 bits. However, if the size
1015 changes, a new field of that size is allocated.) In an unpacked
1016 record, this is the same as using alignment, but not equivalent
1017 when packing.
1019 Note: for compatibility, we use the type size, not the type alignment
1020 to determine alignment, since that matches the documentation */
1022 if (targetm.ms_bitfield_layout_p (rli->t))
1024 tree prev_saved = rli->prev_field;
1025 tree prev_type = prev_saved ? DECL_BIT_FIELD_TYPE (prev_saved) : NULL;
1027 /* This is a bitfield if it exists. */
1028 if (rli->prev_field)
1030 /* If both are bitfields, nonzero, and the same size, this is
1031 the middle of a run. Zero declared size fields are special
1032 and handled as "end of run". (Note: it's nonzero declared
1033 size, but equal type sizes!) (Since we know that both
1034 the current and previous fields are bitfields by the
1035 time we check it, DECL_SIZE must be present for both.) */
1036 if (DECL_BIT_FIELD_TYPE (field)
1037 && !integer_zerop (DECL_SIZE (field))
1038 && !integer_zerop (DECL_SIZE (rli->prev_field))
1039 && host_integerp (DECL_SIZE (rli->prev_field), 0)
1040 && host_integerp (TYPE_SIZE (type), 0)
1041 && simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type)))
1043 /* We're in the middle of a run of equal type size fields; make
1044 sure we realign if we run out of bits. (Not decl size,
1045 type size!) */
1046 HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 1);
1048 if (rli->remaining_in_alignment < bitsize)
1050 HOST_WIDE_INT typesize = tree_low_cst (TYPE_SIZE (type), 1);
1052 /* out of bits; bump up to next 'word'. */
1053 rli->bitpos
1054 = size_binop (PLUS_EXPR, rli->bitpos,
1055 bitsize_int (rli->remaining_in_alignment));
1056 rli->prev_field = field;
1057 if (typesize < bitsize)
1058 rli->remaining_in_alignment = 0;
1059 else
1060 rli->remaining_in_alignment = typesize - bitsize;
1062 else
1063 rli->remaining_in_alignment -= bitsize;
1065 else
1067 /* End of a run: if leaving a run of bitfields of the same type
1068 size, we have to "use up" the rest of the bits of the type
1069 size.
1071 Compute the new position as the sum of the size for the prior
1072 type and where we first started working on that type.
1073 Note: since the beginning of the field was aligned then
1074 of course the end will be too. No round needed. */
1076 if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1078 rli->bitpos
1079 = size_binop (PLUS_EXPR, rli->bitpos,
1080 bitsize_int (rli->remaining_in_alignment));
1082 else
1083 /* We "use up" size zero fields; the code below should behave
1084 as if the prior field was not a bitfield. */
1085 prev_saved = NULL;
1087 /* Cause a new bitfield to be captured, either this time (if
1088 currently a bitfield) or next time we see one. */
1089 if (!DECL_BIT_FIELD_TYPE(field)
1090 || integer_zerop (DECL_SIZE (field)))
1091 rli->prev_field = NULL;
1094 normalize_rli (rli);
1097 /* If we're starting a new run of same size type bitfields
1098 (or a run of non-bitfields), set up the "first of the run"
1099 fields.
1101 That is, if the current field is not a bitfield, or if there
1102 was a prior bitfield the type sizes differ, or if there wasn't
1103 a prior bitfield the size of the current field is nonzero.
1105 Note: we must be sure to test ONLY the type size if there was
1106 a prior bitfield and ONLY for the current field being zero if
1107 there wasn't. */
1109 if (!DECL_BIT_FIELD_TYPE (field)
1110 || (prev_saved != NULL
1111 ? !simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (prev_type))
1112 : !integer_zerop (DECL_SIZE (field)) ))
1114 /* Never smaller than a byte for compatibility. */
1115 unsigned int type_align = BITS_PER_UNIT;
1117 /* (When not a bitfield), we could be seeing a flex array (with
1118 no DECL_SIZE). Since we won't be using remaining_in_alignment
1119 until we see a bitfield (and come by here again) we just skip
1120 calculating it. */
1121 if (DECL_SIZE (field) != NULL
1122 && host_integerp (TYPE_SIZE (TREE_TYPE (field)), 0)
1123 && host_integerp (DECL_SIZE (field), 0))
1125 HOST_WIDE_INT bitsize = tree_low_cst (DECL_SIZE (field), 1);
1126 HOST_WIDE_INT typesize
1127 = tree_low_cst (TYPE_SIZE (TREE_TYPE (field)), 1);
1129 if (typesize < bitsize)
1130 rli->remaining_in_alignment = 0;
1131 else
1132 rli->remaining_in_alignment = typesize - bitsize;
1135 /* Now align (conventionally) for the new type. */
1136 type_align = TYPE_ALIGN (TREE_TYPE (field));
1138 if (maximum_field_alignment != 0)
1139 type_align = MIN (type_align, maximum_field_alignment);
1141 rli->bitpos = round_up (rli->bitpos, type_align);
1143 /* If we really aligned, don't allow subsequent bitfields
1144 to undo that. */
1145 rli->prev_field = NULL;
1149 /* Offset so far becomes the position of this field after normalizing. */
1150 normalize_rli (rli);
1151 DECL_FIELD_OFFSET (field) = rli->offset;
1152 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1153 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1155 /* If this field ended up more aligned than we thought it would be (we
1156 approximate this by seeing if its position changed), lay out the field
1157 again; perhaps we can use an integral mode for it now. */
1158 if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1159 actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1160 & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
1161 else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1162 actual_align = MAX (BIGGEST_ALIGNMENT, rli->record_align);
1163 else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
1164 actual_align = (BITS_PER_UNIT
1165 * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1166 & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
1167 else
1168 actual_align = DECL_OFFSET_ALIGN (field);
1169 /* ACTUAL_ALIGN is still the actual alignment *within the record* .
1170 store / extract bit field operations will check the alignment of the
1171 record against the mode of bit fields. */
1173 if (known_align != actual_align)
1174 layout_decl (field, actual_align);
1176 if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE (field))
1177 rli->prev_field = field;
1179 /* Now add size of this field to the size of the record. If the size is
1180 not constant, treat the field as being a multiple of bytes and just
1181 adjust the offset, resetting the bit position. Otherwise, apportion the
1182 size amongst the bit position and offset. First handle the case of an
1183 unspecified size, which can happen when we have an invalid nested struct
1184 definition, such as struct j { struct j { int i; } }. The error message
1185 is printed in finish_struct. */
1186 if (DECL_SIZE (field) == 0)
1187 /* Do nothing. */;
1188 else if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1189 || TREE_CONSTANT_OVERFLOW (DECL_SIZE (field)))
1191 rli->offset
1192 = size_binop (PLUS_EXPR, rli->offset,
1193 fold_convert (sizetype,
1194 size_binop (CEIL_DIV_EXPR, rli->bitpos,
1195 bitsize_unit_node)));
1196 rli->offset
1197 = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1198 rli->bitpos = bitsize_zero_node;
1199 rli->offset_align = MIN (rli->offset_align, desired_align);
1201 else if (targetm.ms_bitfield_layout_p (rli->t))
1203 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1205 /* If we ended a bitfield before the full length of the type then
1206 pad the struct out to the full length of the last type. */
1207 if ((TREE_CHAIN (field) == NULL
1208 || TREE_CODE (TREE_CHAIN (field)) != FIELD_DECL)
1209 && DECL_BIT_FIELD_TYPE (field)
1210 && !integer_zerop (DECL_SIZE (field)))
1211 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos,
1212 bitsize_int (rli->remaining_in_alignment));
1214 normalize_rli (rli);
1216 else
1218 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1219 normalize_rli (rli);
1223 /* Assuming that all the fields have been laid out, this function uses
1224 RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1225 indicated by RLI. */
1227 static void
1228 finalize_record_size (record_layout_info rli)
1230 tree unpadded_size, unpadded_size_unit;
1232 /* Now we want just byte and bit offsets, so set the offset alignment
1233 to be a byte and then normalize. */
1234 rli->offset_align = BITS_PER_UNIT;
1235 normalize_rli (rli);
1237 /* Determine the desired alignment. */
1238 #ifdef ROUND_TYPE_ALIGN
1239 TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1240 rli->record_align);
1241 #else
1242 TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
1243 #endif
1245 /* Compute the size so far. Be sure to allow for extra bits in the
1246 size in bytes. We have guaranteed above that it will be no more
1247 than a single byte. */
1248 unpadded_size = rli_size_so_far (rli);
1249 unpadded_size_unit = rli_size_unit_so_far (rli);
1250 if (! integer_zerop (rli->bitpos))
1251 unpadded_size_unit
1252 = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1254 /* Round the size up to be a multiple of the required alignment. */
1255 TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1256 TYPE_SIZE_UNIT (rli->t)
1257 = round_up (unpadded_size_unit, TYPE_ALIGN_UNIT (rli->t));
1259 if (TREE_CONSTANT (unpadded_size)
1260 && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
1261 warning (OPT_Wpadded, "padding struct size to alignment boundary");
1263 if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1264 && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1265 && TREE_CONSTANT (unpadded_size))
1267 tree unpacked_size;
1269 #ifdef ROUND_TYPE_ALIGN
1270 rli->unpacked_align
1271 = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1272 #else
1273 rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1274 #endif
1276 unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1277 if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1279 TYPE_PACKED (rli->t) = 0;
1281 if (TYPE_NAME (rli->t))
1283 const char *name;
1285 if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1286 name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
1287 else
1288 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
1290 if (STRICT_ALIGNMENT)
1291 warning (OPT_Wpacked, "packed attribute causes inefficient "
1292 "alignment for %qs", name);
1293 else
1294 warning (OPT_Wpacked,
1295 "packed attribute is unnecessary for %qs", name);
1297 else
1299 if (STRICT_ALIGNMENT)
1300 warning (OPT_Wpacked,
1301 "packed attribute causes inefficient alignment");
1302 else
1303 warning (OPT_Wpacked, "packed attribute is unnecessary");
1309 /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
1311 void
1312 compute_record_mode (tree type)
1314 tree field;
1315 enum machine_mode mode = VOIDmode;
1317 /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1318 However, if possible, we use a mode that fits in a register
1319 instead, in order to allow for better optimization down the
1320 line. */
1321 TYPE_MODE (type) = BLKmode;
1323 if (! host_integerp (TYPE_SIZE (type), 1))
1324 return;
1326 /* A record which has any BLKmode members must itself be
1327 BLKmode; it can't go in a register. Unless the member is
1328 BLKmode only because it isn't aligned. */
1329 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1331 if (TREE_CODE (field) != FIELD_DECL)
1332 continue;
1334 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1335 || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1336 && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field))
1337 && !(TYPE_SIZE (TREE_TYPE (field)) != 0
1338 && integer_zerop (TYPE_SIZE (TREE_TYPE (field)))))
1339 || ! host_integerp (bit_position (field), 1)
1340 || DECL_SIZE (field) == 0
1341 || ! host_integerp (DECL_SIZE (field), 1))
1342 return;
1344 /* If this field is the whole struct, remember its mode so
1345 that, say, we can put a double in a class into a DF
1346 register instead of forcing it to live in the stack. */
1347 if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
1348 mode = DECL_MODE (field);
1350 #ifdef MEMBER_TYPE_FORCES_BLK
1351 /* With some targets, eg. c4x, it is sub-optimal
1352 to access an aligned BLKmode structure as a scalar. */
1354 if (MEMBER_TYPE_FORCES_BLK (field, mode))
1355 return;
1356 #endif /* MEMBER_TYPE_FORCES_BLK */
1359 /* If we only have one real field; use its mode if that mode's size
1360 matches the type's size. This only applies to RECORD_TYPE. This
1361 does not apply to unions. */
1362 if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode
1363 && host_integerp (TYPE_SIZE (type), 1)
1364 && GET_MODE_BITSIZE (mode) == TREE_INT_CST_LOW (TYPE_SIZE (type)))
1365 TYPE_MODE (type) = mode;
1366 else
1367 TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1369 /* If structure's known alignment is less than what the scalar
1370 mode would need, and it matters, then stick with BLKmode. */
1371 if (TYPE_MODE (type) != BLKmode
1372 && STRICT_ALIGNMENT
1373 && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1374 || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1376 /* If this is the only reason this type is BLKmode, then
1377 don't force containing types to be BLKmode. */
1378 TYPE_NO_FORCE_BLK (type) = 1;
1379 TYPE_MODE (type) = BLKmode;
1383 /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1384 out. */
1386 static void
1387 finalize_type_size (tree type)
1389 /* Normally, use the alignment corresponding to the mode chosen.
1390 However, where strict alignment is not required, avoid
1391 over-aligning structures, since most compilers do not do this
1392 alignment. */
1394 if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1395 && (STRICT_ALIGNMENT
1396 || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1397 && TREE_CODE (type) != QUAL_UNION_TYPE
1398 && TREE_CODE (type) != ARRAY_TYPE)))
1400 unsigned mode_align = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1402 /* Don't override a larger alignment requirement coming from a user
1403 alignment of one of the fields. */
1404 if (mode_align >= TYPE_ALIGN (type))
1406 TYPE_ALIGN (type) = mode_align;
1407 TYPE_USER_ALIGN (type) = 0;
1411 /* Do machine-dependent extra alignment. */
1412 #ifdef ROUND_TYPE_ALIGN
1413 TYPE_ALIGN (type)
1414 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1415 #endif
1417 /* If we failed to find a simple way to calculate the unit size
1418 of the type, find it by division. */
1419 if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1420 /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1421 result will fit in sizetype. We will get more efficient code using
1422 sizetype, so we force a conversion. */
1423 TYPE_SIZE_UNIT (type)
1424 = fold_convert (sizetype,
1425 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1426 bitsize_unit_node));
1428 if (TYPE_SIZE (type) != 0)
1430 TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1431 TYPE_SIZE_UNIT (type) = round_up (TYPE_SIZE_UNIT (type),
1432 TYPE_ALIGN_UNIT (type));
1435 /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
1436 if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1437 TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
1438 if (TYPE_SIZE_UNIT (type) != 0
1439 && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1440 TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1442 /* Also layout any other variants of the type. */
1443 if (TYPE_NEXT_VARIANT (type)
1444 || type != TYPE_MAIN_VARIANT (type))
1446 tree variant;
1447 /* Record layout info of this variant. */
1448 tree size = TYPE_SIZE (type);
1449 tree size_unit = TYPE_SIZE_UNIT (type);
1450 unsigned int align = TYPE_ALIGN (type);
1451 unsigned int user_align = TYPE_USER_ALIGN (type);
1452 enum machine_mode mode = TYPE_MODE (type);
1454 /* Copy it into all variants. */
1455 for (variant = TYPE_MAIN_VARIANT (type);
1456 variant != 0;
1457 variant = TYPE_NEXT_VARIANT (variant))
1459 TYPE_SIZE (variant) = size;
1460 TYPE_SIZE_UNIT (variant) = size_unit;
1461 TYPE_ALIGN (variant) = align;
1462 TYPE_USER_ALIGN (variant) = user_align;
1463 TYPE_MODE (variant) = mode;
1468 /* Do all of the work required to layout the type indicated by RLI,
1469 once the fields have been laid out. This function will call `free'
1470 for RLI, unless FREE_P is false. Passing a value other than false
1471 for FREE_P is bad practice; this option only exists to support the
1472 G++ 3.2 ABI. */
1474 void
1475 finish_record_layout (record_layout_info rli, int free_p)
1477 tree variant;
1479 /* Compute the final size. */
1480 finalize_record_size (rli);
1482 /* Compute the TYPE_MODE for the record. */
1483 compute_record_mode (rli->t);
1485 /* Perform any last tweaks to the TYPE_SIZE, etc. */
1486 finalize_type_size (rli->t);
1488 /* Propagate TYPE_PACKED to variants. With C++ templates,
1489 handle_packed_attribute is too early to do this. */
1490 for (variant = TYPE_NEXT_VARIANT (rli->t); variant;
1491 variant = TYPE_NEXT_VARIANT (variant))
1492 TYPE_PACKED (variant) = TYPE_PACKED (rli->t);
1494 /* Lay out any static members. This is done now because their type
1495 may use the record's type. */
1496 while (rli->pending_statics)
1498 layout_decl (TREE_VALUE (rli->pending_statics), 0);
1499 rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1502 /* Clean up. */
1503 if (free_p)
1504 free (rli);
1508 /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is
1509 NAME, its fields are chained in reverse on FIELDS.
1511 If ALIGN_TYPE is non-null, it is given the same alignment as
1512 ALIGN_TYPE. */
1514 void
1515 finish_builtin_struct (tree type, const char *name, tree fields,
1516 tree align_type)
1518 tree tail, next;
1520 for (tail = NULL_TREE; fields; tail = fields, fields = next)
1522 DECL_FIELD_CONTEXT (fields) = type;
1523 next = TREE_CHAIN (fields);
1524 TREE_CHAIN (fields) = tail;
1526 TYPE_FIELDS (type) = tail;
1528 if (align_type)
1530 TYPE_ALIGN (type) = TYPE_ALIGN (align_type);
1531 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
1534 layout_type (type);
1535 #if 0 /* not yet, should get fixed properly later */
1536 TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
1537 #else
1538 TYPE_NAME (type) = build_decl (TYPE_DECL, get_identifier (name), type);
1539 #endif
1540 TYPE_STUB_DECL (type) = TYPE_NAME (type);
1541 layout_decl (TYPE_NAME (type), 0);
1544 /* Calculate the mode, size, and alignment for TYPE.
1545 For an array type, calculate the element separation as well.
1546 Record TYPE on the chain of permanent or temporary types
1547 so that dbxout will find out about it.
1549 TYPE_SIZE of a type is nonzero if the type has been laid out already.
1550 layout_type does nothing on such a type.
1552 If the type is incomplete, its TYPE_SIZE remains zero. */
1554 void
1555 layout_type (tree type)
1557 gcc_assert (type);
1559 if (type == error_mark_node)
1560 return;
1562 /* Do nothing if type has been laid out before. */
1563 if (TYPE_SIZE (type))
1564 return;
1566 switch (TREE_CODE (type))
1568 case LANG_TYPE:
1569 /* This kind of type is the responsibility
1570 of the language-specific code. */
1571 gcc_unreachable ();
1573 case BOOLEAN_TYPE: /* Used for Java, Pascal, and Chill. */
1574 if (TYPE_PRECISION (type) == 0)
1575 TYPE_PRECISION (type) = 1; /* default to one byte/boolean. */
1577 /* ... fall through ... */
1579 case INTEGER_TYPE:
1580 case ENUMERAL_TYPE:
1581 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1582 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1583 TYPE_UNSIGNED (type) = 1;
1585 TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1586 MODE_INT);
1587 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1588 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1589 break;
1591 case REAL_TYPE:
1592 TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
1593 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1594 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1595 break;
1597 case COMPLEX_TYPE:
1598 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1599 TYPE_MODE (type)
1600 = mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
1601 (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE
1602 ? MODE_COMPLEX_FLOAT : MODE_COMPLEX_INT),
1604 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1605 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1606 break;
1608 case VECTOR_TYPE:
1610 int nunits = TYPE_VECTOR_SUBPARTS (type);
1611 tree nunits_tree = build_int_cst (NULL_TREE, nunits);
1612 tree innertype = TREE_TYPE (type);
1614 gcc_assert (!(nunits & (nunits - 1)));
1616 /* Find an appropriate mode for the vector type. */
1617 if (TYPE_MODE (type) == VOIDmode)
1619 enum machine_mode innermode = TYPE_MODE (innertype);
1620 enum machine_mode mode;
1622 /* First, look for a supported vector type. */
1623 if (SCALAR_FLOAT_MODE_P (innermode))
1624 mode = MIN_MODE_VECTOR_FLOAT;
1625 else
1626 mode = MIN_MODE_VECTOR_INT;
1628 for (; mode != VOIDmode ; mode = GET_MODE_WIDER_MODE (mode))
1629 if (GET_MODE_NUNITS (mode) == nunits
1630 && GET_MODE_INNER (mode) == innermode
1631 && targetm.vector_mode_supported_p (mode))
1632 break;
1634 /* For integers, try mapping it to a same-sized scalar mode. */
1635 if (mode == VOIDmode
1636 && GET_MODE_CLASS (innermode) == MODE_INT)
1637 mode = mode_for_size (nunits * GET_MODE_BITSIZE (innermode),
1638 MODE_INT, 0);
1640 if (mode == VOIDmode || !have_regs_of_mode[mode])
1641 TYPE_MODE (type) = BLKmode;
1642 else
1643 TYPE_MODE (type) = mode;
1646 TYPE_UNSIGNED (type) = TYPE_UNSIGNED (TREE_TYPE (type));
1647 TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR,
1648 TYPE_SIZE_UNIT (innertype),
1649 nunits_tree, 0);
1650 TYPE_SIZE (type) = int_const_binop (MULT_EXPR, TYPE_SIZE (innertype),
1651 nunits_tree, 0);
1653 /* Always naturally align vectors. This prevents ABI changes
1654 depending on whether or not native vector modes are supported. */
1655 TYPE_ALIGN (type) = tree_low_cst (TYPE_SIZE (type), 0);
1656 break;
1659 case VOID_TYPE:
1660 /* This is an incomplete type and so doesn't have a size. */
1661 TYPE_ALIGN (type) = 1;
1662 TYPE_USER_ALIGN (type) = 0;
1663 TYPE_MODE (type) = VOIDmode;
1664 break;
1666 case OFFSET_TYPE:
1667 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1668 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
1669 /* A pointer might be MODE_PARTIAL_INT,
1670 but ptrdiff_t must be integral. */
1671 TYPE_MODE (type) = mode_for_size (POINTER_SIZE, MODE_INT, 0);
1672 break;
1674 case FUNCTION_TYPE:
1675 case METHOD_TYPE:
1676 /* It's hard to see what the mode and size of a function ought to
1677 be, but we do know the alignment is FUNCTION_BOUNDARY, so
1678 make it consistent with that. */
1679 TYPE_MODE (type) = mode_for_size (FUNCTION_BOUNDARY, MODE_INT, 0);
1680 TYPE_SIZE (type) = bitsize_int (FUNCTION_BOUNDARY);
1681 TYPE_SIZE_UNIT (type) = size_int (FUNCTION_BOUNDARY / BITS_PER_UNIT);
1682 break;
1684 case POINTER_TYPE:
1685 case REFERENCE_TYPE:
1688 enum machine_mode mode = ((TREE_CODE (type) == REFERENCE_TYPE
1689 && reference_types_internal)
1690 ? Pmode : TYPE_MODE (type));
1692 int nbits = GET_MODE_BITSIZE (mode);
1694 TYPE_SIZE (type) = bitsize_int (nbits);
1695 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
1696 TYPE_UNSIGNED (type) = 1;
1697 TYPE_PRECISION (type) = nbits;
1699 break;
1701 case ARRAY_TYPE:
1703 tree index = TYPE_DOMAIN (type);
1704 tree element = TREE_TYPE (type);
1706 build_pointer_type (element);
1708 /* We need to know both bounds in order to compute the size. */
1709 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1710 && TYPE_SIZE (element))
1712 tree ub = TYPE_MAX_VALUE (index);
1713 tree lb = TYPE_MIN_VALUE (index);
1714 tree length;
1715 tree element_size;
1717 /* The initial subtraction should happen in the original type so
1718 that (possible) negative values are handled appropriately. */
1719 length = size_binop (PLUS_EXPR, size_one_node,
1720 fold_convert (sizetype,
1721 fold_build2 (MINUS_EXPR,
1722 TREE_TYPE (lb),
1723 ub, lb)));
1725 /* Special handling for arrays of bits (for Chill). */
1726 element_size = TYPE_SIZE (element);
1727 if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1728 && (integer_zerop (TYPE_MAX_VALUE (element))
1729 || integer_onep (TYPE_MAX_VALUE (element)))
1730 && host_integerp (TYPE_MIN_VALUE (element), 1))
1732 HOST_WIDE_INT maxvalue
1733 = tree_low_cst (TYPE_MAX_VALUE (element), 1);
1734 HOST_WIDE_INT minvalue
1735 = tree_low_cst (TYPE_MIN_VALUE (element), 1);
1737 if (maxvalue - minvalue == 1
1738 && (maxvalue == 1 || maxvalue == 0))
1739 element_size = integer_one_node;
1742 /* If neither bound is a constant and sizetype is signed, make
1743 sure the size is never negative. We should really do this
1744 if *either* bound is non-constant, but this is the best
1745 compromise between C and Ada. */
1746 if (!TYPE_UNSIGNED (sizetype)
1747 && TREE_CODE (TYPE_MIN_VALUE (index)) != INTEGER_CST
1748 && TREE_CODE (TYPE_MAX_VALUE (index)) != INTEGER_CST)
1749 length = size_binop (MAX_EXPR, length, size_zero_node);
1751 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1752 fold_convert (bitsizetype,
1753 length));
1755 /* If we know the size of the element, calculate the total
1756 size directly, rather than do some division thing below.
1757 This optimization helps Fortran assumed-size arrays
1758 (where the size of the array is determined at runtime)
1759 substantially.
1760 Note that we can't do this in the case where the size of
1761 the elements is one bit since TYPE_SIZE_UNIT cannot be
1762 set correctly in that case. */
1763 if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
1764 TYPE_SIZE_UNIT (type)
1765 = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
1768 /* Now round the alignment and size,
1769 using machine-dependent criteria if any. */
1771 #ifdef ROUND_TYPE_ALIGN
1772 TYPE_ALIGN (type)
1773 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1774 #else
1775 TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1776 #endif
1777 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
1778 TYPE_MODE (type) = BLKmode;
1779 if (TYPE_SIZE (type) != 0
1780 #ifdef MEMBER_TYPE_FORCES_BLK
1781 && ! MEMBER_TYPE_FORCES_BLK (type, VOIDmode)
1782 #endif
1783 /* BLKmode elements force BLKmode aggregate;
1784 else extract/store fields may lose. */
1785 && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1786 || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1788 /* One-element arrays get the component type's mode. */
1789 if (simple_cst_equal (TYPE_SIZE (type),
1790 TYPE_SIZE (TREE_TYPE (type))))
1791 TYPE_MODE (type) = TYPE_MODE (TREE_TYPE (type));
1792 else
1793 TYPE_MODE (type)
1794 = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1796 if (TYPE_MODE (type) != BLKmode
1797 && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1798 && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))
1799 && TYPE_MODE (type) != BLKmode)
1801 TYPE_NO_FORCE_BLK (type) = 1;
1802 TYPE_MODE (type) = BLKmode;
1805 /* When the element size is constant, check that it is at least as
1806 large as the element alignment. */
1807 if (TYPE_SIZE_UNIT (element)
1808 && TREE_CODE (TYPE_SIZE_UNIT (element)) == INTEGER_CST
1809 /* If TYPE_SIZE_UNIT overflowed, then it is certainly larger than
1810 TYPE_ALIGN_UNIT. */
1811 && !TREE_CONSTANT_OVERFLOW (TYPE_SIZE_UNIT (element))
1812 && !integer_zerop (TYPE_SIZE_UNIT (element))
1813 && compare_tree_int (TYPE_SIZE_UNIT (element),
1814 TYPE_ALIGN_UNIT (element)) < 0)
1815 error ("alignment of array elements is greater than element size");
1816 break;
1819 case RECORD_TYPE:
1820 case UNION_TYPE:
1821 case QUAL_UNION_TYPE:
1823 tree field;
1824 record_layout_info rli;
1826 /* Initialize the layout information. */
1827 rli = start_record_layout (type);
1829 /* If this is a QUAL_UNION_TYPE, we want to process the fields
1830 in the reverse order in building the COND_EXPR that denotes
1831 its size. We reverse them again later. */
1832 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1833 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1835 /* Place all the fields. */
1836 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1837 place_field (rli, field);
1839 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1840 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1842 if (lang_adjust_rli)
1843 (*lang_adjust_rli) (rli);
1845 /* Finish laying out the record. */
1846 finish_record_layout (rli, /*free_p=*/true);
1848 break;
1850 default:
1851 gcc_unreachable ();
1854 /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
1855 records and unions, finish_record_layout already called this
1856 function. */
1857 if (TREE_CODE (type) != RECORD_TYPE
1858 && TREE_CODE (type) != UNION_TYPE
1859 && TREE_CODE (type) != QUAL_UNION_TYPE)
1860 finalize_type_size (type);
1862 /* If an alias set has been set for this aggregate when it was incomplete,
1863 force it into alias set 0.
1864 This is too conservative, but we cannot call record_component_aliases
1865 here because some frontends still change the aggregates after
1866 layout_type. */
1867 if (AGGREGATE_TYPE_P (type) && TYPE_ALIAS_SET_KNOWN_P (type))
1868 TYPE_ALIAS_SET (type) = 0;
1871 /* Create and return a type for signed integers of PRECISION bits. */
1873 tree
1874 make_signed_type (int precision)
1876 tree type = make_node (INTEGER_TYPE);
1878 TYPE_PRECISION (type) = precision;
1880 fixup_signed_type (type);
1881 return type;
1884 /* Create and return a type for unsigned integers of PRECISION bits. */
1886 tree
1887 make_unsigned_type (int precision)
1889 tree type = make_node (INTEGER_TYPE);
1891 TYPE_PRECISION (type) = precision;
1893 fixup_unsigned_type (type);
1894 return type;
1897 /* Initialize sizetype and bitsizetype to a reasonable and temporary
1898 value to enable integer types to be created. */
1900 void
1901 initialize_sizetypes (bool signed_p)
1903 tree t = make_node (INTEGER_TYPE);
1904 int precision = GET_MODE_BITSIZE (SImode);
1906 TYPE_MODE (t) = SImode;
1907 TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
1908 TYPE_USER_ALIGN (t) = 0;
1909 TYPE_IS_SIZETYPE (t) = 1;
1910 TYPE_UNSIGNED (t) = !signed_p;
1911 TYPE_SIZE (t) = build_int_cst (t, precision);
1912 TYPE_SIZE_UNIT (t) = build_int_cst (t, GET_MODE_SIZE (SImode));
1913 TYPE_PRECISION (t) = precision;
1915 /* Set TYPE_MIN_VALUE and TYPE_MAX_VALUE. */
1916 set_min_and_max_values_for_integral_type (t, precision, !signed_p);
1918 sizetype = t;
1919 bitsizetype = build_distinct_type_copy (t);
1922 /* Make sizetype a version of TYPE, and initialize *sizetype
1923 accordingly. We do this by overwriting the stub sizetype and
1924 bitsizetype nodes created by initialize_sizetypes. This makes sure
1925 that (a) anything stubby about them no longer exists, (b) any
1926 INTEGER_CSTs created with such a type, remain valid. */
1928 void
1929 set_sizetype (tree type)
1931 int oprecision = TYPE_PRECISION (type);
1932 /* The *bitsizetype types use a precision that avoids overflows when
1933 calculating signed sizes / offsets in bits. However, when
1934 cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
1935 precision. */
1936 int precision = MIN (MIN (oprecision + BITS_PER_UNIT_LOG + 1,
1937 MAX_FIXED_MODE_SIZE),
1938 2 * HOST_BITS_PER_WIDE_INT);
1939 tree t;
1941 gcc_assert (TYPE_UNSIGNED (type) == TYPE_UNSIGNED (sizetype));
1943 t = build_distinct_type_copy (type);
1944 /* We do want to use sizetype's cache, as we will be replacing that
1945 type. */
1946 TYPE_CACHED_VALUES (t) = TYPE_CACHED_VALUES (sizetype);
1947 TYPE_CACHED_VALUES_P (t) = TYPE_CACHED_VALUES_P (sizetype);
1948 TREE_TYPE (TYPE_CACHED_VALUES (t)) = type;
1949 TYPE_UID (t) = TYPE_UID (sizetype);
1950 TYPE_IS_SIZETYPE (t) = 1;
1952 /* Replace our original stub sizetype. */
1953 memcpy (sizetype, t, tree_size (sizetype));
1954 TYPE_MAIN_VARIANT (sizetype) = sizetype;
1956 t = make_node (INTEGER_TYPE);
1957 TYPE_NAME (t) = get_identifier ("bit_size_type");
1958 /* We do want to use bitsizetype's cache, as we will be replacing that
1959 type. */
1960 TYPE_CACHED_VALUES (t) = TYPE_CACHED_VALUES (bitsizetype);
1961 TYPE_CACHED_VALUES_P (t) = TYPE_CACHED_VALUES_P (bitsizetype);
1962 TYPE_PRECISION (t) = precision;
1963 TYPE_UID (t) = TYPE_UID (bitsizetype);
1964 TYPE_IS_SIZETYPE (t) = 1;
1966 /* Replace our original stub bitsizetype. */
1967 memcpy (bitsizetype, t, tree_size (bitsizetype));
1968 TYPE_MAIN_VARIANT (bitsizetype) = bitsizetype;
1970 if (TYPE_UNSIGNED (type))
1972 fixup_unsigned_type (bitsizetype);
1973 ssizetype = build_distinct_type_copy (make_signed_type (oprecision));
1974 TYPE_IS_SIZETYPE (ssizetype) = 1;
1975 sbitsizetype = build_distinct_type_copy (make_signed_type (precision));
1976 TYPE_IS_SIZETYPE (sbitsizetype) = 1;
1978 else
1980 fixup_signed_type (bitsizetype);
1981 ssizetype = sizetype;
1982 sbitsizetype = bitsizetype;
1985 /* If SIZETYPE is unsigned, we need to fix TYPE_MAX_VALUE so that
1986 it is sign extended in a way consistent with force_fit_type. */
1987 if (TYPE_UNSIGNED (type))
1989 tree orig_max, new_max;
1991 orig_max = TYPE_MAX_VALUE (sizetype);
1993 /* Build a new node with the same values, but a different type. */
1994 new_max = build_int_cst_wide (sizetype,
1995 TREE_INT_CST_LOW (orig_max),
1996 TREE_INT_CST_HIGH (orig_max));
1998 /* Now sign extend it using force_fit_type to ensure
1999 consistency. */
2000 new_max = force_fit_type (new_max, 0, 0, 0);
2001 TYPE_MAX_VALUE (sizetype) = new_max;
2005 /* TYPE is an integral type, i.e., an INTEGRAL_TYPE, ENUMERAL_TYPE
2006 or BOOLEAN_TYPE. Set TYPE_MIN_VALUE and TYPE_MAX_VALUE
2007 for TYPE, based on the PRECISION and whether or not the TYPE
2008 IS_UNSIGNED. PRECISION need not correspond to a width supported
2009 natively by the hardware; for example, on a machine with 8-bit,
2010 16-bit, and 32-bit register modes, PRECISION might be 7, 23, or
2011 61. */
2013 void
2014 set_min_and_max_values_for_integral_type (tree type,
2015 int precision,
2016 bool is_unsigned)
2018 tree min_value;
2019 tree max_value;
2021 if (is_unsigned)
2023 min_value = build_int_cst (type, 0);
2024 max_value
2025 = build_int_cst_wide (type, precision - HOST_BITS_PER_WIDE_INT >= 0
2026 ? -1
2027 : ((HOST_WIDE_INT) 1 << precision) - 1,
2028 precision - HOST_BITS_PER_WIDE_INT > 0
2029 ? ((unsigned HOST_WIDE_INT) ~0
2030 >> (HOST_BITS_PER_WIDE_INT
2031 - (precision - HOST_BITS_PER_WIDE_INT)))
2032 : 0);
2034 else
2036 min_value
2037 = build_int_cst_wide (type,
2038 (precision - HOST_BITS_PER_WIDE_INT > 0
2040 : (HOST_WIDE_INT) (-1) << (precision - 1)),
2041 (((HOST_WIDE_INT) (-1)
2042 << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2043 ? precision - HOST_BITS_PER_WIDE_INT - 1
2044 : 0))));
2045 max_value
2046 = build_int_cst_wide (type,
2047 (precision - HOST_BITS_PER_WIDE_INT > 0
2048 ? -1
2049 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
2050 (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2051 ? (((HOST_WIDE_INT) 1
2052 << (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
2053 : 0));
2056 TYPE_MIN_VALUE (type) = min_value;
2057 TYPE_MAX_VALUE (type) = max_value;
2060 /* Set the extreme values of TYPE based on its precision in bits,
2061 then lay it out. Used when make_signed_type won't do
2062 because the tree code is not INTEGER_TYPE.
2063 E.g. for Pascal, when the -fsigned-char option is given. */
2065 void
2066 fixup_signed_type (tree type)
2068 int precision = TYPE_PRECISION (type);
2070 /* We can not represent properly constants greater then
2071 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2072 as they are used by i386 vector extensions and friends. */
2073 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2074 precision = HOST_BITS_PER_WIDE_INT * 2;
2076 set_min_and_max_values_for_integral_type (type, precision,
2077 /*is_unsigned=*/false);
2079 /* Lay out the type: set its alignment, size, etc. */
2080 layout_type (type);
2083 /* Set the extreme values of TYPE based on its precision in bits,
2084 then lay it out. This is used both in `make_unsigned_type'
2085 and for enumeral types. */
2087 void
2088 fixup_unsigned_type (tree type)
2090 int precision = TYPE_PRECISION (type);
2092 /* We can not represent properly constants greater then
2093 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2094 as they are used by i386 vector extensions and friends. */
2095 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2096 precision = HOST_BITS_PER_WIDE_INT * 2;
2098 TYPE_UNSIGNED (type) = 1;
2100 set_min_and_max_values_for_integral_type (type, precision,
2101 /*is_unsigned=*/true);
2103 /* Lay out the type: set its alignment, size, etc. */
2104 layout_type (type);
2107 /* Find the best machine mode to use when referencing a bit field of length
2108 BITSIZE bits starting at BITPOS.
2110 The underlying object is known to be aligned to a boundary of ALIGN bits.
2111 If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
2112 larger than LARGEST_MODE (usually SImode).
2114 If no mode meets all these conditions, we return VOIDmode.
2116 If VOLATILEP is false and SLOW_BYTE_ACCESS is false, we return the
2117 smallest mode meeting these conditions.
2119 If VOLATILEP is false and SLOW_BYTE_ACCESS is true, we return the
2120 largest mode (but a mode no wider than UNITS_PER_WORD) that meets
2121 all the conditions.
2123 If VOLATILEP is true the narrow_volatile_bitfields target hook is used to
2124 decide which of the above modes should be used. */
2126 enum machine_mode
2127 get_best_mode (int bitsize, int bitpos, unsigned int align,
2128 enum machine_mode largest_mode, int volatilep)
2130 enum machine_mode mode;
2131 unsigned int unit = 0;
2133 /* Find the narrowest integer mode that contains the bit field. */
2134 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2135 mode = GET_MODE_WIDER_MODE (mode))
2137 unit = GET_MODE_BITSIZE (mode);
2138 if ((bitpos % unit) + bitsize <= unit)
2139 break;
2142 if (mode == VOIDmode
2143 /* It is tempting to omit the following line
2144 if STRICT_ALIGNMENT is true.
2145 But that is incorrect, since if the bitfield uses part of 3 bytes
2146 and we use a 4-byte mode, we could get a spurious segv
2147 if the extra 4th byte is past the end of memory.
2148 (Though at least one Unix compiler ignores this problem:
2149 that on the Sequent 386 machine. */
2150 || MIN (unit, BIGGEST_ALIGNMENT) > align
2151 || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
2152 return VOIDmode;
2154 if ((SLOW_BYTE_ACCESS && ! volatilep)
2155 || (volatilep && !targetm.narrow_volatile_bitfield()))
2157 enum machine_mode wide_mode = VOIDmode, tmode;
2159 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
2160 tmode = GET_MODE_WIDER_MODE (tmode))
2162 unit = GET_MODE_BITSIZE (tmode);
2163 if (bitpos / unit == (bitpos + bitsize - 1) / unit
2164 && unit <= BITS_PER_WORD
2165 && unit <= MIN (align, BIGGEST_ALIGNMENT)
2166 && (largest_mode == VOIDmode
2167 || unit <= GET_MODE_BITSIZE (largest_mode)))
2168 wide_mode = tmode;
2171 if (wide_mode != VOIDmode)
2172 return wide_mode;
2175 return mode;
2178 /* Gets minimal and maximal values for MODE (signed or unsigned depending on
2179 SIGN). The returned constants are made to be usable in TARGET_MODE. */
2181 void
2182 get_mode_bounds (enum machine_mode mode, int sign,
2183 enum machine_mode target_mode,
2184 rtx *mmin, rtx *mmax)
2186 unsigned size = GET_MODE_BITSIZE (mode);
2187 unsigned HOST_WIDE_INT min_val, max_val;
2189 gcc_assert (size <= HOST_BITS_PER_WIDE_INT);
2191 if (sign)
2193 min_val = -((unsigned HOST_WIDE_INT) 1 << (size - 1));
2194 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1)) - 1;
2196 else
2198 min_val = 0;
2199 max_val = ((unsigned HOST_WIDE_INT) 1 << (size - 1) << 1) - 1;
2202 *mmin = gen_int_mode (min_val, target_mode);
2203 *mmax = gen_int_mode (max_val, target_mode);
2206 #include "gt-stor-layout.h"