* config/xtensa/linux.h (TARGET_OS_CPP_BUILTINS): Remove definition of
[official-gcc.git] / gcc / stor-layout.c
blobdad514e27c2eccf012788704ac62fcab549a8aac
1 /* C-compiler utilities for types and variables storage layout
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "flags.h"
31 #include "function.h"
32 #include "expr.h"
33 #include "toplev.h"
34 #include "ggc.h"
35 #include "target.h"
36 #include "langhooks.h"
38 /* Set to one when set_sizetype has been called. */
39 static int sizetype_set;
41 /* List of types created before set_sizetype has been called. We do not
42 make this a GGC root since we want these nodes to be reclaimed. */
43 static tree early_type_list;
45 /* Data type for the expressions representing sizes of data types.
46 It is the first integer type laid out. */
47 tree sizetype_tab[(int) TYPE_KIND_LAST];
49 /* If nonzero, this is an upper limit on alignment of structure fields.
50 The value is measured in bits. */
51 unsigned int maximum_field_alignment;
53 /* If nonzero, the alignment of a bitstring or (power-)set value, in bits.
54 May be overridden by front-ends. */
55 unsigned int set_alignment = 0;
57 /* Nonzero if all REFERENCE_TYPEs are internal and hence should be
58 allocated in Pmode, not ptr_mode. Set only by internal_reference_types
59 called only by a front end. */
60 static int reference_types_internal = 0;
62 static void finalize_record_size PARAMS ((record_layout_info));
63 static void finalize_type_size PARAMS ((tree));
64 static void place_union_field PARAMS ((record_layout_info, tree));
65 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
66 static int excess_unit_span PARAMS ((HOST_WIDE_INT, HOST_WIDE_INT,
67 HOST_WIDE_INT, HOST_WIDE_INT,
68 tree));
69 #endif
70 static unsigned int update_alignment_for_field
71 PARAMS ((record_layout_info, tree,
72 unsigned int));
73 extern void debug_rli PARAMS ((record_layout_info));
75 /* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */
77 static GTY(()) tree pending_sizes;
79 /* Nonzero means cannot safely call expand_expr now,
80 so put variable sizes onto `pending_sizes' instead. */
82 int immediate_size_expand;
84 /* Show that REFERENCE_TYPES are internal and should be Pmode. Called only
85 by front end. */
87 void
88 internal_reference_types ()
90 reference_types_internal = 1;
93 /* Get a list of all the objects put on the pending sizes list. */
95 tree
96 get_pending_sizes ()
98 tree chain = pending_sizes;
99 tree t;
101 /* Put each SAVE_EXPR into the current function. */
102 for (t = chain; t; t = TREE_CHAIN (t))
103 SAVE_EXPR_CONTEXT (TREE_VALUE (t)) = current_function_decl;
105 pending_sizes = 0;
106 return chain;
109 /* Return nonzero if EXPR is present on the pending sizes list. */
112 is_pending_size (expr)
113 tree expr;
115 tree t;
117 for (t = pending_sizes; t; t = TREE_CHAIN (t))
118 if (TREE_VALUE (t) == expr)
119 return 1;
120 return 0;
123 /* Add EXPR to the pending sizes list. */
125 void
126 put_pending_size (expr)
127 tree expr;
129 /* Strip any simple arithmetic from EXPR to see if it has an underlying
130 SAVE_EXPR. */
131 expr = skip_simple_arithmetic (expr);
133 if (TREE_CODE (expr) == SAVE_EXPR)
134 pending_sizes = tree_cons (NULL_TREE, expr, pending_sizes);
137 /* Put a chain of objects into the pending sizes list, which must be
138 empty. */
140 void
141 put_pending_sizes (chain)
142 tree chain;
144 if (pending_sizes)
145 abort ();
147 pending_sizes = chain;
150 /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
151 to serve as the actual size-expression for a type or decl. */
153 tree
154 variable_size (size)
155 tree size;
157 tree save;
159 /* If the language-processor is to take responsibility for variable-sized
160 items (e.g., languages which have elaboration procedures like Ada),
161 just return SIZE unchanged. Likewise for self-referential sizes and
162 constant sizes. */
163 if (TREE_CONSTANT (size)
164 || (*lang_hooks.decls.global_bindings_p) () < 0
165 || contains_placeholder_p (size))
166 return size;
168 if (TREE_CODE (size) == MINUS_EXPR && integer_onep (TREE_OPERAND (size, 1)))
169 /* If this is the upper bound of a C array, leave the minus 1 outside
170 the SAVE_EXPR so it can be folded away. */
171 TREE_OPERAND (size, 0) = save = save_expr (TREE_OPERAND (size, 0));
172 else
173 size = save = save_expr (size);
175 /* If an array with a variable number of elements is declared, and
176 the elements require destruction, we will emit a cleanup for the
177 array. That cleanup is run both on normal exit from the block
178 and in the exception-handler for the block. Normally, when code
179 is used in both ordinary code and in an exception handler it is
180 `unsaved', i.e., all SAVE_EXPRs are recalculated. However, we do
181 not wish to do that here; the array-size is the same in both
182 places. */
183 if (TREE_CODE (save) == SAVE_EXPR)
184 SAVE_EXPR_PERSISTENT_P (save) = 1;
186 if ((*lang_hooks.decls.global_bindings_p) ())
188 if (TREE_CONSTANT (size))
189 error ("type size can't be explicitly evaluated");
190 else
191 error ("variable-size type declared outside of any function");
193 return size_one_node;
196 if (immediate_size_expand)
197 expand_expr (save, const0_rtx, VOIDmode, 0);
198 else if (cfun != 0 && cfun->x_dont_save_pending_sizes_p)
199 /* The front-end doesn't want us to keep a list of the expressions
200 that determine sizes for variable size objects. */
202 else
203 put_pending_size (save);
205 return size;
208 #ifndef MAX_FIXED_MODE_SIZE
209 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
210 #endif
212 /* Return the machine mode to use for a nonscalar of SIZE bits.
213 The mode must be in class CLASS, and have exactly that many bits.
214 If LIMIT is nonzero, modes of wider than MAX_FIXED_MODE_SIZE will not
215 be used. */
217 enum machine_mode
218 mode_for_size (size, class, limit)
219 unsigned int size;
220 enum mode_class class;
221 int limit;
223 enum machine_mode mode;
225 if (limit && size > MAX_FIXED_MODE_SIZE)
226 return BLKmode;
228 /* Get the first mode which has this size, in the specified class. */
229 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
230 mode = GET_MODE_WIDER_MODE (mode))
231 if (GET_MODE_BITSIZE (mode) == size)
232 return mode;
234 return BLKmode;
237 /* Similar, except passed a tree node. */
239 enum machine_mode
240 mode_for_size_tree (size, class, limit)
241 tree size;
242 enum mode_class class;
243 int limit;
245 if (TREE_CODE (size) != INTEGER_CST
246 /* What we really want to say here is that the size can fit in a
247 host integer, but we know there's no way we'd find a mode for
248 this many bits, so there's no point in doing the precise test. */
249 || compare_tree_int (size, 1000) > 0)
250 return BLKmode;
251 else
252 return mode_for_size (TREE_INT_CST_LOW (size), class, limit);
255 /* Similar, but never return BLKmode; return the narrowest mode that
256 contains at least the requested number of bits. */
258 enum machine_mode
259 smallest_mode_for_size (size, class)
260 unsigned int size;
261 enum mode_class class;
263 enum machine_mode mode;
265 /* Get the first mode which has at least this size, in the
266 specified class. */
267 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
268 mode = GET_MODE_WIDER_MODE (mode))
269 if (GET_MODE_BITSIZE (mode) >= size)
270 return mode;
272 abort ();
275 /* Find an integer mode of the exact same size, or BLKmode on failure. */
277 enum machine_mode
278 int_mode_for_mode (mode)
279 enum machine_mode mode;
281 switch (GET_MODE_CLASS (mode))
283 case MODE_INT:
284 case MODE_PARTIAL_INT:
285 break;
287 case MODE_COMPLEX_INT:
288 case MODE_COMPLEX_FLOAT:
289 case MODE_FLOAT:
290 case MODE_VECTOR_INT:
291 case MODE_VECTOR_FLOAT:
292 mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
293 break;
295 case MODE_RANDOM:
296 if (mode == BLKmode)
297 break;
299 /* ... fall through ... */
301 case MODE_CC:
302 default:
303 abort ();
306 return mode;
309 /* Return the alignment of MODE. This will be bounded by 1 and
310 BIGGEST_ALIGNMENT. */
312 unsigned int
313 get_mode_alignment (mode)
314 enum machine_mode mode;
316 unsigned int alignment;
318 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
319 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)
320 alignment = GET_MODE_UNIT_SIZE (mode);
321 else
322 alignment = GET_MODE_SIZE (mode);
324 /* Extract the LSB of the size. */
325 alignment = alignment & -alignment;
326 alignment *= BITS_PER_UNIT;
328 alignment = MIN (BIGGEST_ALIGNMENT, MAX (1, alignment));
329 return alignment;
332 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
333 This can only be applied to objects of a sizetype. */
335 tree
336 round_up (value, divisor)
337 tree value;
338 int divisor;
340 tree arg = size_int_type (divisor, TREE_TYPE (value));
342 return size_binop (MULT_EXPR, size_binop (CEIL_DIV_EXPR, value, arg), arg);
345 /* Likewise, but round down. */
347 tree
348 round_down (value, divisor)
349 tree value;
350 int divisor;
352 tree arg = size_int_type (divisor, TREE_TYPE (value));
354 return size_binop (MULT_EXPR, size_binop (FLOOR_DIV_EXPR, value, arg), arg);
357 /* Subroutine of layout_decl: Force alignment required for the data type.
358 But if the decl itself wants greater alignment, don't override that. */
360 static inline void
361 do_type_align (tree type, tree decl)
363 if (TYPE_ALIGN (type) > DECL_ALIGN (decl))
365 DECL_ALIGN (decl) = TYPE_ALIGN (type);
366 DECL_USER_ALIGN (decl) = TYPE_USER_ALIGN (type);
370 /* Set the size, mode and alignment of a ..._DECL node.
371 TYPE_DECL does need this for C++.
372 Note that LABEL_DECL and CONST_DECL nodes do not need this,
373 and FUNCTION_DECL nodes have them set up in a special (and simple) way.
374 Don't call layout_decl for them.
376 KNOWN_ALIGN is the amount of alignment we can assume this
377 decl has with no special effort. It is relevant only for FIELD_DECLs
378 and depends on the previous fields.
379 All that matters about KNOWN_ALIGN is which powers of 2 divide it.
380 If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
381 the record will be aligned to suit. */
383 void
384 layout_decl (decl, known_align)
385 tree decl;
386 unsigned int known_align;
388 tree type = TREE_TYPE (decl);
389 enum tree_code code = TREE_CODE (decl);
390 rtx rtl = NULL_RTX;
392 if (code == CONST_DECL)
393 return;
394 else if (code != VAR_DECL && code != PARM_DECL && code != RESULT_DECL
395 && code != TYPE_DECL && code != FIELD_DECL)
396 abort ();
398 rtl = DECL_RTL_IF_SET (decl);
400 if (type == error_mark_node)
401 type = void_type_node;
403 /* Usually the size and mode come from the data type without change,
404 however, the front-end may set the explicit width of the field, so its
405 size may not be the same as the size of its type. This happens with
406 bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
407 also happens with other fields. For example, the C++ front-end creates
408 zero-sized fields corresponding to empty base classes, and depends on
409 layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
410 size in bytes from the size in bits. If we have already set the mode,
411 don't set it again since we can be called twice for FIELD_DECLs. */
413 TREE_UNSIGNED (decl) = TREE_UNSIGNED (type);
414 if (DECL_MODE (decl) == VOIDmode)
415 DECL_MODE (decl) = TYPE_MODE (type);
417 if (DECL_SIZE (decl) == 0)
419 DECL_SIZE (decl) = TYPE_SIZE (type);
420 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
422 else if (DECL_SIZE_UNIT (decl) == 0)
423 DECL_SIZE_UNIT (decl)
424 = convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
425 bitsize_unit_node));
427 if (code != FIELD_DECL)
428 /* For non-fields, update the alignment from the type. */
429 do_type_align (type, decl);
430 else
431 /* For fields, it's a bit more complicated... */
433 if (DECL_BIT_FIELD (decl))
435 DECL_BIT_FIELD_TYPE (decl) = type;
437 /* A zero-length bit-field affects the alignment of the next
438 field. */
439 if (integer_zerop (DECL_SIZE (decl))
440 && ! DECL_PACKED (decl)
441 && ! (*targetm.ms_bitfield_layout_p) (DECL_FIELD_CONTEXT (decl)))
443 #ifdef PCC_BITFIELD_TYPE_MATTERS
444 if (PCC_BITFIELD_TYPE_MATTERS)
445 do_type_align (type, decl);
446 else
447 #endif
449 #ifdef EMPTY_FIELD_BOUNDARY
450 if (EMPTY_FIELD_BOUNDARY > DECL_ALIGN (decl))
452 DECL_ALIGN (decl) = EMPTY_FIELD_BOUNDARY;
453 DECL_USER_ALIGN (decl) = 0;
455 #endif
459 /* See if we can use an ordinary integer mode for a bit-field.
460 Conditions are: a fixed size that is correct for another mode
461 and occupying a complete byte or bytes on proper boundary. */
462 if (TYPE_SIZE (type) != 0
463 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
464 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
466 enum machine_mode xmode
467 = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
469 if (xmode != BLKmode && known_align >= GET_MODE_ALIGNMENT (xmode))
471 DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
472 DECL_ALIGN (decl));
473 DECL_MODE (decl) = xmode;
474 DECL_BIT_FIELD (decl) = 0;
478 /* Turn off DECL_BIT_FIELD if we won't need it set. */
479 if (TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
480 && known_align >= TYPE_ALIGN (type)
481 && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
482 DECL_BIT_FIELD (decl) = 0;
484 else if (DECL_PACKED (decl) && DECL_USER_ALIGN (decl))
485 /* Don't touch DECL_ALIGN. For other packed fields, go ahead and
486 round up; we'll reduce it again below. */;
487 else
488 do_type_align (type, decl);
490 /* If the field is of variable size, we can't misalign it since we
491 have no way to make a temporary to align the result. But this
492 isn't an issue if the decl is not addressable. Likewise if it
493 is of unknown size. */
494 if (DECL_PACKED (decl)
495 && !DECL_USER_ALIGN (decl)
496 && (DECL_NONADDRESSABLE_P (decl)
497 || DECL_SIZE_UNIT (decl) == 0
498 || TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST))
499 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
501 /* Should this be controlled by DECL_USER_ALIGN, too? */
502 if (maximum_field_alignment != 0)
503 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), maximum_field_alignment);
504 if (! DECL_USER_ALIGN (decl))
506 /* Some targets (i.e. i386, VMS) limit struct field alignment
507 to a lower boundary than alignment of variables unless
508 it was overridden by attribute aligned. */
509 #ifdef BIGGEST_FIELD_ALIGNMENT
510 DECL_ALIGN (decl)
511 = MIN (DECL_ALIGN (decl), (unsigned) BIGGEST_FIELD_ALIGNMENT);
512 #endif
513 #ifdef ADJUST_FIELD_ALIGN
514 DECL_ALIGN (decl) = ADJUST_FIELD_ALIGN (decl, DECL_ALIGN (decl));
515 #endif
519 /* Evaluate nonconstant size only once, either now or as soon as safe. */
520 if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
521 DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
522 if (DECL_SIZE_UNIT (decl) != 0
523 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
524 DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
526 /* If requested, warn about definitions of large data objects. */
527 if (warn_larger_than
528 && (code == VAR_DECL || code == PARM_DECL)
529 && ! DECL_EXTERNAL (decl))
531 tree size = DECL_SIZE_UNIT (decl);
533 if (size != 0 && TREE_CODE (size) == INTEGER_CST
534 && compare_tree_int (size, larger_than_size) > 0)
536 unsigned int size_as_int = TREE_INT_CST_LOW (size);
538 if (compare_tree_int (size, size_as_int) == 0)
539 warning_with_decl (decl, "size of `%s' is %d bytes", size_as_int);
540 else
541 warning_with_decl (decl, "size of `%s' is larger than %d bytes",
542 larger_than_size);
546 /* If the RTL was already set, update its mode and mem attributes. */
547 if (rtl)
549 PUT_MODE (rtl, DECL_MODE (decl));
550 SET_DECL_RTL (decl, 0);
551 set_mem_attributes (rtl, decl, 1);
552 SET_DECL_RTL (decl, rtl);
556 /* Hook for a front-end function that can modify the record layout as needed
557 immediately before it is finalized. */
559 void (*lang_adjust_rli) PARAMS ((record_layout_info)) = 0;
561 void
562 set_lang_adjust_rli (f)
563 void (*f) PARAMS ((record_layout_info));
565 lang_adjust_rli = f;
568 /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
569 QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
570 is to be passed to all other layout functions for this record. It is the
571 responsibility of the caller to call `free' for the storage returned.
572 Note that garbage collection is not permitted until we finish laying
573 out the record. */
575 record_layout_info
576 start_record_layout (t)
577 tree t;
579 record_layout_info rli
580 = (record_layout_info) xmalloc (sizeof (struct record_layout_info_s));
582 rli->t = t;
584 /* If the type has a minimum specified alignment (via an attribute
585 declaration, for example) use it -- otherwise, start with a
586 one-byte alignment. */
587 rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
588 rli->unpacked_align = rli->record_align;
589 rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
591 #ifdef STRUCTURE_SIZE_BOUNDARY
592 /* Packed structures don't need to have minimum size. */
593 if (! TYPE_PACKED (t))
594 rli->record_align = MAX (rli->record_align, (unsigned) STRUCTURE_SIZE_BOUNDARY);
595 #endif
597 rli->offset = size_zero_node;
598 rli->bitpos = bitsize_zero_node;
599 rli->prev_field = 0;
600 rli->pending_statics = 0;
601 rli->packed_maybe_necessary = 0;
603 return rli;
606 /* These four routines perform computations that convert between
607 the offset/bitpos forms and byte and bit offsets. */
609 tree
610 bit_from_pos (offset, bitpos)
611 tree offset, bitpos;
613 return size_binop (PLUS_EXPR, bitpos,
614 size_binop (MULT_EXPR, convert (bitsizetype, offset),
615 bitsize_unit_node));
618 tree
619 byte_from_pos (offset, bitpos)
620 tree offset, bitpos;
622 return size_binop (PLUS_EXPR, offset,
623 convert (sizetype,
624 size_binop (TRUNC_DIV_EXPR, bitpos,
625 bitsize_unit_node)));
628 void
629 pos_from_bit (poffset, pbitpos, off_align, pos)
630 tree *poffset, *pbitpos;
631 unsigned int off_align;
632 tree pos;
634 *poffset = size_binop (MULT_EXPR,
635 convert (sizetype,
636 size_binop (FLOOR_DIV_EXPR, pos,
637 bitsize_int (off_align))),
638 size_int (off_align / BITS_PER_UNIT));
639 *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
642 /* Given a pointer to bit and byte offsets and an offset alignment,
643 normalize the offsets so they are within the alignment. */
645 void
646 normalize_offset (poffset, pbitpos, off_align)
647 tree *poffset, *pbitpos;
648 unsigned int off_align;
650 /* If the bit position is now larger than it should be, adjust it
651 downwards. */
652 if (compare_tree_int (*pbitpos, off_align) >= 0)
654 tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
655 bitsize_int (off_align));
657 *poffset
658 = size_binop (PLUS_EXPR, *poffset,
659 size_binop (MULT_EXPR, convert (sizetype, extra_aligns),
660 size_int (off_align / BITS_PER_UNIT)));
662 *pbitpos
663 = size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
667 /* Print debugging information about the information in RLI. */
669 void
670 debug_rli (rli)
671 record_layout_info rli;
673 print_node_brief (stderr, "type", rli->t, 0);
674 print_node_brief (stderr, "\noffset", rli->offset, 0);
675 print_node_brief (stderr, " bitpos", rli->bitpos, 0);
677 fprintf (stderr, "\naligns: rec = %u, unpack = %u, off = %u\n",
678 rli->record_align, rli->unpacked_align,
679 rli->offset_align);
680 if (rli->packed_maybe_necessary)
681 fprintf (stderr, "packed may be necessary\n");
683 if (rli->pending_statics)
685 fprintf (stderr, "pending statics:\n");
686 debug_tree (rli->pending_statics);
690 /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
691 BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
693 void
694 normalize_rli (rli)
695 record_layout_info rli;
697 normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
700 /* Returns the size in bytes allocated so far. */
702 tree
703 rli_size_unit_so_far (rli)
704 record_layout_info rli;
706 return byte_from_pos (rli->offset, rli->bitpos);
709 /* Returns the size in bits allocated so far. */
711 tree
712 rli_size_so_far (rli)
713 record_layout_info rli;
715 return bit_from_pos (rli->offset, rli->bitpos);
718 /* FIELD is about to be added to RLI->T. The alignment (in bits) of
719 the next available location is given by KNOWN_ALIGN. Update the
720 variable alignment fields in RLI, and return the alignment to give
721 the FIELD. */
723 static unsigned int
724 update_alignment_for_field (rli, field, known_align)
725 record_layout_info rli;
726 tree field;
727 unsigned int known_align;
729 /* The alignment required for FIELD. */
730 unsigned int desired_align;
731 /* The type of this field. */
732 tree type = TREE_TYPE (field);
733 /* True if the field was explicitly aligned by the user. */
734 bool user_align;
735 bool is_bitfield;
737 /* Lay out the field so we know what alignment it needs. */
738 layout_decl (field, known_align);
739 desired_align = DECL_ALIGN (field);
740 user_align = DECL_USER_ALIGN (field);
742 is_bitfield = (type != error_mark_node
743 && DECL_BIT_FIELD_TYPE (field)
744 && ! integer_zerop (TYPE_SIZE (type)));
746 /* Record must have at least as much alignment as any field.
747 Otherwise, the alignment of the field within the record is
748 meaningless. */
749 if (is_bitfield && (* targetm.ms_bitfield_layout_p) (rli->t))
751 /* Here, the alignment of the underlying type of a bitfield can
752 affect the alignment of a record; even a zero-sized field
753 can do this. The alignment should be to the alignment of
754 the type, except that for zero-size bitfields this only
755 applies if there was an immediately prior, nonzero-size
756 bitfield. (That's the way it is, experimentally.) */
757 if (! integer_zerop (DECL_SIZE (field))
758 ? ! DECL_PACKED (field)
759 : (rli->prev_field
760 && DECL_BIT_FIELD_TYPE (rli->prev_field)
761 && ! integer_zerop (DECL_SIZE (rli->prev_field))))
763 unsigned int type_align = TYPE_ALIGN (type);
764 type_align = MAX (type_align, desired_align);
765 if (maximum_field_alignment != 0)
766 type_align = MIN (type_align, maximum_field_alignment);
767 rli->record_align = MAX (rli->record_align, type_align);
768 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
771 #ifdef PCC_BITFIELD_TYPE_MATTERS
772 else if (is_bitfield && PCC_BITFIELD_TYPE_MATTERS)
774 /* Named bit-fields cause the entire structure to have the
775 alignment implied by their type. */
776 if (DECL_NAME (field) != 0)
778 unsigned int type_align = TYPE_ALIGN (type);
780 #ifdef ADJUST_FIELD_ALIGN
781 if (! TYPE_USER_ALIGN (type))
782 type_align = ADJUST_FIELD_ALIGN (field, type_align);
783 #endif
785 if (maximum_field_alignment != 0)
786 type_align = MIN (type_align, maximum_field_alignment);
787 else if (DECL_PACKED (field))
788 type_align = MIN (type_align, BITS_PER_UNIT);
790 /* The alignment of the record is increased to the maximum
791 of the current alignment, the alignment indicated on the
792 field (i.e., the alignment specified by an __aligned__
793 attribute), and the alignment indicated by the type of
794 the field. */
795 rli->record_align = MAX (rli->record_align, desired_align);
796 rli->record_align = MAX (rli->record_align, type_align);
798 if (warn_packed)
799 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
800 user_align |= TYPE_USER_ALIGN (type);
803 #endif
804 else
806 rli->record_align = MAX (rli->record_align, desired_align);
807 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
810 TYPE_USER_ALIGN (rli->t) |= user_align;
812 return desired_align;
815 /* Called from place_field to handle unions. */
817 static void
818 place_union_field (rli, field)
819 record_layout_info rli;
820 tree field;
822 update_alignment_for_field (rli, field, /*known_align=*/0);
824 DECL_FIELD_OFFSET (field) = size_zero_node;
825 DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
826 SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
828 /* We assume the union's size will be a multiple of a byte so we don't
829 bother with BITPOS. */
830 if (TREE_CODE (rli->t) == UNION_TYPE)
831 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
832 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
833 rli->offset = fold (build (COND_EXPR, sizetype,
834 DECL_QUALIFIER (field),
835 DECL_SIZE_UNIT (field), rli->offset));
838 #if defined (PCC_BITFIELD_TYPE_MATTERS) || defined (BITFIELD_NBYTES_LIMITED)
839 /* A bitfield of SIZE with a required access alignment of ALIGN is allocated
840 at BYTE_OFFSET / BIT_OFFSET. Return nonzero if the field would span more
841 units of alignment than the underlying TYPE. */
842 static int
843 excess_unit_span (byte_offset, bit_offset, size, align, type)
844 HOST_WIDE_INT byte_offset, bit_offset, size, align;
845 tree type;
847 /* Note that the calculation of OFFSET might overflow; we calculate it so
848 that we still get the right result as long as ALIGN is a power of two. */
849 unsigned HOST_WIDE_INT offset = byte_offset * BITS_PER_UNIT + bit_offset;
851 offset = offset % align;
852 return ((offset + size + align - 1) / align
853 > ((unsigned HOST_WIDE_INT) tree_low_cst (TYPE_SIZE (type), 1)
854 / align));
856 #endif
858 /* RLI contains information about the layout of a RECORD_TYPE. FIELD
859 is a FIELD_DECL to be added after those fields already present in
860 T. (FIELD is not actually added to the TYPE_FIELDS list here;
861 callers that desire that behavior must manually perform that step.) */
863 void
864 place_field (rli, field)
865 record_layout_info rli;
866 tree field;
868 /* The alignment required for FIELD. */
869 unsigned int desired_align;
870 /* The alignment FIELD would have if we just dropped it into the
871 record as it presently stands. */
872 unsigned int known_align;
873 unsigned int actual_align;
874 /* The type of this field. */
875 tree type = TREE_TYPE (field);
877 if (TREE_CODE (field) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
878 return;
880 /* If FIELD is static, then treat it like a separate variable, not
881 really like a structure field. If it is a FUNCTION_DECL, it's a
882 method. In both cases, all we do is lay out the decl, and we do
883 it *after* the record is laid out. */
884 if (TREE_CODE (field) == VAR_DECL)
886 rli->pending_statics = tree_cons (NULL_TREE, field,
887 rli->pending_statics);
888 return;
891 /* Enumerators and enum types which are local to this class need not
892 be laid out. Likewise for initialized constant fields. */
893 else if (TREE_CODE (field) != FIELD_DECL)
894 return;
896 /* Unions are laid out very differently than records, so split
897 that code off to another function. */
898 else if (TREE_CODE (rli->t) != RECORD_TYPE)
900 place_union_field (rli, field);
901 return;
904 /* Work out the known alignment so far. Note that A & (-A) is the
905 value of the least-significant bit in A that is one. */
906 if (! integer_zerop (rli->bitpos))
907 known_align = (tree_low_cst (rli->bitpos, 1)
908 & - tree_low_cst (rli->bitpos, 1));
909 else if (integer_zerop (rli->offset))
910 known_align = BIGGEST_ALIGNMENT;
911 else if (host_integerp (rli->offset, 1))
912 known_align = (BITS_PER_UNIT
913 * (tree_low_cst (rli->offset, 1)
914 & - tree_low_cst (rli->offset, 1)));
915 else
916 known_align = rli->offset_align;
918 desired_align = update_alignment_for_field (rli, field, known_align);
920 if (warn_packed && DECL_PACKED (field))
922 if (known_align >= TYPE_ALIGN (type))
924 if (TYPE_ALIGN (type) > desired_align)
926 if (STRICT_ALIGNMENT)
927 warning_with_decl (field, "packed attribute causes inefficient alignment for `%s'");
928 else
929 warning_with_decl (field, "packed attribute is unnecessary for `%s'");
932 else
933 rli->packed_maybe_necessary = 1;
936 /* Does this field automatically have alignment it needs by virtue
937 of the fields that precede it and the record's own alignment? */
938 if (known_align < desired_align)
940 /* No, we need to skip space before this field.
941 Bump the cumulative size to multiple of field alignment. */
943 if (warn_padded)
944 warning_with_decl (field, "padding struct to align `%s'");
946 /* If the alignment is still within offset_align, just align
947 the bit position. */
948 if (desired_align < rli->offset_align)
949 rli->bitpos = round_up (rli->bitpos, desired_align);
950 else
952 /* First adjust OFFSET by the partial bits, then align. */
953 rli->offset
954 = size_binop (PLUS_EXPR, rli->offset,
955 convert (sizetype,
956 size_binop (CEIL_DIV_EXPR, rli->bitpos,
957 bitsize_unit_node)));
958 rli->bitpos = bitsize_zero_node;
960 rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
963 if (! TREE_CONSTANT (rli->offset))
964 rli->offset_align = desired_align;
968 /* Handle compatibility with PCC. Note that if the record has any
969 variable-sized fields, we need not worry about compatibility. */
970 #ifdef PCC_BITFIELD_TYPE_MATTERS
971 if (PCC_BITFIELD_TYPE_MATTERS
972 && ! (* targetm.ms_bitfield_layout_p) (rli->t)
973 && TREE_CODE (field) == FIELD_DECL
974 && type != error_mark_node
975 && DECL_BIT_FIELD (field)
976 && ! DECL_PACKED (field)
977 && maximum_field_alignment == 0
978 && ! integer_zerop (DECL_SIZE (field))
979 && host_integerp (DECL_SIZE (field), 1)
980 && host_integerp (rli->offset, 1)
981 && host_integerp (TYPE_SIZE (type), 1))
983 unsigned int type_align = TYPE_ALIGN (type);
984 tree dsize = DECL_SIZE (field);
985 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
986 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
987 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
989 #ifdef ADJUST_FIELD_ALIGN
990 if (! TYPE_USER_ALIGN (type))
991 type_align = ADJUST_FIELD_ALIGN (field, type_align);
992 #endif
994 /* A bit field may not span more units of alignment of its type
995 than its type itself. Advance to next boundary if necessary. */
996 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
997 rli->bitpos = round_up (rli->bitpos, type_align);
999 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1001 #endif
1003 #ifdef BITFIELD_NBYTES_LIMITED
1004 if (BITFIELD_NBYTES_LIMITED
1005 && ! (* targetm.ms_bitfield_layout_p) (rli->t)
1006 && TREE_CODE (field) == FIELD_DECL
1007 && type != error_mark_node
1008 && DECL_BIT_FIELD_TYPE (field)
1009 && ! DECL_PACKED (field)
1010 && ! integer_zerop (DECL_SIZE (field))
1011 && host_integerp (DECL_SIZE (field), 1)
1012 && host_integerp (rli->offset, 1)
1013 && host_integerp (TYPE_SIZE (type), 1))
1015 unsigned int type_align = TYPE_ALIGN (type);
1016 tree dsize = DECL_SIZE (field);
1017 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
1018 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
1019 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
1021 #ifdef ADJUST_FIELD_ALIGN
1022 if (! TYPE_USER_ALIGN (type))
1023 type_align = ADJUST_FIELD_ALIGN (field, type_align);
1024 #endif
1026 if (maximum_field_alignment != 0)
1027 type_align = MIN (type_align, maximum_field_alignment);
1028 /* ??? This test is opposite the test in the containing if
1029 statement, so this code is unreachable currently. */
1030 else if (DECL_PACKED (field))
1031 type_align = MIN (type_align, BITS_PER_UNIT);
1033 /* A bit field may not span the unit of alignment of its type.
1034 Advance to next boundary if necessary. */
1035 if (excess_unit_span (offset, bit_offset, field_size, type_align, type))
1036 rli->bitpos = round_up (rli->bitpos, type_align);
1038 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1040 #endif
1042 /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1043 A subtlety:
1044 When a bit field is inserted into a packed record, the whole
1045 size of the underlying type is used by one or more same-size
1046 adjacent bitfields. (That is, if its long:3, 32 bits is
1047 used in the record, and any additional adjacent long bitfields are
1048 packed into the same chunk of 32 bits. However, if the size
1049 changes, a new field of that size is allocated.) In an unpacked
1050 record, this is the same as using alignment, but not equivalent
1051 when packing.
1053 Note: for compatibility, we use the type size, not the type alignment
1054 to determine alignment, since that matches the documentation */
1056 if ((* targetm.ms_bitfield_layout_p) (rli->t)
1057 && ((DECL_BIT_FIELD_TYPE (field) && ! DECL_PACKED (field))
1058 || (rli->prev_field && ! DECL_PACKED (rli->prev_field))))
1060 /* At this point, either the prior or current are bitfields,
1061 (possibly both), and we're dealing with MS packing. */
1062 tree prev_saved = rli->prev_field;
1064 /* Is the prior field a bitfield? If so, handle "runs" of same
1065 type size fields. */
1066 if (rli->prev_field /* necessarily a bitfield if it exists. */)
1068 /* If both are bitfields, nonzero, and the same size, this is
1069 the middle of a run. Zero declared size fields are special
1070 and handled as "end of run". (Note: it's nonzero declared
1071 size, but equal type sizes!) (Since we know that both
1072 the current and previous fields are bitfields by the
1073 time we check it, DECL_SIZE must be present for both.) */
1074 if (DECL_BIT_FIELD_TYPE (field)
1075 && !integer_zerop (DECL_SIZE (field))
1076 && !integer_zerop (DECL_SIZE (rli->prev_field))
1077 && simple_cst_equal (TYPE_SIZE (type),
1078 TYPE_SIZE (TREE_TYPE (rli->prev_field))) )
1080 /* We're in the middle of a run of equal type size fields; make
1081 sure we realign if we run out of bits. (Not decl size,
1082 type size!) */
1083 int bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
1084 tree type_size = TYPE_SIZE(TREE_TYPE(rli->prev_field));
1086 if (rli->remaining_in_alignment < bitsize)
1088 /* out of bits; bump up to next 'word'. */
1089 rli->offset = DECL_FIELD_OFFSET (rli->prev_field);
1090 rli->bitpos = size_binop (PLUS_EXPR,
1091 type_size,
1092 DECL_FIELD_BIT_OFFSET(rli->prev_field));
1093 rli->prev_field = field;
1094 rli->remaining_in_alignment = TREE_INT_CST_LOW (type_size);
1096 rli->remaining_in_alignment -= bitsize;
1098 else
1100 /* End of a run: if leaving a run of bitfields of the same type
1101 size, we have to "use up" the rest of the bits of the type
1102 size.
1104 Compute the new position as the sum of the size for the prior
1105 type and where we first started working on that type.
1106 Note: since the beginning of the field was aligned then
1107 of course the end will be too. No round needed. */
1109 if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1111 tree type_size = TYPE_SIZE(TREE_TYPE(rli->prev_field));
1112 rli->bitpos = size_binop (PLUS_EXPR,
1113 type_size,
1114 DECL_FIELD_BIT_OFFSET(rli->prev_field));
1116 else
1118 /* We "use up" size zero fields; the code below should behave
1119 as if the prior field was not a bitfield. */
1120 prev_saved = NULL;
1123 /* Cause a new bitfield to be captured, either this time (if
1124 currently a bitfield) or next time we see one. */
1125 if (!DECL_BIT_FIELD_TYPE(field)
1126 || integer_zerop (DECL_SIZE (field)))
1128 rli->prev_field = NULL;
1131 normalize_rli (rli);
1134 /* If we're starting a new run of same size type bitfields
1135 (or a run of non-bitfields), set up the "first of the run"
1136 fields.
1138 That is, if the current field is not a bitfield, or if there
1139 was a prior bitfield the type sizes differ, or if there wasn't
1140 a prior bitfield the size of the current field is nonzero.
1142 Note: we must be sure to test ONLY the type size if there was
1143 a prior bitfield and ONLY for the current field being zero if
1144 there wasn't. */
1146 if (!DECL_BIT_FIELD_TYPE (field)
1147 || ( prev_saved != NULL
1148 ? !simple_cst_equal (TYPE_SIZE (type),
1149 TYPE_SIZE (TREE_TYPE (prev_saved)))
1150 : !integer_zerop (DECL_SIZE (field)) ))
1152 unsigned int type_align = 8; /* Never below 8 for compatibility */
1154 /* (When not a bitfield), we could be seeing a flex array (with
1155 no DECL_SIZE). Since we won't be using remaining_in_alignment
1156 until we see a bitfield (and come by here again) we just skip
1157 calculating it. */
1159 if (DECL_SIZE (field) != NULL)
1160 rli->remaining_in_alignment
1161 = TREE_INT_CST_LOW (TYPE_SIZE(TREE_TYPE(field)))
1162 - TREE_INT_CST_LOW (DECL_SIZE (field));
1164 /* Now align (conventionally) for the new type. */
1165 if (!DECL_PACKED(field))
1166 type_align = MAX(TYPE_ALIGN (type), type_align);
1168 if (prev_saved
1169 && DECL_BIT_FIELD_TYPE (prev_saved)
1170 /* If the previous bit-field is zero-sized, we've already
1171 accounted for its alignment needs (or ignored it, if
1172 appropriate) while placing it. */
1173 && ! integer_zerop (DECL_SIZE (prev_saved)))
1174 type_align = MAX (type_align,
1175 TYPE_ALIGN (TREE_TYPE (prev_saved)));
1177 if (maximum_field_alignment != 0)
1178 type_align = MIN (type_align, maximum_field_alignment);
1180 rli->bitpos = round_up (rli->bitpos, type_align);
1181 /* If we really aligned, don't allow subsequent bitfields
1182 to undo that. */
1183 rli->prev_field = NULL;
1187 /* Offset so far becomes the position of this field after normalizing. */
1188 normalize_rli (rli);
1189 DECL_FIELD_OFFSET (field) = rli->offset;
1190 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1191 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1193 /* If this field ended up more aligned than we thought it would be (we
1194 approximate this by seeing if its position changed), lay out the field
1195 again; perhaps we can use an integral mode for it now. */
1196 if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1197 actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1198 & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
1199 else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1200 actual_align = BIGGEST_ALIGNMENT;
1201 else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
1202 actual_align = (BITS_PER_UNIT
1203 * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1204 & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
1205 else
1206 actual_align = DECL_OFFSET_ALIGN (field);
1208 if (known_align != actual_align)
1209 layout_decl (field, actual_align);
1211 /* Only the MS bitfields use this. */
1212 if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE(field))
1213 rli->prev_field = field;
1215 /* Now add size of this field to the size of the record. If the size is
1216 not constant, treat the field as being a multiple of bytes and just
1217 adjust the offset, resetting the bit position. Otherwise, apportion the
1218 size amongst the bit position and offset. First handle the case of an
1219 unspecified size, which can happen when we have an invalid nested struct
1220 definition, such as struct j { struct j { int i; } }. The error message
1221 is printed in finish_struct. */
1222 if (DECL_SIZE (field) == 0)
1223 /* Do nothing. */;
1224 else if (TREE_CODE (DECL_SIZE_UNIT (field)) != INTEGER_CST
1225 || TREE_CONSTANT_OVERFLOW (DECL_SIZE_UNIT (field)))
1227 rli->offset
1228 = size_binop (PLUS_EXPR, rli->offset,
1229 convert (sizetype,
1230 size_binop (CEIL_DIV_EXPR, rli->bitpos,
1231 bitsize_unit_node)));
1232 rli->offset
1233 = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1234 rli->bitpos = bitsize_zero_node;
1235 rli->offset_align = MIN (rli->offset_align, DECL_ALIGN (field));
1237 else
1239 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1240 normalize_rli (rli);
1244 /* Assuming that all the fields have been laid out, this function uses
1245 RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1246 indicated by RLI. */
1248 static void
1249 finalize_record_size (rli)
1250 record_layout_info rli;
1252 tree unpadded_size, unpadded_size_unit;
1254 /* Now we want just byte and bit offsets, so set the offset alignment
1255 to be a byte and then normalize. */
1256 rli->offset_align = BITS_PER_UNIT;
1257 normalize_rli (rli);
1259 /* Determine the desired alignment. */
1260 #ifdef ROUND_TYPE_ALIGN
1261 TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1262 rli->record_align);
1263 #else
1264 TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
1265 #endif
1267 /* Compute the size so far. Be sure to allow for extra bits in the
1268 size in bytes. We have guaranteed above that it will be no more
1269 than a single byte. */
1270 unpadded_size = rli_size_so_far (rli);
1271 unpadded_size_unit = rli_size_unit_so_far (rli);
1272 if (! integer_zerop (rli->bitpos))
1273 unpadded_size_unit
1274 = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1276 /* Round the size up to be a multiple of the required alignment */
1277 #ifdef ROUND_TYPE_SIZE
1278 TYPE_SIZE (rli->t) = ROUND_TYPE_SIZE (rli->t, unpadded_size,
1279 TYPE_ALIGN (rli->t));
1280 TYPE_SIZE_UNIT (rli->t)
1281 = ROUND_TYPE_SIZE_UNIT (rli->t, unpadded_size_unit,
1282 TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
1283 #else
1284 TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1285 TYPE_SIZE_UNIT (rli->t) = round_up (unpadded_size_unit,
1286 TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
1287 #endif
1289 if (warn_padded && TREE_CONSTANT (unpadded_size)
1290 && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
1291 warning ("padding struct size to alignment boundary");
1293 if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1294 && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1295 && TREE_CONSTANT (unpadded_size))
1297 tree unpacked_size;
1299 #ifdef ROUND_TYPE_ALIGN
1300 rli->unpacked_align
1301 = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1302 #else
1303 rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1304 #endif
1306 #ifdef ROUND_TYPE_SIZE
1307 unpacked_size = ROUND_TYPE_SIZE (rli->t, TYPE_SIZE (rli->t),
1308 rli->unpacked_align);
1309 #else
1310 unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1311 #endif
1313 if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1315 TYPE_PACKED (rli->t) = 0;
1317 if (TYPE_NAME (rli->t))
1319 const char *name;
1321 if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1322 name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
1323 else
1324 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
1326 if (STRICT_ALIGNMENT)
1327 warning ("packed attribute causes inefficient alignment for `%s'", name);
1328 else
1329 warning ("packed attribute is unnecessary for `%s'", name);
1331 else
1333 if (STRICT_ALIGNMENT)
1334 warning ("packed attribute causes inefficient alignment");
1335 else
1336 warning ("packed attribute is unnecessary");
1342 /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
1344 void
1345 compute_record_mode (type)
1346 tree type;
1348 tree field;
1349 enum machine_mode mode = VOIDmode;
1351 /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1352 However, if possible, we use a mode that fits in a register
1353 instead, in order to allow for better optimization down the
1354 line. */
1355 TYPE_MODE (type) = BLKmode;
1357 if (! host_integerp (TYPE_SIZE (type), 1))
1358 return;
1360 /* A record which has any BLKmode members must itself be
1361 BLKmode; it can't go in a register. Unless the member is
1362 BLKmode only because it isn't aligned. */
1363 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1365 unsigned HOST_WIDE_INT bitpos;
1367 if (TREE_CODE (field) != FIELD_DECL)
1368 continue;
1370 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1371 || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1372 && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field)))
1373 || ! host_integerp (bit_position (field), 1)
1374 || DECL_SIZE (field) == 0
1375 || ! host_integerp (DECL_SIZE (field), 1))
1376 return;
1378 bitpos = int_bit_position (field);
1380 /* Must be BLKmode if any field crosses a word boundary,
1381 since extract_bit_field can't handle that in registers. */
1382 if (bitpos / BITS_PER_WORD
1383 != ((tree_low_cst (DECL_SIZE (field), 1) + bitpos - 1)
1384 / BITS_PER_WORD)
1385 /* But there is no problem if the field is entire words. */
1386 && tree_low_cst (DECL_SIZE (field), 1) % BITS_PER_WORD != 0)
1387 return;
1389 /* If this field is the whole struct, remember its mode so
1390 that, say, we can put a double in a class into a DF
1391 register instead of forcing it to live in the stack. */
1392 if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
1393 mode = DECL_MODE (field);
1395 #ifdef MEMBER_TYPE_FORCES_BLK
1396 /* With some targets, eg. c4x, it is sub-optimal
1397 to access an aligned BLKmode structure as a scalar. */
1399 if (MEMBER_TYPE_FORCES_BLK (field, mode))
1400 return;
1401 #endif /* MEMBER_TYPE_FORCES_BLK */
1404 /* If we only have one real field; use its mode. This only applies to
1405 RECORD_TYPE. This does not apply to unions. */
1406 if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode)
1407 TYPE_MODE (type) = mode;
1408 else
1409 TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1411 /* If structure's known alignment is less than what the scalar
1412 mode would need, and it matters, then stick with BLKmode. */
1413 if (TYPE_MODE (type) != BLKmode
1414 && STRICT_ALIGNMENT
1415 && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1416 || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1418 /* If this is the only reason this type is BLKmode, then
1419 don't force containing types to be BLKmode. */
1420 TYPE_NO_FORCE_BLK (type) = 1;
1421 TYPE_MODE (type) = BLKmode;
1425 /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1426 out. */
1428 static void
1429 finalize_type_size (type)
1430 tree type;
1432 /* Normally, use the alignment corresponding to the mode chosen.
1433 However, where strict alignment is not required, avoid
1434 over-aligning structures, since most compilers do not do this
1435 alignment. */
1437 if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1438 && (STRICT_ALIGNMENT
1439 || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1440 && TREE_CODE (type) != QUAL_UNION_TYPE
1441 && TREE_CODE (type) != ARRAY_TYPE)))
1443 TYPE_ALIGN (type) = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1444 TYPE_USER_ALIGN (type) = 0;
1447 /* Do machine-dependent extra alignment. */
1448 #ifdef ROUND_TYPE_ALIGN
1449 TYPE_ALIGN (type)
1450 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1451 #endif
1453 /* If we failed to find a simple way to calculate the unit size
1454 of the type, find it by division. */
1455 if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1456 /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1457 result will fit in sizetype. We will get more efficient code using
1458 sizetype, so we force a conversion. */
1459 TYPE_SIZE_UNIT (type)
1460 = convert (sizetype,
1461 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1462 bitsize_unit_node));
1464 if (TYPE_SIZE (type) != 0)
1466 #ifdef ROUND_TYPE_SIZE
1467 TYPE_SIZE (type)
1468 = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
1469 TYPE_SIZE_UNIT (type)
1470 = ROUND_TYPE_SIZE_UNIT (type, TYPE_SIZE_UNIT (type),
1471 TYPE_ALIGN (type) / BITS_PER_UNIT);
1472 #else
1473 TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1474 TYPE_SIZE_UNIT (type)
1475 = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN (type) / BITS_PER_UNIT);
1476 #endif
1479 /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
1480 if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1481 TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
1482 if (TYPE_SIZE_UNIT (type) != 0
1483 && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1484 TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1486 /* Also layout any other variants of the type. */
1487 if (TYPE_NEXT_VARIANT (type)
1488 || type != TYPE_MAIN_VARIANT (type))
1490 tree variant;
1491 /* Record layout info of this variant. */
1492 tree size = TYPE_SIZE (type);
1493 tree size_unit = TYPE_SIZE_UNIT (type);
1494 unsigned int align = TYPE_ALIGN (type);
1495 unsigned int user_align = TYPE_USER_ALIGN (type);
1496 enum machine_mode mode = TYPE_MODE (type);
1498 /* Copy it into all variants. */
1499 for (variant = TYPE_MAIN_VARIANT (type);
1500 variant != 0;
1501 variant = TYPE_NEXT_VARIANT (variant))
1503 TYPE_SIZE (variant) = size;
1504 TYPE_SIZE_UNIT (variant) = size_unit;
1505 TYPE_ALIGN (variant) = align;
1506 TYPE_USER_ALIGN (variant) = user_align;
1507 TYPE_MODE (variant) = mode;
1512 /* Do all of the work required to layout the type indicated by RLI,
1513 once the fields have been laid out. This function will call `free'
1514 for RLI, unless FREE_P is false. Passing a value other than false
1515 for FREE_P is bad practice; this option only exists to support the
1516 G++ 3.2 ABI. */
1518 void
1519 finish_record_layout (rli, free_p)
1520 record_layout_info rli;
1521 int free_p;
1523 /* Compute the final size. */
1524 finalize_record_size (rli);
1526 /* Compute the TYPE_MODE for the record. */
1527 compute_record_mode (rli->t);
1529 /* Perform any last tweaks to the TYPE_SIZE, etc. */
1530 finalize_type_size (rli->t);
1532 /* Lay out any static members. This is done now because their type
1533 may use the record's type. */
1534 while (rli->pending_statics)
1536 layout_decl (TREE_VALUE (rli->pending_statics), 0);
1537 rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1540 /* Clean up. */
1541 if (free_p)
1542 free (rli);
1546 /* Finish processing a builtin RECORD_TYPE type TYPE. It's name is
1547 NAME, its fields are chained in reverse on FIELDS.
1549 If ALIGN_TYPE is non-null, it is given the same alignment as
1550 ALIGN_TYPE. */
1552 void
1553 finish_builtin_struct (type, name, fields, align_type)
1554 tree type;
1555 const char *name;
1556 tree fields;
1557 tree align_type;
1559 tree tail, next;
1561 for (tail = NULL_TREE; fields; tail = fields, fields = next)
1563 DECL_FIELD_CONTEXT (fields) = type;
1564 next = TREE_CHAIN (fields);
1565 TREE_CHAIN (fields) = tail;
1567 TYPE_FIELDS (type) = tail;
1569 if (align_type)
1571 TYPE_ALIGN (type) = TYPE_ALIGN (align_type);
1572 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (align_type);
1575 layout_type (type);
1576 #if 0 /* not yet, should get fixed properly later */
1577 TYPE_NAME (type) = make_type_decl (get_identifier (name), type);
1578 #else
1579 TYPE_NAME (type) = build_decl (TYPE_DECL, get_identifier (name), type);
1580 #endif
1581 TYPE_STUB_DECL (type) = TYPE_NAME (type);
1582 layout_decl (TYPE_NAME (type), 0);
1585 /* Calculate the mode, size, and alignment for TYPE.
1586 For an array type, calculate the element separation as well.
1587 Record TYPE on the chain of permanent or temporary types
1588 so that dbxout will find out about it.
1590 TYPE_SIZE of a type is nonzero if the type has been laid out already.
1591 layout_type does nothing on such a type.
1593 If the type is incomplete, its TYPE_SIZE remains zero. */
1595 void
1596 layout_type (type)
1597 tree type;
1599 if (type == 0)
1600 abort ();
1602 /* Do nothing if type has been laid out before. */
1603 if (TYPE_SIZE (type))
1604 return;
1606 switch (TREE_CODE (type))
1608 case LANG_TYPE:
1609 /* This kind of type is the responsibility
1610 of the language-specific code. */
1611 abort ();
1613 case BOOLEAN_TYPE: /* Used for Java, Pascal, and Chill. */
1614 if (TYPE_PRECISION (type) == 0)
1615 TYPE_PRECISION (type) = 1; /* default to one byte/boolean. */
1617 /* ... fall through ... */
1619 case INTEGER_TYPE:
1620 case ENUMERAL_TYPE:
1621 case CHAR_TYPE:
1622 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1623 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1624 TREE_UNSIGNED (type) = 1;
1626 TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1627 MODE_INT);
1628 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1629 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1630 break;
1632 case REAL_TYPE:
1633 TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
1634 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1635 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1636 break;
1638 case COMPLEX_TYPE:
1639 TREE_UNSIGNED (type) = TREE_UNSIGNED (TREE_TYPE (type));
1640 TYPE_MODE (type)
1641 = mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
1642 (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
1643 ? MODE_COMPLEX_INT : MODE_COMPLEX_FLOAT),
1645 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1646 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1647 break;
1649 case VECTOR_TYPE:
1651 tree subtype;
1653 subtype = TREE_TYPE (type);
1654 TREE_UNSIGNED (type) = TREE_UNSIGNED (subtype);
1655 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1656 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1658 break;
1660 case VOID_TYPE:
1661 /* This is an incomplete type and so doesn't have a size. */
1662 TYPE_ALIGN (type) = 1;
1663 TYPE_USER_ALIGN (type) = 0;
1664 TYPE_MODE (type) = VOIDmode;
1665 break;
1667 case OFFSET_TYPE:
1668 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1669 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
1670 /* A pointer might be MODE_PARTIAL_INT,
1671 but ptrdiff_t must be integral. */
1672 TYPE_MODE (type) = mode_for_size (POINTER_SIZE, MODE_INT, 0);
1673 break;
1675 case FUNCTION_TYPE:
1676 case METHOD_TYPE:
1677 TYPE_MODE (type) = mode_for_size (2 * POINTER_SIZE, MODE_INT, 0);
1678 TYPE_SIZE (type) = bitsize_int (2 * POINTER_SIZE);
1679 TYPE_SIZE_UNIT (type) = size_int ((2 * POINTER_SIZE) / BITS_PER_UNIT);
1680 break;
1682 case POINTER_TYPE:
1683 case REFERENCE_TYPE:
1686 enum machine_mode mode = ((TREE_CODE (type) == REFERENCE_TYPE
1687 && reference_types_internal)
1688 ? Pmode : TYPE_MODE (type));
1690 int nbits = GET_MODE_BITSIZE (mode);
1692 TYPE_SIZE (type) = bitsize_int (nbits);
1693 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (mode));
1694 TREE_UNSIGNED (type) = 1;
1695 TYPE_PRECISION (type) = nbits;
1697 break;
1699 case ARRAY_TYPE:
1701 tree index = TYPE_DOMAIN (type);
1702 tree element = TREE_TYPE (type);
1704 build_pointer_type (element);
1706 /* We need to know both bounds in order to compute the size. */
1707 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1708 && TYPE_SIZE (element))
1710 tree ub = TYPE_MAX_VALUE (index);
1711 tree lb = TYPE_MIN_VALUE (index);
1712 tree length;
1713 tree element_size;
1715 /* The initial subtraction should happen in the original type so
1716 that (possible) negative values are handled appropriately. */
1717 length = size_binop (PLUS_EXPR, size_one_node,
1718 convert (sizetype,
1719 fold (build (MINUS_EXPR,
1720 TREE_TYPE (lb),
1721 ub, lb))));
1723 /* Special handling for arrays of bits (for Chill). */
1724 element_size = TYPE_SIZE (element);
1725 if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1726 && (integer_zerop (TYPE_MAX_VALUE (element))
1727 || integer_onep (TYPE_MAX_VALUE (element)))
1728 && host_integerp (TYPE_MIN_VALUE (element), 1))
1730 HOST_WIDE_INT maxvalue
1731 = tree_low_cst (TYPE_MAX_VALUE (element), 1);
1732 HOST_WIDE_INT minvalue
1733 = tree_low_cst (TYPE_MIN_VALUE (element), 1);
1735 if (maxvalue - minvalue == 1
1736 && (maxvalue == 1 || maxvalue == 0))
1737 element_size = integer_one_node;
1740 /* If neither bound is a constant and sizetype is signed, make
1741 sure the size is never negative. We should really do this
1742 if *either* bound is non-constant, but this is the best
1743 compromise between C and Ada. */
1744 if (! TREE_UNSIGNED (sizetype)
1745 && TREE_CODE (TYPE_MIN_VALUE (index)) != INTEGER_CST
1746 && TREE_CODE (TYPE_MAX_VALUE (index)) != INTEGER_CST)
1747 length = size_binop (MAX_EXPR, length, size_zero_node);
1749 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1750 convert (bitsizetype, length));
1752 /* If we know the size of the element, calculate the total
1753 size directly, rather than do some division thing below.
1754 This optimization helps Fortran assumed-size arrays
1755 (where the size of the array is determined at runtime)
1756 substantially.
1757 Note that we can't do this in the case where the size of
1758 the elements is one bit since TYPE_SIZE_UNIT cannot be
1759 set correctly in that case. */
1760 if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
1761 TYPE_SIZE_UNIT (type)
1762 = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
1765 /* Now round the alignment and size,
1766 using machine-dependent criteria if any. */
1768 #ifdef ROUND_TYPE_ALIGN
1769 TYPE_ALIGN (type)
1770 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1771 #else
1772 TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1773 #endif
1774 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
1776 #ifdef ROUND_TYPE_SIZE
1777 if (TYPE_SIZE (type) != 0)
1779 tree tmp
1780 = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
1782 /* If the rounding changed the size of the type, remove any
1783 pre-calculated TYPE_SIZE_UNIT. */
1784 if (simple_cst_equal (TYPE_SIZE (type), tmp) != 1)
1785 TYPE_SIZE_UNIT (type) = NULL;
1787 TYPE_SIZE (type) = tmp;
1789 #endif
1791 TYPE_MODE (type) = BLKmode;
1792 if (TYPE_SIZE (type) != 0
1793 #ifdef MEMBER_TYPE_FORCES_BLK
1794 && ! MEMBER_TYPE_FORCES_BLK (type, VOIDmode)
1795 #endif
1796 /* BLKmode elements force BLKmode aggregate;
1797 else extract/store fields may lose. */
1798 && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1799 || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1801 /* One-element arrays get the component type's mode. */
1802 if (simple_cst_equal (TYPE_SIZE (type),
1803 TYPE_SIZE (TREE_TYPE (type))))
1804 TYPE_MODE (type) = TYPE_MODE (TREE_TYPE (type));
1805 else
1806 TYPE_MODE (type)
1807 = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1809 if (TYPE_MODE (type) != BLKmode
1810 && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1811 && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))
1812 && TYPE_MODE (type) != BLKmode)
1814 TYPE_NO_FORCE_BLK (type) = 1;
1815 TYPE_MODE (type) = BLKmode;
1818 break;
1821 case RECORD_TYPE:
1822 case UNION_TYPE:
1823 case QUAL_UNION_TYPE:
1825 tree field;
1826 record_layout_info rli;
1828 /* Initialize the layout information. */
1829 rli = start_record_layout (type);
1831 /* If this is a QUAL_UNION_TYPE, we want to process the fields
1832 in the reverse order in building the COND_EXPR that denotes
1833 its size. We reverse them again later. */
1834 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1835 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1837 /* Place all the fields. */
1838 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1839 place_field (rli, field);
1841 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1842 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1844 if (lang_adjust_rli)
1845 (*lang_adjust_rli) (rli);
1847 /* Finish laying out the record. */
1848 finish_record_layout (rli, /*free_p=*/true);
1850 break;
1852 case SET_TYPE: /* Used by Chill and Pascal. */
1853 if (TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST
1854 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
1855 abort ();
1856 else
1858 #ifndef SET_WORD_SIZE
1859 #define SET_WORD_SIZE BITS_PER_WORD
1860 #endif
1861 unsigned int alignment
1862 = set_alignment ? set_alignment : SET_WORD_SIZE;
1863 int size_in_bits
1864 = (TREE_INT_CST_LOW (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
1865 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) + 1);
1866 int rounded_size
1867 = ((size_in_bits + alignment - 1) / alignment) * alignment;
1869 if (rounded_size > (int) alignment)
1870 TYPE_MODE (type) = BLKmode;
1871 else
1872 TYPE_MODE (type) = mode_for_size (alignment, MODE_INT, 1);
1874 TYPE_SIZE (type) = bitsize_int (rounded_size);
1875 TYPE_SIZE_UNIT (type) = size_int (rounded_size / BITS_PER_UNIT);
1876 TYPE_ALIGN (type) = alignment;
1877 TYPE_USER_ALIGN (type) = 0;
1878 TYPE_PRECISION (type) = size_in_bits;
1880 break;
1882 case FILE_TYPE:
1883 /* The size may vary in different languages, so the language front end
1884 should fill in the size. */
1885 TYPE_ALIGN (type) = BIGGEST_ALIGNMENT;
1886 TYPE_USER_ALIGN (type) = 0;
1887 TYPE_MODE (type) = BLKmode;
1888 break;
1890 default:
1891 abort ();
1894 /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
1895 records and unions, finish_record_layout already called this
1896 function. */
1897 if (TREE_CODE (type) != RECORD_TYPE
1898 && TREE_CODE (type) != UNION_TYPE
1899 && TREE_CODE (type) != QUAL_UNION_TYPE)
1900 finalize_type_size (type);
1902 /* If this type is created before sizetype has been permanently set,
1903 record it so set_sizetype can fix it up. */
1904 if (! sizetype_set)
1905 early_type_list = tree_cons (NULL_TREE, type, early_type_list);
1907 /* If an alias set has been set for this aggregate when it was incomplete,
1908 force it into alias set 0.
1909 This is too conservative, but we cannot call record_component_aliases
1910 here because some frontends still change the aggregates after
1911 layout_type. */
1912 if (AGGREGATE_TYPE_P (type) && TYPE_ALIAS_SET_KNOWN_P (type))
1913 TYPE_ALIAS_SET (type) = 0;
1916 /* Create and return a type for signed integers of PRECISION bits. */
1918 tree
1919 make_signed_type (precision)
1920 int precision;
1922 tree type = make_node (INTEGER_TYPE);
1924 TYPE_PRECISION (type) = precision;
1926 fixup_signed_type (type);
1927 return type;
1930 /* Create and return a type for unsigned integers of PRECISION bits. */
1932 tree
1933 make_unsigned_type (precision)
1934 int precision;
1936 tree type = make_node (INTEGER_TYPE);
1938 TYPE_PRECISION (type) = precision;
1940 fixup_unsigned_type (type);
1941 return type;
1944 /* Initialize sizetype and bitsizetype to a reasonable and temporary
1945 value to enable integer types to be created. */
1947 void
1948 initialize_sizetypes ()
1950 tree t = make_node (INTEGER_TYPE);
1952 /* Set this so we do something reasonable for the build_int_2 calls
1953 below. */
1954 integer_type_node = t;
1956 TYPE_MODE (t) = SImode;
1957 TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
1958 TYPE_USER_ALIGN (t) = 0;
1959 TYPE_SIZE (t) = build_int_2 (GET_MODE_BITSIZE (SImode), 0);
1960 TYPE_SIZE_UNIT (t) = build_int_2 (GET_MODE_SIZE (SImode), 0);
1961 TREE_UNSIGNED (t) = 1;
1962 TYPE_PRECISION (t) = GET_MODE_BITSIZE (SImode);
1963 TYPE_MIN_VALUE (t) = build_int_2 (0, 0);
1964 TYPE_IS_SIZETYPE (t) = 1;
1966 /* 1000 avoids problems with possible overflow and is certainly
1967 larger than any size value we'd want to be storing. */
1968 TYPE_MAX_VALUE (t) = build_int_2 (1000, 0);
1970 /* These two must be different nodes because of the caching done in
1971 size_int_wide. */
1972 sizetype = t;
1973 bitsizetype = copy_node (t);
1974 integer_type_node = 0;
1977 /* Set sizetype to TYPE, and initialize *sizetype accordingly.
1978 Also update the type of any standard type's sizes made so far. */
1980 void
1981 set_sizetype (type)
1982 tree type;
1984 int oprecision = TYPE_PRECISION (type);
1985 /* The *bitsizetype types use a precision that avoids overflows when
1986 calculating signed sizes / offsets in bits. However, when
1987 cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
1988 precision. */
1989 int precision = MIN (oprecision + BITS_PER_UNIT_LOG + 1,
1990 2 * HOST_BITS_PER_WIDE_INT);
1991 unsigned int i;
1992 tree t;
1994 if (sizetype_set)
1995 abort ();
1997 /* Make copies of nodes since we'll be setting TYPE_IS_SIZETYPE. */
1998 sizetype = copy_node (type);
1999 TYPE_DOMAIN (sizetype) = type;
2000 TYPE_IS_SIZETYPE (sizetype) = 1;
2001 bitsizetype = make_node (INTEGER_TYPE);
2002 TYPE_NAME (bitsizetype) = TYPE_NAME (type);
2003 TYPE_PRECISION (bitsizetype) = precision;
2004 TYPE_IS_SIZETYPE (bitsizetype) = 1;
2006 if (TREE_UNSIGNED (type))
2007 fixup_unsigned_type (bitsizetype);
2008 else
2009 fixup_signed_type (bitsizetype);
2011 layout_type (bitsizetype);
2013 if (TREE_UNSIGNED (type))
2015 usizetype = sizetype;
2016 ubitsizetype = bitsizetype;
2017 ssizetype = copy_node (make_signed_type (oprecision));
2018 sbitsizetype = copy_node (make_signed_type (precision));
2020 else
2022 ssizetype = sizetype;
2023 sbitsizetype = bitsizetype;
2024 usizetype = copy_node (make_unsigned_type (oprecision));
2025 ubitsizetype = copy_node (make_unsigned_type (precision));
2028 TYPE_NAME (bitsizetype) = get_identifier ("bit_size_type");
2030 /* Show is a sizetype, is a main type, and has no pointers to it. */
2031 for (i = 0; i < ARRAY_SIZE (sizetype_tab); i++)
2033 TYPE_IS_SIZETYPE (sizetype_tab[i]) = 1;
2034 TYPE_MAIN_VARIANT (sizetype_tab[i]) = sizetype_tab[i];
2035 TYPE_NEXT_VARIANT (sizetype_tab[i]) = 0;
2036 TYPE_POINTER_TO (sizetype_tab[i]) = 0;
2037 TYPE_REFERENCE_TO (sizetype_tab[i]) = 0;
2040 /* Go down each of the types we already made and set the proper type
2041 for the sizes in them. */
2042 for (t = early_type_list; t != 0; t = TREE_CHAIN (t))
2044 if (TREE_CODE (TREE_VALUE (t)) != INTEGER_TYPE)
2045 abort ();
2047 TREE_TYPE (TYPE_SIZE (TREE_VALUE (t))) = bitsizetype;
2048 TREE_TYPE (TYPE_SIZE_UNIT (TREE_VALUE (t))) = sizetype;
2051 early_type_list = 0;
2052 sizetype_set = 1;
2055 /* Set the extreme values of TYPE based on its precision in bits,
2056 then lay it out. Used when make_signed_type won't do
2057 because the tree code is not INTEGER_TYPE.
2058 E.g. for Pascal, when the -fsigned-char option is given. */
2060 void
2061 fixup_signed_type (type)
2062 tree type;
2064 int precision = TYPE_PRECISION (type);
2066 /* We can not represent properly constants greater then
2067 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2068 as they are used by i386 vector extensions and friends. */
2069 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2070 precision = HOST_BITS_PER_WIDE_INT * 2;
2072 TYPE_MIN_VALUE (type)
2073 = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
2074 ? 0 : (HOST_WIDE_INT) (-1) << (precision - 1)),
2075 (((HOST_WIDE_INT) (-1)
2076 << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2077 ? precision - HOST_BITS_PER_WIDE_INT - 1
2078 : 0))));
2079 TYPE_MAX_VALUE (type)
2080 = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
2081 ? -1 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
2082 (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2083 ? (((HOST_WIDE_INT) 1
2084 << (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
2085 : 0));
2087 TREE_TYPE (TYPE_MIN_VALUE (type)) = type;
2088 TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
2090 /* Lay out the type: set its alignment, size, etc. */
2091 layout_type (type);
2094 /* Set the extreme values of TYPE based on its precision in bits,
2095 then lay it out. This is used both in `make_unsigned_type'
2096 and for enumeral types. */
2098 void
2099 fixup_unsigned_type (type)
2100 tree type;
2102 int precision = TYPE_PRECISION (type);
2104 /* We can not represent properly constants greater then
2105 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2106 as they are used by i386 vector extensions and friends. */
2107 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2108 precision = HOST_BITS_PER_WIDE_INT * 2;
2110 TYPE_MIN_VALUE (type) = build_int_2 (0, 0);
2111 TYPE_MAX_VALUE (type)
2112 = build_int_2 (precision - HOST_BITS_PER_WIDE_INT >= 0
2113 ? -1 : ((HOST_WIDE_INT) 1 << precision) - 1,
2114 precision - HOST_BITS_PER_WIDE_INT > 0
2115 ? ((unsigned HOST_WIDE_INT) ~0
2116 >> (HOST_BITS_PER_WIDE_INT
2117 - (precision - HOST_BITS_PER_WIDE_INT)))
2118 : 0);
2119 TREE_TYPE (TYPE_MIN_VALUE (type)) = type;
2120 TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
2122 /* Lay out the type: set its alignment, size, etc. */
2123 layout_type (type);
2126 /* Find the best machine mode to use when referencing a bit field of length
2127 BITSIZE bits starting at BITPOS.
2129 The underlying object is known to be aligned to a boundary of ALIGN bits.
2130 If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
2131 larger than LARGEST_MODE (usually SImode).
2133 If no mode meets all these conditions, we return VOIDmode. Otherwise, if
2134 VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest
2135 mode meeting these conditions.
2137 Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), we return
2138 the largest mode (but a mode no wider than UNITS_PER_WORD) that meets
2139 all the conditions. */
2141 enum machine_mode
2142 get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
2143 int bitsize, bitpos;
2144 unsigned int align;
2145 enum machine_mode largest_mode;
2146 int volatilep;
2148 enum machine_mode mode;
2149 unsigned int unit = 0;
2151 /* Find the narrowest integer mode that contains the bit field. */
2152 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2153 mode = GET_MODE_WIDER_MODE (mode))
2155 unit = GET_MODE_BITSIZE (mode);
2156 if ((bitpos % unit) + bitsize <= unit)
2157 break;
2160 if (mode == VOIDmode
2161 /* It is tempting to omit the following line
2162 if STRICT_ALIGNMENT is true.
2163 But that is incorrect, since if the bitfield uses part of 3 bytes
2164 and we use a 4-byte mode, we could get a spurious segv
2165 if the extra 4th byte is past the end of memory.
2166 (Though at least one Unix compiler ignores this problem:
2167 that on the Sequent 386 machine. */
2168 || MIN (unit, BIGGEST_ALIGNMENT) > align
2169 || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
2170 return VOIDmode;
2172 if (SLOW_BYTE_ACCESS && ! volatilep)
2174 enum machine_mode wide_mode = VOIDmode, tmode;
2176 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
2177 tmode = GET_MODE_WIDER_MODE (tmode))
2179 unit = GET_MODE_BITSIZE (tmode);
2180 if (bitpos / unit == (bitpos + bitsize - 1) / unit
2181 && unit <= BITS_PER_WORD
2182 && unit <= MIN (align, BIGGEST_ALIGNMENT)
2183 && (largest_mode == VOIDmode
2184 || unit <= GET_MODE_BITSIZE (largest_mode)))
2185 wide_mode = tmode;
2188 if (wide_mode != VOIDmode)
2189 return wide_mode;
2192 return mode;
2195 #include "gt-stor-layout.h"