1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
10 * Copyright (C) 1992-2003, Free Software Foundation, Inc. *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * Extensive contributions were provided by Ada Core Technologies Inc. *
26 ****************************************************************************/
30 #include "coretypes.h"
55 #ifndef MAX_FIXED_MODE_SIZE
56 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
59 #ifndef MAX_BITS_PER_WORD
60 #define MAX_BITS_PER_WORD BITS_PER_WORD
63 /* If nonzero, pretend we are allocating at global level. */
66 /* Tree nodes for the various types and decls we create. */
67 tree gnat_std_decls
[(int) ADT_LAST
];
69 /* Functions to call for each of the possible raise reasons. */
70 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
72 /* Associates a GNAT tree node to a GCC tree node. It is used in
73 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
74 of `save_gnu_tree' for more info. */
75 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
77 /* This listhead is used to record any global objects that need elaboration.
78 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
79 initial value to assign. */
81 static GTY(()) tree pending_elaborations
;
83 /* This stack allows us to momentarily switch to generating elaboration
84 lists for an inner context. */
86 struct e_stack
GTY(()) {
90 static GTY(()) struct e_stack
*elist_stack
;
92 /* This variable keeps a table for types for each precision so that we only
93 allocate each of them once. Signed and unsigned types are kept separate.
95 Note that these types are only used when fold-const requests something
96 special. Perhaps we should NOT share these types; we'll see how it
98 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
100 /* Likewise for float types, but record these by mode. */
101 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
103 /* For each binding contour we allocate a binding_level structure which records
104 the entities defined or declared in that contour. Contours include:
107 one for each subprogram definition
108 one for each compound statement (declare block)
110 Binding contours are used to create GCC tree BLOCK nodes. */
112 struct binding_level
GTY(())
114 /* A chain of ..._DECL nodes for all variables, constants, functions,
115 parameters and type declarations. These ..._DECL nodes are chained
116 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
117 in the reverse of the order supplied to be compatible with the
120 /* For each level (except the global one), a chain of BLOCK nodes for all
121 the levels that were entered and exited one level down from this one. */
123 /* The BLOCK node for this level, if one has been preallocated.
124 If 0, the BLOCK is allocated (if needed) when the level is popped. */
126 /* The binding level containing this one (the enclosing binding level). */
127 struct binding_level
*level_chain
;
130 /* The binding level currently in effect. */
131 static GTY(()) struct binding_level
*current_binding_level
;
133 /* A chain of binding_level structures awaiting reuse. */
134 static GTY((deletable (""))) struct binding_level
*free_binding_level
;
136 /* The outermost binding level. This binding level is created when the
137 compiler is started and it will exist through the entire compilation. */
138 static struct binding_level
*global_binding_level
;
140 /* Binding level structures are initialized by copying this one. */
141 static struct binding_level clear_binding_level
= {NULL
, NULL
, NULL
, NULL
};
143 struct language_function
GTY(())
148 static tree merge_sizes
PARAMS ((tree
, tree
, tree
, int, int));
149 static tree compute_related_constant
PARAMS ((tree
, tree
));
150 static tree split_plus
PARAMS ((tree
, tree
*));
151 static int value_zerop
PARAMS ((tree
));
152 static tree float_type_for_size
PARAMS ((int, enum machine_mode
));
153 static tree convert_to_fat_pointer
PARAMS ((tree
, tree
));
154 static tree convert_to_thin_pointer
PARAMS ((tree
, tree
));
155 static tree make_descriptor_field
PARAMS ((const char *,tree
, tree
,
158 /* Initialize the association of GNAT nodes to GCC trees. */
165 associate_gnat_to_gnu
= (tree
*) ggc_alloc (max_gnat_nodes
* sizeof (tree
));
167 for (gnat_node
= 0; gnat_node
< max_gnat_nodes
; gnat_node
++)
168 associate_gnat_to_gnu
[gnat_node
] = NULL_TREE
;
170 pending_elaborations
= build_tree_list (NULL_TREE
, NULL_TREE
);
173 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
174 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
175 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
177 If GNU_DECL is zero, a previous association is to be reset. */
180 save_gnu_tree (gnat_entity
, gnu_decl
, no_check
)
181 Entity_Id gnat_entity
;
186 && (associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
]
187 || (! no_check
&& ! DECL_P (gnu_decl
))))
190 associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
] = gnu_decl
;
193 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
194 Return the ..._DECL node that was associated with it. If there is no tree
195 node associated with GNAT_ENTITY, abort.
197 In some cases, such as delayed elaboration or expressions that need to
198 be elaborated only once, GNAT_ENTITY is really not an entity. */
201 get_gnu_tree (gnat_entity
)
202 Entity_Id gnat_entity
;
204 if (! associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
])
207 return associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
];
210 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
213 present_gnu_tree (gnat_entity
)
214 Entity_Id gnat_entity
;
216 return (associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
] != NULL_TREE
);
220 /* Return non-zero if we are currently in the global binding level. */
225 return (force_global
!= 0 || current_binding_level
== global_binding_level
229 /* Return the list of declarations in the current level. Note that this list
230 is in reverse order (it has to be so for back-end compatibility). */
235 return current_binding_level
->names
;
238 /* Nonzero if the current level needs to have a BLOCK made. */
243 return (current_binding_level
->names
!= 0);
246 /* Enter a new binding level. The input parameter is ignored, but has to be
247 specified for back-end compatibility. */
251 int ignore ATTRIBUTE_UNUSED
;
253 struct binding_level
*newlevel
= NULL
;
255 /* Reuse a struct for this binding level, if there is one. */
256 if (free_binding_level
)
258 newlevel
= free_binding_level
;
259 free_binding_level
= free_binding_level
->level_chain
;
263 = (struct binding_level
*) ggc_alloc (sizeof (struct binding_level
));
265 *newlevel
= clear_binding_level
;
267 /* Add this level to the front of the chain (stack) of levels that are
269 newlevel
->level_chain
= current_binding_level
;
270 current_binding_level
= newlevel
;
273 /* Exit a binding level.
274 Pop the level off, and restore the state of the identifier-decl mappings
275 that were in effect when this level was entered.
277 If KEEP is nonzero, this level had explicit declarations, so
278 and create a "block" (a BLOCK node) for the level
279 to record its declarations and subblocks for symbol table output.
281 If FUNCTIONBODY is nonzero, this level is the body of a function,
282 so create a block as if KEEP were set and also clear out all
285 If REVERSE is nonzero, reverse the order of decls before putting
286 them into the BLOCK. */
289 poplevel (keep
, reverse
, functionbody
)
294 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
295 binding level that we are about to exit and which is returned by this
297 tree block
= NULL_TREE
;
300 tree subblock_chain
= current_binding_level
->blocks
;
302 int block_previously_created
;
304 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
305 nodes chained through the `names' field of current_binding_level are in
306 reverse order except for PARM_DECL node, which are explicitly stored in
308 current_binding_level
->names
309 = decl_chain
= (reverse
) ? nreverse (current_binding_level
->names
)
310 : current_binding_level
->names
;
312 /* Output any nested inline functions within this block which must be
313 compiled because their address is needed. */
314 for (decl_node
= decl_chain
; decl_node
; decl_node
= TREE_CHAIN (decl_node
))
315 if (TREE_CODE (decl_node
) == FUNCTION_DECL
316 && ! TREE_ASM_WRITTEN (decl_node
) && TREE_ADDRESSABLE (decl_node
)
317 && DECL_INITIAL (decl_node
) != 0)
319 push_function_context ();
320 output_inline_function (decl_node
);
321 pop_function_context ();
325 block_previously_created
= (current_binding_level
->this_block
!= 0);
326 if (block_previously_created
)
327 block
= current_binding_level
->this_block
;
328 else if (keep
|| functionbody
)
329 block
= make_node (BLOCK
);
332 BLOCK_VARS (block
) = keep
? decl_chain
: 0;
333 BLOCK_SUBBLOCKS (block
) = subblock_chain
;
336 /* Record the BLOCK node just built as the subblock its enclosing scope. */
337 for (subblock_node
= subblock_chain
; subblock_node
;
338 subblock_node
= TREE_CHAIN (subblock_node
))
339 BLOCK_SUPERCONTEXT (subblock_node
) = block
;
341 /* Clear out the meanings of the local variables of this level. */
343 for (subblock_node
= decl_chain
; subblock_node
;
344 subblock_node
= TREE_CHAIN (subblock_node
))
345 if (DECL_NAME (subblock_node
) != 0)
346 /* If the identifier was used or addressed via a local extern decl,
347 don't forget that fact. */
348 if (DECL_EXTERNAL (subblock_node
))
350 if (TREE_USED (subblock_node
))
351 TREE_USED (DECL_NAME (subblock_node
)) = 1;
352 if (TREE_ADDRESSABLE (subblock_node
))
353 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node
)) = 1;
357 /* Pop the current level, and free the structure for reuse. */
358 struct binding_level
*level
= current_binding_level
;
359 current_binding_level
= current_binding_level
->level_chain
;
360 level
->level_chain
= free_binding_level
;
361 free_binding_level
= level
;
366 /* This is the top level block of a function. The ..._DECL chain stored
367 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
368 leave them in the BLOCK because they are found in the FUNCTION_DECL
370 DECL_INITIAL (current_function_decl
) = block
;
371 BLOCK_VARS (block
) = 0;
375 if (!block_previously_created
)
376 current_binding_level
->blocks
377 = chainon (current_binding_level
->blocks
, block
);
380 /* If we did not make a block for the level just exited, any blocks made for
381 inner levels (since they cannot be recorded as subblocks in that level)
382 must be carried forward so they will later become subblocks of something
384 else if (subblock_chain
)
385 current_binding_level
->blocks
386 = chainon (current_binding_level
->blocks
, subblock_chain
);
388 TREE_USED (block
) = 1;
393 /* Insert BLOCK at the end of the list of subblocks of the
394 current binding level. This is used when a BIND_EXPR is expanded,
395 to handle the BLOCK node inside the BIND_EXPR. */
401 TREE_USED (block
) = 1;
402 current_binding_level
->blocks
403 = chainon (current_binding_level
->blocks
, block
);
406 /* Set the BLOCK node for the innermost scope
407 (the one we are currently in). */
413 current_binding_level
->this_block
= block
;
414 current_binding_level
->names
= chainon (current_binding_level
->names
,
416 current_binding_level
->blocks
= chainon (current_binding_level
->blocks
,
417 BLOCK_SUBBLOCKS (block
));
420 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
421 Returns the ..._DECL node. */
427 struct binding_level
*b
;
429 /* If at top level, there is no context. But PARM_DECLs always go in the
430 level of its function. */
431 if (global_bindings_p () && TREE_CODE (decl
) != PARM_DECL
)
433 b
= global_binding_level
;
434 DECL_CONTEXT (decl
) = 0;
438 b
= current_binding_level
;
439 DECL_CONTEXT (decl
) = current_function_decl
;
442 /* Put the declaration on the list. The list of declarations is in reverse
443 order. The list will be reversed later if necessary. This needs to be
444 this way for compatibility with the back-end.
446 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
447 will cause trouble with the debugger and aren't needed anyway. */
448 if (TREE_CODE (decl
) != TYPE_DECL
449 || TREE_CODE (TREE_TYPE (decl
)) != UNCONSTRAINED_ARRAY_TYPE
)
451 TREE_CHAIN (decl
) = b
->names
;
455 /* For the declaration of a type, set its name if it either is not already
456 set, was set to an IDENTIFIER_NODE, indicating an internal name,
457 or if the previous type name was not derived from a source name.
458 We'd rather have the type named with a real name and all the pointer
459 types to the same object have the same POINTER_TYPE node. Code in this
460 function in c-decl.c makes a copy of the type node here, but that may
461 cause us trouble with incomplete types, so let's not try it (at least
464 if (TREE_CODE (decl
) == TYPE_DECL
465 && DECL_NAME (decl
) != 0
466 && (TYPE_NAME (TREE_TYPE (decl
)) == 0
467 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl
))) == IDENTIFIER_NODE
468 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl
))) == TYPE_DECL
469 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl
)))
470 && ! DECL_ARTIFICIAL (decl
))))
471 TYPE_NAME (TREE_TYPE (decl
)) = decl
;
476 /* Do little here. Set up the standard declarations later after the
477 front end has been run. */
480 gnat_init_decl_processing ()
484 /* Make the binding_level structure for global names. */
485 current_function_decl
= 0;
486 current_binding_level
= 0;
487 free_binding_level
= 0;
489 global_binding_level
= current_binding_level
;
491 build_common_tree_nodes (0);
493 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
494 corresponding to the size of ptr_mode. Make this here since we need
495 this before we can expand the GNAT types. */
496 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (ptr_mode
), 0));
497 build_common_tree_nodes_2 (0);
499 pushdecl (build_decl (TYPE_DECL
, get_identifier (SIZE_TYPE
), sizetype
));
501 /* We need to make the integer type before doing anything else.
502 We stitch this in to the appropriate GNAT type later. */
503 pushdecl (build_decl (TYPE_DECL
, get_identifier ("integer"),
505 pushdecl (build_decl (TYPE_DECL
, get_identifier ("unsigned char"),
508 ptr_void_type_node
= build_pointer_type (void_type_node
);
512 /* Create the predefined scalar types such as `integer_type_node' needed
513 in the gcc back-end and initialize the global binding level. */
516 init_gigi_decls (long_long_float_type
, exception_type
)
517 tree long_long_float_type
, exception_type
;
522 /* Set the types that GCC and Gigi use from the front end. We would like
523 to do this for char_type_node, but it needs to correspond to the C
525 if (TREE_CODE (TREE_TYPE (long_long_float_type
)) == INTEGER_TYPE
)
527 /* In this case, the builtin floating point types are VAX float,
528 so make up a type for use. */
529 longest_float_type_node
= make_node (REAL_TYPE
);
530 TYPE_PRECISION (longest_float_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
531 layout_type (longest_float_type_node
);
532 pushdecl (build_decl (TYPE_DECL
, get_identifier ("longest float type"),
533 longest_float_type_node
));
536 longest_float_type_node
= TREE_TYPE (long_long_float_type
);
538 except_type_node
= TREE_TYPE (exception_type
);
540 unsigned_type_node
= gnat_type_for_size (INT_TYPE_SIZE
, 1);
541 pushdecl (build_decl (TYPE_DECL
, get_identifier ("unsigned int"),
542 unsigned_type_node
));
545 = pushdecl (build_decl (TYPE_DECL
, get_identifier ("void"),
548 void_ftype
= build_function_type (void_type_node
, NULL_TREE
);
549 ptr_void_ftype
= build_pointer_type (void_ftype
);
551 /* Now declare runtime functions. */
552 endlink
= tree_cons (NULL_TREE
, void_type_node
, NULL_TREE
);
554 /* malloc is a function declaration tree for a function to allocate
556 malloc_decl
= create_subprog_decl (get_identifier ("__gnat_malloc"),
558 build_function_type (ptr_void_type_node
,
559 tree_cons (NULL_TREE
,
562 NULL_TREE
, 0, 1, 1, 0);
564 /* free is a function declaration tree for a function to free memory. */
567 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE
,
568 build_function_type (void_type_node
,
569 tree_cons (NULL_TREE
,
572 NULL_TREE
, 0, 1, 1, 0);
574 /* Make the types and functions used for exception processing. */
576 = build_array_type (gnat_type_for_mode (Pmode
, 0),
577 build_index_type (build_int_2 (5, 0)));
578 pushdecl (build_decl (TYPE_DECL
, get_identifier ("JMPBUF_T"), jmpbuf_type
));
579 jmpbuf_ptr_type
= build_pointer_type (jmpbuf_type
);
581 /* Functions to get and set the jumpbuf pointer for the current thread. */
583 = create_subprog_decl
584 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
585 NULL_TREE
, build_function_type (jmpbuf_ptr_type
, NULL_TREE
),
586 NULL_TREE
, 0, 1, 1, 0);
589 = create_subprog_decl
590 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
592 build_function_type (void_type_node
,
593 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
594 NULL_TREE
, 0, 1, 1, 0);
596 /* Function to get the current exception. */
598 = create_subprog_decl
599 (get_identifier ("system__soft_links__get_gnat_exception"),
601 build_function_type (build_pointer_type (except_type_node
), NULL_TREE
),
602 NULL_TREE
, 0, 1, 1, 0);
604 /* Functions that raise exceptions. */
606 = create_subprog_decl
607 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE
,
608 build_function_type (void_type_node
,
609 tree_cons (NULL_TREE
,
610 build_pointer_type (except_type_node
),
612 NULL_TREE
, 0, 1, 1, 0);
614 /* If in no exception handlers mode, all raise statements are redirected to
615 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
616 this procedure will never be called in this mode. */
617 if (No_Exception_Handlers_Set ())
620 = create_subprog_decl
621 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE
,
622 build_function_type (void_type_node
,
623 tree_cons (NULL_TREE
,
624 build_pointer_type (char_type_node
),
625 tree_cons (NULL_TREE
,
628 NULL_TREE
, 0, 1, 1, 0);
630 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
631 gnat_raise_decls
[i
] = decl
;
634 /* Otherwise, make one decl for each exception reason. */
635 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
639 sprintf (name
, "__gnat_rcheck_%.2d", i
);
641 = create_subprog_decl
642 (get_identifier (name
), NULL_TREE
,
643 build_function_type (void_type_node
,
644 tree_cons (NULL_TREE
,
647 tree_cons (NULL_TREE
,
650 NULL_TREE
, 0, 1, 1, 0);
653 /* Indicate that these never return. */
654 TREE_THIS_VOLATILE (raise_nodefer_decl
) = 1;
655 TREE_SIDE_EFFECTS (raise_nodefer_decl
) = 1;
656 TREE_TYPE (raise_nodefer_decl
)
657 = build_qualified_type (TREE_TYPE (raise_nodefer_decl
),
660 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
662 TREE_THIS_VOLATILE (gnat_raise_decls
[i
]) = 1;
663 TREE_SIDE_EFFECTS (gnat_raise_decls
[i
]) = 1;
664 TREE_TYPE (gnat_raise_decls
[i
])
665 = build_qualified_type (TREE_TYPE (gnat_raise_decls
[i
]),
669 /* setjmp returns an integer and has one operand, which is a pointer to
672 = create_subprog_decl
673 (get_identifier ("setjmp"), NULL_TREE
,
674 build_function_type (integer_type_node
,
675 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
676 NULL_TREE
, 0, 1, 1, 0);
678 DECL_BUILT_IN_CLASS (setjmp_decl
) = BUILT_IN_NORMAL
;
679 DECL_FUNCTION_CODE (setjmp_decl
) = BUILT_IN_SETJMP
;
681 main_identifier_node
= get_identifier ("main");
684 /* This function is called indirectly from toplev.c to handle incomplete
685 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
686 compile_file in toplev.c makes an indirect call through the function pointer
687 incomplete_decl_finalize_hook which is initialized to this routine in
688 init_decl_processing. */
691 gnat_finish_incomplete_decl (dont_care
)
692 tree dont_care ATTRIBUTE_UNUSED
;
697 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
698 nodes (FIELDLIST), finish constructing the record or union type.
699 If HAS_REP is nonzero, this record has a rep clause; don't call
700 layout_type but merely set the size and alignment ourselves.
701 If DEFER_DEBUG is nonzero, do not call the debugging routines
702 on this type; it will be done later. */
705 finish_record_type (record_type
, fieldlist
, has_rep
, defer_debug
)
711 enum tree_code code
= TREE_CODE (record_type
);
712 tree ada_size
= bitsize_zero_node
;
713 tree size
= bitsize_zero_node
;
714 tree size_unit
= size_zero_node
;
718 TYPE_FIELDS (record_type
) = fieldlist
;
720 if (TYPE_NAME (record_type
) != 0
721 && TREE_CODE (TYPE_NAME (record_type
)) == TYPE_DECL
)
722 TYPE_STUB_DECL (record_type
) = TYPE_NAME (record_type
);
724 TYPE_STUB_DECL (record_type
)
725 = pushdecl (build_decl (TYPE_DECL
, TYPE_NAME (record_type
),
728 /* We don't need both the typedef name and the record name output in
729 the debugging information, since they are the same. */
730 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type
)) = 1;
732 /* Globally initialize the record first. If this is a rep'ed record,
733 that just means some initializations; otherwise, layout the record. */
737 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
738 TYPE_MODE (record_type
) = BLKmode
;
739 if (TYPE_SIZE (record_type
) == 0)
741 TYPE_SIZE (record_type
) = bitsize_zero_node
;
742 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
747 /* Ensure there isn't a size already set. There can be in an error
748 case where there is a rep clause but all fields have errors and
749 no longer have a position. */
750 TYPE_SIZE (record_type
) = 0;
751 layout_type (record_type
);
754 /* At this point, the position and size of each field is known. It was
755 either set before entry by a rep clause, or by laying out the type
756 above. We now make a pass through the fields (in reverse order for
757 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
758 (for rep'ed records that are not padding types); and the mode (for
761 if (code
== QUAL_UNION_TYPE
)
762 fieldlist
= nreverse (fieldlist
);
764 for (field
= fieldlist
; field
; field
= TREE_CHAIN (field
))
766 tree type
= TREE_TYPE (field
);
767 tree this_size
= DECL_SIZE (field
);
768 tree this_size_unit
= DECL_SIZE_UNIT (field
);
769 tree this_ada_size
= DECL_SIZE (field
);
771 /* We need to make an XVE/XVU record if any field has variable size,
772 whether or not the record does. For example, if we have an union,
773 it may be that all fields, rounded up to the alignment, have the
774 same size, in which case we'll use that size. But the debug
775 output routines (except Dwarf2) won't be able to output the fields,
776 so we need to make the special record. */
777 if (TREE_CODE (this_size
) != INTEGER_CST
)
780 if ((TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
781 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
782 && ! TYPE_IS_FAT_POINTER_P (type
)
783 && ! TYPE_CONTAINS_TEMPLATE_P (type
)
784 && TYPE_ADA_SIZE (type
) != 0)
785 this_ada_size
= TYPE_ADA_SIZE (type
);
787 if (has_rep
&& ! DECL_BIT_FIELD (field
))
788 TYPE_ALIGN (record_type
)
789 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
794 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
795 size
= size_binop (MAX_EXPR
, size
, this_size
);
796 size_unit
= size_binop (MAX_EXPR
, size_unit
, this_size_unit
);
799 case QUAL_UNION_TYPE
:
801 = fold (build (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
802 this_ada_size
, ada_size
));
803 size
= fold (build (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
805 size_unit
= fold (build (COND_EXPR
, sizetype
, DECL_QUALIFIER (field
),
806 this_size_unit
, size_unit
));
810 /* Since we know here that all fields are sorted in order of
811 increasing bit position, the size of the record is one
812 higher than the ending bit of the last field processed
813 unless we have a rep clause, since in that case we might
814 have a field outside a QUAL_UNION_TYPE that has a higher ending
815 position. So use a MAX in that case. Also, if this field is a
816 QUAL_UNION_TYPE, we need to take into account the previous size in
817 the case of empty variants. */
819 = merge_sizes (ada_size
, bit_position (field
), this_ada_size
,
820 TREE_CODE (type
) == QUAL_UNION_TYPE
, has_rep
);
821 size
= merge_sizes (size
, bit_position (field
), this_size
,
822 TREE_CODE (type
) == QUAL_UNION_TYPE
, has_rep
);
824 = merge_sizes (size_unit
, byte_position (field
), this_size_unit
,
825 TREE_CODE (type
) == QUAL_UNION_TYPE
, has_rep
);
833 if (code
== QUAL_UNION_TYPE
)
834 nreverse (fieldlist
);
836 /* If this is a padding record, we never want to make the size smaller than
837 what was specified in it, if any. */
838 if (TREE_CODE (record_type
) == RECORD_TYPE
839 && TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
) != 0)
841 size
= TYPE_SIZE (record_type
);
842 size_unit
= TYPE_SIZE_UNIT (record_type
);
845 /* Now set any of the values we've just computed that apply. */
846 if (! TYPE_IS_FAT_POINTER_P (record_type
)
847 && ! TYPE_CONTAINS_TEMPLATE_P (record_type
))
848 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
850 size
= round_up (size
, TYPE_ALIGN (record_type
));
851 size_unit
= round_up (size_unit
, TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
854 && ! (TREE_CODE (record_type
) == RECORD_TYPE
855 && TYPE_IS_PADDING_P (record_type
)
856 && TREE_CODE (size
) != INTEGER_CST
857 && contains_placeholder_p (size
)))
859 TYPE_SIZE (record_type
) = size
;
860 TYPE_SIZE_UNIT (record_type
) = size_unit
;
864 compute_record_mode (record_type
);
868 /* If this record is of variable size, rename it so that the
869 debugger knows it is and make a new, parallel, record
870 that tells the debugger how the record is laid out. See
875 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
876 ? UNION_TYPE
: TREE_CODE (record_type
));
877 tree orig_id
= DECL_NAME (TYPE_STUB_DECL (record_type
));
879 = concat_id_with_name (orig_id
,
880 TREE_CODE (record_type
) == QUAL_UNION_TYPE
882 tree last_pos
= bitsize_zero_node
;
885 TYPE_NAME (new_record_type
) = new_id
;
886 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
887 TYPE_STUB_DECL (new_record_type
)
888 = pushdecl (build_decl (TYPE_DECL
, new_id
, new_record_type
));
889 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type
)) = 1;
890 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
891 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
892 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
894 /* Now scan all the fields, replacing each field with a new
895 field corresponding to the new encoding. */
896 for (old_field
= TYPE_FIELDS (record_type
); old_field
!= 0;
897 old_field
= TREE_CHAIN (old_field
))
899 tree field_type
= TREE_TYPE (old_field
);
900 tree field_name
= DECL_NAME (old_field
);
902 tree curpos
= bit_position (old_field
);
904 unsigned int align
= 0;
907 /* See how the position was modified from the last position.
909 There are two basic cases we support: a value was added
910 to the last position or the last position was rounded to
911 a boundary and they something was added. Check for the
912 first case first. If not, see if there is any evidence
913 of rounding. If so, round the last position and try
916 If this is a union, the position can be taken as zero. */
918 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
919 pos
= bitsize_zero_node
, align
= 0;
921 pos
= compute_related_constant (curpos
, last_pos
);
923 if (pos
== 0 && TREE_CODE (curpos
) == MULT_EXPR
924 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
)
926 align
= TREE_INT_CST_LOW (TREE_OPERAND (curpos
, 1));
927 pos
= compute_related_constant (curpos
,
928 round_up (last_pos
, align
));
930 else if (pos
== 0 && TREE_CODE (curpos
) == PLUS_EXPR
931 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
932 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
933 && host_integerp (TREE_OPERAND
934 (TREE_OPERAND (curpos
, 0), 1),
939 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1), 1);
940 pos
= compute_related_constant (curpos
,
941 round_up (last_pos
, align
));
944 /* If we can't compute a position, set it to zero.
946 ??? We really should abort here, but it's too much work
947 to get this correct for all cases. */
950 pos
= bitsize_zero_node
;
952 /* See if this type is variable-size and make a new type
953 and indicate the indirection if so. */
954 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
956 field_type
= build_pointer_type (field_type
);
960 /* Make a new field name, if necessary. */
961 if (var
|| align
!= 0)
966 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
967 align
/ BITS_PER_UNIT
);
969 strcpy (suffix
, "XVL");
971 field_name
= concat_id_with_name (field_name
, suffix
);
974 new_field
= create_field_decl (field_name
, field_type
,
976 DECL_SIZE (old_field
), pos
, 0);
977 TREE_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
978 TYPE_FIELDS (new_record_type
) = new_field
;
980 /* If old_field is a QUAL_UNION_TYPE, take its size as being
981 zero. The only time it's not the last field of the record
982 is when there are other components at fixed positions after
983 it (meaning there was a rep clause for every field) and we
984 want to be able to encode them. */
985 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
986 (TREE_CODE (TREE_TYPE (old_field
))
989 : DECL_SIZE (old_field
));
992 TYPE_FIELDS (new_record_type
)
993 = nreverse (TYPE_FIELDS (new_record_type
));
995 rest_of_type_compilation (new_record_type
, global_bindings_p ());
998 rest_of_type_compilation (record_type
, global_bindings_p ());
1002 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1003 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1004 if this represents a QUAL_UNION_TYPE in which case we must look for
1005 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1006 is nonzero, we must take the MAX of the end position of this field
1007 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1009 We return an expression for the size. */
1012 merge_sizes (last_size
, first_bit
, size
, special
, has_rep
)
1014 tree first_bit
, size
;
1018 tree type
= TREE_TYPE (last_size
);
1020 if (! special
|| TREE_CODE (size
) != COND_EXPR
)
1022 tree
new = size_binop (PLUS_EXPR
, first_bit
, size
);
1025 new = size_binop (MAX_EXPR
, last_size
, new);
1030 return fold (build (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1031 integer_zerop (TREE_OPERAND (size
, 1))
1032 ? last_size
: merge_sizes (last_size
, first_bit
,
1033 TREE_OPERAND (size
, 1),
1035 integer_zerop (TREE_OPERAND (size
, 2))
1036 ? last_size
: merge_sizes (last_size
, first_bit
,
1037 TREE_OPERAND (size
, 2),
1041 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1042 related by the addition of a constant. Return that constant if so. */
1045 compute_related_constant (op0
, op1
)
1048 tree op0_var
, op1_var
;
1049 tree op0_con
= split_plus (op0
, &op0_var
);
1050 tree op1_con
= split_plus (op1
, &op1_var
);
1051 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1053 if (operand_equal_p (op0_var
, op1_var
, 0))
1055 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1061 /* Utility function of above to split a tree OP which may be a sum, into a
1062 constant part, which is returned, and a variable part, which is stored
1063 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1067 split_plus (in
, pvar
)
1071 tree result
= bitsize_zero_node
;
1073 while (TREE_CODE (in
) == NON_LVALUE_EXPR
)
1074 in
= TREE_OPERAND (in
, 0);
1077 if (TREE_CODE (in
) == INTEGER_CST
)
1079 *pvar
= bitsize_zero_node
;
1082 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1084 tree lhs_var
, rhs_var
;
1085 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1086 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1088 result
= size_binop (PLUS_EXPR
, result
, lhs_con
);
1089 result
= size_binop (TREE_CODE (in
), result
, rhs_con
);
1091 if (lhs_var
== TREE_OPERAND (in
, 0)
1092 && rhs_var
== TREE_OPERAND (in
, 1))
1093 return bitsize_zero_node
;
1095 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1099 return bitsize_zero_node
;
1102 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1103 subprogram. If it is void_type_node, then we are dealing with a procedure,
1104 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1105 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1106 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1107 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1108 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1109 RETURNS_WITH_DSP is nonzero if the function is to return with a
1110 depressed stack pointer. */
1113 create_subprog_type (return_type
, param_decl_list
, cico_list
,
1114 returns_unconstrained
, returns_by_ref
, returns_with_dsp
)
1116 tree param_decl_list
;
1118 int returns_unconstrained
, returns_by_ref
, returns_with_dsp
;
1120 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1121 the subprogram formal parameters. This list is generated by traversing the
1122 input list of PARM_DECL nodes. */
1123 tree param_type_list
= NULL
;
1127 for (param_decl
= param_decl_list
; param_decl
;
1128 param_decl
= TREE_CHAIN (param_decl
))
1129 param_type_list
= tree_cons (NULL_TREE
, TREE_TYPE (param_decl
),
1132 /* The list of the function parameter types has to be terminated by the void
1133 type to signal to the back-end that we are not dealing with a variable
1134 parameter subprogram, but that the subprogram has a fixed number of
1136 param_type_list
= tree_cons (NULL_TREE
, void_type_node
, param_type_list
);
1138 /* The list of argument types has been created in reverse
1140 param_type_list
= nreverse (param_type_list
);
1142 type
= build_function_type (return_type
, param_type_list
);
1144 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1145 or the new type should, make a copy of TYPE. Likewise for
1146 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1147 if (TYPE_CI_CO_LIST (type
) != 0 || cico_list
!= 0
1148 || TYPE_RETURNS_UNCONSTRAINED_P (type
) != returns_unconstrained
1149 || TYPE_RETURNS_BY_REF_P (type
) != returns_by_ref
)
1150 type
= copy_type (type
);
1152 SET_TYPE_CI_CO_LIST (type
, cico_list
);
1153 TYPE_RETURNS_UNCONSTRAINED_P (type
) = returns_unconstrained
;
1154 TYPE_RETURNS_STACK_DEPRESSED (type
) = returns_with_dsp
;
1155 TYPE_RETURNS_BY_REF_P (type
) = returns_by_ref
;
1159 /* Return a copy of TYPE but safe to modify in any way. */
1165 tree
new = copy_node (type
);
1167 /* copy_node clears this field instead of copying it, because it is
1168 aliased with TREE_CHAIN. */
1169 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type
);
1171 TYPE_POINTER_TO (new) = 0;
1172 TYPE_REFERENCE_TO (new) = 0;
1173 TYPE_MAIN_VARIANT (new) = new;
1174 TYPE_NEXT_VARIANT (new) = 0;
1179 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1180 TYPE_INDEX_TYPE is INDEX. */
1183 create_index_type (min
, max
, index
)
1187 /* First build a type for the desired range. */
1188 tree type
= build_index_2_type (min
, max
);
1190 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1191 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1192 is set, but not to INDEX, make a copy of this type with the requested
1193 index type. Note that we have no way of sharing these types, but that's
1194 only a small hole. */
1195 if (TYPE_INDEX_TYPE (type
) == index
)
1197 else if (TYPE_INDEX_TYPE (type
) != 0)
1198 type
= copy_type (type
);
1200 SET_TYPE_INDEX_TYPE (type
, index
);
1204 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1205 string) and TYPE is a ..._TYPE node giving its data type.
1206 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1207 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1208 information about this type. */
1211 create_type_decl (type_name
, type
, attr_list
, artificial_p
, debug_info_p
)
1214 struct attrib
*attr_list
;
1218 tree type_decl
= build_decl (TYPE_DECL
, type_name
, type
);
1219 enum tree_code code
= TREE_CODE (type
);
1221 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1222 pushdecl (type_decl
);
1223 process_attributes (type_decl
, attr_list
);
1225 /* Pass type declaration information to the debugger unless this is an
1226 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1227 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1228 a dummy type, which will be completed later, or a type for which
1229 debugging information was not requested. */
1230 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| TYPE_IS_DUMMY_P (type
)
1232 DECL_IGNORED_P (type_decl
) = 1;
1233 else if (code
!= ENUMERAL_TYPE
&& code
!= RECORD_TYPE
1234 && ! ((code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1235 && TYPE_IS_DUMMY_P (TREE_TYPE (type
))))
1236 rest_of_decl_compilation (type_decl
, NULL
, global_bindings_p (), 0);
1241 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1242 ASM_NAME is its assembler name (if provided). TYPE is its data type
1243 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1244 expression; NULL_TREE if none.
1246 CONST_FLAG is nonzero if this variable is constant.
1248 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1249 the current compilation unit. This flag should be set when processing the
1250 variable definitions in a package specification. EXTERN_FLAG is nonzero
1251 when processing an external variable declaration (as opposed to a
1252 definition: no storage is to be allocated for the variable here).
1254 STATIC_FLAG is only relevant when not at top level. In that case
1255 it indicates whether to always allocate storage to the variable. */
1258 create_var_decl (var_name
, asm_name
, type
, var_init
, const_flag
, public_flag
,
1259 extern_flag
, static_flag
, attr_list
)
1268 struct attrib
*attr_list
;
1273 : (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init
))
1274 && (global_bindings_p () || static_flag
1275 ? 0 != initializer_constant_valid_p (var_init
,
1276 TREE_TYPE (var_init
))
1277 : TREE_CONSTANT (var_init
))));
1279 = build_decl ((const_flag
&& init_const
1280 /* Only make a CONST_DECL for sufficiently-small objects.
1281 We consider complex double "sufficiently-small" */
1282 && TYPE_SIZE (type
) != 0
1283 && host_integerp (TYPE_SIZE_UNIT (type
), 1)
1284 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type
),
1285 GET_MODE_SIZE (DCmode
)))
1286 ? CONST_DECL
: VAR_DECL
, var_name
, type
);
1287 tree assign_init
= 0;
1289 /* If this is external, throw away any initializations unless this is a
1290 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1291 we are defining a global here, leave a constant initialization and save
1292 any variable elaborations for the elaboration routine. Otherwise, if
1293 the initializing expression is not the same as TYPE, generate the
1294 initialization with an assignment statement, since it knows how
1295 to do the required adjustents. If we are just annotating types,
1296 throw away the initialization if it isn't a constant. */
1298 if ((extern_flag
&& TREE_CODE (var_decl
) != CONST_DECL
)
1299 || (type_annotate_only
&& var_init
!= 0 && ! TREE_CONSTANT (var_init
)))
1302 if (global_bindings_p () && var_init
!= 0 && ! init_const
)
1304 add_pending_elaborations (var_decl
, var_init
);
1308 else if (var_init
!= 0
1309 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init
))
1310 != TYPE_MAIN_VARIANT (type
))
1311 || (static_flag
&& ! init_const
)))
1312 assign_init
= var_init
, var_init
= 0;
1314 DECL_COMMON (var_decl
) = !flag_no_common
;
1315 DECL_INITIAL (var_decl
) = var_init
;
1316 TREE_READONLY (var_decl
) = const_flag
;
1317 DECL_EXTERNAL (var_decl
) = extern_flag
;
1318 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
1319 TREE_CONSTANT (var_decl
) = TREE_CODE (var_decl
) == CONST_DECL
;
1320 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
1321 = TYPE_VOLATILE (type
);
1323 /* At the global binding level we need to allocate static storage for the
1324 variable if and only if its not external. If we are not at the top level
1325 we allocate automatic storage unless requested not to. */
1326 TREE_STATIC (var_decl
) = global_bindings_p () ? !extern_flag
: static_flag
;
1329 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
1331 process_attributes (var_decl
, attr_list
);
1333 /* Add this decl to the current binding level and generate any
1334 needed code and RTL. */
1335 var_decl
= pushdecl (var_decl
);
1336 expand_decl (var_decl
);
1338 if (DECL_CONTEXT (var_decl
) != 0)
1339 expand_decl_init (var_decl
);
1341 /* If this is volatile, force it into memory. */
1342 if (TREE_SIDE_EFFECTS (var_decl
))
1343 gnat_mark_addressable (var_decl
);
1345 if (TREE_CODE (var_decl
) != CONST_DECL
)
1346 rest_of_decl_compilation (var_decl
, 0, global_bindings_p (), 0);
1348 if (assign_init
!= 0)
1350 /* If VAR_DECL has a padded type, convert it to the unpadded
1351 type so the assignment is done properly. */
1352 tree lhs
= var_decl
;
1354 if (TREE_CODE (TREE_TYPE (lhs
)) == RECORD_TYPE
1355 && TYPE_IS_PADDING_P (TREE_TYPE (lhs
)))
1356 lhs
= convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs
))), lhs
);
1358 expand_expr_stmt (build_binary_op (MODIFY_EXPR
, NULL_TREE
, lhs
,
1365 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1366 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1367 this field is in a record type with a "pragma pack". If SIZE is nonzero
1368 it is the specified size for this field. If POS is nonzero, it is the bit
1369 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1370 the address of this field for aliasing purposes. */
1373 create_field_decl (field_name
, field_type
, record_type
, packed
, size
, pos
,
1382 tree field_decl
= build_decl (FIELD_DECL
, field_name
, field_type
);
1384 DECL_CONTEXT (field_decl
) = record_type
;
1385 TREE_READONLY (field_decl
) = TREE_READONLY (field_type
);
1387 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1388 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1389 If it is a padding type where the inner field is of variable size, it
1390 must be at its natural alignment. Just handle the packed case here; we
1391 will disallow non-aligned rep clauses elsewhere. */
1392 if (packed
&& TYPE_MODE (field_type
) == BLKmode
)
1393 DECL_ALIGN (field_decl
)
1394 = ((TREE_CODE (field_type
) == RECORD_TYPE
1395 && TYPE_IS_PADDING_P (field_type
)
1396 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type
))))
1397 ? TYPE_ALIGN (field_type
) : BITS_PER_UNIT
);
1399 /* If a size is specified, use it. Otherwise, see if we have a size
1400 to use that may differ from the natural size of the object. */
1402 size
= convert (bitsizetype
, size
);
1405 if (packed
== 1 && ! operand_equal_p (rm_size (field_type
),
1406 TYPE_SIZE (field_type
), 0))
1407 size
= rm_size (field_type
);
1409 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1411 if (size
!= 0 && TREE_CODE (size
) == INTEGER_CST
1412 && compare_tree_int (size
, MAX_FIXED_MODE_SIZE
) > 0)
1413 size
= round_up (size
, BITS_PER_UNIT
);
1416 /* Make a bitfield if a size is specified for two reasons: first if the size
1417 differs from the natural size. Second, if the alignment is insufficient.
1418 There are a number of ways the latter can be true. But never make a
1419 bitfield if the type of the field has a nonconstant size. */
1421 if (size
!= 0 && TREE_CODE (size
) == INTEGER_CST
1422 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
1423 && (! operand_equal_p (TYPE_SIZE (field_type
), size
, 0)
1425 && ! value_zerop (size_binop (TRUNC_MOD_EXPR
, pos
,
1426 bitsize_int (TYPE_ALIGN
1429 || (TYPE_ALIGN (record_type
) != 0
1430 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
1432 DECL_BIT_FIELD (field_decl
) = 1;
1433 DECL_SIZE (field_decl
) = size
;
1434 if (! packed
&& pos
== 0)
1435 DECL_ALIGN (field_decl
)
1436 = (TYPE_ALIGN (record_type
) != 0
1437 ? MIN (TYPE_ALIGN (record_type
), TYPE_ALIGN (field_type
))
1438 : TYPE_ALIGN (field_type
));
1441 DECL_PACKED (field_decl
) = pos
!= 0 ? DECL_BIT_FIELD (field_decl
) : packed
;
1442 DECL_ALIGN (field_decl
)
1443 = MAX (DECL_ALIGN (field_decl
),
1444 DECL_BIT_FIELD (field_decl
) ? 1
1445 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
1446 : TYPE_ALIGN (field_type
));
1450 /* We need to pass in the alignment the DECL is known to have.
1451 This is the lowest-order bit set in POS, but no more than
1452 the alignment of the record, if one is specified. Note
1453 that an alignment of 0 is taken as infinite. */
1454 unsigned int known_align
;
1456 if (host_integerp (pos
, 1))
1457 known_align
= tree_low_cst (pos
, 1) & - tree_low_cst (pos
, 1);
1459 known_align
= BITS_PER_UNIT
;
1461 if (TYPE_ALIGN (record_type
)
1462 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
1463 known_align
= TYPE_ALIGN (record_type
);
1465 layout_decl (field_decl
, known_align
);
1466 SET_DECL_OFFSET_ALIGN (field_decl
,
1467 host_integerp (pos
, 1) ? BIGGEST_ALIGNMENT
1469 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
1470 &DECL_FIELD_BIT_OFFSET (field_decl
),
1471 DECL_OFFSET_ALIGN (field_decl
), pos
);
1473 DECL_HAS_REP_P (field_decl
) = 1;
1476 /* If the field type is passed by reference, we will have pointers to the
1477 field, so it is addressable. */
1478 if (must_pass_by_ref (field_type
) || default_pass_by_ref (field_type
))
1481 /* Mark the decl as nonaddressable if it either is indicated so semantically
1482 or if it is a bit field. */
1483 DECL_NONADDRESSABLE_P (field_decl
)
1484 = ! addressable
|| DECL_BIT_FIELD (field_decl
);
1489 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1490 effects, has the value of zero. */
1496 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
1497 return value_zerop (TREE_OPERAND (exp
, 1));
1499 return integer_zerop (exp
);
1502 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1503 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1504 readonly (either an IN parameter or an address of a pass-by-ref
1508 create_param_decl (param_name
, param_type
, readonly
)
1513 tree param_decl
= build_decl (PARM_DECL
, param_name
, param_type
);
1515 DECL_ARG_TYPE (param_decl
) = param_type
;
1516 DECL_ARG_TYPE_AS_WRITTEN (param_decl
) = param_type
;
1517 TREE_READONLY (param_decl
) = readonly
;
1521 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1524 process_attributes (decl
, attr_list
)
1526 struct attrib
*attr_list
;
1528 for (; attr_list
; attr_list
= attr_list
->next
)
1529 switch (attr_list
->type
)
1531 case ATTR_MACHINE_ATTRIBUTE
:
1532 decl_attributes (&decl
, tree_cons (attr_list
->name
, attr_list
->arg
,
1534 ATTR_FLAG_TYPE_IN_PLACE
);
1537 case ATTR_LINK_ALIAS
:
1538 TREE_STATIC (decl
) = 1;
1539 assemble_alias (decl
, attr_list
->name
);
1542 case ATTR_WEAK_EXTERNAL
:
1544 declare_weak (decl
);
1546 post_error ("?weak declarations not supported on this target",
1547 attr_list
->error_point
);
1550 case ATTR_LINK_SECTION
:
1551 #ifdef ASM_OUTPUT_SECTION_NAME
1552 DECL_SECTION_NAME (decl
)
1553 = build_string (IDENTIFIER_LENGTH (attr_list
->name
),
1554 IDENTIFIER_POINTER (attr_list
->name
));
1555 DECL_COMMON (decl
) = 0;
1557 post_error ("?section attributes are not supported for this target",
1558 attr_list
->error_point
);
1564 /* Add some pending elaborations on the list. */
1567 add_pending_elaborations (var_decl
, var_init
)
1572 Check_Elaboration_Code_Allowed (error_gnat_node
);
1574 pending_elaborations
1575 = chainon (pending_elaborations
, build_tree_list (var_decl
, var_init
));
1578 /* Obtain any pending elaborations and clear the old list. */
1581 get_pending_elaborations ()
1583 /* Each thing added to the list went on the end; we want it on the
1585 tree result
= TREE_CHAIN (pending_elaborations
);
1587 TREE_CHAIN (pending_elaborations
) = 0;
1591 /* Return nonzero if there are pending elaborations. */
1594 pending_elaborations_p ()
1596 return TREE_CHAIN (pending_elaborations
) != 0;
1599 /* Save a copy of the current pending elaboration list and make a new
1603 push_pending_elaborations ()
1605 struct e_stack
*p
= (struct e_stack
*) ggc_alloc (sizeof (struct e_stack
));
1607 p
->next
= elist_stack
;
1608 p
->elab_list
= pending_elaborations
;
1610 pending_elaborations
= build_tree_list (NULL_TREE
, NULL_TREE
);
1613 /* Pop the stack of pending elaborations. */
1616 pop_pending_elaborations ()
1618 struct e_stack
*p
= elist_stack
;
1620 pending_elaborations
= p
->elab_list
;
1621 elist_stack
= p
->next
;
1624 /* Return the current position in pending_elaborations so we can insert
1625 elaborations after that point. */
1628 get_elaboration_location ()
1630 return tree_last (pending_elaborations
);
1633 /* Insert the current elaborations after ELAB, which is in some elaboration
1637 insert_elaboration_list (elab
)
1640 tree next
= TREE_CHAIN (elab
);
1642 if (TREE_CHAIN (pending_elaborations
))
1644 TREE_CHAIN (elab
) = TREE_CHAIN (pending_elaborations
);
1645 TREE_CHAIN (tree_last (pending_elaborations
)) = next
;
1646 TREE_CHAIN (pending_elaborations
) = 0;
1650 /* Returns a LABEL_DECL node for LABEL_NAME. */
1653 create_label_decl (label_name
)
1656 tree label_decl
= build_decl (LABEL_DECL
, label_name
, void_type_node
);
1658 DECL_CONTEXT (label_decl
) = current_function_decl
;
1659 DECL_MODE (label_decl
) = VOIDmode
;
1660 DECL_SOURCE_LOCATION (label_decl
) = input_location
;
1665 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1666 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1667 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1668 PARM_DECL nodes chained through the TREE_CHAIN field).
1670 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1671 appropriate fields in the FUNCTION_DECL. */
1674 create_subprog_decl (subprog_name
, asm_name
, subprog_type
, param_decl_list
,
1675 inline_flag
, public_flag
, extern_flag
, attr_list
)
1679 tree param_decl_list
;
1683 struct attrib
*attr_list
;
1685 tree return_type
= TREE_TYPE (subprog_type
);
1686 tree subprog_decl
= build_decl (FUNCTION_DECL
, subprog_name
, subprog_type
);
1688 /* If this is a function nested inside an inlined external function, it
1689 means we aren't going to compile the outer function unless it is
1690 actually inlined, so do the same for us. */
1691 if (current_function_decl
!= 0 && DECL_INLINE (current_function_decl
)
1692 && DECL_EXTERNAL (current_function_decl
))
1695 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
1696 TREE_PUBLIC (subprog_decl
) = public_flag
;
1697 DECL_INLINE (subprog_decl
) = inline_flag
;
1698 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
1699 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1700 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1701 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
1702 DECL_RESULT (subprog_decl
) = build_decl (RESULT_DECL
, 0, return_type
);
1705 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
1707 process_attributes (subprog_decl
, attr_list
);
1709 /* Add this decl to the current binding level. */
1710 subprog_decl
= pushdecl (subprog_decl
);
1712 /* Output the assembler code and/or RTL for the declaration. */
1713 rest_of_decl_compilation (subprog_decl
, 0, global_bindings_p (), 0);
1715 return subprog_decl
;
1718 /* Count how deep we are into nested functions. This is because
1719 we shouldn't call the backend function context routines unless we
1720 are in a nested function. */
1722 static int function_nesting_depth
;
1724 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1725 body. This routine needs to be invoked before processing the declarations
1726 appearing in the subprogram. */
1729 begin_subprog_body (subprog_decl
)
1732 tree param_decl_list
;
1736 if (function_nesting_depth
++ != 0)
1737 push_function_context ();
1739 announce_function (subprog_decl
);
1741 /* Make this field nonzero so further routines know that this is not
1742 tentative. error_mark_node is replaced below (in poplevel) with the
1744 DECL_INITIAL (subprog_decl
) = error_mark_node
;
1746 /* This function exists in static storage. This does not mean `static' in
1748 TREE_STATIC (subprog_decl
) = 1;
1750 /* Enter a new binding level. */
1751 current_function_decl
= subprog_decl
;
1754 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1755 subprogram body) so that they can be recognized as local variables in the
1758 The list of PARM_DECL nodes is stored in the right order in
1759 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1760 which they are transmitted to `pushdecl' we need to reverse the list of
1761 PARM_DECLs if we want it to be stored in the right order. The reason why
1762 we want to make sure the PARM_DECLs are stored in the correct order is
1763 that this list will be retrieved in a few lines with a call to `getdecl'
1764 to store it back into the DECL_ARGUMENTS field. */
1765 param_decl_list
= nreverse (DECL_ARGUMENTS (subprog_decl
));
1767 for (param_decl
= param_decl_list
; param_decl
; param_decl
= next_param
)
1769 next_param
= TREE_CHAIN (param_decl
);
1770 TREE_CHAIN (param_decl
) = NULL
;
1771 pushdecl (param_decl
);
1774 /* Store back the PARM_DECL nodes. They appear in the right order. */
1775 DECL_ARGUMENTS (subprog_decl
) = getdecls ();
1777 init_function_start (subprog_decl
);
1778 expand_function_start (subprog_decl
, 0);
1780 /* If this function is `main', emit a call to `__main'
1781 to run global initializers, etc. */
1782 if (DECL_ASSEMBLER_NAME (subprog_decl
) != 0
1783 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl
))
1784 && DECL_CONTEXT (subprog_decl
) == NULL_TREE
)
1785 expand_main_function ();
1788 /* Finish the definition of the current subprogram and compile it all the way
1789 to assembler language output. */
1798 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl
))
1799 = current_function_decl
;
1801 /* Mark the RESULT_DECL as being in this subprogram. */
1802 DECL_CONTEXT (DECL_RESULT (current_function_decl
)) = current_function_decl
;
1804 expand_function_end ();
1806 /* If this is a nested function, push a new GC context. That will keep
1807 local variables on the stack from being collected while we're doing
1808 the compilation of this function. */
1809 if (function_nesting_depth
> 1)
1810 ggc_push_context ();
1812 rest_of_compilation (current_function_decl
);
1814 if (function_nesting_depth
> 1)
1817 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1818 not be seen when we call this function and will be in
1819 unallocated memory anyway. */
1820 for (cico_list
= TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl
));
1821 cico_list
!= 0; cico_list
= TREE_CHAIN (cico_list
))
1822 TREE_VALUE (cico_list
) = 0;
1824 if (DECL_SAVED_INSNS (current_function_decl
) == 0)
1826 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1827 was saved for inline, in which case the DECL_RTLs are in
1828 preserved memory. */
1829 for (decl
= DECL_ARGUMENTS (current_function_decl
);
1830 decl
!= 0; decl
= TREE_CHAIN (decl
))
1832 SET_DECL_RTL (decl
, 0);
1833 DECL_INCOMING_RTL (decl
) = 0;
1836 /* Similarly, discard DECL_RTL of the return value. */
1837 SET_DECL_RTL (DECL_RESULT (current_function_decl
), 0);
1839 /* But DECL_INITIAL must remain nonzero so we know this
1840 was an actual function definition unless toplev.c decided not
1842 if (DECL_INITIAL (current_function_decl
) != 0)
1843 DECL_INITIAL (current_function_decl
) = error_mark_node
;
1845 DECL_ARGUMENTS (current_function_decl
) = 0;
1848 /* If we are not at the bottom of the function nesting stack, pop up to
1849 the containing function. Otherwise show we aren't in any function. */
1850 if (--function_nesting_depth
!= 0)
1851 pop_function_context ();
1853 current_function_decl
= 0;
1856 /* Return a definition for a builtin function named NAME and whose data type
1857 is TYPE. TYPE should be a function type with argument types.
1858 FUNCTION_CODE tells later passes how to compile calls to this function.
1859 See tree.h for its possible values.
1861 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1862 the name to be called if we can't opencode the function. If
1863 ATTRS is nonzero, use that for the function attribute list. */
1866 builtin_function (name
, type
, function_code
, class, library_name
, attrs
)
1870 enum built_in_class
class;
1871 const char *library_name
;
1874 tree decl
= build_decl (FUNCTION_DECL
, get_identifier (name
), type
);
1876 DECL_EXTERNAL (decl
) = 1;
1877 TREE_PUBLIC (decl
) = 1;
1879 SET_DECL_ASSEMBLER_NAME (decl
, get_identifier (library_name
));
1882 DECL_BUILT_IN_CLASS (decl
) = class;
1883 DECL_FUNCTION_CODE (decl
) = function_code
;
1885 decl_attributes (&decl
, attrs
, ATTR_FLAG_BUILT_IN
);
1889 /* Return an integer type with the number of bits of precision given by
1890 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1891 it is a signed type. */
1894 gnat_type_for_size (precision
, unsignedp
)
1901 if (precision
<= 2 * MAX_BITS_PER_WORD
1902 && signed_and_unsigned_types
[precision
][unsignedp
] != 0)
1903 return signed_and_unsigned_types
[precision
][unsignedp
];
1906 t
= make_unsigned_type (precision
);
1908 t
= make_signed_type (precision
);
1910 if (precision
<= 2 * MAX_BITS_PER_WORD
)
1911 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
1913 if (TYPE_NAME (t
) == 0)
1915 sprintf (type_name
, "%sSIGNED_%d", unsignedp
? "UN" : "", precision
);
1916 TYPE_NAME (t
) = get_identifier (type_name
);
1922 /* Likewise for floating-point types. */
1925 float_type_for_size (precision
, mode
)
1927 enum machine_mode mode
;
1932 if (float_types
[(int) mode
] != 0)
1933 return float_types
[(int) mode
];
1935 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
1936 TYPE_PRECISION (t
) = precision
;
1939 if (TYPE_MODE (t
) != mode
)
1942 if (TYPE_NAME (t
) == 0)
1944 sprintf (type_name
, "FLOAT_%d", precision
);
1945 TYPE_NAME (t
) = get_identifier (type_name
);
1951 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1952 an unsigned type; otherwise a signed type is returned. */
1955 gnat_type_for_mode (mode
, unsignedp
)
1956 enum machine_mode mode
;
1959 if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
1960 return float_type_for_size (GET_MODE_BITSIZE (mode
), mode
);
1962 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
1965 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1968 gnat_unsigned_type (type_node
)
1971 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
1973 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
1975 type
= copy_node (type
);
1976 TREE_TYPE (type
) = type_node
;
1978 else if (TREE_TYPE (type_node
) != 0
1979 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
1980 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
1982 type
= copy_node (type
);
1983 TREE_TYPE (type
) = TREE_TYPE (type_node
);
1989 /* Return the signed version of a TYPE_NODE, a scalar type. */
1992 gnat_signed_type (type_node
)
1995 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
1997 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
1999 type
= copy_node (type
);
2000 TREE_TYPE (type
) = type_node
;
2002 else if (TREE_TYPE (type_node
) != 0
2003 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2004 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2006 type
= copy_node (type
);
2007 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2013 /* Return a type the same as TYPE except unsigned or signed according to
2017 gnat_signed_or_unsigned_type (unsignedp
, type
)
2021 if (! INTEGRAL_TYPE_P (type
) || TREE_UNSIGNED (type
) == unsignedp
)
2024 return gnat_type_for_size (TYPE_PRECISION (type
), unsignedp
);
2027 /* EXP is an expression for the size of an object. If this size contains
2028 discriminant references, replace them with the maximum (if MAX_P) or
2029 minimum (if ! MAX_P) possible value of the discriminant. */
2032 max_size (exp
, max_p
)
2036 enum tree_code code
= TREE_CODE (exp
);
2037 tree type
= TREE_TYPE (exp
);
2039 switch (TREE_CODE_CLASS (code
))
2046 if (code
== TREE_LIST
)
2047 return tree_cons (TREE_PURPOSE (exp
),
2048 max_size (TREE_VALUE (exp
), max_p
),
2049 TREE_CHAIN (exp
) != 0
2050 ? max_size (TREE_CHAIN (exp
), max_p
) : 0);
2054 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2055 modify. Otherwise, we abort since it is something we can't
2057 if (! contains_placeholder_p (exp
))
2060 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
2062 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), 1);
2065 return max_p
? size_one_node
: size_zero_node
;
2070 switch (TREE_CODE_LENGTH (code
))
2073 if (code
== NON_LVALUE_EXPR
)
2074 return max_size (TREE_OPERAND (exp
, 0), max_p
);
2077 fold (build1 (code
, type
,
2078 max_size (TREE_OPERAND (exp
, 0),
2079 code
== NEGATE_EXPR
? ! max_p
: max_p
)));
2082 if (code
== RTL_EXPR
)
2084 else if (code
== COMPOUND_EXPR
)
2085 return max_size (TREE_OPERAND (exp
, 1), max_p
);
2086 else if (code
== WITH_RECORD_EXPR
)
2090 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
2091 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
2092 code
== MINUS_EXPR
? ! max_p
: max_p
);
2094 /* Special-case wanting the maximum value of a MIN_EXPR.
2095 In that case, if one side overflows, return the other.
2096 sizetype is signed, but we know sizes are non-negative.
2097 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2098 overflowing or the maximum possible value and the RHS
2100 if (max_p
&& code
== MIN_EXPR
&& TREE_OVERFLOW (rhs
))
2102 else if (max_p
&& code
== MIN_EXPR
&& TREE_OVERFLOW (lhs
))
2104 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
2105 && (TREE_OVERFLOW (lhs
)
2106 || operand_equal_p (lhs
, TYPE_MAX_VALUE (type
), 0))
2107 && ! TREE_CONSTANT (rhs
))
2110 return fold (build (code
, type
, lhs
, rhs
));
2114 if (code
== SAVE_EXPR
)
2116 else if (code
== COND_EXPR
)
2117 return fold (build (MAX_EXPR
, type
,
2118 max_size (TREE_OPERAND (exp
, 1), max_p
),
2119 max_size (TREE_OPERAND (exp
, 2), max_p
)));
2120 else if (code
== CALL_EXPR
&& TREE_OPERAND (exp
, 1) != 0)
2121 return build (CALL_EXPR
, type
, TREE_OPERAND (exp
, 0),
2122 max_size (TREE_OPERAND (exp
, 1), max_p
));
2129 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2130 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2131 Return a constructor for the template. */
2134 build_template (template_type
, array_type
, expr
)
2139 tree template_elts
= NULL_TREE
;
2140 tree bound_list
= NULL_TREE
;
2143 if (TREE_CODE (array_type
) == RECORD_TYPE
2144 && (TYPE_IS_PADDING_P (array_type
)
2145 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type
)))
2146 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
2148 if (TREE_CODE (array_type
) == ARRAY_TYPE
2149 || (TREE_CODE (array_type
) == INTEGER_TYPE
2150 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
2151 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
2153 /* First make the list for a CONSTRUCTOR for the template. Go down the
2154 field list of the template instead of the type chain because this
2155 array might be an Ada array of arrays and we can't tell where the
2156 nested arrays stop being the underlying object. */
2158 for (field
= TYPE_FIELDS (template_type
); field
;
2160 ? (bound_list
= TREE_CHAIN (bound_list
))
2161 : (array_type
= TREE_TYPE (array_type
))),
2162 field
= TREE_CHAIN (TREE_CHAIN (field
)))
2164 tree bounds
, min
, max
;
2166 /* If we have a bound list, get the bounds from there. Likewise
2167 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2168 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2169 This will give us a maximum range. */
2170 if (bound_list
!= 0)
2171 bounds
= TREE_VALUE (bound_list
);
2172 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
2173 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
2174 else if (expr
!= 0 && TREE_CODE (expr
) == PARM_DECL
2175 && DECL_BY_COMPONENT_PTR_P (expr
))
2176 bounds
= TREE_TYPE (field
);
2180 min
= convert (TREE_TYPE (TREE_CHAIN (field
)), TYPE_MIN_VALUE (bounds
));
2181 max
= convert (TREE_TYPE (field
), TYPE_MAX_VALUE (bounds
));
2183 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2184 surround them with a WITH_RECORD_EXPR giving EXPR as the
2186 if (! TREE_CONSTANT (min
) && contains_placeholder_p (min
))
2187 min
= build (WITH_RECORD_EXPR
, TREE_TYPE (min
), min
, expr
);
2188 if (! TREE_CONSTANT (max
) && contains_placeholder_p (max
))
2189 max
= build (WITH_RECORD_EXPR
, TREE_TYPE (max
), max
, expr
);
2191 template_elts
= tree_cons (TREE_CHAIN (field
), max
,
2192 tree_cons (field
, min
, template_elts
));
2195 return gnat_build_constructor (template_type
, nreverse (template_elts
));
2198 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2199 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2200 in the type contains in its DECL_INITIAL the expression to use when
2201 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2202 to print out an error message if the mechanism cannot be applied to
2203 an object of that type and also for the name. */
2206 build_vms_descriptor (type
, mech
, gnat_entity
)
2208 Mechanism_Type mech
;
2209 Entity_Id gnat_entity
;
2211 tree record_type
= make_node (RECORD_TYPE
);
2212 tree field_list
= 0;
2221 /* If TYPE is an unconstrained array, use the underlying array type. */
2222 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
2223 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
2225 /* If this is an array, compute the number of dimensions in the array,
2226 get the index types, and point to the inner type. */
2227 if (TREE_CODE (type
) != ARRAY_TYPE
)
2230 for (ndim
= 1, inner_type
= type
;
2231 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
2232 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
2233 ndim
++, inner_type
= TREE_TYPE (inner_type
))
2236 idx_arr
= (tree
*) alloca (ndim
* sizeof (tree
));
2238 if (mech
!= By_Descriptor_NCA
2239 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
2240 for (i
= ndim
- 1, inner_type
= type
;
2242 i
--, inner_type
= TREE_TYPE (inner_type
))
2243 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2245 for (i
= 0, inner_type
= type
;
2247 i
++, inner_type
= TREE_TYPE (inner_type
))
2248 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2250 /* Now get the DTYPE value. */
2251 switch (TREE_CODE (type
))
2255 if (TYPE_VAX_FLOATING_POINT_P (type
))
2256 switch ((int) TYPE_DIGITS_VALUE (type
))
2269 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
2272 dtype
= TREE_UNSIGNED (type
) ? 2 : 6;
2275 dtype
= TREE_UNSIGNED (type
) ? 3 : 7;
2278 dtype
= TREE_UNSIGNED (type
) ? 4 : 8;
2281 dtype
= TREE_UNSIGNED (type
) ? 5 : 9;
2284 dtype
= TREE_UNSIGNED (type
) ? 25 : 26;
2290 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
2294 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
2295 && TYPE_VAX_FLOATING_POINT_P (type
))
2296 switch ((int) TYPE_DIGITS_VALUE (type
))
2308 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
2319 /* Get the CLASS value. */
2322 case By_Descriptor_A
:
2325 case By_Descriptor_NCA
:
2328 case By_Descriptor_SB
:
2335 /* Make the type for a descriptor for VMS. The first four fields
2336 are the same for all types. */
2339 = chainon (field_list
,
2340 make_descriptor_field
2341 ("LENGTH", gnat_type_for_size (16, 1), record_type
,
2342 size_in_bytes (mech
== By_Descriptor_A
? inner_type
: type
)));
2344 field_list
= chainon (field_list
,
2345 make_descriptor_field ("DTYPE",
2346 gnat_type_for_size (8, 1),
2347 record_type
, size_int (dtype
)));
2348 field_list
= chainon (field_list
,
2349 make_descriptor_field ("CLASS",
2350 gnat_type_for_size (8, 1),
2351 record_type
, size_int (class)));
2354 = chainon (field_list
,
2355 make_descriptor_field ("POINTER",
2356 build_pointer_type (type
),
2359 build_pointer_type (type
),
2360 build (PLACEHOLDER_EXPR
,
2366 case By_Descriptor_S
:
2369 case By_Descriptor_SB
:
2371 = chainon (field_list
,
2372 make_descriptor_field
2373 ("SB_L1", gnat_type_for_size (32, 1), record_type
,
2374 TREE_CODE (type
) == ARRAY_TYPE
2375 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2377 = chainon (field_list
,
2378 make_descriptor_field
2379 ("SB_L2", gnat_type_for_size (32, 1), record_type
,
2380 TREE_CODE (type
) == ARRAY_TYPE
2381 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2384 case By_Descriptor_A
:
2385 case By_Descriptor_NCA
:
2386 field_list
= chainon (field_list
,
2387 make_descriptor_field ("SCALE",
2388 gnat_type_for_size (8, 1),
2392 field_list
= chainon (field_list
,
2393 make_descriptor_field ("DIGITS",
2394 gnat_type_for_size (8, 1),
2399 = chainon (field_list
,
2400 make_descriptor_field
2401 ("AFLAGS", gnat_type_for_size (8, 1), record_type
,
2402 size_int (mech
== By_Descriptor_NCA
2404 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2405 : (TREE_CODE (type
) == ARRAY_TYPE
2406 && TYPE_CONVENTION_FORTRAN_P (type
)
2409 field_list
= chainon (field_list
,
2410 make_descriptor_field ("DIMCT",
2411 gnat_type_for_size (8, 1),
2415 field_list
= chainon (field_list
,
2416 make_descriptor_field ("ARSIZE",
2417 gnat_type_for_size (32, 1),
2419 size_in_bytes (type
)));
2421 /* Now build a pointer to the 0,0,0... element. */
2422 tem
= build (PLACEHOLDER_EXPR
, type
);
2423 for (i
= 0, inner_type
= type
; i
< ndim
;
2424 i
++, inner_type
= TREE_TYPE (inner_type
))
2425 tem
= build (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
2426 convert (TYPE_DOMAIN (inner_type
), size_zero_node
));
2429 = chainon (field_list
,
2430 make_descriptor_field
2431 ("A0", build_pointer_type (inner_type
), record_type
,
2432 build1 (ADDR_EXPR
, build_pointer_type (inner_type
), tem
)));
2434 /* Next come the addressing coefficients. */
2436 for (i
= 0; i
< ndim
; i
++)
2440 = size_binop (MULT_EXPR
, tem
,
2441 size_binop (PLUS_EXPR
,
2442 size_binop (MINUS_EXPR
,
2443 TYPE_MAX_VALUE (idx_arr
[i
]),
2444 TYPE_MIN_VALUE (idx_arr
[i
])),
2447 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
2448 fname
[1] = '0' + i
, fname
[2] = 0;
2450 = chainon (field_list
,
2451 make_descriptor_field (fname
,
2452 gnat_type_for_size (32, 1),
2453 record_type
, idx_length
));
2455 if (mech
== By_Descriptor_NCA
)
2459 /* Finally here are the bounds. */
2460 for (i
= 0; i
< ndim
; i
++)
2464 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
2466 = chainon (field_list
,
2467 make_descriptor_field
2468 (fname
, gnat_type_for_size (32, 1), record_type
,
2469 TYPE_MIN_VALUE (idx_arr
[i
])));
2473 = chainon (field_list
,
2474 make_descriptor_field
2475 (fname
, gnat_type_for_size (32, 1), record_type
,
2476 TYPE_MAX_VALUE (idx_arr
[i
])));
2481 post_error ("unsupported descriptor type for &", gnat_entity
);
2484 finish_record_type (record_type
, field_list
, 0, 1);
2485 pushdecl (build_decl (TYPE_DECL
, create_concat_name (gnat_entity
, "DESC"),
2491 /* Utility routine for above code to make a field. */
2494 make_descriptor_field (name
, type
, rec_type
, initial
)
2501 = create_field_decl (get_identifier (name
), type
, rec_type
, 0, 0, 0, 0);
2503 DECL_INITIAL (field
) = initial
;
2507 /* Build a type to be used to represent an aliased object whose nominal
2508 type is an unconstrained array. This consists of a RECORD_TYPE containing
2509 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2510 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2511 is used to represent an arbitrary unconstrained object. Use NAME
2512 as the name of the record. */
2515 build_unc_object_type (template_type
, object_type
, name
)
2520 tree type
= make_node (RECORD_TYPE
);
2521 tree template_field
= create_field_decl (get_identifier ("BOUNDS"),
2522 template_type
, type
, 0, 0, 0, 1);
2523 tree array_field
= create_field_decl (get_identifier ("ARRAY"), object_type
,
2526 TYPE_NAME (type
) = name
;
2527 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
2528 finish_record_type (type
,
2529 chainon (chainon (NULL_TREE
, template_field
),
2536 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2537 the normal case this is just two adjustments, but we have more to do
2538 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2541 update_pointer_to (old_type
, new_type
)
2545 tree ptr
= TYPE_POINTER_TO (old_type
);
2546 tree ref
= TYPE_REFERENCE_TO (old_type
);
2549 /* If this is the main variant, process all the other variants first. */
2550 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
2551 for (type
= TYPE_NEXT_VARIANT (old_type
); type
!= 0;
2552 type
= TYPE_NEXT_VARIANT (type
))
2553 update_pointer_to (type
, new_type
);
2555 /* If no pointer or reference, we are done. Otherwise, get the new type with
2556 the same qualifiers as the old type and see if it is the same as the old
2558 if (ptr
== 0 && ref
== 0)
2561 new_type
= build_qualified_type (new_type
, TYPE_QUALS (old_type
));
2562 if (old_type
== new_type
)
2565 /* First handle the simple case. */
2566 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
2569 TREE_TYPE (ptr
) = new_type
;
2570 TYPE_POINTER_TO (new_type
) = ptr
;
2573 TREE_TYPE (ref
) = new_type
;
2574 TYPE_REFERENCE_TO (new_type
) = ref
;
2576 if (ptr
!= 0 && TYPE_NAME (ptr
) != 0
2577 && TREE_CODE (TYPE_NAME (ptr
)) == TYPE_DECL
2578 && TREE_CODE (new_type
) != ENUMERAL_TYPE
)
2579 rest_of_decl_compilation (TYPE_NAME (ptr
), NULL
,
2580 global_bindings_p (), 0);
2581 if (ref
!= 0 && TYPE_NAME (ref
) != 0
2582 && TREE_CODE (TYPE_NAME (ref
)) == TYPE_DECL
2583 && TREE_CODE (new_type
) != ENUMERAL_TYPE
)
2584 rest_of_decl_compilation (TYPE_NAME (ref
), NULL
,
2585 global_bindings_p (), 0);
2588 /* Now deal with the unconstrained array case. In this case the "pointer"
2589 is actually a RECORD_TYPE where the types of both fields are
2590 pointers to void. In that case, copy the field list from the
2591 old type to the new one and update the fields' context. */
2592 else if (TREE_CODE (ptr
) != RECORD_TYPE
|| ! TYPE_IS_FAT_POINTER_P (ptr
))
2597 tree new_obj_rec
= TYPE_OBJECT_RECORD_TYPE (new_type
);
2602 TYPE_FIELDS (ptr
) = TYPE_FIELDS (TYPE_POINTER_TO (new_type
));
2603 DECL_CONTEXT (TYPE_FIELDS (ptr
)) = ptr
;
2604 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr
))) = ptr
;
2606 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2609 ??? This is now the only use of gnat_substitute_in_type, which
2610 is now a very "heavy" routine to do this, so it should be replaced
2612 ptr_temp_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr
)));
2613 new_ref
= build (COMPONENT_REF
, ptr_temp_type
,
2614 build (PLACEHOLDER_EXPR
, ptr
),
2615 TREE_CHAIN (TYPE_FIELDS (ptr
)));
2618 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
2619 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
2620 TREE_CHAIN (TYPE_FIELDS (ptr
)), new_ref
));
2622 for (var
= TYPE_MAIN_VARIANT (ptr
); var
; var
= TYPE_NEXT_VARIANT (var
))
2623 SET_TYPE_UNCONSTRAINED_ARRAY (var
, new_type
);
2625 TYPE_POINTER_TO (new_type
) = TYPE_REFERENCE_TO (new_type
)
2626 = TREE_TYPE (new_type
) = ptr
;
2628 /* Now handle updating the allocation record, what the thin pointer
2629 points to. Update all pointers from the old record into the new
2630 one, update the types of the fields, and recompute the size. */
2632 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
), new_obj_rec
);
2634 TREE_TYPE (TYPE_FIELDS (new_obj_rec
)) = TREE_TYPE (ptr_temp_type
);
2635 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2636 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
)));
2637 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2638 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))));
2639 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2640 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))));
2642 TYPE_SIZE (new_obj_rec
)
2643 = size_binop (PLUS_EXPR
,
2644 DECL_SIZE (TYPE_FIELDS (new_obj_rec
)),
2645 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
))));
2646 TYPE_SIZE_UNIT (new_obj_rec
)
2647 = size_binop (PLUS_EXPR
,
2648 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec
)),
2649 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
))));
2650 rest_of_type_compilation (ptr
, global_bindings_p ());
2654 /* Convert a pointer to a constrained array into a pointer to a fat
2655 pointer. This involves making or finding a template. */
2658 convert_to_fat_pointer (type
, expr
)
2662 tree template_type
= TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type
))));
2663 tree
template, template_addr
;
2664 tree etype
= TREE_TYPE (expr
);
2666 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2667 pointer to the template and array. */
2668 if (integer_zerop (expr
))
2670 gnat_build_constructor
2672 tree_cons (TYPE_FIELDS (type
),
2673 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
2674 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2675 convert (build_pointer_type (template_type
),
2679 /* If EXPR is a thin pointer, make the template and data from the record. */
2681 else if (TYPE_THIN_POINTER_P (etype
))
2683 tree fields
= TYPE_FIELDS (TREE_TYPE (etype
));
2685 expr
= save_expr (expr
);
2686 if (TREE_CODE (expr
) == ADDR_EXPR
)
2687 expr
= TREE_OPERAND (expr
, 0);
2689 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
2691 template = build_component_ref (expr
, NULL_TREE
, fields
);
2692 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
2693 build_component_ref (expr
, NULL_TREE
,
2694 TREE_CHAIN (fields
)));
2697 /* Otherwise, build the constructor for the template. */
2698 template = build_template (template_type
, TREE_TYPE (etype
), expr
);
2700 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template);
2702 /* The result is a CONSTRUCTOR for the fat pointer. */
2704 gnat_build_constructor (type
,
2705 tree_cons (TYPE_FIELDS (type
), expr
,
2706 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2707 template_addr
, NULL_TREE
)));
2710 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2711 is something that is a fat pointer, so convert to it first if it EXPR
2712 is not already a fat pointer. */
2715 convert_to_thin_pointer (type
, expr
)
2719 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr
)))
2721 = convert_to_fat_pointer
2722 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
))), expr
);
2724 /* We get the pointer to the data and use a NOP_EXPR to make it the
2726 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (TREE_TYPE (expr
)));
2727 expr
= build1 (NOP_EXPR
, type
, expr
);
2732 /* Create an expression whose value is that of EXPR,
2733 converted to type TYPE. The TREE_TYPE of the value
2734 is always TYPE. This function implements all reasonable
2735 conversions; callers should filter out those that are
2736 not permitted by the language being compiled. */
2739 convert (type
, expr
)
2742 enum tree_code code
= TREE_CODE (type
);
2743 tree etype
= TREE_TYPE (expr
);
2744 enum tree_code ecode
= TREE_CODE (etype
);
2747 /* If EXPR is already the right type, we are done. */
2751 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2753 if (TREE_CODE (expr
) == WITH_RECORD_EXPR
)
2754 return build (WITH_RECORD_EXPR
, type
,
2755 convert (type
, TREE_OPERAND (expr
, 0)),
2756 TREE_OPERAND (expr
, 1));
2758 /* If the input type has padding, remove it by doing a component reference
2759 to the field. If the output type has padding, make a constructor
2760 to build the record. If both input and output have padding and are
2761 of variable size, do this as an unchecked conversion. */
2762 if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
2763 && TYPE_IS_PADDING_P (type
) && TYPE_IS_PADDING_P (etype
)
2764 && (! TREE_CONSTANT (TYPE_SIZE (type
))
2765 || ! TREE_CONSTANT (TYPE_SIZE (etype
))))
2767 else if (ecode
== RECORD_TYPE
&& TYPE_IS_PADDING_P (etype
))
2769 /* If we have just converted to this padded type, just get
2770 the inner expression. */
2771 if (TREE_CODE (expr
) == CONSTRUCTOR
2772 && CONSTRUCTOR_ELTS (expr
) != 0
2773 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr
)) == TYPE_FIELDS (etype
))
2774 return TREE_VALUE (CONSTRUCTOR_ELTS (expr
));
2776 return convert (type
, build_component_ref (expr
, NULL_TREE
,
2777 TYPE_FIELDS (etype
)));
2779 else if (code
== RECORD_TYPE
&& TYPE_IS_PADDING_P (type
))
2781 /* If we previously converted from another type and our type is
2782 of variable size, remove the conversion to avoid the need for
2783 variable-size temporaries. */
2784 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
2785 && ! TREE_CONSTANT (TYPE_SIZE (type
)))
2786 expr
= TREE_OPERAND (expr
, 0);
2788 /* If we are just removing the padding from expr, convert the original
2789 object if we have variable size. That will avoid the need
2790 for some variable-size temporaries. */
2791 if (TREE_CODE (expr
) == COMPONENT_REF
2792 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr
, 0))) == RECORD_TYPE
2793 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
2794 && ! TREE_CONSTANT (TYPE_SIZE (type
)))
2795 return convert (type
, TREE_OPERAND (expr
, 0));
2797 /* If the result type is a padded type with a self-referentially-sized
2798 field and the expression type is a record, do this as an
2799 unchecked converstion. */
2800 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type
))) != INTEGER_CST
2801 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type
)))
2802 && TREE_CODE (etype
) == RECORD_TYPE
)
2803 return unchecked_convert (type
, expr
);
2807 gnat_build_constructor (type
,
2808 tree_cons (TYPE_FIELDS (type
),
2810 (TYPE_FIELDS (type
)),
2815 /* If the input is a biased type, adjust first. */
2816 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
2817 return convert (type
, fold (build (PLUS_EXPR
, TREE_TYPE (etype
),
2818 fold (build1 (GNAT_NOP_EXPR
,
2819 TREE_TYPE (etype
), expr
)),
2820 TYPE_MIN_VALUE (etype
))));
2822 /* If the input is a left-justified modular type, we need to extract
2823 the actual object before converting it to any other type with the
2824 exception of an unconstrained array. */
2825 if (ecode
== RECORD_TYPE
&& TYPE_LEFT_JUSTIFIED_MODULAR_P (etype
)
2826 && code
!= UNCONSTRAINED_ARRAY_TYPE
)
2827 return convert (type
, build_component_ref (expr
, NULL_TREE
,
2828 TYPE_FIELDS (etype
)));
2830 /* If converting a type that does not contain a template into one
2831 that does, convert to the data type and then build the template. */
2832 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
)
2833 && ! (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
)))
2835 tree obj_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type
)));
2838 gnat_build_constructor
2840 tree_cons (TYPE_FIELDS (type
),
2841 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
2842 obj_type
, NULL_TREE
),
2843 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2844 convert (obj_type
, expr
), NULL_TREE
)));
2847 /* There are some special cases of expressions that we process
2849 switch (TREE_CODE (expr
))
2854 case TRANSFORM_EXPR
:
2856 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2857 conversion in gnat_expand_expr. NULL_EXPR does not represent
2858 and actual value, so no conversion is needed. */
2859 TREE_TYPE (expr
) = type
;
2864 /* If we are converting a STRING_CST to another constrained array type,
2865 just make a new one in the proper type. Likewise for a
2867 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
2868 && ! (TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
2869 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
))
2871 expr
= copy_node (expr
);
2872 TREE_TYPE (expr
) = type
;
2878 /* If we are converting between two aggregate types of the same
2879 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2880 This avoid unneeded conversions which makes reference computations
2882 if (code
== ecode
&& TYPE_MODE (type
) == TYPE_MODE (etype
)
2883 && AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
)
2884 && TYPE_ALIGN (type
) == TYPE_ALIGN (etype
)
2885 && operand_equal_p (TYPE_SIZE (type
), TYPE_SIZE (etype
), 0))
2886 return build (COMPONENT_REF
, type
, TREE_OPERAND (expr
, 0),
2887 TREE_OPERAND (expr
, 1));
2891 case UNCONSTRAINED_ARRAY_REF
:
2892 /* Convert this to the type of the inner array by getting the address of
2893 the array from the template. */
2894 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
2895 build_component_ref (TREE_OPERAND (expr
, 0),
2896 get_identifier ("P_ARRAY"),
2898 etype
= TREE_TYPE (expr
);
2899 ecode
= TREE_CODE (etype
);
2902 case VIEW_CONVERT_EXPR
:
2903 if (AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
)
2904 && ! TYPE_FAT_POINTER_P (type
) && ! TYPE_FAT_POINTER_P (etype
))
2905 return convert (type
, TREE_OPERAND (expr
, 0));
2909 /* If both types are record types, just convert the pointer and
2910 make a new INDIRECT_REF.
2912 ??? Disable this for now since it causes problems with the
2913 code in build_binary_op for MODIFY_EXPR which wants to
2914 strip off conversions. But that code really is a mess and
2915 we need to do this a much better way some time. */
2917 && (TREE_CODE (type
) == RECORD_TYPE
2918 || TREE_CODE (type
) == UNION_TYPE
)
2919 && (TREE_CODE (etype
) == RECORD_TYPE
2920 || TREE_CODE (etype
) == UNION_TYPE
)
2921 && ! TYPE_FAT_POINTER_P (type
) && ! TYPE_FAT_POINTER_P (etype
))
2922 return build_unary_op (INDIRECT_REF
, NULL_TREE
,
2923 convert (build_pointer_type (type
),
2924 TREE_OPERAND (expr
, 0)));
2931 /* Check for converting to a pointer to an unconstrained array. */
2932 if (TYPE_FAT_POINTER_P (type
) && ! TYPE_FAT_POINTER_P (etype
))
2933 return convert_to_fat_pointer (type
, expr
);
2935 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
)
2936 || (code
== INTEGER_CST
&& ecode
== INTEGER_CST
2937 && (type
== TREE_TYPE (etype
) || etype
== TREE_TYPE (type
))))
2938 return fold (build1 (NOP_EXPR
, type
, expr
));
2943 return build1 (CONVERT_EXPR
, type
, expr
);
2946 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
2947 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
))
2948 return unchecked_convert (type
, expr
);
2949 else if (TYPE_BIASED_REPRESENTATION_P (type
))
2950 return fold (build1 (CONVERT_EXPR
, type
,
2951 fold (build (MINUS_EXPR
, TREE_TYPE (type
),
2952 convert (TREE_TYPE (type
), expr
),
2953 TYPE_MIN_VALUE (type
)))));
2955 /* ... fall through ... */
2958 return fold (convert_to_integer (type
, expr
));
2961 case REFERENCE_TYPE
:
2962 /* If converting between two pointers to records denoting
2963 both a template and type, adjust if needed to account
2964 for any differing offsets, since one might be negative. */
2965 if (TYPE_THIN_POINTER_P (etype
) && TYPE_THIN_POINTER_P (type
))
2968 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype
))),
2969 bit_position (TYPE_FIELDS (TREE_TYPE (type
))));
2970 tree byte_diff
= size_binop (CEIL_DIV_EXPR
, bit_diff
,
2971 sbitsize_int (BITS_PER_UNIT
));
2973 expr
= build1 (NOP_EXPR
, type
, expr
);
2974 TREE_CONSTANT (expr
) = TREE_CONSTANT (TREE_OPERAND (expr
, 0));
2975 if (integer_zerop (byte_diff
))
2978 return build_binary_op (PLUS_EXPR
, type
, expr
,
2979 fold (convert_to_pointer (type
, byte_diff
)));
2982 /* If converting to a thin pointer, handle specially. */
2983 if (TYPE_THIN_POINTER_P (type
)
2984 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)) != 0)
2985 return convert_to_thin_pointer (type
, expr
);
2987 /* If converting fat pointer to normal pointer, get the pointer to the
2988 array and then convert it. */
2989 else if (TYPE_FAT_POINTER_P (etype
))
2990 expr
= build_component_ref (expr
, get_identifier ("P_ARRAY"),
2993 return fold (convert_to_pointer (type
, expr
));
2996 return fold (convert_to_real (type
, expr
));
2999 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type
) && ! AGGREGATE_TYPE_P (etype
))
3001 gnat_build_constructor
3002 (type
, tree_cons (TYPE_FIELDS (type
),
3003 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
3006 /* ... fall through ... */
3009 /* In these cases, assume the front-end has validated the conversion.
3010 If the conversion is valid, it will be a bit-wise conversion, so
3011 it can be viewed as an unchecked conversion. */
3012 return unchecked_convert (type
, expr
);
3015 /* Just validate that the type is indeed that of a field
3016 of the type. Then make the simple conversion. */
3017 for (tem
= TYPE_FIELDS (type
); tem
; tem
= TREE_CHAIN (tem
))
3018 if (TREE_TYPE (tem
) == etype
)
3019 return build1 (CONVERT_EXPR
, type
, expr
);
3023 case UNCONSTRAINED_ARRAY_TYPE
:
3024 /* If EXPR is a constrained array, take its address, convert it to a
3025 fat pointer, and then dereference it. Likewise if EXPR is a
3026 record containing both a template and a constrained array.
3027 Note that a record representing a left justified modular type
3028 always represents a packed constrained array. */
3029 if (ecode
== ARRAY_TYPE
3030 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
3031 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
3032 || (ecode
== RECORD_TYPE
&& TYPE_LEFT_JUSTIFIED_MODULAR_P (etype
)))
3035 (INDIRECT_REF
, NULL_TREE
,
3036 convert_to_fat_pointer (TREE_TYPE (type
),
3037 build_unary_op (ADDR_EXPR
,
3040 /* Do something very similar for converting one unconstrained
3041 array to another. */
3042 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
3044 build_unary_op (INDIRECT_REF
, NULL_TREE
,
3045 convert (TREE_TYPE (type
),
3046 build_unary_op (ADDR_EXPR
,
3052 return fold (convert_to_complex (type
, expr
));
3059 /* Remove all conversions that are done in EXP. This includes converting
3060 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3061 is nonzero, always return the address of the containing object even if
3062 the address is not bit-aligned. */
3065 remove_conversions (exp
, true_address
)
3069 switch (TREE_CODE (exp
))
3073 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
3074 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
3075 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp
)), 1);
3079 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == RECORD_TYPE
3080 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3081 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
3084 case VIEW_CONVERT_EXPR
: case NON_LVALUE_EXPR
:
3085 case NOP_EXPR
: case CONVERT_EXPR
: case GNAT_NOP_EXPR
:
3086 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
3095 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3096 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3097 likewise return an expression pointing to the underlying array. */
3100 maybe_unconstrained_array (exp
)
3103 enum tree_code code
= TREE_CODE (exp
);
3106 switch (TREE_CODE (TREE_TYPE (exp
)))
3108 case UNCONSTRAINED_ARRAY_TYPE
:
3109 if (code
== UNCONSTRAINED_ARRAY_REF
)
3112 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
3113 build_component_ref (TREE_OPERAND (exp
, 0),
3114 get_identifier ("P_ARRAY"),
3116 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp
);
3120 else if (code
== NULL_EXPR
)
3121 return build1 (NULL_EXPR
,
3122 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3123 (TREE_TYPE (TREE_TYPE (exp
))))),
3124 TREE_OPERAND (exp
, 0));
3126 else if (code
== WITH_RECORD_EXPR
3127 && (TREE_OPERAND (exp
, 0)
3128 != (new = maybe_unconstrained_array
3129 (TREE_OPERAND (exp
, 0)))))
3130 return build (WITH_RECORD_EXPR
, TREE_TYPE (new), new,
3131 TREE_OPERAND (exp
, 1));
3134 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp
)))
3137 = build_component_ref (exp
, NULL_TREE
,
3138 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp
))));
3139 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3140 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3141 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3154 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3157 unchecked_convert (type
, expr
)
3161 tree etype
= TREE_TYPE (expr
);
3163 /* If the expression is already the right type, we are done. */
3167 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3169 if (TREE_CODE (expr
) == WITH_RECORD_EXPR
)
3170 return build (WITH_RECORD_EXPR
, type
,
3171 unchecked_convert (type
, TREE_OPERAND (expr
, 0)),
3172 TREE_OPERAND (expr
, 1));
3174 /* If both types types are integral just do a normal conversion.
3175 Likewise for a conversion to an unconstrained array. */
3176 if ((((INTEGRAL_TYPE_P (type
)
3177 && ! (TREE_CODE (type
) == INTEGER_TYPE
3178 && TYPE_VAX_FLOATING_POINT_P (type
)))
3179 || (POINTER_TYPE_P (type
) && ! TYPE_THIN_POINTER_P (type
))
3180 || (TREE_CODE (type
) == RECORD_TYPE
3181 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type
)))
3182 && ((INTEGRAL_TYPE_P (etype
)
3183 && ! (TREE_CODE (etype
) == INTEGER_TYPE
3184 && TYPE_VAX_FLOATING_POINT_P (etype
)))
3185 || (POINTER_TYPE_P (etype
) && ! TYPE_THIN_POINTER_P (etype
))
3186 || (TREE_CODE (etype
) == RECORD_TYPE
3187 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype
))))
3188 || TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3192 if (TREE_CODE (etype
) == INTEGER_TYPE
3193 && TYPE_BIASED_REPRESENTATION_P (etype
))
3195 tree ntype
= copy_type (etype
);
3197 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
3198 TYPE_MAIN_VARIANT (ntype
) = ntype
;
3199 expr
= build1 (GNAT_NOP_EXPR
, ntype
, expr
);
3202 if (TREE_CODE (type
) == INTEGER_TYPE
3203 && TYPE_BIASED_REPRESENTATION_P (type
))
3205 rtype
= copy_type (type
);
3206 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
3207 TYPE_MAIN_VARIANT (rtype
) = rtype
;
3210 expr
= convert (rtype
, expr
);
3212 expr
= build1 (GNAT_NOP_EXPR
, type
, expr
);
3215 /* If we are converting TO an integral type whose precision is not the
3216 same as its size, first unchecked convert to a record that contains
3217 an object of the output type. Then extract the field. */
3218 else if (INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
) != 0
3219 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
3220 GET_MODE_BITSIZE (TYPE_MODE (type
))))
3222 tree rec_type
= make_node (RECORD_TYPE
);
3223 tree field
= create_field_decl (get_identifier ("OBJ"), type
,
3224 rec_type
, 1, 0, 0, 0);
3226 TYPE_FIELDS (rec_type
) = field
;
3227 layout_type (rec_type
);
3229 expr
= unchecked_convert (rec_type
, expr
);
3230 expr
= build_component_ref (expr
, NULL_TREE
, field
);
3233 /* Similarly for integral input type whose precision is not equal to its
3235 else if (INTEGRAL_TYPE_P (etype
) && TYPE_RM_SIZE (etype
) != 0
3236 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
3237 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
3239 tree rec_type
= make_node (RECORD_TYPE
);
3241 = create_field_decl (get_identifier ("OBJ"), etype
, rec_type
,
3244 TYPE_FIELDS (rec_type
) = field
;
3245 layout_type (rec_type
);
3247 expr
= gnat_build_constructor (rec_type
, build_tree_list (field
, expr
));
3248 expr
= unchecked_convert (type
, expr
);
3251 /* We have a special case when we are converting between two
3252 unconstrained array types. In that case, take the address,
3253 convert the fat pointer types, and dereference. */
3254 else if (TREE_CODE (etype
) == UNCONSTRAINED_ARRAY_TYPE
3255 && TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3256 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
3257 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
3258 build_unary_op (ADDR_EXPR
, NULL_TREE
,
3262 expr
= maybe_unconstrained_array (expr
);
3263 etype
= TREE_TYPE (expr
);
3264 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
3267 /* If the result is an integral type whose size is not equal to
3268 the size of the underlying machine type, sign- or zero-extend
3269 the result. We need not do this in the case where the input is
3270 an integral type of the same precision and signedness or if the output
3271 is a biased type or if both the input and output are unsigned. */
3272 if (INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
) != 0
3273 && ! (TREE_CODE (type
) == INTEGER_TYPE
3274 && TYPE_BIASED_REPRESENTATION_P (type
))
3275 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
3276 GET_MODE_BITSIZE (TYPE_MODE (type
)))
3277 && ! (INTEGRAL_TYPE_P (etype
)
3278 && TREE_UNSIGNED (type
) == TREE_UNSIGNED (etype
)
3279 && operand_equal_p (TYPE_RM_SIZE (type
),
3280 (TYPE_RM_SIZE (etype
) != 0
3281 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
3283 && ! (TREE_UNSIGNED (type
) && TREE_UNSIGNED (etype
)))
3285 tree base_type
= gnat_type_for_mode (TYPE_MODE (type
),
3286 TREE_UNSIGNED (type
));
3288 = convert (base_type
,
3289 size_binop (MINUS_EXPR
,
3291 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
3292 TYPE_RM_SIZE (type
)));
3295 build_binary_op (RSHIFT_EXPR
, base_type
,
3296 build_binary_op (LSHIFT_EXPR
, base_type
,
3297 convert (base_type
, expr
),
3302 /* An unchecked conversion should never raise Constraint_Error. The code
3303 below assumes that GCC's conversion routines overflow the same way that
3304 the underlying hardware does. This is probably true. In the rare case
3305 when it is false, we can rely on the fact that such conversions are
3306 erroneous anyway. */
3307 if (TREE_CODE (expr
) == INTEGER_CST
)
3308 TREE_OVERFLOW (expr
) = TREE_CONSTANT_OVERFLOW (expr
) = 0;
3310 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3311 show no longer constant. */
3312 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
3313 && ! operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
), 1))
3314 TREE_CONSTANT (expr
) = 0;
3319 #include "gt-ada-utils.h"
3320 #include "gtype-ada.h"