DFix typo
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob54815073e60c480b0c7c258cc17f56d3e8d0f800
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2018, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
111 affects_type_identity, handler, exclude } */
112 { "const", 0, 0, true, false, false, false,
113 handle_const_attribute, NULL },
114 { "nothrow", 0, 0, true, false, false, false,
115 handle_nothrow_attribute, NULL },
116 { "pure", 0, 0, true, false, false, false,
117 handle_pure_attribute, NULL },
118 { "no vops", 0, 0, true, false, false, false,
119 handle_novops_attribute, NULL },
120 { "nonnull", 0, -1, false, true, true, false,
121 handle_nonnull_attribute, NULL },
122 { "sentinel", 0, 1, false, true, true, false,
123 handle_sentinel_attribute, NULL },
124 { "noreturn", 0, 0, true, false, false, false,
125 handle_noreturn_attribute, NULL },
126 { "noinline", 0, 0, true, false, false, false,
127 handle_noinline_attribute, NULL },
128 { "noclone", 0, 0, true, false, false, false,
129 handle_noclone_attribute, NULL },
130 { "leaf", 0, 0, true, false, false, false,
131 handle_leaf_attribute, NULL },
132 { "always_inline",0, 0, true, false, false, false,
133 handle_always_inline_attribute, NULL },
134 { "malloc", 0, 0, true, false, false, false,
135 handle_malloc_attribute, NULL },
136 { "type generic", 0, 0, false, true, true, false,
137 handle_type_generic_attribute, NULL },
139 { "vector_size", 1, 1, false, true, false, false,
140 handle_vector_size_attribute, NULL },
141 { "vector_type", 0, 0, false, true, false, false,
142 handle_vector_type_attribute, NULL },
143 { "may_alias", 0, 0, false, true, false, false, NULL, NULL },
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
148 { "format", 3, 3, false, true, true, false, fake_attribute_handler,
149 NULL },
150 { "format_arg", 1, 1, false, true, true, false, fake_attribute_handler,
151 NULL },
153 { NULL, 0, 0, false, false, false, false, NULL, NULL }
156 /* Associates a GNAT tree node to a GCC tree node. It is used in
157 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
158 of `save_gnu_tree' for more info. */
159 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
161 #define GET_GNU_TREE(GNAT_ENTITY) \
162 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
164 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
165 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
167 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
168 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
170 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
171 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
173 #define GET_DUMMY_NODE(GNAT_ENTITY) \
174 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
176 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
177 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
179 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
180 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
182 /* This variable keeps a table for types for each precision so that we only
183 allocate each of them once. Signed and unsigned types are kept separate.
185 Note that these types are only used when fold-const requests something
186 special. Perhaps we should NOT share these types; we'll see how it
187 goes later. */
188 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
190 /* Likewise for float types, but record these by mode. */
191 static GTY(()) tree float_types[NUM_MACHINE_MODES];
193 /* For each binding contour we allocate a binding_level structure to indicate
194 the binding depth. */
196 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
197 /* The binding level containing this one (the enclosing binding level). */
198 struct gnat_binding_level *chain;
199 /* The BLOCK node for this level. */
200 tree block;
201 /* If nonzero, the setjmp buffer that needs to be updated for any
202 variable-sized definition within this context. */
203 tree jmpbuf_decl;
206 /* The binding level currently in effect. */
207 static GTY(()) struct gnat_binding_level *current_binding_level;
209 /* A chain of gnat_binding_level structures awaiting reuse. */
210 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
212 /* The context to be used for global declarations. */
213 static GTY(()) tree global_context;
215 /* An array of global declarations. */
216 static GTY(()) vec<tree, va_gc> *global_decls;
218 /* An array of builtin function declarations. */
219 static GTY(()) vec<tree, va_gc> *builtin_decls;
221 /* A chain of unused BLOCK nodes. */
222 static GTY((deletable)) tree free_block_chain;
224 /* A hash table of padded types. It is modelled on the generic type
225 hash table in tree.c, which must thus be used as a reference. */
227 struct GTY((for_user)) pad_type_hash
229 hashval_t hash;
230 tree type;
233 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
235 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
236 static bool equal (pad_type_hash *a, pad_type_hash *b);
238 static int
239 keep_cache_entry (pad_type_hash *&t)
241 return ggc_marked_p (t->type);
245 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
247 static tree merge_sizes (tree, tree, tree, bool, bool);
248 static tree fold_bit_position (const_tree);
249 static tree compute_related_constant (tree, tree);
250 static tree split_plus (tree, tree *);
251 static tree float_type_for_precision (int, machine_mode);
252 static tree convert_to_fat_pointer (tree, tree);
253 static unsigned int scale_by_factor_of (tree, unsigned int);
254 static bool potential_alignment_gap (tree, tree, tree);
256 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
257 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
258 struct deferred_decl_context_node
260 /* The ..._DECL node to work on. */
261 tree decl;
263 /* The corresponding entity's Scope. */
264 Entity_Id gnat_scope;
266 /* The value of force_global when DECL was pushed. */
267 int force_global;
269 /* The list of ..._TYPE nodes to propagate the context to. */
270 vec<tree> types;
272 /* The next queue item. */
273 struct deferred_decl_context_node *next;
276 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
278 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
279 feed it with the elaboration of GNAT_SCOPE. */
280 static struct deferred_decl_context_node *
281 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
283 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
284 feed it with the DECL_CONTEXT computed as part of N as soon as it is
285 computed. */
286 static void add_deferred_type_context (struct deferred_decl_context_node *n,
287 tree type);
289 /* Initialize data structures of the utils.c module. */
291 void
292 init_gnat_utils (void)
294 /* Initialize the association of GNAT nodes to GCC trees. */
295 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
297 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
298 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
300 /* Initialize the hash table of padded types. */
301 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
304 /* Destroy data structures of the utils.c module. */
306 void
307 destroy_gnat_utils (void)
309 /* Destroy the association of GNAT nodes to GCC trees. */
310 ggc_free (associate_gnat_to_gnu);
311 associate_gnat_to_gnu = NULL;
313 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
314 ggc_free (dummy_node_table);
315 dummy_node_table = NULL;
317 /* Destroy the hash table of padded types. */
318 pad_type_hash_table->empty ();
319 pad_type_hash_table = NULL;
322 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
323 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
324 If NO_CHECK is true, the latter check is suppressed.
326 If GNU_DECL is zero, reset a previous association. */
328 void
329 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
331 /* Check that GNAT_ENTITY is not already defined and that it is being set
332 to something which is a decl. If that is not the case, this usually
333 means GNAT_ENTITY is defined twice, but occasionally is due to some
334 Gigi problem. */
335 gcc_assert (!(gnu_decl
336 && (PRESENT_GNU_TREE (gnat_entity)
337 || (!no_check && !DECL_P (gnu_decl)))));
339 SET_GNU_TREE (gnat_entity, gnu_decl);
342 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
343 that was associated with it. If there is no such tree node, abort.
345 In some cases, such as delayed elaboration or expressions that need to
346 be elaborated only once, GNAT_ENTITY is really not an entity. */
348 tree
349 get_gnu_tree (Entity_Id gnat_entity)
351 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
352 return GET_GNU_TREE (gnat_entity);
355 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
357 bool
358 present_gnu_tree (Entity_Id gnat_entity)
360 return PRESENT_GNU_TREE (gnat_entity);
363 /* Make a dummy type corresponding to GNAT_TYPE. */
365 tree
366 make_dummy_type (Entity_Id gnat_type)
368 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
369 tree gnu_type, debug_type;
371 /* If there was no equivalent type (can only happen when just annotating
372 types) or underlying type, go back to the original type. */
373 if (No (gnat_equiv))
374 gnat_equiv = gnat_type;
376 /* If it there already a dummy type, use that one. Else make one. */
377 if (PRESENT_DUMMY_NODE (gnat_equiv))
378 return GET_DUMMY_NODE (gnat_equiv);
380 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
381 an ENUMERAL_TYPE. */
382 gnu_type = make_node (Is_Record_Type (gnat_equiv)
383 ? tree_code_for_record_type (gnat_equiv)
384 : ENUMERAL_TYPE);
385 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
386 TYPE_DUMMY_P (gnu_type) = 1;
387 TYPE_STUB_DECL (gnu_type)
388 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
389 if (Is_By_Reference_Type (gnat_equiv))
390 TYPE_BY_REFERENCE_P (gnu_type) = 1;
392 SET_DUMMY_NODE (gnat_equiv, gnu_type);
394 /* Create a debug type so that debug info consumers only see an unspecified
395 type. */
396 if (Needs_Debug_Info (gnat_type))
398 debug_type = make_node (LANG_TYPE);
399 SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
401 TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
402 TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
405 return gnu_type;
408 /* Return the dummy type that was made for GNAT_TYPE, if any. */
410 tree
411 get_dummy_type (Entity_Id gnat_type)
413 return GET_DUMMY_NODE (gnat_type);
416 /* Build dummy fat and thin pointer types whose designated type is specified
417 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
419 void
420 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
422 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
423 tree gnu_fat_type, fields, gnu_object_type;
425 gnu_template_type = make_node (RECORD_TYPE);
426 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
427 TYPE_DUMMY_P (gnu_template_type) = 1;
428 gnu_ptr_template = build_pointer_type (gnu_template_type);
430 gnu_array_type = make_node (ENUMERAL_TYPE);
431 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
432 TYPE_DUMMY_P (gnu_array_type) = 1;
433 gnu_ptr_array = build_pointer_type (gnu_array_type);
435 gnu_fat_type = make_node (RECORD_TYPE);
436 /* Build a stub DECL to trigger the special processing for fat pointer types
437 in gnat_pushdecl. */
438 TYPE_NAME (gnu_fat_type)
439 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
440 gnu_fat_type);
441 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
442 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
443 DECL_CHAIN (fields)
444 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
445 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
446 finish_fat_pointer_type (gnu_fat_type, fields);
447 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
448 /* Suppress debug info until after the type is completed. */
449 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
451 gnu_object_type = make_node (RECORD_TYPE);
452 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
453 TYPE_DUMMY_P (gnu_object_type) = 1;
455 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
456 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
457 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
460 /* Return true if we are in the global binding level. */
462 bool
463 global_bindings_p (void)
465 return force_global || !current_function_decl;
468 /* Enter a new binding level. */
470 void
471 gnat_pushlevel (void)
473 struct gnat_binding_level *newlevel = NULL;
475 /* Reuse a struct for this binding level, if there is one. */
476 if (free_binding_level)
478 newlevel = free_binding_level;
479 free_binding_level = free_binding_level->chain;
481 else
482 newlevel = ggc_alloc<gnat_binding_level> ();
484 /* Use a free BLOCK, if any; otherwise, allocate one. */
485 if (free_block_chain)
487 newlevel->block = free_block_chain;
488 free_block_chain = BLOCK_CHAIN (free_block_chain);
489 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
491 else
492 newlevel->block = make_node (BLOCK);
494 /* Point the BLOCK we just made to its parent. */
495 if (current_binding_level)
496 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
498 BLOCK_VARS (newlevel->block) = NULL_TREE;
499 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
500 TREE_USED (newlevel->block) = 1;
502 /* Add this level to the front of the chain (stack) of active levels. */
503 newlevel->chain = current_binding_level;
504 newlevel->jmpbuf_decl = NULL_TREE;
505 current_binding_level = newlevel;
508 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
509 and point FNDECL to this BLOCK. */
511 void
512 set_current_block_context (tree fndecl)
514 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
515 DECL_INITIAL (fndecl) = current_binding_level->block;
516 set_block_for_group (current_binding_level->block);
519 /* Set the jmpbuf_decl for the current binding level to DECL. */
521 void
522 set_block_jmpbuf_decl (tree decl)
524 current_binding_level->jmpbuf_decl = decl;
527 /* Get the jmpbuf_decl, if any, for the current binding level. */
529 tree
530 get_block_jmpbuf_decl (void)
532 return current_binding_level->jmpbuf_decl;
535 /* Exit a binding level. Set any BLOCK into the current code group. */
537 void
538 gnat_poplevel (void)
540 struct gnat_binding_level *level = current_binding_level;
541 tree block = level->block;
543 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
544 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
546 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
547 are no variables free the block and merge its subblocks into those of its
548 parent block. Otherwise, add it to the list of its parent. */
549 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
551 else if (!BLOCK_VARS (block))
553 BLOCK_SUBBLOCKS (level->chain->block)
554 = block_chainon (BLOCK_SUBBLOCKS (block),
555 BLOCK_SUBBLOCKS (level->chain->block));
556 BLOCK_CHAIN (block) = free_block_chain;
557 free_block_chain = block;
559 else
561 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
562 BLOCK_SUBBLOCKS (level->chain->block) = block;
563 TREE_USED (block) = 1;
564 set_block_for_group (block);
567 /* Free this binding structure. */
568 current_binding_level = level->chain;
569 level->chain = free_binding_level;
570 free_binding_level = level;
573 /* Exit a binding level and discard the associated BLOCK. */
575 void
576 gnat_zaplevel (void)
578 struct gnat_binding_level *level = current_binding_level;
579 tree block = level->block;
581 BLOCK_CHAIN (block) = free_block_chain;
582 free_block_chain = block;
584 /* Free this binding structure. */
585 current_binding_level = level->chain;
586 level->chain = free_binding_level;
587 free_binding_level = level;
590 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
592 static void
593 gnat_set_type_context (tree type, tree context)
595 tree decl = TYPE_STUB_DECL (type);
597 TYPE_CONTEXT (type) = context;
599 while (decl && DECL_PARALLEL_TYPE (decl))
601 tree parallel_type = DECL_PARALLEL_TYPE (decl);
603 /* Give a context to the parallel types and their stub decl, if any.
604 Some parallel types seems to be present in multiple parallel type
605 chains, so don't mess with their context if they already have one. */
606 if (!TYPE_CONTEXT (parallel_type))
608 if (TYPE_STUB_DECL (parallel_type))
609 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
610 TYPE_CONTEXT (parallel_type) = context;
613 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
617 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
618 the debug info, or Empty if there is no such scope. If not NULL, set
619 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
621 Entity_Id
622 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
624 Entity_Id gnat_entity;
626 if (is_subprogram)
627 *is_subprogram = false;
629 if (Nkind (gnat_node) == N_Defining_Identifier
630 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
631 gnat_entity = Scope (gnat_node);
632 else
633 return Empty;
635 while (Present (gnat_entity))
637 switch (Ekind (gnat_entity))
639 case E_Function:
640 case E_Procedure:
641 if (Present (Protected_Body_Subprogram (gnat_entity)))
642 gnat_entity = Protected_Body_Subprogram (gnat_entity);
644 /* If the scope is a subprogram, then just rely on
645 current_function_decl, so that we don't have to defer
646 anything. This is needed because other places rely on the
647 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
648 if (is_subprogram)
649 *is_subprogram = true;
650 return gnat_entity;
652 case E_Record_Type:
653 case E_Record_Subtype:
654 return gnat_entity;
656 default:
657 /* By default, we are not interested in this particular scope: go to
658 the outer one. */
659 break;
662 gnat_entity = Scope (gnat_entity);
665 return Empty;
668 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
669 of N otherwise. */
671 static void
672 defer_or_set_type_context (tree type, tree context,
673 struct deferred_decl_context_node *n)
675 if (n)
676 add_deferred_type_context (n, type);
677 else
678 gnat_set_type_context (type, context);
681 /* Return global_context, but create it first if need be. */
683 static tree
684 get_global_context (void)
686 if (!global_context)
688 global_context
689 = build_translation_unit_decl (get_identifier (main_input_filename));
690 debug_hooks->register_main_translation_unit (global_context);
693 return global_context;
696 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
697 for location information and flag propagation. */
699 void
700 gnat_pushdecl (tree decl, Node_Id gnat_node)
702 tree context = NULL_TREE;
703 struct deferred_decl_context_node *deferred_decl_context = NULL;
705 /* If explicitely asked to make DECL global or if it's an imported nested
706 object, short-circuit the regular Scope-based context computation. */
707 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
709 /* Rely on the GNAT scope, or fallback to the current_function_decl if
710 the GNAT scope reached the global scope, if it reached a subprogram
711 or the declaration is a subprogram or a variable (for them we skip
712 intermediate context types because the subprogram body elaboration
713 machinery and the inliner both expect a subprogram context).
715 Falling back to current_function_decl is necessary for implicit
716 subprograms created by gigi, such as the elaboration subprograms. */
717 bool context_is_subprogram = false;
718 const Entity_Id gnat_scope
719 = get_debug_scope (gnat_node, &context_is_subprogram);
721 if (Present (gnat_scope)
722 && !context_is_subprogram
723 && TREE_CODE (decl) != FUNCTION_DECL
724 && TREE_CODE (decl) != VAR_DECL)
725 /* Always assume the scope has not been elaborated, thus defer the
726 context propagation to the time its elaboration will be
727 available. */
728 deferred_decl_context
729 = add_deferred_decl_context (decl, gnat_scope, force_global);
731 /* External declarations (when force_global > 0) may not be in a
732 local context. */
733 else if (current_function_decl && force_global == 0)
734 context = current_function_decl;
737 /* If either we are forced to be in global mode or if both the GNAT scope and
738 the current_function_decl did not help in determining the context, use the
739 global scope. */
740 if (!deferred_decl_context && !context)
741 context = get_global_context ();
743 /* Functions imported in another function are not really nested.
744 For really nested functions mark them initially as needing
745 a static chain for uses of that flag before unnesting;
746 lower_nested_functions will then recompute it. */
747 if (TREE_CODE (decl) == FUNCTION_DECL
748 && !TREE_PUBLIC (decl)
749 && context
750 && (TREE_CODE (context) == FUNCTION_DECL
751 || decl_function_context (context)))
752 DECL_STATIC_CHAIN (decl) = 1;
754 if (!deferred_decl_context)
755 DECL_CONTEXT (decl) = context;
757 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
759 /* Set the location of DECL and emit a declaration for it. */
760 if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
761 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
763 add_decl_expr (decl, gnat_node);
765 /* Put the declaration on the list. The list of declarations is in reverse
766 order. The list will be reversed later. Put global declarations in the
767 globals list and local ones in the current block. But skip TYPE_DECLs
768 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
769 with the debugger and aren't needed anyway. */
770 if (!(TREE_CODE (decl) == TYPE_DECL
771 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
773 /* External declarations must go to the binding level they belong to.
774 This will make corresponding imported entities are available in the
775 debugger at the proper time. */
776 if (DECL_EXTERNAL (decl)
777 && TREE_CODE (decl) == FUNCTION_DECL
778 && DECL_BUILT_IN (decl))
779 vec_safe_push (builtin_decls, decl);
780 else if (global_bindings_p ())
781 vec_safe_push (global_decls, decl);
782 else
784 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
785 BLOCK_VARS (current_binding_level->block) = decl;
789 /* For the declaration of a type, set its name either if it isn't already
790 set or if the previous type name was not derived from a source name.
791 We'd rather have the type named with a real name and all the pointer
792 types to the same object have the same node, except when the names are
793 both derived from source names. */
794 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
796 tree t = TREE_TYPE (decl);
798 /* Array and pointer types aren't tagged types in the C sense so we need
799 to generate a typedef in DWARF for them and make sure it is preserved,
800 unless the type is artificial. */
801 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
802 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
803 || DECL_ARTIFICIAL (decl)))
805 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
806 generate the typedef in DWARF. Also do that for fat pointer types
807 because, even though they are tagged types in the C sense, they are
808 still XUP types attached to the base array type at this point. */
809 else if (!DECL_ARTIFICIAL (decl)
810 && (TREE_CODE (t) == ARRAY_TYPE
811 || TREE_CODE (t) == POINTER_TYPE
812 || TYPE_IS_FAT_POINTER_P (t)))
814 tree tt = build_variant_type_copy (t);
815 TYPE_NAME (tt) = decl;
816 defer_or_set_type_context (tt,
817 DECL_CONTEXT (decl),
818 deferred_decl_context);
819 TREE_TYPE (decl) = tt;
820 if (TYPE_NAME (t)
821 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
822 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
823 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
824 else
825 DECL_ORIGINAL_TYPE (decl) = t;
826 /* Array types need to have a name so that they can be related to
827 their GNAT encodings. */
828 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
829 TYPE_NAME (t) = DECL_NAME (decl);
830 t = NULL_TREE;
832 else if (TYPE_NAME (t)
833 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
834 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
836 else
837 t = NULL_TREE;
839 /* Propagate the name to all the variants, this is needed for the type
840 qualifiers machinery to work properly (see check_qualified_type).
841 Also propagate the context to them. Note that it will be propagated
842 to all parallel types too thanks to gnat_set_type_context. */
843 if (t)
844 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
845 /* ??? Because of the previous kludge, we can have variants of fat
846 pointer types with different names. */
847 if (!(TYPE_IS_FAT_POINTER_P (t)
848 && TYPE_NAME (t)
849 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
851 TYPE_NAME (t) = decl;
852 defer_or_set_type_context (t,
853 DECL_CONTEXT (decl),
854 deferred_decl_context);
859 /* Create a record type that contains a SIZE bytes long field of TYPE with a
860 starting bit position so that it is aligned to ALIGN bits, and leaving at
861 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
862 record is guaranteed to get. GNAT_NODE is used for the position of the
863 associated TYPE_DECL. */
865 tree
866 make_aligning_type (tree type, unsigned int align, tree size,
867 unsigned int base_align, int room, Node_Id gnat_node)
869 /* We will be crafting a record type with one field at a position set to be
870 the next multiple of ALIGN past record'address + room bytes. We use a
871 record placeholder to express record'address. */
872 tree record_type = make_node (RECORD_TYPE);
873 tree record = build0 (PLACEHOLDER_EXPR, record_type);
875 tree record_addr_st
876 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
878 /* The diagram below summarizes the shape of what we manipulate:
880 <--------- pos ---------->
881 { +------------+-------------+-----------------+
882 record =>{ |############| ... | field (type) |
883 { +------------+-------------+-----------------+
884 |<-- room -->|<- voffset ->|<---- size ----->|
887 record_addr vblock_addr
889 Every length is in sizetype bytes there, except "pos" which has to be
890 set as a bit position in the GCC tree for the record. */
891 tree room_st = size_int (room);
892 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
893 tree voffset_st, pos, field;
895 tree name = TYPE_IDENTIFIER (type);
897 name = concat_name (name, "ALIGN");
898 TYPE_NAME (record_type) = name;
900 /* Compute VOFFSET and then POS. The next byte position multiple of some
901 alignment after some address is obtained by "and"ing the alignment minus
902 1 with the two's complement of the address. */
903 voffset_st = size_binop (BIT_AND_EXPR,
904 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
905 size_int ((align / BITS_PER_UNIT) - 1));
907 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
908 pos = size_binop (MULT_EXPR,
909 convert (bitsizetype,
910 size_binop (PLUS_EXPR, room_st, voffset_st)),
911 bitsize_unit_node);
913 /* Craft the GCC record representation. We exceptionally do everything
914 manually here because 1) our generic circuitry is not quite ready to
915 handle the complex position/size expressions we are setting up, 2) we
916 have a strong simplifying factor at hand: we know the maximum possible
917 value of voffset, and 3) we have to set/reset at least the sizes in
918 accordance with this maximum value anyway, as we need them to convey
919 what should be "alloc"ated for this type.
921 Use -1 as the 'addressable' indication for the field to prevent the
922 creation of a bitfield. We don't need one, it would have damaging
923 consequences on the alignment computation, and create_field_decl would
924 make one without this special argument, for instance because of the
925 complex position expression. */
926 field = create_field_decl (get_identifier ("F"), type, record_type, size,
927 pos, 1, -1);
928 TYPE_FIELDS (record_type) = field;
930 SET_TYPE_ALIGN (record_type, base_align);
931 TYPE_USER_ALIGN (record_type) = 1;
933 TYPE_SIZE (record_type)
934 = size_binop (PLUS_EXPR,
935 size_binop (MULT_EXPR, convert (bitsizetype, size),
936 bitsize_unit_node),
937 bitsize_int (align + room * BITS_PER_UNIT));
938 TYPE_SIZE_UNIT (record_type)
939 = size_binop (PLUS_EXPR, size,
940 size_int (room + align / BITS_PER_UNIT));
942 SET_TYPE_MODE (record_type, BLKmode);
943 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
945 /* Declare it now since it will never be declared otherwise. This is
946 necessary to ensure that its subtrees are properly marked. */
947 create_type_decl (name, record_type, true, false, gnat_node);
949 return record_type;
952 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
953 as the field type of a packed record if IN_RECORD is true, or as the
954 component type of a packed array if IN_RECORD is false. See if we can
955 rewrite it either as a type that has non-BLKmode, which we can pack
956 tighter in the packed record case, or as a smaller type with at most
957 MAX_ALIGN alignment if the value is non-zero. If so, return the new
958 type; if not, return the original type. */
960 tree
961 make_packable_type (tree type, bool in_record, unsigned int max_align)
963 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
964 unsigned HOST_WIDE_INT new_size;
965 unsigned int align = TYPE_ALIGN (type);
966 unsigned int new_align;
968 /* No point in doing anything if the size is zero. */
969 if (size == 0)
970 return type;
972 tree new_type = make_node (TREE_CODE (type));
974 /* Copy the name and flags from the old type to that of the new.
975 Note that we rely on the pointer equality created here for
976 TYPE_NAME to look through conversions in various places. */
977 TYPE_NAME (new_type) = TYPE_NAME (type);
978 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
979 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
980 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
981 if (TREE_CODE (type) == RECORD_TYPE)
982 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
984 /* If we are in a record and have a small size, set the alignment to
985 try for an integral mode. Otherwise set it to try for a smaller
986 type with BLKmode. */
987 if (in_record && size <= MAX_FIXED_MODE_SIZE)
989 new_size = ceil_pow2 (size);
990 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
991 SET_TYPE_ALIGN (new_type, new_align);
993 else
995 /* Do not try to shrink the size if the RM size is not constant. */
996 if (TYPE_CONTAINS_TEMPLATE_P (type)
997 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
998 return type;
1000 /* Round the RM size up to a unit boundary to get the minimal size
1001 for a BLKmode record. Give up if it's already the size and we
1002 don't need to lower the alignment. */
1003 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
1004 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1005 if (new_size == size && (max_align == 0 || align <= max_align))
1006 return type;
1008 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1009 if (max_align > 0 && new_align > max_align)
1010 new_align = max_align;
1011 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1014 TYPE_USER_ALIGN (new_type) = 1;
1016 /* Now copy the fields, keeping the position and size as we don't want
1017 to change the layout by propagating the packedness downwards. */
1018 tree new_field_list = NULL_TREE;
1019 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1021 tree new_field_type = TREE_TYPE (field);
1022 tree new_field, new_size;
1024 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1025 && !TYPE_FAT_POINTER_P (new_field_type)
1026 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1027 new_field_type = make_packable_type (new_field_type, true, max_align);
1029 /* However, for the last field in a not already packed record type
1030 that is of an aggregate type, we need to use the RM size in the
1031 packable version of the record type, see finish_record_type. */
1032 if (!DECL_CHAIN (field)
1033 && !TYPE_PACKED (type)
1034 && RECORD_OR_UNION_TYPE_P (new_field_type)
1035 && !TYPE_FAT_POINTER_P (new_field_type)
1036 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1037 && TYPE_ADA_SIZE (new_field_type))
1038 new_size = TYPE_ADA_SIZE (new_field_type);
1039 else
1040 new_size = DECL_SIZE (field);
1042 new_field
1043 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1044 new_size, bit_position (field),
1045 TYPE_PACKED (type),
1046 !DECL_NONADDRESSABLE_P (field));
1048 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1049 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1050 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1051 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1053 DECL_CHAIN (new_field) = new_field_list;
1054 new_field_list = new_field;
1057 /* If this is a padding record, we never want to make the size smaller
1058 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1059 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1061 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1062 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1063 new_size = size;
1065 else
1067 TYPE_SIZE (new_type) = bitsize_int (new_size);
1068 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1071 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1072 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1074 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1075 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1076 if (TYPE_STUB_DECL (type))
1077 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1078 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1080 /* Try harder to get a packable type if necessary, for example
1081 in case the record itself contains a BLKmode field. */
1082 if (in_record && TYPE_MODE (new_type) == BLKmode)
1083 SET_TYPE_MODE (new_type,
1084 mode_for_size_tree (TYPE_SIZE (new_type),
1085 MODE_INT, 1).else_blk ());
1087 /* If neither mode nor size nor alignment shrunk, return the old type. */
1088 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1089 return type;
1091 return new_type;
1094 /* Return true if TYPE has an unsigned representation. This needs to be used
1095 when the representation of types whose precision is not equal to their size
1096 is manipulated based on the RM size. */
1098 static inline bool
1099 type_unsigned_for_rm (tree type)
1101 /* This is the common case. */
1102 if (TYPE_UNSIGNED (type))
1103 return true;
1105 /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1106 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1107 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1108 return true;
1110 return false;
1113 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1114 If TYPE is the best type, return it. Otherwise, make a new type. We
1115 only support new integral and pointer types. FOR_BIASED is true if
1116 we are making a biased type. */
1118 tree
1119 make_type_from_size (tree type, tree size_tree, bool for_biased)
1121 unsigned HOST_WIDE_INT size;
1122 bool biased_p;
1123 tree new_type;
1125 /* If size indicates an error, just return TYPE to avoid propagating
1126 the error. Likewise if it's too large to represent. */
1127 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1128 return type;
1130 size = tree_to_uhwi (size_tree);
1132 switch (TREE_CODE (type))
1134 case BOOLEAN_TYPE:
1135 /* Do not mess with boolean types that have foreign convention. */
1136 if (TYPE_PRECISION (type) == 1 && TYPE_SIZE (type) == size_tree)
1137 break;
1139 /* ... fall through ... */
1141 case INTEGER_TYPE:
1142 case ENUMERAL_TYPE:
1143 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1144 && TYPE_BIASED_REPRESENTATION_P (type));
1146 /* Integer types with precision 0 are forbidden. */
1147 if (size == 0)
1148 size = 1;
1150 /* Only do something if the type isn't a packed array type and doesn't
1151 already have the proper size and the size isn't too large. */
1152 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1153 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1154 || size > LONG_LONG_TYPE_SIZE)
1155 break;
1157 biased_p |= for_biased;
1159 /* The type should be an unsigned type if the original type is unsigned
1160 or if the lower bound is constant and non-negative or if the type is
1161 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1162 if (type_unsigned_for_rm (type) || biased_p)
1163 new_type = make_unsigned_type (size);
1164 else
1165 new_type = make_signed_type (size);
1166 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1167 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1168 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1169 /* Copy the name to show that it's essentially the same type and
1170 not a subrange type. */
1171 TYPE_NAME (new_type) = TYPE_NAME (type);
1172 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1173 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1174 return new_type;
1176 case RECORD_TYPE:
1177 /* Do something if this is a fat pointer, in which case we
1178 may need to return the thin pointer. */
1179 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1181 scalar_int_mode p_mode;
1182 if (!int_mode_for_size (size, 0).exists (&p_mode)
1183 || !targetm.valid_pointer_mode (p_mode))
1184 p_mode = ptr_mode;
1185 return
1186 build_pointer_type_for_mode
1187 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1188 p_mode, 0);
1190 break;
1192 case POINTER_TYPE:
1193 /* Only do something if this is a thin pointer, in which case we
1194 may need to return the fat pointer. */
1195 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1196 return
1197 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1198 break;
1200 default:
1201 break;
1204 return type;
1207 /* Return true iff the padded types are equivalent. */
1209 bool
1210 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1212 tree type1, type2;
1214 if (t1->hash != t2->hash)
1215 return 0;
1217 type1 = t1->type;
1218 type2 = t2->type;
1220 /* We consider that the padded types are equivalent if they pad the same type
1221 and have the same size, alignment, RM size and storage order. Taking the
1222 mode into account is redundant since it is determined by the others. */
1223 return
1224 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1225 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1226 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1227 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1228 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1231 /* Compute the hash value for the padded TYPE. */
1233 static hashval_t
1234 hash_pad_type (tree type)
1236 hashval_t hashcode;
1238 hashcode
1239 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1240 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1241 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1242 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1244 return hashcode;
1247 /* Look up the padded TYPE in the hash table and return its canonical version
1248 if it exists; otherwise, insert it into the hash table. */
1250 static tree
1251 canonicalize_pad_type (tree type)
1253 const hashval_t hashcode = hash_pad_type (type);
1254 struct pad_type_hash in, *h, **slot;
1256 in.hash = hashcode;
1257 in.type = type;
1258 slot = pad_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1259 h = *slot;
1260 if (!h)
1262 h = ggc_alloc<pad_type_hash> ();
1263 h->hash = hashcode;
1264 h->type = type;
1265 *slot = h;
1268 return h->type;
1271 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1272 if needed. We have already verified that SIZE and ALIGN are large enough.
1273 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1274 IS_COMPONENT_TYPE is true if this is being done for the component type of
1275 an array. IS_USER_TYPE is true if the original type needs to be completed.
1276 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1277 the RM size of the resulting type is to be set to SIZE too; in this case,
1278 the padded type is canonicalized before being returned. */
1280 tree
1281 maybe_pad_type (tree type, tree size, unsigned int align,
1282 Entity_Id gnat_entity, bool is_component_type,
1283 bool is_user_type, bool definition, bool set_rm_size)
1285 tree orig_size = TYPE_SIZE (type);
1286 unsigned int orig_align = TYPE_ALIGN (type);
1287 tree record, field;
1289 /* If TYPE is a padded type, see if it agrees with any size and alignment
1290 we were given. If so, return the original type. Otherwise, strip
1291 off the padding, since we will either be returning the inner type
1292 or repadding it. If no size or alignment is specified, use that of
1293 the original padded type. */
1294 if (TYPE_IS_PADDING_P (type))
1296 if ((!size
1297 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1298 && (align == 0 || align == orig_align))
1299 return type;
1301 if (!size)
1302 size = orig_size;
1303 if (align == 0)
1304 align = orig_align;
1306 type = TREE_TYPE (TYPE_FIELDS (type));
1307 orig_size = TYPE_SIZE (type);
1308 orig_align = TYPE_ALIGN (type);
1311 /* If the size is either not being changed or is being made smaller (which
1312 is not done here and is only valid for bitfields anyway), show the size
1313 isn't changing. Likewise, clear the alignment if it isn't being
1314 changed. Then return if we aren't doing anything. */
1315 if (size
1316 && (operand_equal_p (size, orig_size, 0)
1317 || (TREE_CODE (orig_size) == INTEGER_CST
1318 && tree_int_cst_lt (size, orig_size))))
1319 size = NULL_TREE;
1321 if (align == orig_align)
1322 align = 0;
1324 if (align == 0 && !size)
1325 return type;
1327 /* If requested, complete the original type and give it a name. */
1328 if (is_user_type)
1329 create_type_decl (get_entity_name (gnat_entity), type,
1330 !Comes_From_Source (gnat_entity),
1331 !(TYPE_NAME (type)
1332 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1333 && DECL_IGNORED_P (TYPE_NAME (type))),
1334 gnat_entity);
1336 /* We used to modify the record in place in some cases, but that could
1337 generate incorrect debugging information. So make a new record
1338 type and name. */
1339 record = make_node (RECORD_TYPE);
1340 TYPE_PADDING_P (record) = 1;
1342 /* ??? Padding types around packed array implementation types will be
1343 considered as root types in the array descriptor language hook (see
1344 gnat_get_array_descr_info). Give them the original packed array type
1345 name so that the one coming from sources appears in the debugging
1346 information. */
1347 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1348 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1349 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1350 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1351 else if (Present (gnat_entity))
1352 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1354 SET_TYPE_ALIGN (record, align ? align : orig_align);
1355 TYPE_SIZE (record) = size ? size : orig_size;
1356 TYPE_SIZE_UNIT (record)
1357 = convert (sizetype,
1358 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1359 bitsize_unit_node));
1361 /* If we are changing the alignment and the input type is a record with
1362 BLKmode and a small constant size, try to make a form that has an
1363 integral mode. This might allow the padding record to also have an
1364 integral mode, which will be much more efficient. There is no point
1365 in doing so if a size is specified unless it is also a small constant
1366 size and it is incorrect to do so if we cannot guarantee that the mode
1367 will be naturally aligned since the field must always be addressable.
1369 ??? This might not always be a win when done for a stand-alone object:
1370 since the nominal and the effective type of the object will now have
1371 different modes, a VIEW_CONVERT_EXPR will be required for converting
1372 between them and it might be hard to overcome afterwards, including
1373 at the RTL level when the stand-alone object is accessed as a whole. */
1374 if (align != 0
1375 && RECORD_OR_UNION_TYPE_P (type)
1376 && TYPE_MODE (type) == BLKmode
1377 && !TYPE_BY_REFERENCE_P (type)
1378 && TREE_CODE (orig_size) == INTEGER_CST
1379 && !TREE_OVERFLOW (orig_size)
1380 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1381 && (!size
1382 || (TREE_CODE (size) == INTEGER_CST
1383 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1385 tree packable_type = make_packable_type (type, true);
1386 if (TYPE_MODE (packable_type) != BLKmode
1387 && align >= TYPE_ALIGN (packable_type))
1388 type = packable_type;
1391 /* Now create the field with the original size. */
1392 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1393 bitsize_zero_node, 0, 1);
1394 DECL_INTERNAL_P (field) = 1;
1396 /* We will output additional debug info manually below. */
1397 finish_record_type (record, field, 1, false);
1399 /* Set the RM size if requested. */
1400 if (set_rm_size)
1402 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1404 /* If the padded type is complete and has constant size, we canonicalize
1405 it by means of the hash table. This is consistent with the language
1406 semantics and ensures that gigi and the middle-end have a common view
1407 of these padded types. */
1408 if (TREE_CONSTANT (TYPE_SIZE (record)))
1410 tree canonical = canonicalize_pad_type (record);
1411 if (canonical != record)
1413 record = canonical;
1414 goto built;
1419 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1420 SET_TYPE_DEBUG_TYPE (record, type);
1422 /* Unless debugging information isn't being written for the input type,
1423 write a record that shows what we are a subtype of and also make a
1424 variable that indicates our size, if still variable. */
1425 if (TREE_CODE (orig_size) != INTEGER_CST
1426 && TYPE_NAME (record)
1427 && TYPE_NAME (type)
1428 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1429 && DECL_IGNORED_P (TYPE_NAME (type))))
1431 tree name = TYPE_IDENTIFIER (record);
1432 tree size_unit = TYPE_SIZE_UNIT (record);
1434 /* A variable that holds the size is required even with no encoding since
1435 it will be referenced by debugging information attributes. At global
1436 level, we need a single variable across all translation units. */
1437 if (size
1438 && TREE_CODE (size) != INTEGER_CST
1439 && (definition || global_bindings_p ()))
1441 /* Whether or not gnat_entity comes from source, this XVZ variable is
1442 is a compilation artifact. */
1443 size_unit
1444 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1445 size_unit, true, global_bindings_p (),
1446 !definition && global_bindings_p (), false,
1447 false, true, true, NULL, gnat_entity);
1448 TYPE_SIZE_UNIT (record) = size_unit;
1451 /* There is no need to show what we are a subtype of when outputting as
1452 few encodings as possible: regular debugging infomation makes this
1453 redundant. */
1454 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1456 tree marker = make_node (RECORD_TYPE);
1457 tree orig_name = TYPE_IDENTIFIER (type);
1459 TYPE_NAME (marker) = concat_name (name, "XVS");
1460 finish_record_type (marker,
1461 create_field_decl (orig_name,
1462 build_reference_type (type),
1463 marker, NULL_TREE, NULL_TREE,
1464 0, 0),
1465 0, true);
1466 TYPE_SIZE_UNIT (marker) = size_unit;
1468 add_parallel_type (record, marker);
1472 built:
1473 /* If a simple size was explicitly given, maybe issue a warning. */
1474 if (!size
1475 || TREE_CODE (size) == COND_EXPR
1476 || TREE_CODE (size) == MAX_EXPR
1477 || No (gnat_entity))
1478 return record;
1480 /* But don't do it if we are just annotating types and the type is tagged or
1481 concurrent, since these types aren't fully laid out in this mode. */
1482 if (type_annotate_only)
1484 Entity_Id gnat_type
1485 = is_component_type
1486 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1488 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1489 return record;
1492 /* Take the original size as the maximum size of the input if there was an
1493 unconstrained record involved and round it up to the specified alignment,
1494 if one was specified, but only for aggregate types. */
1495 if (CONTAINS_PLACEHOLDER_P (orig_size))
1496 orig_size = max_size (orig_size, true);
1498 if (align && AGGREGATE_TYPE_P (type))
1499 orig_size = round_up (orig_size, align);
1501 if (!operand_equal_p (size, orig_size, 0)
1502 && !(TREE_CODE (size) == INTEGER_CST
1503 && TREE_CODE (orig_size) == INTEGER_CST
1504 && (TREE_OVERFLOW (size)
1505 || TREE_OVERFLOW (orig_size)
1506 || tree_int_cst_lt (size, orig_size))))
1508 Node_Id gnat_error_node;
1510 /* For a packed array, post the message on the original array type. */
1511 if (Is_Packed_Array_Impl_Type (gnat_entity))
1512 gnat_entity = Original_Array_Type (gnat_entity);
1514 if ((Ekind (gnat_entity) == E_Component
1515 || Ekind (gnat_entity) == E_Discriminant)
1516 && Present (Component_Clause (gnat_entity)))
1517 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1518 else if (Has_Size_Clause (gnat_entity))
1519 gnat_error_node = Expression (Size_Clause (gnat_entity));
1520 else if (Has_Object_Size_Clause (gnat_entity))
1521 gnat_error_node = Expression (Object_Size_Clause (gnat_entity));
1522 else
1523 gnat_error_node = Empty;
1525 /* Generate message only for entities that come from source, since
1526 if we have an entity created by expansion, the message will be
1527 generated for some other corresponding source entity. */
1528 if (Comes_From_Source (gnat_entity))
1530 if (Present (gnat_error_node))
1531 post_error_ne_tree ("{^ }bits of & unused?",
1532 gnat_error_node, gnat_entity,
1533 size_diffop (size, orig_size));
1534 else if (is_component_type)
1535 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1536 gnat_entity, gnat_entity,
1537 size_diffop (size, orig_size));
1541 return record;
1544 /* Return true if padded TYPE was built with an RM size. */
1546 bool
1547 pad_type_has_rm_size (tree type)
1549 /* This is required for the lookup. */
1550 if (!TREE_CONSTANT (TYPE_SIZE (type)))
1551 return false;
1553 const hashval_t hashcode = hash_pad_type (type);
1554 struct pad_type_hash in, *h;
1556 in.hash = hashcode;
1557 in.type = type;
1558 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1560 /* The types built with an RM size are the canonicalized ones. */
1561 return h && h->type == type;
1564 /* Return a copy of the padded TYPE but with reverse storage order. */
1566 tree
1567 set_reverse_storage_order_on_pad_type (tree type)
1569 if (flag_checking)
1571 /* If the inner type is not scalar then the function does nothing. */
1572 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1573 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1574 && !VECTOR_TYPE_P (inner_type));
1577 /* This is required for the canonicalization. */
1578 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1580 tree field = copy_node (TYPE_FIELDS (type));
1581 type = copy_type (type);
1582 DECL_CONTEXT (field) = type;
1583 TYPE_FIELDS (type) = field;
1584 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1585 return canonicalize_pad_type (type);
1588 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1589 If this is a multi-dimensional array type, do this recursively.
1591 OP may be
1592 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1593 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1594 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1596 void
1597 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1599 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1600 of a one-dimensional array, since the padding has the same alias set
1601 as the field type, but if it's a multi-dimensional array, we need to
1602 see the inner types. */
1603 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1604 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1605 || TYPE_PADDING_P (gnu_old_type)))
1606 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1608 /* Unconstrained array types are deemed incomplete and would thus be given
1609 alias set 0. Retrieve the underlying array type. */
1610 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1611 gnu_old_type
1612 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1613 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1614 gnu_new_type
1615 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1617 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1618 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1619 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1620 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1622 switch (op)
1624 case ALIAS_SET_COPY:
1625 /* The alias set shouldn't be copied between array types with different
1626 aliasing settings because this can break the aliasing relationship
1627 between the array type and its element type. */
1628 if (flag_checking || flag_strict_aliasing)
1629 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1630 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1631 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1632 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1634 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1635 break;
1637 case ALIAS_SET_SUBSET:
1638 case ALIAS_SET_SUPERSET:
1640 alias_set_type old_set = get_alias_set (gnu_old_type);
1641 alias_set_type new_set = get_alias_set (gnu_new_type);
1643 /* Do nothing if the alias sets conflict. This ensures that we
1644 never call record_alias_subset several times for the same pair
1645 or at all for alias set 0. */
1646 if (!alias_sets_conflict_p (old_set, new_set))
1648 if (op == ALIAS_SET_SUBSET)
1649 record_alias_subset (old_set, new_set);
1650 else
1651 record_alias_subset (new_set, old_set);
1654 break;
1656 default:
1657 gcc_unreachable ();
1660 record_component_aliases (gnu_new_type);
1663 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1664 ARTIFICIAL_P is true if the type was generated by the compiler. */
1666 void
1667 record_builtin_type (const char *name, tree type, bool artificial_p)
1669 tree type_decl = build_decl (input_location,
1670 TYPE_DECL, get_identifier (name), type);
1671 DECL_ARTIFICIAL (type_decl) = artificial_p;
1672 TYPE_ARTIFICIAL (type) = artificial_p;
1673 gnat_pushdecl (type_decl, Empty);
1675 if (debug_hooks->type_decl)
1676 debug_hooks->type_decl (type_decl, false);
1679 /* Finish constructing the character type CHAR_TYPE.
1681 In Ada character types are enumeration types and, as a consequence, are
1682 represented in the front-end by integral types holding the positions of
1683 the enumeration values as defined by the language, which means that the
1684 integral types are unsigned.
1686 Unfortunately the signedness of 'char' in C is implementation-defined
1687 and GCC even has the option -f[un]signed-char to toggle it at run time.
1688 Since GNAT's philosophy is to be compatible with C by default, to wit
1689 Interfaces.C.char is defined as a mere copy of Character, we may need
1690 to declare character types as signed types in GENERIC and generate the
1691 necessary adjustments to make them behave as unsigned types.
1693 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1694 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1695 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1696 types. The idea is to ensure that the bit pattern contained in the
1697 Esize'd objects is not changed, even though the numerical value will
1698 be interpreted differently depending on the signedness. */
1700 void
1701 finish_character_type (tree char_type)
1703 if (TYPE_UNSIGNED (char_type))
1704 return;
1706 /* Make a copy of a generic unsigned version since we'll modify it. */
1707 tree unsigned_char_type
1708 = (char_type == char_type_node
1709 ? unsigned_char_type_node
1710 : copy_type (gnat_unsigned_type_for (char_type)));
1712 /* Create an unsigned version of the type and set it as debug type. */
1713 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1714 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1715 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1716 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1718 /* If this is a subtype, make the debug type a subtype of the debug type
1719 of the base type and convert literal RM bounds to unsigned. */
1720 if (TREE_TYPE (char_type))
1722 tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1723 tree min_value = TYPE_RM_MIN_VALUE (char_type);
1724 tree max_value = TYPE_RM_MAX_VALUE (char_type);
1726 if (TREE_CODE (min_value) == INTEGER_CST)
1727 min_value = fold_convert (base_unsigned_char_type, min_value);
1728 if (TREE_CODE (max_value) == INTEGER_CST)
1729 max_value = fold_convert (base_unsigned_char_type, max_value);
1731 TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1732 SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1733 SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1736 /* Adjust the RM bounds of the original type to unsigned; that's especially
1737 important for types since they are implicit in this case. */
1738 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1739 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1742 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1743 finish constructing the record type as a fat pointer type. */
1745 void
1746 finish_fat_pointer_type (tree record_type, tree field_list)
1748 /* Make sure we can put it into a register. */
1749 if (STRICT_ALIGNMENT)
1750 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1752 /* Show what it really is. */
1753 TYPE_FAT_POINTER_P (record_type) = 1;
1755 /* Do not emit debug info for it since the types of its fields may still be
1756 incomplete at this point. */
1757 finish_record_type (record_type, field_list, 0, false);
1759 /* Force type_contains_placeholder_p to return true on it. Although the
1760 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1761 type but the representation of the unconstrained array. */
1762 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1765 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1766 finish constructing the record or union type. If REP_LEVEL is zero, this
1767 record has no representation clause and so will be entirely laid out here.
1768 If REP_LEVEL is one, this record has a representation clause and has been
1769 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1770 this record is derived from a parent record and thus inherits its layout;
1771 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1772 additional debug info needs to be output for this type. */
1774 void
1775 finish_record_type (tree record_type, tree field_list, int rep_level,
1776 bool debug_info_p)
1778 enum tree_code code = TREE_CODE (record_type);
1779 tree name = TYPE_IDENTIFIER (record_type);
1780 tree ada_size = bitsize_zero_node;
1781 tree size = bitsize_zero_node;
1782 bool had_size = TYPE_SIZE (record_type) != 0;
1783 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1784 bool had_align = TYPE_ALIGN (record_type) != 0;
1785 tree field;
1787 TYPE_FIELDS (record_type) = field_list;
1789 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1790 generate debug info and have a parallel type. */
1791 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1793 /* Globally initialize the record first. If this is a rep'ed record,
1794 that just means some initializations; otherwise, layout the record. */
1795 if (rep_level > 0)
1797 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1798 TYPE_ALIGN (record_type)));
1800 if (!had_size_unit)
1801 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1803 if (!had_size)
1804 TYPE_SIZE (record_type) = bitsize_zero_node;
1806 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1807 out just like a UNION_TYPE, since the size will be fixed. */
1808 else if (code == QUAL_UNION_TYPE)
1809 code = UNION_TYPE;
1811 else
1813 /* Ensure there isn't a size already set. There can be in an error
1814 case where there is a rep clause but all fields have errors and
1815 no longer have a position. */
1816 TYPE_SIZE (record_type) = 0;
1818 /* Ensure we use the traditional GCC layout for bitfields when we need
1819 to pack the record type or have a representation clause. The other
1820 possible layout (Microsoft C compiler), if available, would prevent
1821 efficient packing in almost all cases. */
1822 #ifdef TARGET_MS_BITFIELD_LAYOUT
1823 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1824 decl_attributes (&record_type,
1825 tree_cons (get_identifier ("gcc_struct"),
1826 NULL_TREE, NULL_TREE),
1827 ATTR_FLAG_TYPE_IN_PLACE);
1828 #endif
1830 layout_type (record_type);
1833 /* At this point, the position and size of each field is known. It was
1834 either set before entry by a rep clause, or by laying out the type above.
1836 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1837 to compute the Ada size; the GCC size and alignment (for rep'ed records
1838 that are not padding types); and the mode (for rep'ed records). We also
1839 clear the DECL_BIT_FIELD indication for the cases we know have not been
1840 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1842 if (code == QUAL_UNION_TYPE)
1843 field_list = nreverse (field_list);
1845 for (field = field_list; field; field = DECL_CHAIN (field))
1847 tree type = TREE_TYPE (field);
1848 tree pos = bit_position (field);
1849 tree this_size = DECL_SIZE (field);
1850 tree this_ada_size;
1852 if (RECORD_OR_UNION_TYPE_P (type)
1853 && !TYPE_FAT_POINTER_P (type)
1854 && !TYPE_CONTAINS_TEMPLATE_P (type)
1855 && TYPE_ADA_SIZE (type))
1856 this_ada_size = TYPE_ADA_SIZE (type);
1857 else
1858 this_ada_size = this_size;
1860 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1861 if (DECL_BIT_FIELD (field)
1862 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1864 unsigned int align = TYPE_ALIGN (type);
1866 /* In the general case, type alignment is required. */
1867 if (value_factor_p (pos, align))
1869 /* The enclosing record type must be sufficiently aligned.
1870 Otherwise, if no alignment was specified for it and it
1871 has been laid out already, bump its alignment to the
1872 desired one if this is compatible with its size and
1873 maximum alignment, if any. */
1874 if (TYPE_ALIGN (record_type) >= align)
1876 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1877 DECL_BIT_FIELD (field) = 0;
1879 else if (!had_align
1880 && rep_level == 0
1881 && value_factor_p (TYPE_SIZE (record_type), align)
1882 && (!TYPE_MAX_ALIGN (record_type)
1883 || TYPE_MAX_ALIGN (record_type) >= align))
1885 SET_TYPE_ALIGN (record_type, align);
1886 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1887 DECL_BIT_FIELD (field) = 0;
1891 /* In the non-strict alignment case, only byte alignment is. */
1892 if (!STRICT_ALIGNMENT
1893 && DECL_BIT_FIELD (field)
1894 && value_factor_p (pos, BITS_PER_UNIT))
1895 DECL_BIT_FIELD (field) = 0;
1898 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1899 field is technically not addressable. Except that it can actually
1900 be addressed if it is BLKmode and happens to be properly aligned. */
1901 if (DECL_BIT_FIELD (field)
1902 && !(DECL_MODE (field) == BLKmode
1903 && value_factor_p (pos, BITS_PER_UNIT)))
1904 DECL_NONADDRESSABLE_P (field) = 1;
1906 /* A type must be as aligned as its most aligned field that is not
1907 a bit-field. But this is already enforced by layout_type. */
1908 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1909 SET_TYPE_ALIGN (record_type,
1910 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
1912 switch (code)
1914 case UNION_TYPE:
1915 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1916 size = size_binop (MAX_EXPR, size, this_size);
1917 break;
1919 case QUAL_UNION_TYPE:
1920 ada_size
1921 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1922 this_ada_size, ada_size);
1923 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1924 this_size, size);
1925 break;
1927 case RECORD_TYPE:
1928 /* Since we know here that all fields are sorted in order of
1929 increasing bit position, the size of the record is one
1930 higher than the ending bit of the last field processed
1931 unless we have a rep clause, since in that case we might
1932 have a field outside a QUAL_UNION_TYPE that has a higher ending
1933 position. So use a MAX in that case. Also, if this field is a
1934 QUAL_UNION_TYPE, we need to take into account the previous size in
1935 the case of empty variants. */
1936 ada_size
1937 = merge_sizes (ada_size, pos, this_ada_size,
1938 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1939 size
1940 = merge_sizes (size, pos, this_size,
1941 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1942 break;
1944 default:
1945 gcc_unreachable ();
1949 if (code == QUAL_UNION_TYPE)
1950 nreverse (field_list);
1952 /* We need to set the regular sizes if REP_LEVEL is one. */
1953 if (rep_level == 1)
1955 /* If this is a padding record, we never want to make the size smaller
1956 than what was specified in it, if any. */
1957 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1958 size = TYPE_SIZE (record_type);
1960 tree size_unit = had_size_unit
1961 ? TYPE_SIZE_UNIT (record_type)
1962 : convert (sizetype,
1963 size_binop (CEIL_DIV_EXPR, size,
1964 bitsize_unit_node));
1965 const unsigned int align = TYPE_ALIGN (record_type);
1967 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1968 TYPE_SIZE_UNIT (record_type)
1969 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1972 /* We need to set the Ada size if REP_LEVEL is zero or one. */
1973 if (rep_level < 2)
1975 /* Now set any of the values we've just computed that apply. */
1976 if (!TYPE_FAT_POINTER_P (record_type)
1977 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1978 SET_TYPE_ADA_SIZE (record_type, ada_size);
1981 /* We need to set the mode if REP_LEVEL is one or two. */
1982 if (rep_level > 0)
1984 compute_record_mode (record_type);
1985 finish_bitfield_layout (record_type);
1988 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1989 TYPE_MAX_ALIGN (record_type) = 0;
1991 if (debug_info_p)
1992 rest_of_record_type_compilation (record_type);
1995 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1996 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1997 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1998 moment TYPE will get a context. */
2000 void
2001 add_parallel_type (tree type, tree parallel_type)
2003 tree decl = TYPE_STUB_DECL (type);
2005 while (DECL_PARALLEL_TYPE (decl))
2006 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
2008 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
2010 /* If PARALLEL_TYPE already has a context, we are done. */
2011 if (TYPE_CONTEXT (parallel_type))
2012 return;
2014 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
2015 it to PARALLEL_TYPE. */
2016 if (TYPE_CONTEXT (type))
2017 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
2019 /* Otherwise TYPE has not context yet. We know it will have one thanks to
2020 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
2021 so we have nothing to do in this case. */
2024 /* Return true if TYPE has a parallel type. */
2026 static bool
2027 has_parallel_type (tree type)
2029 tree decl = TYPE_STUB_DECL (type);
2031 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
2034 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
2035 associated with it. It need not be invoked directly in most cases as
2036 finish_record_type takes care of doing so. */
2038 void
2039 rest_of_record_type_compilation (tree record_type)
2041 bool var_size = false;
2042 tree field;
2044 /* If this is a padded type, the bulk of the debug info has already been
2045 generated for the field's type. */
2046 if (TYPE_IS_PADDING_P (record_type))
2047 return;
2049 /* If the type already has a parallel type (XVS type), then we're done. */
2050 if (has_parallel_type (record_type))
2051 return;
2053 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2055 /* We need to make an XVE/XVU record if any field has variable size,
2056 whether or not the record does. For example, if we have a union,
2057 it may be that all fields, rounded up to the alignment, have the
2058 same size, in which case we'll use that size. But the debug
2059 output routines (except Dwarf2) won't be able to output the fields,
2060 so we need to make the special record. */
2061 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2062 /* If a field has a non-constant qualifier, the record will have
2063 variable size too. */
2064 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2065 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2067 var_size = true;
2068 break;
2072 /* If this record type is of variable size, make a parallel record type that
2073 will tell the debugger how the former is laid out (see exp_dbug.ads). */
2074 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
2076 tree new_record_type
2077 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2078 ? UNION_TYPE : TREE_CODE (record_type));
2079 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2080 tree last_pos = bitsize_zero_node;
2081 tree old_field, prev_old_field = NULL_TREE;
2083 new_name
2084 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2085 ? "XVU" : "XVE");
2086 TYPE_NAME (new_record_type) = new_name;
2087 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2088 TYPE_STUB_DECL (new_record_type)
2089 = create_type_stub_decl (new_name, new_record_type);
2090 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2091 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2092 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2093 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2094 TYPE_SIZE_UNIT (new_record_type)
2095 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2097 /* Now scan all the fields, replacing each field with a new field
2098 corresponding to the new encoding. */
2099 for (old_field = TYPE_FIELDS (record_type); old_field;
2100 old_field = DECL_CHAIN (old_field))
2102 tree field_type = TREE_TYPE (old_field);
2103 tree field_name = DECL_NAME (old_field);
2104 tree curpos = fold_bit_position (old_field);
2105 tree pos, new_field;
2106 bool var = false;
2107 unsigned int align = 0;
2109 /* See how the position was modified from the last position.
2111 There are two basic cases we support: a value was added
2112 to the last position or the last position was rounded to
2113 a boundary and they something was added. Check for the
2114 first case first. If not, see if there is any evidence
2115 of rounding. If so, round the last position and retry.
2117 If this is a union, the position can be taken as zero. */
2118 if (TREE_CODE (new_record_type) == UNION_TYPE)
2119 pos = bitsize_zero_node;
2120 else
2121 pos = compute_related_constant (curpos, last_pos);
2123 if (!pos
2124 && TREE_CODE (curpos) == MULT_EXPR
2125 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2127 tree offset = TREE_OPERAND (curpos, 0);
2128 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2129 align = scale_by_factor_of (offset, align);
2130 last_pos = round_up (last_pos, align);
2131 pos = compute_related_constant (curpos, last_pos);
2133 else if (!pos
2134 && TREE_CODE (curpos) == PLUS_EXPR
2135 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2136 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2137 && tree_fits_uhwi_p
2138 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2140 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2141 unsigned HOST_WIDE_INT addend
2142 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2143 align
2144 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2145 align = scale_by_factor_of (offset, align);
2146 align = MIN (align, addend & -addend);
2147 last_pos = round_up (last_pos, align);
2148 pos = compute_related_constant (curpos, last_pos);
2150 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2152 align = TYPE_ALIGN (field_type);
2153 last_pos = round_up (last_pos, align);
2154 pos = compute_related_constant (curpos, last_pos);
2157 /* If we can't compute a position, set it to zero.
2159 ??? We really should abort here, but it's too much work
2160 to get this correct for all cases. */
2161 if (!pos)
2162 pos = bitsize_zero_node;
2164 /* See if this type is variable-sized and make a pointer type
2165 and indicate the indirection if so. Beware that the debug
2166 back-end may adjust the position computed above according
2167 to the alignment of the field type, i.e. the pointer type
2168 in this case, if we don't preventively counter that. */
2169 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2171 field_type = build_pointer_type (field_type);
2172 if (align != 0 && TYPE_ALIGN (field_type) > align)
2174 field_type = copy_type (field_type);
2175 SET_TYPE_ALIGN (field_type, align);
2177 var = true;
2180 /* Make a new field name, if necessary. */
2181 if (var || align != 0)
2183 char suffix[16];
2185 if (align != 0)
2186 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2187 align / BITS_PER_UNIT);
2188 else
2189 strcpy (suffix, "XVL");
2191 field_name = concat_name (field_name, suffix);
2194 new_field
2195 = create_field_decl (field_name, field_type, new_record_type,
2196 DECL_SIZE (old_field), pos, 0, 0);
2197 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2198 TYPE_FIELDS (new_record_type) = new_field;
2200 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2201 zero. The only time it's not the last field of the record
2202 is when there are other components at fixed positions after
2203 it (meaning there was a rep clause for every field) and we
2204 want to be able to encode them. */
2205 last_pos = size_binop (PLUS_EXPR, curpos,
2206 (TREE_CODE (TREE_TYPE (old_field))
2207 == QUAL_UNION_TYPE)
2208 ? bitsize_zero_node
2209 : DECL_SIZE (old_field));
2210 prev_old_field = old_field;
2213 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2215 add_parallel_type (record_type, new_record_type);
2219 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2220 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2221 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2222 replace a value of zero with the old size. If HAS_REP is true, we take the
2223 MAX of the end position of this field with LAST_SIZE. In all other cases,
2224 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2226 static tree
2227 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2228 bool has_rep)
2230 tree type = TREE_TYPE (last_size);
2231 tree new_size;
2233 if (!special || TREE_CODE (size) != COND_EXPR)
2235 new_size = size_binop (PLUS_EXPR, first_bit, size);
2236 if (has_rep)
2237 new_size = size_binop (MAX_EXPR, last_size, new_size);
2240 else
2241 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2242 integer_zerop (TREE_OPERAND (size, 1))
2243 ? last_size : merge_sizes (last_size, first_bit,
2244 TREE_OPERAND (size, 1),
2245 1, has_rep),
2246 integer_zerop (TREE_OPERAND (size, 2))
2247 ? last_size : merge_sizes (last_size, first_bit,
2248 TREE_OPERAND (size, 2),
2249 1, has_rep));
2251 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2252 when fed through substitute_in_expr) into thinking that a constant
2253 size is not constant. */
2254 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2255 new_size = TREE_OPERAND (new_size, 0);
2257 return new_size;
2260 /* Return the bit position of FIELD, in bits from the start of the record,
2261 and fold it as much as possible. This is a tree of type bitsizetype. */
2263 static tree
2264 fold_bit_position (const_tree field)
2266 tree offset = DECL_FIELD_OFFSET (field);
2267 if (TREE_CODE (offset) == MULT_EXPR || TREE_CODE (offset) == PLUS_EXPR)
2268 offset = size_binop (TREE_CODE (offset),
2269 fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
2270 fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
2271 else
2272 offset = fold_convert (bitsizetype, offset);
2273 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2274 size_binop (MULT_EXPR, offset, bitsize_unit_node));
2277 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2278 related by the addition of a constant. Return that constant if so. */
2280 static tree
2281 compute_related_constant (tree op0, tree op1)
2283 tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2285 if (TREE_CODE (op0) == MULT_EXPR
2286 && TREE_CODE (op1) == MULT_EXPR
2287 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2288 && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2290 factor = TREE_OPERAND (op0, 1);
2291 op0 = TREE_OPERAND (op0, 0);
2292 op1 = TREE_OPERAND (op1, 0);
2294 else
2295 factor = NULL_TREE;
2297 op0_cst = split_plus (op0, &op0_var);
2298 op1_cst = split_plus (op1, &op1_var);
2299 result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2301 if (operand_equal_p (op0_var, op1_var, 0))
2302 return factor ? size_binop (MULT_EXPR, factor, result) : result;
2304 return NULL_TREE;
2307 /* Utility function of above to split a tree OP which may be a sum, into a
2308 constant part, which is returned, and a variable part, which is stored
2309 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2310 bitsizetype. */
2312 static tree
2313 split_plus (tree in, tree *pvar)
2315 /* Strip conversions in order to ease the tree traversal and maximize the
2316 potential for constant or plus/minus discovery. We need to be careful
2317 to always return and set *pvar to bitsizetype trees, but it's worth
2318 the effort. */
2319 in = remove_conversions (in, false);
2321 *pvar = convert (bitsizetype, in);
2323 if (TREE_CODE (in) == INTEGER_CST)
2325 *pvar = bitsize_zero_node;
2326 return convert (bitsizetype, in);
2328 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2330 tree lhs_var, rhs_var;
2331 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2332 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2334 if (lhs_var == TREE_OPERAND (in, 0)
2335 && rhs_var == TREE_OPERAND (in, 1))
2336 return bitsize_zero_node;
2338 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2339 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2341 else
2342 return bitsize_zero_node;
2345 /* Return a copy of TYPE but safe to modify in any way. */
2347 tree
2348 copy_type (tree type)
2350 tree new_type = copy_node (type);
2352 /* Unshare the language-specific data. */
2353 if (TYPE_LANG_SPECIFIC (type))
2355 TYPE_LANG_SPECIFIC (new_type) = NULL;
2356 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2359 /* And the contents of the language-specific slot if needed. */
2360 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2361 && TYPE_RM_VALUES (type))
2363 TYPE_RM_VALUES (new_type) = NULL_TREE;
2364 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2365 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2366 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2369 /* copy_node clears this field instead of copying it, because it is
2370 aliased with TREE_CHAIN. */
2371 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2373 TYPE_POINTER_TO (new_type) = NULL_TREE;
2374 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2375 TYPE_MAIN_VARIANT (new_type) = new_type;
2376 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2377 TYPE_CANONICAL (new_type) = new_type;
2379 return new_type;
2382 /* Return a subtype of sizetype with range MIN to MAX and whose
2383 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2384 of the associated TYPE_DECL. */
2386 tree
2387 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2389 /* First build a type for the desired range. */
2390 tree type = build_nonshared_range_type (sizetype, min, max);
2392 /* Then set the index type. */
2393 SET_TYPE_INDEX_TYPE (type, index);
2394 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2396 return type;
2399 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2400 sizetype is used. */
2402 tree
2403 create_range_type (tree type, tree min, tree max)
2405 tree range_type;
2407 if (!type)
2408 type = sizetype;
2410 /* First build a type with the base range. */
2411 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2412 TYPE_MAX_VALUE (type));
2414 /* Then set the actual range. */
2415 SET_TYPE_RM_MIN_VALUE (range_type, min);
2416 SET_TYPE_RM_MAX_VALUE (range_type, max);
2418 return range_type;
2421 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2422 NAME gives the name of the type to be used in the declaration. */
2424 tree
2425 create_type_stub_decl (tree name, tree type)
2427 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2428 DECL_ARTIFICIAL (type_decl) = 1;
2429 TYPE_ARTIFICIAL (type) = 1;
2430 return type_decl;
2433 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2434 used in the declaration. ARTIFICIAL_P is true if the declaration was
2435 generated by the compiler. DEBUG_INFO_P is true if we need to write
2436 debug information about this type. GNAT_NODE is used for the position
2437 of the decl. */
2439 tree
2440 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2441 Node_Id gnat_node)
2443 enum tree_code code = TREE_CODE (type);
2444 bool is_named
2445 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2446 tree type_decl;
2448 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2449 gcc_assert (!TYPE_IS_DUMMY_P (type));
2451 /* If the type hasn't been named yet, we're naming it; preserve an existing
2452 TYPE_STUB_DECL that has been attached to it for some purpose. */
2453 if (!is_named && TYPE_STUB_DECL (type))
2455 type_decl = TYPE_STUB_DECL (type);
2456 DECL_NAME (type_decl) = name;
2458 else
2459 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2461 DECL_ARTIFICIAL (type_decl) = artificial_p;
2462 TYPE_ARTIFICIAL (type) = artificial_p;
2464 /* Add this decl to the current binding level. */
2465 gnat_pushdecl (type_decl, gnat_node);
2467 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2468 causes the name to be also viewed as a "tag" by the debug back-end, with
2469 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2470 types in DWARF.
2472 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2473 from multiple contexts, and "type_decl" references a copy of it: in such a
2474 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2475 with the mechanism above. */
2476 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2477 TYPE_STUB_DECL (type) = type_decl;
2479 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2480 back-end doesn't support, and for others if we don't need to. */
2481 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2482 DECL_IGNORED_P (type_decl) = 1;
2484 return type_decl;
2487 /* Return a VAR_DECL or CONST_DECL node.
2489 NAME gives the name of the variable. ASM_NAME is its assembler name
2490 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2491 the GCC tree for an optional initial expression; NULL_TREE if none.
2493 CONST_FLAG is true if this variable is constant, in which case we might
2494 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2496 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2497 definition to be made visible outside of the current compilation unit, for
2498 instance variable definitions in a package specification.
2500 EXTERN_FLAG is true when processing an external variable declaration (as
2501 opposed to a definition: no storage is to be allocated for the variable).
2503 STATIC_FLAG is only relevant when not at top level and indicates whether
2504 to always allocate storage to the variable.
2506 VOLATILE_FLAG is true if this variable is declared as volatile.
2508 ARTIFICIAL_P is true if the variable was generated by the compiler.
2510 DEBUG_INFO_P is true if we need to write debug information for it.
2512 ATTR_LIST is the list of attributes to be attached to the variable.
2514 GNAT_NODE is used for the position of the decl. */
2516 tree
2517 create_var_decl (tree name, tree asm_name, tree type, tree init,
2518 bool const_flag, bool public_flag, bool extern_flag,
2519 bool static_flag, bool volatile_flag, bool artificial_p,
2520 bool debug_info_p, struct attrib *attr_list,
2521 Node_Id gnat_node, bool const_decl_allowed_p)
2523 /* Whether the object has static storage duration, either explicitly or by
2524 virtue of being declared at the global level. */
2525 const bool static_storage = static_flag || global_bindings_p ();
2527 /* Whether the initializer is constant: for an external object or an object
2528 with static storage duration, we check that the initializer is a valid
2529 constant expression for initializing a static variable; otherwise, we
2530 only check that it is constant. */
2531 const bool init_const
2532 = (init
2533 && gnat_types_compatible_p (type, TREE_TYPE (init))
2534 && (extern_flag || static_storage
2535 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2536 != NULL_TREE
2537 : TREE_CONSTANT (init)));
2539 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2540 case the initializer may be used in lieu of the DECL node (as done in
2541 Identifier_to_gnu). This is useful to prevent the need of elaboration
2542 code when an identifier for which such a DECL is made is in turn used
2543 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2544 but extra constraints apply to this choice (see below) and they are not
2545 relevant to the distinction we wish to make. */
2546 const bool constant_p = const_flag && init_const;
2548 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2549 and may be used for scalars in general but not for aggregates. */
2550 tree var_decl
2551 = build_decl (input_location,
2552 (constant_p
2553 && const_decl_allowed_p
2554 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2555 name, type);
2557 /* Detect constants created by the front-end to hold 'reference to function
2558 calls for stabilization purposes. This is needed for renaming. */
2559 if (const_flag && init && POINTER_TYPE_P (type))
2561 tree inner = init;
2562 if (TREE_CODE (inner) == COMPOUND_EXPR)
2563 inner = TREE_OPERAND (inner, 1);
2564 inner = remove_conversions (inner, true);
2565 if (TREE_CODE (inner) == ADDR_EXPR
2566 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2567 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2568 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2569 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2570 DECL_RETURN_VALUE_P (var_decl) = 1;
2573 /* If this is external, throw away any initializations (they will be done
2574 elsewhere) unless this is a constant for which we would like to remain
2575 able to get the initializer. If we are defining a global here, leave a
2576 constant initialization and save any variable elaborations for the
2577 elaboration routine. If we are just annotating types, throw away the
2578 initialization if it isn't a constant. */
2579 if ((extern_flag && !constant_p)
2580 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2581 init = NULL_TREE;
2583 /* At the global level, a non-constant initializer generates elaboration
2584 statements. Check that such statements are allowed, that is to say,
2585 not violating a No_Elaboration_Code restriction. */
2586 if (init && !init_const && global_bindings_p ())
2587 Check_Elaboration_Code_Allowed (gnat_node);
2589 /* Attach the initializer, if any. */
2590 DECL_INITIAL (var_decl) = init;
2592 /* Directly set some flags. */
2593 DECL_ARTIFICIAL (var_decl) = artificial_p;
2594 DECL_EXTERNAL (var_decl) = extern_flag;
2596 TREE_CONSTANT (var_decl) = constant_p;
2597 TREE_READONLY (var_decl) = const_flag;
2599 /* The object is public if it is external or if it is declared public
2600 and has static storage duration. */
2601 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2603 /* We need to allocate static storage for an object with static storage
2604 duration if it isn't external. */
2605 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2607 TREE_SIDE_EFFECTS (var_decl)
2608 = TREE_THIS_VOLATILE (var_decl)
2609 = TYPE_VOLATILE (type) | volatile_flag;
2611 if (TREE_SIDE_EFFECTS (var_decl))
2612 TREE_ADDRESSABLE (var_decl) = 1;
2614 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2615 try to fiddle with DECL_COMMON. However, on platforms that don't
2616 support global BSS sections, uninitialized global variables would
2617 go in DATA instead, thus increasing the size of the executable. */
2618 if (!flag_no_common
2619 && TREE_CODE (var_decl) == VAR_DECL
2620 && TREE_PUBLIC (var_decl)
2621 && !have_global_bss_p ())
2622 DECL_COMMON (var_decl) = 1;
2624 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2625 since we will create an associated variable. Likewise for an external
2626 constant whose initializer is not absolute, because this would mean a
2627 global relocation in a read-only section which runs afoul of the PE-COFF
2628 run-time relocation mechanism. */
2629 if (!debug_info_p
2630 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2631 || (extern_flag
2632 && constant_p
2633 && init
2634 && initializer_constant_valid_p (init, TREE_TYPE (init))
2635 != null_pointer_node))
2636 DECL_IGNORED_P (var_decl) = 1;
2638 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2639 if (TREE_CODE (var_decl) == VAR_DECL)
2640 process_attributes (&var_decl, &attr_list, true, gnat_node);
2642 /* Add this decl to the current binding level. */
2643 gnat_pushdecl (var_decl, gnat_node);
2645 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2647 /* Let the target mangle the name if this isn't a verbatim asm. */
2648 if (*IDENTIFIER_POINTER (asm_name) != '*')
2649 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2651 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2654 return var_decl;
2657 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2659 static bool
2660 aggregate_type_contains_array_p (tree type)
2662 switch (TREE_CODE (type))
2664 case RECORD_TYPE:
2665 case UNION_TYPE:
2666 case QUAL_UNION_TYPE:
2668 tree field;
2669 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2670 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2671 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2672 return true;
2673 return false;
2676 case ARRAY_TYPE:
2677 return true;
2679 default:
2680 gcc_unreachable ();
2684 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2685 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2686 is the specified size of the field. If POS is nonzero, it is the bit
2687 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2688 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2689 means we are allowed to take the address of the field; if it is negative,
2690 we should not make a bitfield, which is used by make_aligning_type. */
2692 tree
2693 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2694 int packed, int addressable)
2696 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2698 DECL_CONTEXT (field_decl) = record_type;
2699 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2701 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2702 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2703 Likewise for an aggregate without specified position that contains an
2704 array, because in this case slices of variable length of this array
2705 must be handled by GCC and variable-sized objects need to be aligned
2706 to at least a byte boundary. */
2707 if (packed && (TYPE_MODE (type) == BLKmode
2708 || (!pos
2709 && AGGREGATE_TYPE_P (type)
2710 && aggregate_type_contains_array_p (type))))
2711 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2713 /* If a size is specified, use it. Otherwise, if the record type is packed
2714 compute a size to use, which may differ from the object's natural size.
2715 We always set a size in this case to trigger the checks for bitfield
2716 creation below, which is typically required when no position has been
2717 specified. */
2718 if (size)
2719 size = convert (bitsizetype, size);
2720 else if (packed == 1)
2722 size = rm_size (type);
2723 if (TYPE_MODE (type) == BLKmode)
2724 size = round_up (size, BITS_PER_UNIT);
2727 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2728 specified for two reasons: first if the size differs from the natural
2729 size. Second, if the alignment is insufficient. There are a number of
2730 ways the latter can be true.
2732 We never make a bitfield if the type of the field has a nonconstant size,
2733 because no such entity requiring bitfield operations should reach here.
2735 We do *preventively* make a bitfield when there might be the need for it
2736 but we don't have all the necessary information to decide, as is the case
2737 of a field with no specified position in a packed record.
2739 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2740 in layout_decl or finish_record_type to clear the bit_field indication if
2741 it is in fact not needed. */
2742 if (addressable >= 0
2743 && size
2744 && TREE_CODE (size) == INTEGER_CST
2745 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2746 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2747 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2748 || packed
2749 || (TYPE_ALIGN (record_type) != 0
2750 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2752 DECL_BIT_FIELD (field_decl) = 1;
2753 DECL_SIZE (field_decl) = size;
2754 if (!packed && !pos)
2756 if (TYPE_ALIGN (record_type) != 0
2757 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2758 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2759 else
2760 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2764 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2766 /* Bump the alignment if need be, either for bitfield/packing purposes or
2767 to satisfy the type requirements if no such consideration applies. When
2768 we get the alignment from the type, indicate if this is from an explicit
2769 user request, which prevents stor-layout from lowering it later on. */
2771 unsigned int bit_align
2772 = (DECL_BIT_FIELD (field_decl) ? 1
2773 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2775 if (bit_align > DECL_ALIGN (field_decl))
2776 SET_DECL_ALIGN (field_decl, bit_align);
2777 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2779 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2780 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2784 if (pos)
2786 /* We need to pass in the alignment the DECL is known to have.
2787 This is the lowest-order bit set in POS, but no more than
2788 the alignment of the record, if one is specified. Note
2789 that an alignment of 0 is taken as infinite. */
2790 unsigned int known_align;
2792 if (tree_fits_uhwi_p (pos))
2793 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2794 else
2795 known_align = BITS_PER_UNIT;
2797 if (TYPE_ALIGN (record_type)
2798 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2799 known_align = TYPE_ALIGN (record_type);
2801 layout_decl (field_decl, known_align);
2802 SET_DECL_OFFSET_ALIGN (field_decl,
2803 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2804 : BITS_PER_UNIT);
2805 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2806 &DECL_FIELD_BIT_OFFSET (field_decl),
2807 DECL_OFFSET_ALIGN (field_decl), pos);
2810 /* In addition to what our caller says, claim the field is addressable if we
2811 know that its type is not suitable.
2813 The field may also be "technically" nonaddressable, meaning that even if
2814 we attempt to take the field's address we will actually get the address
2815 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2816 value we have at this point is not accurate enough, so we don't account
2817 for this here and let finish_record_type decide. */
2818 if (!addressable && !type_for_nonaliased_component_p (type))
2819 addressable = 1;
2821 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2823 return field_decl;
2826 /* Return a PARM_DECL node with NAME and TYPE. */
2828 tree
2829 create_param_decl (tree name, tree type)
2831 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2833 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2834 can lead to various ABI violations. */
2835 if (targetm.calls.promote_prototypes (NULL_TREE)
2836 && INTEGRAL_TYPE_P (type)
2837 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2839 /* We have to be careful about biased types here. Make a subtype
2840 of integer_type_node with the proper biasing. */
2841 if (TREE_CODE (type) == INTEGER_TYPE
2842 && TYPE_BIASED_REPRESENTATION_P (type))
2844 tree subtype
2845 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2846 TREE_TYPE (subtype) = integer_type_node;
2847 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2848 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2849 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2850 type = subtype;
2852 else
2853 type = integer_type_node;
2856 DECL_ARG_TYPE (param_decl) = type;
2857 return param_decl;
2860 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2861 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2862 changed. GNAT_NODE is used for the position of error messages. */
2864 void
2865 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2866 Node_Id gnat_node)
2868 struct attrib *attr;
2870 for (attr = *attr_list; attr; attr = attr->next)
2871 switch (attr->type)
2873 case ATTR_MACHINE_ATTRIBUTE:
2874 Sloc_to_locus (Sloc (gnat_node), &input_location);
2875 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2876 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2877 break;
2879 case ATTR_LINK_ALIAS:
2880 if (!DECL_EXTERNAL (*node))
2882 TREE_STATIC (*node) = 1;
2883 assemble_alias (*node, attr->name);
2885 break;
2887 case ATTR_WEAK_EXTERNAL:
2888 if (SUPPORTS_WEAK)
2889 declare_weak (*node);
2890 else
2891 post_error ("?weak declarations not supported on this target",
2892 attr->error_point);
2893 break;
2895 case ATTR_LINK_SECTION:
2896 if (targetm_common.have_named_sections)
2898 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2899 DECL_COMMON (*node) = 0;
2901 else
2902 post_error ("?section attributes are not supported for this target",
2903 attr->error_point);
2904 break;
2906 case ATTR_LINK_CONSTRUCTOR:
2907 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2908 TREE_USED (*node) = 1;
2909 break;
2911 case ATTR_LINK_DESTRUCTOR:
2912 DECL_STATIC_DESTRUCTOR (*node) = 1;
2913 TREE_USED (*node) = 1;
2914 break;
2916 case ATTR_THREAD_LOCAL_STORAGE:
2917 set_decl_tls_model (*node, decl_default_tls_model (*node));
2918 DECL_COMMON (*node) = 0;
2919 break;
2922 *attr_list = NULL;
2925 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2926 a power of 2. */
2928 bool
2929 value_factor_p (tree value, HOST_WIDE_INT factor)
2931 if (tree_fits_uhwi_p (value))
2932 return tree_to_uhwi (value) % factor == 0;
2934 if (TREE_CODE (value) == MULT_EXPR)
2935 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2936 || value_factor_p (TREE_OPERAND (value, 1), factor));
2938 return false;
2941 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2942 feed it with the elaboration of GNAT_SCOPE. */
2944 static struct deferred_decl_context_node *
2945 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2947 struct deferred_decl_context_node *new_node;
2949 new_node
2950 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2951 new_node->decl = decl;
2952 new_node->gnat_scope = gnat_scope;
2953 new_node->force_global = force_global;
2954 new_node->types.create (1);
2955 new_node->next = deferred_decl_context_queue;
2956 deferred_decl_context_queue = new_node;
2957 return new_node;
2960 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2961 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2962 computed. */
2964 static void
2965 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2967 n->types.safe_push (type);
2970 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2971 NULL_TREE if it is not available. */
2973 static tree
2974 compute_deferred_decl_context (Entity_Id gnat_scope)
2976 tree context;
2978 if (present_gnu_tree (gnat_scope))
2979 context = get_gnu_tree (gnat_scope);
2980 else
2981 return NULL_TREE;
2983 if (TREE_CODE (context) == TYPE_DECL)
2985 const tree context_type = TREE_TYPE (context);
2987 /* Skip dummy types: only the final ones can appear in the context
2988 chain. */
2989 if (TYPE_DUMMY_P (context_type))
2990 return NULL_TREE;
2992 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2993 chain. */
2994 else
2995 context = context_type;
2998 return context;
3001 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
3002 that cannot be processed yet, remove the other ones. If FORCE is true,
3003 force the processing for all nodes, use the global context when nodes don't
3004 have a GNU translation. */
3006 void
3007 process_deferred_decl_context (bool force)
3009 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
3010 struct deferred_decl_context_node *node;
3012 while (*it)
3014 bool processed = false;
3015 tree context = NULL_TREE;
3016 Entity_Id gnat_scope;
3018 node = *it;
3020 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
3021 get the first scope. */
3022 gnat_scope = node->gnat_scope;
3023 while (Present (gnat_scope))
3025 context = compute_deferred_decl_context (gnat_scope);
3026 if (!force || context)
3027 break;
3028 gnat_scope = get_debug_scope (gnat_scope, NULL);
3031 /* Imported declarations must not be in a local context (i.e. not inside
3032 a function). */
3033 if (context && node->force_global > 0)
3035 tree ctx = context;
3037 while (ctx)
3039 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3040 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3044 /* If FORCE, we want to get rid of all nodes in the queue: in case there
3045 was no elaborated scope, use the global context. */
3046 if (force && !context)
3047 context = get_global_context ();
3049 if (context)
3051 tree t;
3052 int i;
3054 DECL_CONTEXT (node->decl) = context;
3056 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3057 ..._TYPE nodes. */
3058 FOR_EACH_VEC_ELT (node->types, i, t)
3060 gnat_set_type_context (t, context);
3062 processed = true;
3065 /* If this node has been successfuly processed, remove it from the
3066 queue. Then move to the next node. */
3067 if (processed)
3069 *it = node->next;
3070 node->types.release ();
3071 free (node);
3073 else
3074 it = &node->next;
3078 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3080 static unsigned int
3081 scale_by_factor_of (tree expr, unsigned int value)
3083 unsigned HOST_WIDE_INT addend = 0;
3084 unsigned HOST_WIDE_INT factor = 1;
3086 /* Peel conversions around EXPR and try to extract bodies from function
3087 calls: it is possible to get the scale factor from size functions. */
3088 expr = remove_conversions (expr, true);
3089 if (TREE_CODE (expr) == CALL_EXPR)
3090 expr = maybe_inline_call_in_expr (expr);
3092 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3093 multiple of the scale factor we are looking for. */
3094 if (TREE_CODE (expr) == PLUS_EXPR
3095 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3096 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3098 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3099 expr = TREE_OPERAND (expr, 0);
3102 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3103 corresponding to the number of trailing zeros of the mask. */
3104 if (TREE_CODE (expr) == BIT_AND_EXPR
3105 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3107 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3108 unsigned int i = 0;
3110 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3112 mask >>= 1;
3113 factor *= 2;
3114 i++;
3118 /* If the addend is not a multiple of the factor we found, give up. In
3119 theory we could find a smaller common factor but it's useless for our
3120 needs. This situation arises when dealing with a field F1 with no
3121 alignment requirement but that is following a field F2 with such
3122 requirements. As long as we have F2's offset, we don't need alignment
3123 information to compute F1's. */
3124 if (addend % factor != 0)
3125 factor = 1;
3127 return factor * value;
3130 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3131 unless we can prove these 2 fields are laid out in such a way that no gap
3132 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3133 is the distance in bits between the end of PREV_FIELD and the starting
3134 position of CURR_FIELD. It is ignored if null. */
3136 static bool
3137 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3139 /* If this is the first field of the record, there cannot be any gap */
3140 if (!prev_field)
3141 return false;
3143 /* If the previous field is a union type, then return false: The only
3144 time when such a field is not the last field of the record is when
3145 there are other components at fixed positions after it (meaning there
3146 was a rep clause for every field), in which case we don't want the
3147 alignment constraint to override them. */
3148 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3149 return false;
3151 /* If the distance between the end of prev_field and the beginning of
3152 curr_field is constant, then there is a gap if the value of this
3153 constant is not null. */
3154 if (offset && tree_fits_uhwi_p (offset))
3155 return !integer_zerop (offset);
3157 /* If the size and position of the previous field are constant,
3158 then check the sum of this size and position. There will be a gap
3159 iff it is not multiple of the current field alignment. */
3160 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3161 && tree_fits_uhwi_p (bit_position (prev_field)))
3162 return ((tree_to_uhwi (bit_position (prev_field))
3163 + tree_to_uhwi (DECL_SIZE (prev_field)))
3164 % DECL_ALIGN (curr_field) != 0);
3166 /* If both the position and size of the previous field are multiples
3167 of the current field alignment, there cannot be any gap. */
3168 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3169 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3170 return false;
3172 /* Fallback, return that there may be a potential gap */
3173 return true;
3176 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3177 the decl. */
3179 tree
3180 create_label_decl (tree name, Node_Id gnat_node)
3182 tree label_decl
3183 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3185 SET_DECL_MODE (label_decl, VOIDmode);
3187 /* Add this decl to the current binding level. */
3188 gnat_pushdecl (label_decl, gnat_node);
3190 return label_decl;
3193 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3194 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3195 the list of its parameters (a list of PARM_DECL nodes chained through the
3196 DECL_CHAIN field).
3198 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3200 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3201 definition to be made visible outside of the current compilation unit.
3203 EXTERN_FLAG is true when processing an external subprogram declaration.
3205 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3207 DEBUG_INFO_P is true if we need to write debug information for it.
3209 DEFINITION is true if the subprogram is to be considered as a definition.
3211 ATTR_LIST is the list of attributes to be attached to the subprogram.
3213 GNAT_NODE is used for the position of the decl. */
3215 tree
3216 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3217 enum inline_status_t inline_status, bool public_flag,
3218 bool extern_flag, bool artificial_p, bool debug_info_p,
3219 bool definition, struct attrib *attr_list,
3220 Node_Id gnat_node)
3222 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3223 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3225 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3226 DECL_EXTERNAL (subprog_decl) = extern_flag;
3227 TREE_PUBLIC (subprog_decl) = public_flag;
3229 if (!debug_info_p)
3230 DECL_IGNORED_P (subprog_decl) = 1;
3231 if (definition)
3232 DECL_FUNCTION_IS_DEF (subprog_decl) = 1;
3234 switch (inline_status)
3236 case is_suppressed:
3237 DECL_UNINLINABLE (subprog_decl) = 1;
3238 break;
3240 case is_disabled:
3241 break;
3243 case is_required:
3244 if (Back_End_Inlining)
3246 decl_attributes (&subprog_decl,
3247 tree_cons (get_identifier ("always_inline"),
3248 NULL_TREE, NULL_TREE),
3249 ATTR_FLAG_TYPE_IN_PLACE);
3251 /* Inline_Always guarantees that every direct call is inlined and
3252 that there is no indirect reference to the subprogram, so the
3253 instance in the original package (as well as its clones in the
3254 client packages created for inter-unit inlining) can be made
3255 private, which causes the out-of-line body to be eliminated. */
3256 TREE_PUBLIC (subprog_decl) = 0;
3259 /* ... fall through ... */
3261 case is_enabled:
3262 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3263 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3264 break;
3266 default:
3267 gcc_unreachable ();
3270 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3272 /* Once everything is processed, finish the subprogram declaration. */
3273 finish_subprog_decl (subprog_decl, asm_name, type);
3275 /* Add this decl to the current binding level. */
3276 gnat_pushdecl (subprog_decl, gnat_node);
3278 /* Output the assembler code and/or RTL for the declaration. */
3279 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3281 return subprog_decl;
3284 /* Given a subprogram declaration DECL, its assembler name and its type,
3285 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3287 void
3288 finish_subprog_decl (tree decl, tree asm_name, tree type)
3290 tree result_decl
3291 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3292 TREE_TYPE (type));
3294 DECL_ARTIFICIAL (result_decl) = 1;
3295 DECL_IGNORED_P (result_decl) = 1;
3296 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3297 DECL_RESULT (decl) = result_decl;
3299 /* Propagate the "const" property. */
3300 TREE_READONLY (decl) = TYPE_READONLY (type);
3302 /* Propagate the "pure" property. */
3303 DECL_PURE_P (decl) = TYPE_RESTRICT (type);
3305 /* Propagate the "noreturn" property. */
3306 TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3308 if (asm_name)
3310 /* Let the target mangle the name if this isn't a verbatim asm. */
3311 if (*IDENTIFIER_POINTER (asm_name) != '*')
3312 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3314 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3316 /* The expand_main_function circuitry expects "main_identifier_node" to
3317 designate the DECL_NAME of the 'main' entry point, in turn expected
3318 to be declared as the "main" function literally by default. Ada
3319 program entry points are typically declared with a different name
3320 within the binder generated file, exported as 'main' to satisfy the
3321 system expectations. Force main_identifier_node in this case. */
3322 if (asm_name == main_identifier_node)
3323 DECL_NAME (decl) = main_identifier_node;
3327 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3328 body. This routine needs to be invoked before processing the declarations
3329 appearing in the subprogram. */
3331 void
3332 begin_subprog_body (tree subprog_decl)
3334 tree param_decl;
3336 announce_function (subprog_decl);
3338 /* This function is being defined. */
3339 TREE_STATIC (subprog_decl) = 1;
3341 /* The failure of this assertion will likely come from a wrong context for
3342 the subprogram body, e.g. another procedure for a procedure declared at
3343 library level. */
3344 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3346 current_function_decl = subprog_decl;
3348 /* Enter a new binding level and show that all the parameters belong to
3349 this function. */
3350 gnat_pushlevel ();
3352 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3353 param_decl = DECL_CHAIN (param_decl))
3354 DECL_CONTEXT (param_decl) = subprog_decl;
3356 make_decl_rtl (subprog_decl);
3359 /* Finish translating the current subprogram and set its BODY. */
3361 void
3362 end_subprog_body (tree body)
3364 tree fndecl = current_function_decl;
3366 /* Attach the BLOCK for this level to the function and pop the level. */
3367 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3368 DECL_INITIAL (fndecl) = current_binding_level->block;
3369 gnat_poplevel ();
3371 /* Mark the RESULT_DECL as being in this subprogram. */
3372 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3374 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3375 if (TREE_CODE (body) == BIND_EXPR)
3377 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3378 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3381 DECL_SAVED_TREE (fndecl) = body;
3383 current_function_decl = decl_function_context (fndecl);
3386 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3388 void
3389 rest_of_subprog_body_compilation (tree subprog_decl)
3391 /* We cannot track the location of errors past this point. */
3392 error_gnat_node = Empty;
3394 /* If we're only annotating types, don't actually compile this function. */
3395 if (type_annotate_only)
3396 return;
3398 /* Dump functions before gimplification. */
3399 dump_function (TDI_original, subprog_decl);
3401 if (!decl_function_context (subprog_decl))
3402 cgraph_node::finalize_function (subprog_decl, false);
3403 else
3404 /* Register this function with cgraph just far enough to get it
3405 added to our parent's nested function list. */
3406 (void) cgraph_node::get_create (subprog_decl);
3409 tree
3410 gnat_builtin_function (tree decl)
3412 gnat_pushdecl (decl, Empty);
3413 return decl;
3416 /* Return an integer type with the number of bits of precision given by
3417 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3418 it is a signed type. */
3420 tree
3421 gnat_type_for_size (unsigned precision, int unsignedp)
3423 tree t;
3424 char type_name[20];
3426 if (precision <= 2 * MAX_BITS_PER_WORD
3427 && signed_and_unsigned_types[precision][unsignedp])
3428 return signed_and_unsigned_types[precision][unsignedp];
3430 if (unsignedp)
3431 t = make_unsigned_type (precision);
3432 else
3433 t = make_signed_type (precision);
3434 TYPE_ARTIFICIAL (t) = 1;
3436 if (precision <= 2 * MAX_BITS_PER_WORD)
3437 signed_and_unsigned_types[precision][unsignedp] = t;
3439 if (!TYPE_NAME (t))
3441 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3442 TYPE_NAME (t) = get_identifier (type_name);
3445 return t;
3448 /* Likewise for floating-point types. */
3450 static tree
3451 float_type_for_precision (int precision, machine_mode mode)
3453 tree t;
3454 char type_name[20];
3456 if (float_types[(int) mode])
3457 return float_types[(int) mode];
3459 float_types[(int) mode] = t = make_node (REAL_TYPE);
3460 TYPE_PRECISION (t) = precision;
3461 layout_type (t);
3463 gcc_assert (TYPE_MODE (t) == mode);
3464 if (!TYPE_NAME (t))
3466 sprintf (type_name, "FLOAT_%d", precision);
3467 TYPE_NAME (t) = get_identifier (type_name);
3470 return t;
3473 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3474 an unsigned type; otherwise a signed type is returned. */
3476 tree
3477 gnat_type_for_mode (machine_mode mode, int unsignedp)
3479 if (mode == BLKmode)
3480 return NULL_TREE;
3482 if (mode == VOIDmode)
3483 return void_type_node;
3485 if (COMPLEX_MODE_P (mode))
3486 return NULL_TREE;
3488 scalar_float_mode float_mode;
3489 if (is_a <scalar_float_mode> (mode, &float_mode))
3490 return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3491 float_mode);
3493 scalar_int_mode int_mode;
3494 if (is_a <scalar_int_mode> (mode, &int_mode))
3495 return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3497 if (VECTOR_MODE_P (mode))
3499 machine_mode inner_mode = GET_MODE_INNER (mode);
3500 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3501 if (inner_type)
3502 return build_vector_type_for_mode (inner_type, mode);
3505 return NULL_TREE;
3508 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3509 signedness being specified by UNSIGNEDP. */
3511 tree
3512 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3514 if (type_node == char_type_node)
3515 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3517 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3519 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3521 type = copy_type (type);
3522 TREE_TYPE (type) = type_node;
3524 else if (TREE_TYPE (type_node)
3525 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3526 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3528 type = copy_type (type);
3529 TREE_TYPE (type) = TREE_TYPE (type_node);
3532 return type;
3535 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3536 transparently converted to each other. */
3539 gnat_types_compatible_p (tree t1, tree t2)
3541 enum tree_code code;
3543 /* This is the default criterion. */
3544 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3545 return 1;
3547 /* We only check structural equivalence here. */
3548 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3549 return 0;
3551 /* Vector types are also compatible if they have the same number of subparts
3552 and the same form of (scalar) element type. */
3553 if (code == VECTOR_TYPE
3554 && known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2))
3555 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3556 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3557 return 1;
3559 /* Array types are also compatible if they are constrained and have the same
3560 domain(s), the same component type and the same scalar storage order. */
3561 if (code == ARRAY_TYPE
3562 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3563 || (TYPE_DOMAIN (t1)
3564 && TYPE_DOMAIN (t2)
3565 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3566 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3567 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3568 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3569 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3570 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3571 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3572 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3573 return 1;
3575 return 0;
3578 /* Return true if EXPR is a useless type conversion. */
3580 bool
3581 gnat_useless_type_conversion (tree expr)
3583 if (CONVERT_EXPR_P (expr)
3584 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3585 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3586 return gnat_types_compatible_p (TREE_TYPE (expr),
3587 TREE_TYPE (TREE_OPERAND (expr, 0)));
3589 return false;
3592 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3594 bool
3595 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3596 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3598 return TYPE_CI_CO_LIST (t) == cico_list
3599 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3600 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3601 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3604 /* EXP is an expression for the size of an object. If this size contains
3605 discriminant references, replace them with the maximum (if MAX_P) or
3606 minimum (if !MAX_P) possible value of the discriminant. */
3608 tree
3609 max_size (tree exp, bool max_p)
3611 enum tree_code code = TREE_CODE (exp);
3612 tree type = TREE_TYPE (exp);
3613 tree op0, op1, op2;
3615 switch (TREE_CODE_CLASS (code))
3617 case tcc_declaration:
3618 case tcc_constant:
3619 return exp;
3621 case tcc_exceptional:
3622 gcc_assert (code == SSA_NAME);
3623 return exp;
3625 case tcc_vl_exp:
3626 if (code == CALL_EXPR)
3628 tree t, *argarray;
3629 int n, i;
3631 t = maybe_inline_call_in_expr (exp);
3632 if (t)
3633 return max_size (t, max_p);
3635 n = call_expr_nargs (exp);
3636 gcc_assert (n > 0);
3637 argarray = XALLOCAVEC (tree, n);
3638 for (i = 0; i < n; i++)
3639 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3640 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3642 break;
3644 case tcc_reference:
3645 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3646 modify. Otherwise, we treat it like a variable. */
3647 if (CONTAINS_PLACEHOLDER_P (exp))
3649 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3650 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3651 return
3652 convert (type,
3653 max_size (convert (get_base_type (val_type), val), true));
3656 return exp;
3658 case tcc_comparison:
3659 return build_int_cst (type, max_p ? 1 : 0);
3661 case tcc_unary:
3662 if (code == NON_LVALUE_EXPR)
3663 return max_size (TREE_OPERAND (exp, 0), max_p);
3665 op0 = max_size (TREE_OPERAND (exp, 0),
3666 code == NEGATE_EXPR ? !max_p : max_p);
3668 if (op0 == TREE_OPERAND (exp, 0))
3669 return exp;
3671 return fold_build1 (code, type, op0);
3673 case tcc_binary:
3675 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3676 tree rhs = max_size (TREE_OPERAND (exp, 1),
3677 code == MINUS_EXPR ? !max_p : max_p);
3679 /* Special-case wanting the maximum value of a MIN_EXPR.
3680 In that case, if one side overflows, return the other. */
3681 if (max_p && code == MIN_EXPR)
3683 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3684 return lhs;
3686 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3687 return rhs;
3690 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3691 overflowing and the RHS a variable. */
3692 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3693 && TREE_CODE (lhs) == INTEGER_CST
3694 && TREE_OVERFLOW (lhs)
3695 && TREE_CODE (rhs) != INTEGER_CST)
3696 return lhs;
3698 /* If we are going to subtract a "negative" value in an unsigned type,
3699 do the operation as an addition of the negated value, in order to
3700 avoid creating a spurious overflow below. */
3701 if (code == MINUS_EXPR
3702 && TYPE_UNSIGNED (type)
3703 && TREE_CODE (rhs) == INTEGER_CST
3704 && !TREE_OVERFLOW (rhs)
3705 && tree_int_cst_sign_bit (rhs) != 0)
3707 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3708 code = PLUS_EXPR;
3711 if (lhs == TREE_OPERAND (exp, 0) && rhs == TREE_OPERAND (exp, 1))
3712 return exp;
3714 /* We need to detect overflows so we call size_binop here. */
3715 return size_binop (code, lhs, rhs);
3718 case tcc_expression:
3719 switch (TREE_CODE_LENGTH (code))
3721 case 1:
3722 if (code == SAVE_EXPR)
3723 return exp;
3725 op0 = max_size (TREE_OPERAND (exp, 0),
3726 code == TRUTH_NOT_EXPR ? !max_p : max_p);
3728 if (op0 == TREE_OPERAND (exp, 0))
3729 return exp;
3731 return fold_build1 (code, type, op0);
3733 case 2:
3734 if (code == COMPOUND_EXPR)
3735 return max_size (TREE_OPERAND (exp, 1), max_p);
3737 op0 = max_size (TREE_OPERAND (exp, 0), max_p);
3738 op1 = max_size (TREE_OPERAND (exp, 1), max_p);
3740 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3741 return exp;
3743 return fold_build2 (code, type, op0, op1);
3745 case 3:
3746 if (code == COND_EXPR)
3748 op1 = TREE_OPERAND (exp, 1);
3749 op2 = TREE_OPERAND (exp, 2);
3751 if (!op1 || !op2)
3752 return exp;
3754 return
3755 fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3756 max_size (op1, max_p), max_size (op2, max_p));
3758 break;
3760 default:
3761 break;
3764 /* Other tree classes cannot happen. */
3765 default:
3766 break;
3769 gcc_unreachable ();
3772 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3773 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3774 Return a constructor for the template. */
3776 tree
3777 build_template (tree template_type, tree array_type, tree expr)
3779 vec<constructor_elt, va_gc> *template_elts = NULL;
3780 tree bound_list = NULL_TREE;
3781 tree field;
3783 while (TREE_CODE (array_type) == RECORD_TYPE
3784 && (TYPE_PADDING_P (array_type)
3785 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3786 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3788 if (TREE_CODE (array_type) == ARRAY_TYPE
3789 || (TREE_CODE (array_type) == INTEGER_TYPE
3790 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3791 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3793 /* First make the list for a CONSTRUCTOR for the template. Go down the
3794 field list of the template instead of the type chain because this
3795 array might be an Ada array of arrays and we can't tell where the
3796 nested arrays stop being the underlying object. */
3798 for (field = TYPE_FIELDS (template_type); field;
3799 (bound_list
3800 ? (bound_list = TREE_CHAIN (bound_list))
3801 : (array_type = TREE_TYPE (array_type))),
3802 field = DECL_CHAIN (DECL_CHAIN (field)))
3804 tree bounds, min, max;
3806 /* If we have a bound list, get the bounds from there. Likewise
3807 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3808 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3809 This will give us a maximum range. */
3810 if (bound_list)
3811 bounds = TREE_VALUE (bound_list);
3812 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3813 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3814 else if (expr && TREE_CODE (expr) == PARM_DECL
3815 && DECL_BY_COMPONENT_PTR_P (expr))
3816 bounds = TREE_TYPE (field);
3817 else
3818 gcc_unreachable ();
3820 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3821 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3823 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3824 substitute it from OBJECT. */
3825 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3826 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3828 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3829 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3832 return gnat_build_constructor (template_type, template_elts);
3835 /* Return true if TYPE is suitable for the element type of a vector. */
3837 static bool
3838 type_for_vector_element_p (tree type)
3840 machine_mode mode;
3842 if (!INTEGRAL_TYPE_P (type)
3843 && !SCALAR_FLOAT_TYPE_P (type)
3844 && !FIXED_POINT_TYPE_P (type))
3845 return false;
3847 mode = TYPE_MODE (type);
3848 if (GET_MODE_CLASS (mode) != MODE_INT
3849 && !SCALAR_FLOAT_MODE_P (mode)
3850 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3851 return false;
3853 return true;
3856 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3857 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3858 attribute declaration and want to issue error messages on failure. */
3860 static tree
3861 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3863 unsigned HOST_WIDE_INT size_int, inner_size_int;
3864 int nunits;
3866 /* Silently punt on variable sizes. We can't make vector types for them,
3867 need to ignore them on front-end generated subtypes of unconstrained
3868 base types, and this attribute is for binding implementors, not end
3869 users, so we should never get there from legitimate explicit uses. */
3870 if (!tree_fits_uhwi_p (size))
3871 return NULL_TREE;
3872 size_int = tree_to_uhwi (size);
3874 if (!type_for_vector_element_p (inner_type))
3876 if (attribute)
3877 error ("invalid element type for attribute %qs",
3878 IDENTIFIER_POINTER (attribute));
3879 return NULL_TREE;
3881 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3883 if (size_int % inner_size_int)
3885 if (attribute)
3886 error ("vector size not an integral multiple of component size");
3887 return NULL_TREE;
3890 if (size_int == 0)
3892 if (attribute)
3893 error ("zero vector size");
3894 return NULL_TREE;
3897 nunits = size_int / inner_size_int;
3898 if (nunits & (nunits - 1))
3900 if (attribute)
3901 error ("number of components of vector not a power of two");
3902 return NULL_TREE;
3905 return build_vector_type (inner_type, nunits);
3908 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3909 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3910 processing the attribute and want to issue error messages on failure. */
3912 static tree
3913 build_vector_type_for_array (tree array_type, tree attribute)
3915 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3916 TYPE_SIZE_UNIT (array_type),
3917 attribute);
3918 if (!vector_type)
3919 return NULL_TREE;
3921 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3922 return vector_type;
3925 /* Build a type to be used to represent an aliased object whose nominal type
3926 is an unconstrained array. This consists of a RECORD_TYPE containing a
3927 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3928 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3929 an arbitrary unconstrained object. Use NAME as the name of the record.
3930 DEBUG_INFO_P is true if we need to write debug information for the type. */
3932 tree
3933 build_unc_object_type (tree template_type, tree object_type, tree name,
3934 bool debug_info_p)
3936 tree decl;
3937 tree type = make_node (RECORD_TYPE);
3938 tree template_field
3939 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3940 NULL_TREE, NULL_TREE, 0, 1);
3941 tree array_field
3942 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3943 NULL_TREE, NULL_TREE, 0, 1);
3945 TYPE_NAME (type) = name;
3946 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3947 DECL_CHAIN (template_field) = array_field;
3948 finish_record_type (type, template_field, 0, true);
3950 /* Declare it now since it will never be declared otherwise. This is
3951 necessary to ensure that its subtrees are properly marked. */
3952 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3954 /* template_type will not be used elsewhere than here, so to keep the debug
3955 info clean and in order to avoid scoping issues, make decl its
3956 context. */
3957 gnat_set_type_context (template_type, decl);
3959 return type;
3962 /* Same, taking a thin or fat pointer type instead of a template type. */
3964 tree
3965 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3966 tree name, bool debug_info_p)
3968 tree template_type;
3970 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3972 template_type
3973 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3974 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3975 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3977 return
3978 build_unc_object_type (template_type, object_type, name, debug_info_p);
3981 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3982 In the normal case this is just two adjustments, but we have more to
3983 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3985 void
3986 update_pointer_to (tree old_type, tree new_type)
3988 tree ptr = TYPE_POINTER_TO (old_type);
3989 tree ref = TYPE_REFERENCE_TO (old_type);
3990 tree t;
3992 /* If this is the main variant, process all the other variants first. */
3993 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3994 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3995 update_pointer_to (t, new_type);
3997 /* If no pointers and no references, we are done. */
3998 if (!ptr && !ref)
3999 return;
4001 /* Merge the old type qualifiers in the new type.
4003 Each old variant has qualifiers for specific reasons, and the new
4004 designated type as well. Each set of qualifiers represents useful
4005 information grabbed at some point, and merging the two simply unifies
4006 these inputs into the final type description.
4008 Consider for instance a volatile type frozen after an access to constant
4009 type designating it; after the designated type's freeze, we get here with
4010 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4011 when the access type was processed. We will make a volatile and readonly
4012 designated type, because that's what it really is.
4014 We might also get here for a non-dummy OLD_TYPE variant with different
4015 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4016 to private record type elaboration (see the comments around the call to
4017 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4018 the qualifiers in those cases too, to avoid accidentally discarding the
4019 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4020 new_type
4021 = build_qualified_type (new_type,
4022 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4024 /* If old type and new type are identical, there is nothing to do. */
4025 if (old_type == new_type)
4026 return;
4028 /* Otherwise, first handle the simple case. */
4029 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4031 tree new_ptr, new_ref;
4033 /* If pointer or reference already points to new type, nothing to do.
4034 This can happen as update_pointer_to can be invoked multiple times
4035 on the same couple of types because of the type variants. */
4036 if ((ptr && TREE_TYPE (ptr) == new_type)
4037 || (ref && TREE_TYPE (ref) == new_type))
4038 return;
4040 /* Chain PTR and its variants at the end. */
4041 new_ptr = TYPE_POINTER_TO (new_type);
4042 if (new_ptr)
4044 while (TYPE_NEXT_PTR_TO (new_ptr))
4045 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4046 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4048 else
4049 TYPE_POINTER_TO (new_type) = ptr;
4051 /* Now adjust them. */
4052 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4053 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4055 TREE_TYPE (t) = new_type;
4056 if (TYPE_NULL_BOUNDS (t))
4057 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4060 /* Chain REF and its variants at the end. */
4061 new_ref = TYPE_REFERENCE_TO (new_type);
4062 if (new_ref)
4064 while (TYPE_NEXT_REF_TO (new_ref))
4065 new_ref = TYPE_NEXT_REF_TO (new_ref);
4066 TYPE_NEXT_REF_TO (new_ref) = ref;
4068 else
4069 TYPE_REFERENCE_TO (new_type) = ref;
4071 /* Now adjust them. */
4072 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4073 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4074 TREE_TYPE (t) = new_type;
4076 TYPE_POINTER_TO (old_type) = NULL_TREE;
4077 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4080 /* Now deal with the unconstrained array case. In this case the pointer
4081 is actually a record where both fields are pointers to dummy nodes.
4082 Turn them into pointers to the correct types using update_pointer_to.
4083 Likewise for the pointer to the object record (thin pointer). */
4084 else
4086 tree new_ptr = TYPE_POINTER_TO (new_type);
4088 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4090 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4091 since update_pointer_to can be invoked multiple times on the same
4092 couple of types because of the type variants. */
4093 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4094 return;
4096 update_pointer_to
4097 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4098 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4100 update_pointer_to
4101 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4102 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4104 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4105 TYPE_OBJECT_RECORD_TYPE (new_type));
4107 TYPE_POINTER_TO (old_type) = NULL_TREE;
4108 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4112 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4113 unconstrained one. This involves making or finding a template. */
4115 static tree
4116 convert_to_fat_pointer (tree type, tree expr)
4118 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4119 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4120 tree etype = TREE_TYPE (expr);
4121 tree template_addr;
4122 vec<constructor_elt, va_gc> *v;
4123 vec_alloc (v, 2);
4125 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4126 array (compare_fat_pointers ensures that this is the full discriminant)
4127 and a valid pointer to the bounds. This latter property is necessary
4128 since the compiler can hoist the load of the bounds done through it. */
4129 if (integer_zerop (expr))
4131 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4132 tree null_bounds, t;
4134 if (TYPE_NULL_BOUNDS (ptr_template_type))
4135 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4136 else
4138 /* The template type can still be dummy at this point so we build an
4139 empty constructor. The middle-end will fill it in with zeros. */
4140 t = build_constructor (template_type, NULL);
4141 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4142 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4143 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4146 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4147 fold_convert (p_array_type, null_pointer_node));
4148 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4149 t = build_constructor (type, v);
4150 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4151 TREE_CONSTANT (t) = 0;
4152 TREE_STATIC (t) = 1;
4154 return t;
4157 /* If EXPR is a thin pointer, make template and data from the record. */
4158 if (TYPE_IS_THIN_POINTER_P (etype))
4160 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4162 expr = gnat_protect_expr (expr);
4164 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4165 the thin pointer value has been shifted so we shift it back to get
4166 the template address. */
4167 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4169 template_addr
4170 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4171 fold_build1 (NEGATE_EXPR, sizetype,
4172 byte_position
4173 (DECL_CHAIN (field))));
4174 template_addr
4175 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4176 template_addr);
4179 /* Otherwise we explicitly take the address of the fields. */
4180 else
4182 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4183 template_addr
4184 = build_unary_op (ADDR_EXPR, NULL_TREE,
4185 build_component_ref (expr, field, false));
4186 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4187 build_component_ref (expr, DECL_CHAIN (field),
4188 false));
4192 /* Otherwise, build the constructor for the template. */
4193 else
4194 template_addr
4195 = build_unary_op (ADDR_EXPR, NULL_TREE,
4196 build_template (template_type, TREE_TYPE (etype),
4197 expr));
4199 /* The final result is a constructor for the fat pointer.
4201 If EXPR is an argument of a foreign convention subprogram, the type it
4202 points to is directly the component type. In this case, the expression
4203 type may not match the corresponding FIELD_DECL type at this point, so we
4204 call "convert" here to fix that up if necessary. This type consistency is
4205 required, for instance because it ensures that possible later folding of
4206 COMPONENT_REFs against this constructor always yields something of the
4207 same type as the initial reference.
4209 Note that the call to "build_template" above is still fine because it
4210 will only refer to the provided TEMPLATE_TYPE in this case. */
4211 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4212 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4213 return gnat_build_constructor (type, v);
4216 /* Create an expression whose value is that of EXPR,
4217 converted to type TYPE. The TREE_TYPE of the value
4218 is always TYPE. This function implements all reasonable
4219 conversions; callers should filter out those that are
4220 not permitted by the language being compiled. */
4222 tree
4223 convert (tree type, tree expr)
4225 tree etype = TREE_TYPE (expr);
4226 enum tree_code ecode = TREE_CODE (etype);
4227 enum tree_code code = TREE_CODE (type);
4229 /* If the expression is already of the right type, we are done. */
4230 if (etype == type)
4231 return expr;
4233 /* If both input and output have padding and are of variable size, do this
4234 as an unchecked conversion. Likewise if one is a mere variant of the
4235 other, so we avoid a pointless unpad/repad sequence. */
4236 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4237 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4238 && (!TREE_CONSTANT (TYPE_SIZE (type))
4239 || !TREE_CONSTANT (TYPE_SIZE (etype))
4240 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4241 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4242 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4245 /* If the output type has padding, convert to the inner type and make a
4246 constructor to build the record, unless a variable size is involved. */
4247 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4249 /* If we previously converted from another type and our type is
4250 of variable size, remove the conversion to avoid the need for
4251 variable-sized temporaries. Likewise for a conversion between
4252 original and packable version. */
4253 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4254 && (!TREE_CONSTANT (TYPE_SIZE (type))
4255 || (ecode == RECORD_TYPE
4256 && TYPE_NAME (etype)
4257 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4258 expr = TREE_OPERAND (expr, 0);
4260 /* If we are just removing the padding from expr, convert the original
4261 object if we have variable size in order to avoid the need for some
4262 variable-sized temporaries. Likewise if the padding is a variant
4263 of the other, so we avoid a pointless unpad/repad sequence. */
4264 if (TREE_CODE (expr) == COMPONENT_REF
4265 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4266 && (!TREE_CONSTANT (TYPE_SIZE (type))
4267 || TYPE_MAIN_VARIANT (type)
4268 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4269 || (ecode == RECORD_TYPE
4270 && TYPE_NAME (etype)
4271 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4272 return convert (type, TREE_OPERAND (expr, 0));
4274 /* If the inner type is of self-referential size and the expression type
4275 is a record, do this as an unchecked conversion unless both types are
4276 essentially the same. But first pad the expression if possible to
4277 have the same size on both sides. */
4278 if (ecode == RECORD_TYPE
4279 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type)))
4280 && TYPE_MAIN_VARIANT (etype)
4281 != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type))))
4283 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4284 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4285 false, false, false, true),
4286 expr);
4287 return unchecked_convert (type, expr, false);
4290 /* If we are converting between array types with variable size, do the
4291 final conversion as an unchecked conversion, again to avoid the need
4292 for some variable-sized temporaries. If valid, this conversion is
4293 very likely purely technical and without real effects. */
4294 if (ecode == ARRAY_TYPE
4295 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4296 && !TREE_CONSTANT (TYPE_SIZE (etype))
4297 && !TREE_CONSTANT (TYPE_SIZE (type)))
4298 return unchecked_convert (type,
4299 convert (TREE_TYPE (TYPE_FIELDS (type)),
4300 expr),
4301 false);
4303 tree t = convert (TREE_TYPE (TYPE_FIELDS (type)), expr);
4305 /* If converting to the inner type has already created a CONSTRUCTOR with
4306 the right size, then reuse it instead of creating another one. This
4307 can happen for the padding type built to overalign local variables. */
4308 if (TREE_CODE (t) == VIEW_CONVERT_EXPR
4309 && TREE_CODE (TREE_OPERAND (t, 0)) == CONSTRUCTOR
4310 && TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0))))
4311 && tree_int_cst_equal (TYPE_SIZE (type),
4312 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0)))))
4313 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (t, 0));
4315 vec<constructor_elt, va_gc> *v;
4316 vec_alloc (v, 1);
4317 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), t);
4318 return gnat_build_constructor (type, v);
4321 /* If the input type has padding, remove it and convert to the output type.
4322 The conditions ordering is arranged to ensure that the output type is not
4323 a padding type here, as it is not clear whether the conversion would
4324 always be correct if this was to happen. */
4325 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4327 tree unpadded;
4329 /* If we have just converted to this padded type, just get the
4330 inner expression. */
4331 if (TREE_CODE (expr) == CONSTRUCTOR)
4332 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4334 /* Otherwise, build an explicit component reference. */
4335 else
4336 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4338 return convert (type, unpadded);
4341 /* If the input is a biased type, convert first to the base type and add
4342 the bias. Note that the bias must go through a full conversion to the
4343 base type, lest it is itself a biased value; this happens for subtypes
4344 of biased types. */
4345 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4346 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4347 fold_convert (TREE_TYPE (etype), expr),
4348 convert (TREE_TYPE (etype),
4349 TYPE_MIN_VALUE (etype))));
4351 /* If the input is a justified modular type, we need to extract the actual
4352 object before converting it to any other type with the exceptions of an
4353 unconstrained array or of a mere type variant. It is useful to avoid the
4354 extraction and conversion in the type variant case because it could end
4355 up replacing a VAR_DECL expr by a constructor and we might be about the
4356 take the address of the result. */
4357 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4358 && code != UNCONSTRAINED_ARRAY_TYPE
4359 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4360 return
4361 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4363 /* If converting to a type that contains a template, convert to the data
4364 type and then build the template. */
4365 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4367 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4368 vec<constructor_elt, va_gc> *v;
4369 vec_alloc (v, 2);
4371 /* If the source already has a template, get a reference to the
4372 associated array only, as we are going to rebuild a template
4373 for the target type anyway. */
4374 expr = maybe_unconstrained_array (expr);
4376 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4377 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4378 obj_type, NULL_TREE));
4379 if (expr)
4380 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4381 convert (obj_type, expr));
4382 return gnat_build_constructor (type, v);
4385 /* There are some cases of expressions that we process specially. */
4386 switch (TREE_CODE (expr))
4388 case ERROR_MARK:
4389 return expr;
4391 case NULL_EXPR:
4392 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4393 conversion in gnat_expand_expr. NULL_EXPR does not represent
4394 and actual value, so no conversion is needed. */
4395 expr = copy_node (expr);
4396 TREE_TYPE (expr) = type;
4397 return expr;
4399 case STRING_CST:
4400 /* If we are converting a STRING_CST to another constrained array type,
4401 just make a new one in the proper type. */
4402 if (code == ecode && AGGREGATE_TYPE_P (etype)
4403 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4404 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4406 expr = copy_node (expr);
4407 TREE_TYPE (expr) = type;
4408 return expr;
4410 break;
4412 case VECTOR_CST:
4413 /* If we are converting a VECTOR_CST to a mere type variant, just make
4414 a new one in the proper type. */
4415 if (code == ecode && gnat_types_compatible_p (type, etype))
4417 expr = copy_node (expr);
4418 TREE_TYPE (expr) = type;
4419 return expr;
4421 break;
4423 case CONSTRUCTOR:
4424 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4425 another padding type around the same type, just make a new one in
4426 the proper type. */
4427 if (code == ecode
4428 && (gnat_types_compatible_p (type, etype)
4429 || (code == RECORD_TYPE
4430 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4431 && TREE_TYPE (TYPE_FIELDS (type))
4432 == TREE_TYPE (TYPE_FIELDS (etype)))))
4434 expr = copy_node (expr);
4435 TREE_TYPE (expr) = type;
4436 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4437 return expr;
4440 /* Likewise for a conversion between original and packable version, or
4441 conversion between types of the same size and with the same list of
4442 fields, but we have to work harder to preserve type consistency. */
4443 if (code == ecode
4444 && code == RECORD_TYPE
4445 && (TYPE_NAME (type) == TYPE_NAME (etype)
4446 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4449 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4450 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4451 vec<constructor_elt, va_gc> *v;
4452 vec_alloc (v, len);
4453 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4454 unsigned HOST_WIDE_INT idx;
4455 tree index, value;
4457 /* Whether we need to clear TREE_CONSTANT et al. on the output
4458 constructor when we convert in place. */
4459 bool clear_constant = false;
4461 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4463 /* Skip the missing fields in the CONSTRUCTOR. */
4464 while (efield && field && !SAME_FIELD_P (efield, index))
4466 efield = DECL_CHAIN (efield);
4467 field = DECL_CHAIN (field);
4469 /* The field must be the same. */
4470 if (!(efield && field && SAME_FIELD_P (efield, field)))
4471 break;
4472 constructor_elt elt
4473 = {field, convert (TREE_TYPE (field), value)};
4474 v->quick_push (elt);
4476 /* If packing has made this field a bitfield and the input
4477 value couldn't be emitted statically any more, we need to
4478 clear TREE_CONSTANT on our output. */
4479 if (!clear_constant
4480 && TREE_CONSTANT (expr)
4481 && !CONSTRUCTOR_BITFIELD_P (efield)
4482 && CONSTRUCTOR_BITFIELD_P (field)
4483 && !initializer_constant_valid_for_bitfield_p (value))
4484 clear_constant = true;
4486 efield = DECL_CHAIN (efield);
4487 field = DECL_CHAIN (field);
4490 /* If we have been able to match and convert all the input fields
4491 to their output type, convert in place now. We'll fallback to a
4492 view conversion downstream otherwise. */
4493 if (idx == len)
4495 expr = copy_node (expr);
4496 TREE_TYPE (expr) = type;
4497 CONSTRUCTOR_ELTS (expr) = v;
4498 if (clear_constant)
4499 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4500 return expr;
4504 /* Likewise for a conversion between array type and vector type with a
4505 compatible representative array. */
4506 else if (code == VECTOR_TYPE
4507 && ecode == ARRAY_TYPE
4508 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4509 etype))
4511 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4512 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4513 vec<constructor_elt, va_gc> *v;
4514 unsigned HOST_WIDE_INT ix;
4515 tree value;
4517 /* Build a VECTOR_CST from a *constant* array constructor. */
4518 if (TREE_CONSTANT (expr))
4520 bool constant_p = true;
4522 /* Iterate through elements and check if all constructor
4523 elements are *_CSTs. */
4524 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4525 if (!CONSTANT_CLASS_P (value))
4527 constant_p = false;
4528 break;
4531 if (constant_p)
4532 return build_vector_from_ctor (type,
4533 CONSTRUCTOR_ELTS (expr));
4536 /* Otherwise, build a regular vector constructor. */
4537 vec_alloc (v, len);
4538 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4540 constructor_elt elt = {NULL_TREE, value};
4541 v->quick_push (elt);
4543 expr = copy_node (expr);
4544 TREE_TYPE (expr) = type;
4545 CONSTRUCTOR_ELTS (expr) = v;
4546 return expr;
4548 break;
4550 case UNCONSTRAINED_ARRAY_REF:
4551 /* First retrieve the underlying array. */
4552 expr = maybe_unconstrained_array (expr);
4553 etype = TREE_TYPE (expr);
4554 ecode = TREE_CODE (etype);
4555 break;
4557 case VIEW_CONVERT_EXPR:
4559 /* GCC 4.x is very sensitive to type consistency overall, and view
4560 conversions thus are very frequent. Even though just "convert"ing
4561 the inner operand to the output type is fine in most cases, it
4562 might expose unexpected input/output type mismatches in special
4563 circumstances so we avoid such recursive calls when we can. */
4564 tree op0 = TREE_OPERAND (expr, 0);
4566 /* If we are converting back to the original type, we can just
4567 lift the input conversion. This is a common occurrence with
4568 switches back-and-forth amongst type variants. */
4569 if (type == TREE_TYPE (op0))
4570 return op0;
4572 /* Otherwise, if we're converting between two aggregate or vector
4573 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4574 target type in place or to just convert the inner expression. */
4575 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4576 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4578 /* If we are converting between mere variants, we can just
4579 substitute the VIEW_CONVERT_EXPR in place. */
4580 if (gnat_types_compatible_p (type, etype))
4581 return build1 (VIEW_CONVERT_EXPR, type, op0);
4583 /* Otherwise, we may just bypass the input view conversion unless
4584 one of the types is a fat pointer, which is handled by
4585 specialized code below which relies on exact type matching. */
4586 else if (!TYPE_IS_FAT_POINTER_P (type)
4587 && !TYPE_IS_FAT_POINTER_P (etype))
4588 return convert (type, op0);
4591 break;
4594 default:
4595 break;
4598 /* Check for converting to a pointer to an unconstrained array. */
4599 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4600 return convert_to_fat_pointer (type, expr);
4602 /* If we are converting between two aggregate or vector types that are mere
4603 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4604 to a vector type from its representative array type. */
4605 else if ((code == ecode
4606 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4607 && gnat_types_compatible_p (type, etype))
4608 || (code == VECTOR_TYPE
4609 && ecode == ARRAY_TYPE
4610 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4611 etype)))
4612 return build1 (VIEW_CONVERT_EXPR, type, expr);
4614 /* If we are converting between tagged types, try to upcast properly.
4615 But don't do it if we are just annotating types since tagged types
4616 aren't fully laid out in this mode. */
4617 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4618 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type)
4619 && !type_annotate_only)
4621 tree child_etype = etype;
4622 do {
4623 tree field = TYPE_FIELDS (child_etype);
4624 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4625 return build_component_ref (expr, field, false);
4626 child_etype = TREE_TYPE (field);
4627 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4630 /* If we are converting from a smaller form of record type back to it, just
4631 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4632 size on both sides. */
4633 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4634 && smaller_form_type_p (etype, type))
4636 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4637 false, false, false, true),
4638 expr);
4639 return build1 (VIEW_CONVERT_EXPR, type, expr);
4642 /* In all other cases of related types, make a NOP_EXPR. */
4643 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4644 return fold_convert (type, expr);
4646 switch (code)
4648 case VOID_TYPE:
4649 return fold_build1 (CONVERT_EXPR, type, expr);
4651 case INTEGER_TYPE:
4652 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4653 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4654 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4655 return unchecked_convert (type, expr, false);
4657 /* If the output is a biased type, convert first to the base type and
4658 subtract the bias. Note that the bias itself must go through a full
4659 conversion to the base type, lest it is a biased value; this happens
4660 for subtypes of biased types. */
4661 if (TYPE_BIASED_REPRESENTATION_P (type))
4662 return fold_convert (type,
4663 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4664 convert (TREE_TYPE (type), expr),
4665 convert (TREE_TYPE (type),
4666 TYPE_MIN_VALUE (type))));
4668 /* ... fall through ... */
4670 case ENUMERAL_TYPE:
4671 case BOOLEAN_TYPE:
4672 /* If we are converting an additive expression to an integer type
4673 with lower precision, be wary of the optimization that can be
4674 applied by convert_to_integer. There are 2 problematic cases:
4675 - if the first operand was originally of a biased type,
4676 because we could be recursively called to convert it
4677 to an intermediate type and thus rematerialize the
4678 additive operator endlessly,
4679 - if the expression contains a placeholder, because an
4680 intermediate conversion that changes the sign could
4681 be inserted and thus introduce an artificial overflow
4682 at compile time when the placeholder is substituted. */
4683 if (code == INTEGER_TYPE
4684 && ecode == INTEGER_TYPE
4685 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4686 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4688 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4690 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4691 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4692 || CONTAINS_PLACEHOLDER_P (expr))
4693 return build1 (NOP_EXPR, type, expr);
4696 return fold (convert_to_integer (type, expr));
4698 case POINTER_TYPE:
4699 case REFERENCE_TYPE:
4700 /* If converting between two thin pointers, adjust if needed to account
4701 for differing offsets from the base pointer, depending on whether
4702 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4703 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4705 tree etype_pos
4706 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4707 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4708 : size_zero_node;
4709 tree type_pos
4710 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4711 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4712 : size_zero_node;
4713 tree byte_diff = size_diffop (type_pos, etype_pos);
4715 expr = build1 (NOP_EXPR, type, expr);
4716 if (integer_zerop (byte_diff))
4717 return expr;
4719 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4720 fold_convert (sizetype, byte_diff));
4723 /* If converting fat pointer to normal or thin pointer, get the pointer
4724 to the array and then convert it. */
4725 if (TYPE_IS_FAT_POINTER_P (etype))
4726 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4728 return fold (convert_to_pointer (type, expr));
4730 case REAL_TYPE:
4731 return fold (convert_to_real (type, expr));
4733 case RECORD_TYPE:
4734 /* Do a normal conversion between scalar and justified modular type. */
4735 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4737 vec<constructor_elt, va_gc> *v;
4738 vec_alloc (v, 1);
4740 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4741 convert (TREE_TYPE (TYPE_FIELDS (type)),
4742 expr));
4743 return gnat_build_constructor (type, v);
4746 /* In these cases, assume the front-end has validated the conversion.
4747 If the conversion is valid, it will be a bit-wise conversion, so
4748 it can be viewed as an unchecked conversion. */
4749 return unchecked_convert (type, expr, false);
4751 case ARRAY_TYPE:
4752 /* Do a normal conversion between unconstrained and constrained array
4753 type, assuming the latter is a constrained version of the former. */
4754 if (TREE_CODE (expr) == INDIRECT_REF
4755 && ecode == ARRAY_TYPE
4756 && TREE_TYPE (etype) == TREE_TYPE (type))
4758 tree ptr_type = build_pointer_type (type);
4759 tree t = build_unary_op (INDIRECT_REF, NULL_TREE,
4760 fold_convert (ptr_type,
4761 TREE_OPERAND (expr, 0)));
4762 TREE_READONLY (t) = TREE_READONLY (expr);
4763 TREE_THIS_NOTRAP (t) = TREE_THIS_NOTRAP (expr);
4764 return t;
4767 /* In these cases, assume the front-end has validated the conversion.
4768 If the conversion is valid, it will be a bit-wise conversion, so
4769 it can be viewed as an unchecked conversion. */
4770 return unchecked_convert (type, expr, false);
4772 case UNION_TYPE:
4773 /* This is a either a conversion between a tagged type and some
4774 subtype, which we have to mark as a UNION_TYPE because of
4775 overlapping fields or a conversion of an Unchecked_Union. */
4776 return unchecked_convert (type, expr, false);
4778 case UNCONSTRAINED_ARRAY_TYPE:
4779 /* If the input is a VECTOR_TYPE, convert to the representative
4780 array type first. */
4781 if (ecode == VECTOR_TYPE)
4783 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4784 etype = TREE_TYPE (expr);
4785 ecode = TREE_CODE (etype);
4788 /* If EXPR is a constrained array, take its address, convert it to a
4789 fat pointer, and then dereference it. Likewise if EXPR is a
4790 record containing both a template and a constrained array.
4791 Note that a record representing a justified modular type
4792 always represents a packed constrained array. */
4793 if (ecode == ARRAY_TYPE
4794 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4795 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4796 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4797 return
4798 build_unary_op
4799 (INDIRECT_REF, NULL_TREE,
4800 convert_to_fat_pointer (TREE_TYPE (type),
4801 build_unary_op (ADDR_EXPR,
4802 NULL_TREE, expr)));
4804 /* Do something very similar for converting one unconstrained
4805 array to another. */
4806 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4807 return
4808 build_unary_op (INDIRECT_REF, NULL_TREE,
4809 convert (TREE_TYPE (type),
4810 build_unary_op (ADDR_EXPR,
4811 NULL_TREE, expr)));
4812 else
4813 gcc_unreachable ();
4815 case COMPLEX_TYPE:
4816 return fold (convert_to_complex (type, expr));
4818 default:
4819 gcc_unreachable ();
4823 /* Create an expression whose value is that of EXPR converted to the common
4824 index type, which is sizetype. EXPR is supposed to be in the base type
4825 of the GNAT index type. Calling it is equivalent to doing
4827 convert (sizetype, expr)
4829 but we try to distribute the type conversion with the knowledge that EXPR
4830 cannot overflow in its type. This is a best-effort approach and we fall
4831 back to the above expression as soon as difficulties are encountered.
4833 This is necessary to overcome issues that arise when the GNAT base index
4834 type and the GCC common index type (sizetype) don't have the same size,
4835 which is quite frequent on 64-bit architectures. In this case, and if
4836 the GNAT base index type is signed but the iteration type of the loop has
4837 been forced to unsigned, the loop scalar evolution engine cannot compute
4838 a simple evolution for the general induction variables associated with the
4839 array indices, because it will preserve the wrap-around semantics in the
4840 unsigned type of their "inner" part. As a result, many loop optimizations
4841 are blocked.
4843 The solution is to use a special (basic) induction variable that is at
4844 least as large as sizetype, and to express the aforementioned general
4845 induction variables in terms of this induction variable, eliminating
4846 the problematic intermediate truncation to the GNAT base index type.
4847 This is possible as long as the original expression doesn't overflow
4848 and if the middle-end hasn't introduced artificial overflows in the
4849 course of the various simplification it can make to the expression. */
4851 tree
4852 convert_to_index_type (tree expr)
4854 enum tree_code code = TREE_CODE (expr);
4855 tree type = TREE_TYPE (expr);
4857 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4858 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4859 if (TYPE_UNSIGNED (type) || !optimize)
4860 return convert (sizetype, expr);
4862 switch (code)
4864 case VAR_DECL:
4865 /* The main effect of the function: replace a loop parameter with its
4866 associated special induction variable. */
4867 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4868 expr = DECL_INDUCTION_VAR (expr);
4869 break;
4871 CASE_CONVERT:
4873 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4874 /* Bail out as soon as we suspect some sort of type frobbing. */
4875 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4876 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4877 break;
4880 /* ... fall through ... */
4882 case NON_LVALUE_EXPR:
4883 return fold_build1 (code, sizetype,
4884 convert_to_index_type (TREE_OPERAND (expr, 0)));
4886 case PLUS_EXPR:
4887 case MINUS_EXPR:
4888 case MULT_EXPR:
4889 return fold_build2 (code, sizetype,
4890 convert_to_index_type (TREE_OPERAND (expr, 0)),
4891 convert_to_index_type (TREE_OPERAND (expr, 1)));
4893 case COMPOUND_EXPR:
4894 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4895 convert_to_index_type (TREE_OPERAND (expr, 1)));
4897 case COND_EXPR:
4898 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4899 convert_to_index_type (TREE_OPERAND (expr, 1)),
4900 convert_to_index_type (TREE_OPERAND (expr, 2)));
4902 default:
4903 break;
4906 return convert (sizetype, expr);
4909 /* Remove all conversions that are done in EXP. This includes converting
4910 from a padded type or to a justified modular type. If TRUE_ADDRESS
4911 is true, always return the address of the containing object even if
4912 the address is not bit-aligned. */
4914 tree
4915 remove_conversions (tree exp, bool true_address)
4917 switch (TREE_CODE (exp))
4919 case CONSTRUCTOR:
4920 if (true_address
4921 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4922 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4923 return
4924 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
4925 break;
4927 case COMPONENT_REF:
4928 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4929 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4930 break;
4932 CASE_CONVERT:
4933 case VIEW_CONVERT_EXPR:
4934 case NON_LVALUE_EXPR:
4935 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4937 default:
4938 break;
4941 return exp;
4944 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4945 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4946 likewise return an expression pointing to the underlying array. */
4948 tree
4949 maybe_unconstrained_array (tree exp)
4951 enum tree_code code = TREE_CODE (exp);
4952 tree type = TREE_TYPE (exp);
4954 switch (TREE_CODE (type))
4956 case UNCONSTRAINED_ARRAY_TYPE:
4957 if (code == UNCONSTRAINED_ARRAY_REF)
4959 const bool read_only = TREE_READONLY (exp);
4960 const bool no_trap = TREE_THIS_NOTRAP (exp);
4962 exp = TREE_OPERAND (exp, 0);
4963 type = TREE_TYPE (exp);
4965 if (TREE_CODE (exp) == COND_EXPR)
4967 tree op1
4968 = build_unary_op (INDIRECT_REF, NULL_TREE,
4969 build_component_ref (TREE_OPERAND (exp, 1),
4970 TYPE_FIELDS (type),
4971 false));
4972 tree op2
4973 = build_unary_op (INDIRECT_REF, NULL_TREE,
4974 build_component_ref (TREE_OPERAND (exp, 2),
4975 TYPE_FIELDS (type),
4976 false));
4978 exp = build3 (COND_EXPR,
4979 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4980 TREE_OPERAND (exp, 0), op1, op2);
4982 else
4984 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4985 build_component_ref (exp,
4986 TYPE_FIELDS (type),
4987 false));
4988 TREE_READONLY (exp) = read_only;
4989 TREE_THIS_NOTRAP (exp) = no_trap;
4993 else if (code == NULL_EXPR)
4994 exp = build1 (NULL_EXPR,
4995 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4996 TREE_OPERAND (exp, 0));
4997 break;
4999 case RECORD_TYPE:
5000 /* If this is a padded type and it contains a template, convert to the
5001 unpadded type first. */
5002 if (TYPE_PADDING_P (type)
5003 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5004 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5006 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5007 code = TREE_CODE (exp);
5008 type = TREE_TYPE (exp);
5011 if (TYPE_CONTAINS_TEMPLATE_P (type))
5013 /* If the array initializer is a box, return NULL_TREE. */
5014 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
5015 return NULL_TREE;
5017 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
5018 false);
5019 type = TREE_TYPE (exp);
5021 /* If the array type is padded, convert to the unpadded type. */
5022 if (TYPE_IS_PADDING_P (type))
5023 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5025 break;
5027 default:
5028 break;
5031 return exp;
5034 /* Return true if EXPR is an expression that can be folded as an operand
5035 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5037 static bool
5038 can_fold_for_view_convert_p (tree expr)
5040 tree t1, t2;
5042 /* The folder will fold NOP_EXPRs between integral types with the same
5043 precision (in the middle-end's sense). We cannot allow it if the
5044 types don't have the same precision in the Ada sense as well. */
5045 if (TREE_CODE (expr) != NOP_EXPR)
5046 return true;
5048 t1 = TREE_TYPE (expr);
5049 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5051 /* Defer to the folder for non-integral conversions. */
5052 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5053 return true;
5055 /* Only fold conversions that preserve both precisions. */
5056 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5057 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5058 return true;
5060 return false;
5063 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5064 If NOTRUNC_P is true, truncation operations should be suppressed.
5066 Special care is required with (source or target) integral types whose
5067 precision is not equal to their size, to make sure we fetch or assign
5068 the value bits whose location might depend on the endianness, e.g.
5070 Rmsize : constant := 8;
5071 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5073 type Bit_Array is array (1 .. Rmsize) of Boolean;
5074 pragma Pack (Bit_Array);
5076 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5078 Value : Int := 2#1000_0001#;
5079 Vbits : Bit_Array := To_Bit_Array (Value);
5081 we expect the 8 bits at Vbits'Address to always contain Value, while
5082 their original location depends on the endianness, at Value'Address
5083 on a little-endian architecture but not on a big-endian one.
5085 One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5086 the bits between the precision and the size are filled, because of the
5087 trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5088 So we use the special predicate type_unsigned_for_rm above. */
5090 tree
5091 unchecked_convert (tree type, tree expr, bool notrunc_p)
5093 tree etype = TREE_TYPE (expr);
5094 enum tree_code ecode = TREE_CODE (etype);
5095 enum tree_code code = TREE_CODE (type);
5096 tree tem;
5097 int c;
5099 /* If the expression is already of the right type, we are done. */
5100 if (etype == type)
5101 return expr;
5103 /* If both types are integral just do a normal conversion.
5104 Likewise for a conversion to an unconstrained array. */
5105 if (((INTEGRAL_TYPE_P (type)
5106 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5107 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5108 && (INTEGRAL_TYPE_P (etype)
5109 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5110 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5111 || code == UNCONSTRAINED_ARRAY_TYPE)
5113 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
5115 tree ntype = copy_type (etype);
5116 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5117 TYPE_MAIN_VARIANT (ntype) = ntype;
5118 expr = build1 (NOP_EXPR, ntype, expr);
5121 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5123 tree rtype = copy_type (type);
5124 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5125 TYPE_MAIN_VARIANT (rtype) = rtype;
5126 expr = convert (rtype, expr);
5127 expr = build1 (NOP_EXPR, type, expr);
5129 else
5130 expr = convert (type, expr);
5133 /* If we are converting to an integral type whose precision is not equal
5134 to its size, first unchecked convert to a record type that contains a
5135 field of the given precision. Then extract the result from the field.
5137 There is a subtlety if the source type is an aggregate type with reverse
5138 storage order because its representation is not contiguous in the native
5139 storage order, i.e. a direct unchecked conversion to an integral type
5140 with N bits of precision cannot read the first N bits of the aggregate
5141 type. To overcome it, we do an unchecked conversion to an integral type
5142 with reverse storage order and return the resulting value. This also
5143 ensures that the result of the unchecked conversion doesn't depend on
5144 the endianness of the target machine, but only on the storage order of
5145 the aggregate type.
5147 Finally, for the sake of consistency, we do the unchecked conversion
5148 to an integral type with reverse storage order as soon as the source
5149 type is an aggregate type with reverse storage order, even if there
5150 are no considerations of precision or size involved. */
5151 else if (INTEGRAL_TYPE_P (type)
5152 && TYPE_RM_SIZE (type)
5153 && (tree_int_cst_compare (TYPE_RM_SIZE (type),
5154 TYPE_SIZE (type)) < 0
5155 || (AGGREGATE_TYPE_P (etype)
5156 && TYPE_REVERSE_STORAGE_ORDER (etype))))
5158 tree rec_type = make_node (RECORD_TYPE);
5159 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5160 tree field_type, field;
5162 if (AGGREGATE_TYPE_P (etype))
5163 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5164 = TYPE_REVERSE_STORAGE_ORDER (etype);
5166 if (type_unsigned_for_rm (type))
5167 field_type = make_unsigned_type (prec);
5168 else
5169 field_type = make_signed_type (prec);
5170 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5172 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5173 NULL_TREE, bitsize_zero_node, 1, 0);
5175 finish_record_type (rec_type, field, 1, false);
5177 expr = unchecked_convert (rec_type, expr, notrunc_p);
5178 expr = build_component_ref (expr, field, false);
5179 expr = fold_build1 (NOP_EXPR, type, expr);
5182 /* Similarly if we are converting from an integral type whose precision is
5183 not equal to its size, first copy into a field of the given precision
5184 and unchecked convert the record type.
5186 The same considerations as above apply if the target type is an aggregate
5187 type with reverse storage order and we also proceed similarly. */
5188 else if (INTEGRAL_TYPE_P (etype)
5189 && TYPE_RM_SIZE (etype)
5190 && (tree_int_cst_compare (TYPE_RM_SIZE (etype),
5191 TYPE_SIZE (etype)) < 0
5192 || (AGGREGATE_TYPE_P (type)
5193 && TYPE_REVERSE_STORAGE_ORDER (type))))
5195 tree rec_type = make_node (RECORD_TYPE);
5196 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5197 vec<constructor_elt, va_gc> *v;
5198 vec_alloc (v, 1);
5199 tree field_type, field;
5201 if (AGGREGATE_TYPE_P (type))
5202 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5203 = TYPE_REVERSE_STORAGE_ORDER (type);
5205 if (type_unsigned_for_rm (etype))
5206 field_type = make_unsigned_type (prec);
5207 else
5208 field_type = make_signed_type (prec);
5209 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5211 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5212 NULL_TREE, bitsize_zero_node, 1, 0);
5214 finish_record_type (rec_type, field, 1, false);
5216 expr = fold_build1 (NOP_EXPR, field_type, expr);
5217 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5218 expr = gnat_build_constructor (rec_type, v);
5219 expr = unchecked_convert (type, expr, notrunc_p);
5222 /* If we are converting from a scalar type to a type with a different size,
5223 we need to pad to have the same size on both sides.
5225 ??? We cannot do it unconditionally because unchecked conversions are
5226 used liberally by the front-end to implement polymorphism, e.g. in:
5228 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5229 return p___size__4 (p__object!(S191s.all));
5231 so we skip all expressions that are references. */
5232 else if (!REFERENCE_CLASS_P (expr)
5233 && !AGGREGATE_TYPE_P (etype)
5234 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5235 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5237 if (c < 0)
5239 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5240 false, false, false, true),
5241 expr);
5242 expr = unchecked_convert (type, expr, notrunc_p);
5244 else
5246 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5247 false, false, false, true);
5248 expr = unchecked_convert (rec_type, expr, notrunc_p);
5249 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5253 /* We have a special case when we are converting between two unconstrained
5254 array types. In that case, take the address, convert the fat pointer
5255 types, and dereference. */
5256 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5257 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5258 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5259 build_unary_op (ADDR_EXPR, NULL_TREE,
5260 expr)));
5262 /* Another special case is when we are converting to a vector type from its
5263 representative array type; this a regular conversion. */
5264 else if (code == VECTOR_TYPE
5265 && ecode == ARRAY_TYPE
5266 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5267 etype))
5268 expr = convert (type, expr);
5270 /* And, if the array type is not the representative, we try to build an
5271 intermediate vector type of which the array type is the representative
5272 and to do the unchecked conversion between the vector types, in order
5273 to enable further simplifications in the middle-end. */
5274 else if (code == VECTOR_TYPE
5275 && ecode == ARRAY_TYPE
5276 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5278 expr = convert (tem, expr);
5279 return unchecked_convert (type, expr, notrunc_p);
5282 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5283 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5284 else if (TREE_CODE (expr) == CONSTRUCTOR
5285 && code == RECORD_TYPE
5286 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5288 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5289 Empty, false, false, false, true),
5290 expr);
5291 return unchecked_convert (type, expr, notrunc_p);
5294 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5295 else
5297 expr = maybe_unconstrained_array (expr);
5298 etype = TREE_TYPE (expr);
5299 ecode = TREE_CODE (etype);
5300 if (can_fold_for_view_convert_p (expr))
5301 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5302 else
5303 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5306 /* If the result is a non-biased integral type whose precision is not equal
5307 to its size, sign- or zero-extend the result. But we need not do this
5308 if the input is also an integral type and both are unsigned or both are
5309 signed and have the same precision. */
5310 if (!notrunc_p
5311 && INTEGRAL_TYPE_P (type)
5312 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5313 && TYPE_RM_SIZE (type)
5314 && tree_int_cst_compare (TYPE_RM_SIZE (type), TYPE_SIZE (type)) < 0
5315 && !(INTEGRAL_TYPE_P (etype)
5316 && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5317 && (type_unsigned_for_rm (type)
5318 || tree_int_cst_compare (TYPE_RM_SIZE (type),
5319 TYPE_RM_SIZE (etype)
5320 ? TYPE_RM_SIZE (etype)
5321 : TYPE_SIZE (etype)) == 0)))
5323 if (integer_zerop (TYPE_RM_SIZE (type)))
5324 expr = build_int_cst (type, 0);
5325 else
5327 tree base_type
5328 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5329 type_unsigned_for_rm (type));
5330 tree shift_expr
5331 = convert (base_type,
5332 size_binop (MINUS_EXPR,
5333 TYPE_SIZE (type), TYPE_RM_SIZE (type)));
5334 expr
5335 = convert (type,
5336 build_binary_op (RSHIFT_EXPR, base_type,
5337 build_binary_op (LSHIFT_EXPR, base_type,
5338 convert (base_type,
5339 expr),
5340 shift_expr),
5341 shift_expr));
5345 /* An unchecked conversion should never raise Constraint_Error. The code
5346 below assumes that GCC's conversion routines overflow the same way that
5347 the underlying hardware does. This is probably true. In the rare case
5348 when it is false, we can rely on the fact that such conversions are
5349 erroneous anyway. */
5350 if (TREE_CODE (expr) == INTEGER_CST)
5351 TREE_OVERFLOW (expr) = 0;
5353 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5354 show no longer constant. */
5355 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5356 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5357 OEP_ONLY_CONST))
5358 TREE_CONSTANT (expr) = 0;
5360 return expr;
5363 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5364 the latter being a record type as predicated by Is_Record_Type. */
5366 enum tree_code
5367 tree_code_for_record_type (Entity_Id gnat_type)
5369 Node_Id component_list, component;
5371 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5372 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5373 if (!Is_Unchecked_Union (gnat_type))
5374 return RECORD_TYPE;
5376 gnat_type = Implementation_Base_Type (gnat_type);
5377 component_list
5378 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5380 for (component = First_Non_Pragma (Component_Items (component_list));
5381 Present (component);
5382 component = Next_Non_Pragma (component))
5383 if (Ekind (Defining_Entity (component)) == E_Component)
5384 return RECORD_TYPE;
5386 return UNION_TYPE;
5389 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5390 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5391 according to the presence of an alignment clause on the type or, if it
5392 is an array, on the component type. */
5394 bool
5395 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5397 gnat_type = Underlying_Type (gnat_type);
5399 *align_clause = Present (Alignment_Clause (gnat_type));
5401 if (Is_Array_Type (gnat_type))
5403 gnat_type = Underlying_Type (Component_Type (gnat_type));
5404 if (Present (Alignment_Clause (gnat_type)))
5405 *align_clause = true;
5408 if (!Is_Floating_Point_Type (gnat_type))
5409 return false;
5411 if (UI_To_Int (Esize (gnat_type)) != 64)
5412 return false;
5414 return true;
5417 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5418 size is greater or equal to 64 bits, or an array of such a type. Set
5419 ALIGN_CLAUSE according to the presence of an alignment clause on the
5420 type or, if it is an array, on the component type. */
5422 bool
5423 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5425 gnat_type = Underlying_Type (gnat_type);
5427 *align_clause = Present (Alignment_Clause (gnat_type));
5429 if (Is_Array_Type (gnat_type))
5431 gnat_type = Underlying_Type (Component_Type (gnat_type));
5432 if (Present (Alignment_Clause (gnat_type)))
5433 *align_clause = true;
5436 if (!Is_Scalar_Type (gnat_type))
5437 return false;
5439 if (UI_To_Int (Esize (gnat_type)) < 64)
5440 return false;
5442 return true;
5445 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5446 component of an aggregate type. */
5448 bool
5449 type_for_nonaliased_component_p (tree gnu_type)
5451 /* If the type is passed by reference, we may have pointers to the
5452 component so it cannot be made non-aliased. */
5453 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5454 return false;
5456 /* We used to say that any component of aggregate type is aliased
5457 because the front-end may take 'Reference of it. The front-end
5458 has been enhanced in the meantime so as to use a renaming instead
5459 in most cases, but the back-end can probably take the address of
5460 such a component too so we go for the conservative stance.
5462 For instance, we might need the address of any array type, even
5463 if normally passed by copy, to construct a fat pointer if the
5464 component is used as an actual for an unconstrained formal.
5466 Likewise for record types: even if a specific record subtype is
5467 passed by copy, the parent type might be passed by ref (e.g. if
5468 it's of variable size) and we might take the address of a child
5469 component to pass to a parent formal. We have no way to check
5470 for such conditions here. */
5471 if (AGGREGATE_TYPE_P (gnu_type))
5472 return false;
5474 return true;
5477 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5479 bool
5480 smaller_form_type_p (tree type, tree orig_type)
5482 tree size, osize;
5484 /* We're not interested in variants here. */
5485 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5486 return false;
5488 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5489 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5490 return false;
5492 size = TYPE_SIZE (type);
5493 osize = TYPE_SIZE (orig_type);
5495 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5496 return false;
5498 return tree_int_cst_lt (size, osize) != 0;
5501 /* Return whether EXPR, which is the renamed object in an object renaming
5502 declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5503 This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration. */
5505 bool
5506 can_materialize_object_renaming_p (Node_Id expr)
5508 while (true)
5510 expr = Original_Node (expr);
5512 switch Nkind (expr)
5514 case N_Identifier:
5515 case N_Expanded_Name:
5516 if (!Present (Renamed_Object (Entity (expr))))
5517 return true;
5518 expr = Renamed_Object (Entity (expr));
5519 break;
5521 case N_Selected_Component:
5523 if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5524 return false;
5526 const Uint bitpos
5527 = Normalized_First_Bit (Entity (Selector_Name (expr)));
5528 if (!UI_Is_In_Int_Range (bitpos)
5529 || (bitpos != UI_No_Uint && bitpos != UI_From_Int (0)))
5530 return false;
5532 expr = Prefix (expr);
5533 break;
5536 case N_Indexed_Component:
5537 case N_Slice:
5539 const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5541 if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5542 return false;
5544 expr = Prefix (expr);
5545 break;
5548 case N_Explicit_Dereference:
5549 expr = Prefix (expr);
5550 break;
5552 default:
5553 return true;
5558 /* Perform final processing on global declarations. */
5560 static GTY (()) tree dummy_global;
5562 void
5563 gnat_write_global_declarations (void)
5565 unsigned int i;
5566 tree iter;
5568 /* If we have declared types as used at the global level, insert them in
5569 the global hash table. We use a dummy variable for this purpose, but
5570 we need to build it unconditionally to avoid -fcompare-debug issues. */
5571 if (first_global_object_name)
5573 struct varpool_node *node;
5574 char *label;
5576 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5577 dummy_global
5578 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5579 void_type_node);
5580 DECL_HARD_REGISTER (dummy_global) = 1;
5581 TREE_STATIC (dummy_global) = 1;
5582 node = varpool_node::get_create (dummy_global);
5583 node->definition = 1;
5584 node->force_output = 1;
5586 if (types_used_by_cur_var_decl)
5587 while (!types_used_by_cur_var_decl->is_empty ())
5589 tree t = types_used_by_cur_var_decl->pop ();
5590 types_used_by_var_decl_insert (t, dummy_global);
5594 /* Output debug information for all global type declarations first. This
5595 ensures that global types whose compilation hasn't been finalized yet,
5596 for example pointers to Taft amendment types, have their compilation
5597 finalized in the right context. */
5598 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5599 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5600 debug_hooks->type_decl (iter, false);
5602 /* Output imported functions. */
5603 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5604 if (TREE_CODE (iter) == FUNCTION_DECL
5605 && DECL_EXTERNAL (iter)
5606 && DECL_INITIAL (iter) == NULL
5607 && !DECL_IGNORED_P (iter)
5608 && DECL_FUNCTION_IS_DEF (iter))
5609 debug_hooks->early_global_decl (iter);
5611 /* Then output the global variables. We need to do that after the debug
5612 information for global types is emitted so that they are finalized. Skip
5613 external global variables, unless we need to emit debug info for them:
5614 this is useful for imported variables, for instance. */
5615 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5616 if (TREE_CODE (iter) == VAR_DECL
5617 && (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5618 rest_of_decl_compilation (iter, true, 0);
5620 /* Output the imported modules/declarations. In GNAT, these are only
5621 materializing subprogram. */
5622 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5623 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5624 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5625 DECL_CONTEXT (iter), false, false);
5628 /* ************************************************************************
5629 * * GCC builtins support *
5630 * ************************************************************************ */
5632 /* The general scheme is fairly simple:
5634 For each builtin function/type to be declared, gnat_install_builtins calls
5635 internal facilities which eventually get to gnat_pushdecl, which in turn
5636 tracks the so declared builtin function decls in the 'builtin_decls' global
5637 datastructure. When an Intrinsic subprogram declaration is processed, we
5638 search this global datastructure to retrieve the associated BUILT_IN DECL
5639 node. */
5641 /* Search the chain of currently available builtin declarations for a node
5642 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5643 found, if any, or NULL_TREE otherwise. */
5644 tree
5645 builtin_decl_for (tree name)
5647 unsigned i;
5648 tree decl;
5650 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5651 if (DECL_NAME (decl) == name)
5652 return decl;
5654 return NULL_TREE;
5657 /* The code below eventually exposes gnat_install_builtins, which declares
5658 the builtin types and functions we might need, either internally or as
5659 user accessible facilities.
5661 ??? This is a first implementation shot, still in rough shape. It is
5662 heavily inspired from the "C" family implementation, with chunks copied
5663 verbatim from there.
5665 Two obvious improvement candidates are:
5666 o Use a more efficient name/decl mapping scheme
5667 o Devise a middle-end infrastructure to avoid having to copy
5668 pieces between front-ends. */
5670 /* ----------------------------------------------------------------------- *
5671 * BUILTIN ELEMENTARY TYPES *
5672 * ----------------------------------------------------------------------- */
5674 /* Standard data types to be used in builtin argument declarations. */
5676 enum c_tree_index
5678 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5679 CTI_STRING_TYPE,
5680 CTI_CONST_STRING_TYPE,
5682 CTI_MAX
5685 static tree c_global_trees[CTI_MAX];
5687 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5688 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5689 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5691 /* ??? In addition some attribute handlers, we currently don't support a
5692 (small) number of builtin-types, which in turns inhibits support for a
5693 number of builtin functions. */
5694 #define wint_type_node void_type_node
5695 #define intmax_type_node void_type_node
5696 #define uintmax_type_node void_type_node
5698 /* Used to help initialize the builtin-types.def table. When a type of
5699 the correct size doesn't exist, use error_mark_node instead of NULL.
5700 The later results in segfaults even when a decl using the type doesn't
5701 get invoked. */
5703 static tree
5704 builtin_type_for_size (int size, bool unsignedp)
5706 tree type = gnat_type_for_size (size, unsignedp);
5707 return type ? type : error_mark_node;
5710 /* Build/push the elementary type decls that builtin functions/types
5711 will need. */
5713 static void
5714 install_builtin_elementary_types (void)
5716 signed_size_type_node = gnat_signed_type_for (size_type_node);
5717 pid_type_node = integer_type_node;
5719 string_type_node = build_pointer_type (char_type_node);
5720 const_string_type_node
5721 = build_pointer_type (build_qualified_type
5722 (char_type_node, TYPE_QUAL_CONST));
5725 /* ----------------------------------------------------------------------- *
5726 * BUILTIN FUNCTION TYPES *
5727 * ----------------------------------------------------------------------- */
5729 /* Now, builtin function types per se. */
5731 enum c_builtin_type
5733 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5734 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5735 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5736 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5737 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5738 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5739 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5740 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5741 ARG6) NAME,
5742 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5743 ARG6, ARG7) NAME,
5744 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5745 ARG6, ARG7, ARG8) NAME,
5746 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5747 ARG6, ARG7, ARG8, ARG9) NAME,
5748 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5749 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5750 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5751 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5752 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5753 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5754 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5755 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5756 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5757 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5758 NAME,
5759 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5760 ARG6) NAME,
5761 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5762 ARG6, ARG7) NAME,
5763 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5764 #include "builtin-types.def"
5765 #undef DEF_PRIMITIVE_TYPE
5766 #undef DEF_FUNCTION_TYPE_0
5767 #undef DEF_FUNCTION_TYPE_1
5768 #undef DEF_FUNCTION_TYPE_2
5769 #undef DEF_FUNCTION_TYPE_3
5770 #undef DEF_FUNCTION_TYPE_4
5771 #undef DEF_FUNCTION_TYPE_5
5772 #undef DEF_FUNCTION_TYPE_6
5773 #undef DEF_FUNCTION_TYPE_7
5774 #undef DEF_FUNCTION_TYPE_8
5775 #undef DEF_FUNCTION_TYPE_9
5776 #undef DEF_FUNCTION_TYPE_10
5777 #undef DEF_FUNCTION_TYPE_11
5778 #undef DEF_FUNCTION_TYPE_VAR_0
5779 #undef DEF_FUNCTION_TYPE_VAR_1
5780 #undef DEF_FUNCTION_TYPE_VAR_2
5781 #undef DEF_FUNCTION_TYPE_VAR_3
5782 #undef DEF_FUNCTION_TYPE_VAR_4
5783 #undef DEF_FUNCTION_TYPE_VAR_5
5784 #undef DEF_FUNCTION_TYPE_VAR_6
5785 #undef DEF_FUNCTION_TYPE_VAR_7
5786 #undef DEF_POINTER_TYPE
5787 BT_LAST
5790 typedef enum c_builtin_type builtin_type;
5792 /* A temporary array used in communication with def_fn_type. */
5793 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5795 /* A helper function for install_builtin_types. Build function type
5796 for DEF with return type RET and N arguments. If VAR is true, then the
5797 function should be variadic after those N arguments.
5799 Takes special care not to ICE if any of the types involved are
5800 error_mark_node, which indicates that said type is not in fact available
5801 (see builtin_type_for_size). In which case the function type as a whole
5802 should be error_mark_node. */
5804 static void
5805 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5807 tree t;
5808 tree *args = XALLOCAVEC (tree, n);
5809 va_list list;
5810 int i;
5812 va_start (list, n);
5813 for (i = 0; i < n; ++i)
5815 builtin_type a = (builtin_type) va_arg (list, int);
5816 t = builtin_types[a];
5817 if (t == error_mark_node)
5818 goto egress;
5819 args[i] = t;
5822 t = builtin_types[ret];
5823 if (t == error_mark_node)
5824 goto egress;
5825 if (var)
5826 t = build_varargs_function_type_array (t, n, args);
5827 else
5828 t = build_function_type_array (t, n, args);
5830 egress:
5831 builtin_types[def] = t;
5832 va_end (list);
5835 /* Build the builtin function types and install them in the builtin_types
5836 array for later use in builtin function decls. */
5838 static void
5839 install_builtin_function_types (void)
5841 tree va_list_ref_type_node;
5842 tree va_list_arg_type_node;
5844 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5846 va_list_arg_type_node = va_list_ref_type_node =
5847 build_pointer_type (TREE_TYPE (va_list_type_node));
5849 else
5851 va_list_arg_type_node = va_list_type_node;
5852 va_list_ref_type_node = build_reference_type (va_list_type_node);
5855 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5856 builtin_types[ENUM] = VALUE;
5857 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5858 def_fn_type (ENUM, RETURN, 0, 0);
5859 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5860 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5861 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5862 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5863 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5864 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5865 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5866 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5867 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5868 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5869 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5870 ARG6) \
5871 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5872 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5873 ARG6, ARG7) \
5874 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5875 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5876 ARG6, ARG7, ARG8) \
5877 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5878 ARG7, ARG8);
5879 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5880 ARG6, ARG7, ARG8, ARG9) \
5881 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5882 ARG7, ARG8, ARG9);
5883 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5884 ARG6, ARG7, ARG8, ARG9, ARG10) \
5885 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5886 ARG7, ARG8, ARG9, ARG10);
5887 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5888 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5889 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5890 ARG7, ARG8, ARG9, ARG10, ARG11);
5891 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5892 def_fn_type (ENUM, RETURN, 1, 0);
5893 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5894 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5895 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5896 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5897 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5898 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5899 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5900 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5901 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5902 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5903 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5904 ARG6) \
5905 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5906 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5907 ARG6, ARG7) \
5908 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5909 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5910 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5912 #include "builtin-types.def"
5914 #undef DEF_PRIMITIVE_TYPE
5915 #undef DEF_FUNCTION_TYPE_0
5916 #undef DEF_FUNCTION_TYPE_1
5917 #undef DEF_FUNCTION_TYPE_2
5918 #undef DEF_FUNCTION_TYPE_3
5919 #undef DEF_FUNCTION_TYPE_4
5920 #undef DEF_FUNCTION_TYPE_5
5921 #undef DEF_FUNCTION_TYPE_6
5922 #undef DEF_FUNCTION_TYPE_7
5923 #undef DEF_FUNCTION_TYPE_8
5924 #undef DEF_FUNCTION_TYPE_9
5925 #undef DEF_FUNCTION_TYPE_10
5926 #undef DEF_FUNCTION_TYPE_11
5927 #undef DEF_FUNCTION_TYPE_VAR_0
5928 #undef DEF_FUNCTION_TYPE_VAR_1
5929 #undef DEF_FUNCTION_TYPE_VAR_2
5930 #undef DEF_FUNCTION_TYPE_VAR_3
5931 #undef DEF_FUNCTION_TYPE_VAR_4
5932 #undef DEF_FUNCTION_TYPE_VAR_5
5933 #undef DEF_FUNCTION_TYPE_VAR_6
5934 #undef DEF_FUNCTION_TYPE_VAR_7
5935 #undef DEF_POINTER_TYPE
5936 builtin_types[(int) BT_LAST] = NULL_TREE;
5939 /* ----------------------------------------------------------------------- *
5940 * BUILTIN ATTRIBUTES *
5941 * ----------------------------------------------------------------------- */
5943 enum built_in_attribute
5945 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5946 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5947 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5948 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5949 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5950 #include "builtin-attrs.def"
5951 #undef DEF_ATTR_NULL_TREE
5952 #undef DEF_ATTR_INT
5953 #undef DEF_ATTR_STRING
5954 #undef DEF_ATTR_IDENT
5955 #undef DEF_ATTR_TREE_LIST
5956 ATTR_LAST
5959 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5961 static void
5962 install_builtin_attributes (void)
5964 /* Fill in the built_in_attributes array. */
5965 #define DEF_ATTR_NULL_TREE(ENUM) \
5966 built_in_attributes[(int) ENUM] = NULL_TREE;
5967 #define DEF_ATTR_INT(ENUM, VALUE) \
5968 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5969 #define DEF_ATTR_STRING(ENUM, VALUE) \
5970 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5971 #define DEF_ATTR_IDENT(ENUM, STRING) \
5972 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5973 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5974 built_in_attributes[(int) ENUM] \
5975 = tree_cons (built_in_attributes[(int) PURPOSE], \
5976 built_in_attributes[(int) VALUE], \
5977 built_in_attributes[(int) CHAIN]);
5978 #include "builtin-attrs.def"
5979 #undef DEF_ATTR_NULL_TREE
5980 #undef DEF_ATTR_INT
5981 #undef DEF_ATTR_STRING
5982 #undef DEF_ATTR_IDENT
5983 #undef DEF_ATTR_TREE_LIST
5986 /* Handle a "const" attribute; arguments as in
5987 struct attribute_spec.handler. */
5989 static tree
5990 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5991 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5992 bool *no_add_attrs)
5994 if (TREE_CODE (*node) == FUNCTION_DECL)
5995 TREE_READONLY (*node) = 1;
5996 else
5997 *no_add_attrs = true;
5999 return NULL_TREE;
6002 /* Handle a "nothrow" attribute; arguments as in
6003 struct attribute_spec.handler. */
6005 static tree
6006 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6007 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6008 bool *no_add_attrs)
6010 if (TREE_CODE (*node) == FUNCTION_DECL)
6011 TREE_NOTHROW (*node) = 1;
6012 else
6013 *no_add_attrs = true;
6015 return NULL_TREE;
6018 /* Handle a "pure" attribute; arguments as in
6019 struct attribute_spec.handler. */
6021 static tree
6022 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6023 int ARG_UNUSED (flags), bool *no_add_attrs)
6025 if (TREE_CODE (*node) == FUNCTION_DECL)
6026 DECL_PURE_P (*node) = 1;
6027 /* TODO: support types. */
6028 else
6030 warning (OPT_Wattributes, "%qs attribute ignored",
6031 IDENTIFIER_POINTER (name));
6032 *no_add_attrs = true;
6035 return NULL_TREE;
6038 /* Handle a "no vops" attribute; arguments as in
6039 struct attribute_spec.handler. */
6041 static tree
6042 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6043 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6044 bool *ARG_UNUSED (no_add_attrs))
6046 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6047 DECL_IS_NOVOPS (*node) = 1;
6048 return NULL_TREE;
6051 /* Helper for nonnull attribute handling; fetch the operand number
6052 from the attribute argument list. */
6054 static bool
6055 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6057 /* Verify the arg number is a constant. */
6058 if (!tree_fits_uhwi_p (arg_num_expr))
6059 return false;
6061 *valp = TREE_INT_CST_LOW (arg_num_expr);
6062 return true;
6065 /* Handle the "nonnull" attribute. */
6066 static tree
6067 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6068 tree args, int ARG_UNUSED (flags),
6069 bool *no_add_attrs)
6071 tree type = *node;
6072 unsigned HOST_WIDE_INT attr_arg_num;
6074 /* If no arguments are specified, all pointer arguments should be
6075 non-null. Verify a full prototype is given so that the arguments
6076 will have the correct types when we actually check them later.
6077 Avoid diagnosing type-generic built-ins since those have no
6078 prototype. */
6079 if (!args)
6081 if (!prototype_p (type)
6082 && (!TYPE_ATTRIBUTES (type)
6083 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6085 error ("nonnull attribute without arguments on a non-prototype");
6086 *no_add_attrs = true;
6088 return NULL_TREE;
6091 /* Argument list specified. Verify that each argument number references
6092 a pointer argument. */
6093 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6095 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6097 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6099 error ("nonnull argument has invalid operand number (argument %lu)",
6100 (unsigned long) attr_arg_num);
6101 *no_add_attrs = true;
6102 return NULL_TREE;
6105 if (prototype_p (type))
6107 function_args_iterator iter;
6108 tree argument;
6110 function_args_iter_init (&iter, type);
6111 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6113 argument = function_args_iter_cond (&iter);
6114 if (!argument || ck_num == arg_num)
6115 break;
6118 if (!argument
6119 || TREE_CODE (argument) == VOID_TYPE)
6121 error ("nonnull argument with out-of-range operand number "
6122 "(argument %lu, operand %lu)",
6123 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6124 *no_add_attrs = true;
6125 return NULL_TREE;
6128 if (TREE_CODE (argument) != POINTER_TYPE)
6130 error ("nonnull argument references non-pointer operand "
6131 "(argument %lu, operand %lu)",
6132 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6133 *no_add_attrs = true;
6134 return NULL_TREE;
6139 return NULL_TREE;
6142 /* Handle a "sentinel" attribute. */
6144 static tree
6145 handle_sentinel_attribute (tree *node, tree name, tree args,
6146 int ARG_UNUSED (flags), bool *no_add_attrs)
6148 if (!prototype_p (*node))
6150 warning (OPT_Wattributes,
6151 "%qs attribute requires prototypes with named arguments",
6152 IDENTIFIER_POINTER (name));
6153 *no_add_attrs = true;
6155 else
6157 if (!stdarg_p (*node))
6159 warning (OPT_Wattributes,
6160 "%qs attribute only applies to variadic functions",
6161 IDENTIFIER_POINTER (name));
6162 *no_add_attrs = true;
6166 if (args)
6168 tree position = TREE_VALUE (args);
6170 if (TREE_CODE (position) != INTEGER_CST)
6172 warning (0, "requested position is not an integer constant");
6173 *no_add_attrs = true;
6175 else
6177 if (tree_int_cst_lt (position, integer_zero_node))
6179 warning (0, "requested position is less than zero");
6180 *no_add_attrs = true;
6185 return NULL_TREE;
6188 /* Handle a "noreturn" attribute; arguments as in
6189 struct attribute_spec.handler. */
6191 static tree
6192 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6193 int ARG_UNUSED (flags), bool *no_add_attrs)
6195 tree type = TREE_TYPE (*node);
6197 /* See FIXME comment in c_common_attribute_table. */
6198 if (TREE_CODE (*node) == FUNCTION_DECL)
6199 TREE_THIS_VOLATILE (*node) = 1;
6200 else if (TREE_CODE (type) == POINTER_TYPE
6201 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6202 TREE_TYPE (*node)
6203 = build_pointer_type
6204 (change_qualified_type (TREE_TYPE (type), TYPE_QUAL_VOLATILE));
6205 else
6207 warning (OPT_Wattributes, "%qs attribute ignored",
6208 IDENTIFIER_POINTER (name));
6209 *no_add_attrs = true;
6212 return NULL_TREE;
6215 /* Handle a "noinline" attribute; arguments as in
6216 struct attribute_spec.handler. */
6218 static tree
6219 handle_noinline_attribute (tree *node, tree name,
6220 tree ARG_UNUSED (args),
6221 int ARG_UNUSED (flags), bool *no_add_attrs)
6223 if (TREE_CODE (*node) == FUNCTION_DECL)
6225 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6227 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6228 "with attribute %qs", name, "always_inline");
6229 *no_add_attrs = true;
6231 else
6232 DECL_UNINLINABLE (*node) = 1;
6234 else
6236 warning (OPT_Wattributes, "%qE attribute ignored", name);
6237 *no_add_attrs = true;
6240 return NULL_TREE;
6243 /* Handle a "noclone" attribute; arguments as in
6244 struct attribute_spec.handler. */
6246 static tree
6247 handle_noclone_attribute (tree *node, tree name,
6248 tree ARG_UNUSED (args),
6249 int ARG_UNUSED (flags), bool *no_add_attrs)
6251 if (TREE_CODE (*node) != FUNCTION_DECL)
6253 warning (OPT_Wattributes, "%qE attribute ignored", name);
6254 *no_add_attrs = true;
6257 return NULL_TREE;
6260 /* Handle a "leaf" attribute; arguments as in
6261 struct attribute_spec.handler. */
6263 static tree
6264 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6265 int ARG_UNUSED (flags), bool *no_add_attrs)
6267 if (TREE_CODE (*node) != FUNCTION_DECL)
6269 warning (OPT_Wattributes, "%qE attribute ignored", name);
6270 *no_add_attrs = true;
6272 if (!TREE_PUBLIC (*node))
6274 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6275 *no_add_attrs = true;
6278 return NULL_TREE;
6281 /* Handle a "always_inline" attribute; arguments as in
6282 struct attribute_spec.handler. */
6284 static tree
6285 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6286 int ARG_UNUSED (flags), bool *no_add_attrs)
6288 if (TREE_CODE (*node) == FUNCTION_DECL)
6290 /* Set the attribute and mark it for disregarding inline limits. */
6291 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6293 else
6295 warning (OPT_Wattributes, "%qE attribute ignored", name);
6296 *no_add_attrs = true;
6299 return NULL_TREE;
6302 /* Handle a "malloc" attribute; arguments as in
6303 struct attribute_spec.handler. */
6305 static tree
6306 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6307 int ARG_UNUSED (flags), bool *no_add_attrs)
6309 if (TREE_CODE (*node) == FUNCTION_DECL
6310 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6311 DECL_IS_MALLOC (*node) = 1;
6312 else
6314 warning (OPT_Wattributes, "%qs attribute ignored",
6315 IDENTIFIER_POINTER (name));
6316 *no_add_attrs = true;
6319 return NULL_TREE;
6322 /* Fake handler for attributes we don't properly support. */
6324 tree
6325 fake_attribute_handler (tree * ARG_UNUSED (node),
6326 tree ARG_UNUSED (name),
6327 tree ARG_UNUSED (args),
6328 int ARG_UNUSED (flags),
6329 bool * ARG_UNUSED (no_add_attrs))
6331 return NULL_TREE;
6334 /* Handle a "type_generic" attribute. */
6336 static tree
6337 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6338 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6339 bool * ARG_UNUSED (no_add_attrs))
6341 /* Ensure we have a function type. */
6342 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6344 /* Ensure we have a variadic function. */
6345 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6347 return NULL_TREE;
6350 /* Handle a "vector_size" attribute; arguments as in
6351 struct attribute_spec.handler. */
6353 static tree
6354 handle_vector_size_attribute (tree *node, tree name, tree args,
6355 int ARG_UNUSED (flags), bool *no_add_attrs)
6357 tree type = *node;
6358 tree vector_type;
6360 *no_add_attrs = true;
6362 /* We need to provide for vector pointers, vector arrays, and
6363 functions returning vectors. For example:
6365 __attribute__((vector_size(16))) short *foo;
6367 In this case, the mode is SI, but the type being modified is
6368 HI, so we need to look further. */
6369 while (POINTER_TYPE_P (type)
6370 || TREE_CODE (type) == FUNCTION_TYPE
6371 || TREE_CODE (type) == ARRAY_TYPE)
6372 type = TREE_TYPE (type);
6374 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6375 if (!vector_type)
6376 return NULL_TREE;
6378 /* Build back pointers if needed. */
6379 *node = reconstruct_complex_type (*node, vector_type);
6381 return NULL_TREE;
6384 /* Handle a "vector_type" attribute; arguments as in
6385 struct attribute_spec.handler. */
6387 static tree
6388 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6389 int ARG_UNUSED (flags), bool *no_add_attrs)
6391 tree type = *node;
6392 tree vector_type;
6394 *no_add_attrs = true;
6396 if (TREE_CODE (type) != ARRAY_TYPE)
6398 error ("attribute %qs applies to array types only",
6399 IDENTIFIER_POINTER (name));
6400 return NULL_TREE;
6403 vector_type = build_vector_type_for_array (type, name);
6404 if (!vector_type)
6405 return NULL_TREE;
6407 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6408 *node = vector_type;
6410 return NULL_TREE;
6413 /* ----------------------------------------------------------------------- *
6414 * BUILTIN FUNCTIONS *
6415 * ----------------------------------------------------------------------- */
6417 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6418 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6419 if nonansi_p and flag_no_nonansi_builtin. */
6421 static void
6422 def_builtin_1 (enum built_in_function fncode,
6423 const char *name,
6424 enum built_in_class fnclass,
6425 tree fntype, tree libtype,
6426 bool both_p, bool fallback_p,
6427 bool nonansi_p ATTRIBUTE_UNUSED,
6428 tree fnattrs, bool implicit_p)
6430 tree decl;
6431 const char *libname;
6433 /* Preserve an already installed decl. It most likely was setup in advance
6434 (e.g. as part of the internal builtins) for specific reasons. */
6435 if (builtin_decl_explicit (fncode))
6436 return;
6438 if (fntype == error_mark_node)
6439 return;
6441 gcc_assert ((!both_p && !fallback_p)
6442 || !strncmp (name, "__builtin_",
6443 strlen ("__builtin_")));
6445 libname = name + strlen ("__builtin_");
6446 decl = add_builtin_function (name, fntype, fncode, fnclass,
6447 (fallback_p ? libname : NULL),
6448 fnattrs);
6449 if (both_p)
6450 /* ??? This is normally further controlled by command-line options
6451 like -fno-builtin, but we don't have them for Ada. */
6452 add_builtin_function (libname, libtype, fncode, fnclass,
6453 NULL, fnattrs);
6455 set_builtin_decl (fncode, decl, implicit_p);
6458 static int flag_isoc94 = 0;
6459 static int flag_isoc99 = 0;
6460 static int flag_isoc11 = 0;
6462 /* Install what the common builtins.def offers. */
6464 static void
6465 install_builtin_functions (void)
6467 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6468 NONANSI_P, ATTRS, IMPLICIT, COND) \
6469 if (NAME && COND) \
6470 def_builtin_1 (ENUM, NAME, CLASS, \
6471 builtin_types[(int) TYPE], \
6472 builtin_types[(int) LIBTYPE], \
6473 BOTH_P, FALLBACK_P, NONANSI_P, \
6474 built_in_attributes[(int) ATTRS], IMPLICIT);
6475 #include "builtins.def"
6478 /* ----------------------------------------------------------------------- *
6479 * BUILTIN FUNCTIONS *
6480 * ----------------------------------------------------------------------- */
6482 /* Install the builtin functions we might need. */
6484 void
6485 gnat_install_builtins (void)
6487 install_builtin_elementary_types ();
6488 install_builtin_function_types ();
6489 install_builtin_attributes ();
6491 /* Install builtins used by generic middle-end pieces first. Some of these
6492 know about internal specificities and control attributes accordingly, for
6493 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6494 the generic definition from builtins.def. */
6495 build_common_builtin_nodes ();
6497 /* Now, install the target specific builtins, such as the AltiVec family on
6498 ppc, and the common set as exposed by builtins.def. */
6499 targetm.init_builtins ();
6500 install_builtin_functions ();
6503 #include "gt-ada-utils.h"
6504 #include "gtype-ada.h"