PR ada/62235
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob1c83a08d5bdd63885500a9f0c908aff5a8e6260c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2017, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute,
113 false },
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
115 false },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute,
117 false },
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
119 false },
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
121 false },
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
123 false },
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
125 false },
126 { "noinline", 0, 0, true, false, false, handle_noinline_attribute,
127 false },
128 { "noclone", 0, 0, true, false, false, handle_noclone_attribute,
129 false },
130 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
131 false },
132 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
133 false },
134 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
135 false },
136 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
137 false },
139 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
140 false },
141 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
142 false },
143 { "may_alias", 0, 0, false, true, false, NULL, false },
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
148 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
149 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
151 { NULL, 0, 0, false, false, false, NULL, false }
154 /* Associates a GNAT tree node to a GCC tree node. It is used in
155 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
156 of `save_gnu_tree' for more info. */
157 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
159 #define GET_GNU_TREE(GNAT_ENTITY) \
160 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
162 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
163 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
165 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
166 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
168 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
169 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
171 #define GET_DUMMY_NODE(GNAT_ENTITY) \
172 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
174 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
175 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
177 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
178 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
180 /* This variable keeps a table for types for each precision so that we only
181 allocate each of them once. Signed and unsigned types are kept separate.
183 Note that these types are only used when fold-const requests something
184 special. Perhaps we should NOT share these types; we'll see how it
185 goes later. */
186 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
188 /* Likewise for float types, but record these by mode. */
189 static GTY(()) tree float_types[NUM_MACHINE_MODES];
191 /* For each binding contour we allocate a binding_level structure to indicate
192 the binding depth. */
194 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
195 /* The binding level containing this one (the enclosing binding level). */
196 struct gnat_binding_level *chain;
197 /* The BLOCK node for this level. */
198 tree block;
199 /* If nonzero, the setjmp buffer that needs to be updated for any
200 variable-sized definition within this context. */
201 tree jmpbuf_decl;
204 /* The binding level currently in effect. */
205 static GTY(()) struct gnat_binding_level *current_binding_level;
207 /* A chain of gnat_binding_level structures awaiting reuse. */
208 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
210 /* The context to be used for global declarations. */
211 static GTY(()) tree global_context;
213 /* An array of global declarations. */
214 static GTY(()) vec<tree, va_gc> *global_decls;
216 /* An array of builtin function declarations. */
217 static GTY(()) vec<tree, va_gc> *builtin_decls;
219 /* A chain of unused BLOCK nodes. */
220 static GTY((deletable)) tree free_block_chain;
222 /* A hash table of padded types. It is modelled on the generic type
223 hash table in tree.c, which must thus be used as a reference. */
225 struct GTY((for_user)) pad_type_hash {
226 unsigned long hash;
227 tree type;
230 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
232 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
233 static bool equal (pad_type_hash *a, pad_type_hash *b);
235 static int
236 keep_cache_entry (pad_type_hash *&t)
238 return ggc_marked_p (t->type);
242 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
244 static tree merge_sizes (tree, tree, tree, bool, bool);
245 static tree fold_bit_position (const_tree);
246 static tree compute_related_constant (tree, tree);
247 static tree split_plus (tree, tree *);
248 static tree float_type_for_precision (int, machine_mode);
249 static tree convert_to_fat_pointer (tree, tree);
250 static unsigned int scale_by_factor_of (tree, unsigned int);
251 static bool potential_alignment_gap (tree, tree, tree);
253 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
254 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
255 struct deferred_decl_context_node
257 /* The ..._DECL node to work on. */
258 tree decl;
260 /* The corresponding entity's Scope. */
261 Entity_Id gnat_scope;
263 /* The value of force_global when DECL was pushed. */
264 int force_global;
266 /* The list of ..._TYPE nodes to propagate the context to. */
267 vec<tree> types;
269 /* The next queue item. */
270 struct deferred_decl_context_node *next;
273 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
275 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
276 feed it with the elaboration of GNAT_SCOPE. */
277 static struct deferred_decl_context_node *
278 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
280 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
281 feed it with the DECL_CONTEXT computed as part of N as soon as it is
282 computed. */
283 static void add_deferred_type_context (struct deferred_decl_context_node *n,
284 tree type);
286 /* Initialize data structures of the utils.c module. */
288 void
289 init_gnat_utils (void)
291 /* Initialize the association of GNAT nodes to GCC trees. */
292 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
294 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
295 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
297 /* Initialize the hash table of padded types. */
298 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
301 /* Destroy data structures of the utils.c module. */
303 void
304 destroy_gnat_utils (void)
306 /* Destroy the association of GNAT nodes to GCC trees. */
307 ggc_free (associate_gnat_to_gnu);
308 associate_gnat_to_gnu = NULL;
310 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
311 ggc_free (dummy_node_table);
312 dummy_node_table = NULL;
314 /* Destroy the hash table of padded types. */
315 pad_type_hash_table->empty ();
316 pad_type_hash_table = NULL;
319 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
320 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
321 If NO_CHECK is true, the latter check is suppressed.
323 If GNU_DECL is zero, reset a previous association. */
325 void
326 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
328 /* Check that GNAT_ENTITY is not already defined and that it is being set
329 to something which is a decl. If that is not the case, this usually
330 means GNAT_ENTITY is defined twice, but occasionally is due to some
331 Gigi problem. */
332 gcc_assert (!(gnu_decl
333 && (PRESENT_GNU_TREE (gnat_entity)
334 || (!no_check && !DECL_P (gnu_decl)))));
336 SET_GNU_TREE (gnat_entity, gnu_decl);
339 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
340 that was associated with it. If there is no such tree node, abort.
342 In some cases, such as delayed elaboration or expressions that need to
343 be elaborated only once, GNAT_ENTITY is really not an entity. */
345 tree
346 get_gnu_tree (Entity_Id gnat_entity)
348 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
349 return GET_GNU_TREE (gnat_entity);
352 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
354 bool
355 present_gnu_tree (Entity_Id gnat_entity)
357 return PRESENT_GNU_TREE (gnat_entity);
360 /* Make a dummy type corresponding to GNAT_TYPE. */
362 tree
363 make_dummy_type (Entity_Id gnat_type)
365 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
366 tree gnu_type, debug_type;
368 /* If there was no equivalent type (can only happen when just annotating
369 types) or underlying type, go back to the original type. */
370 if (No (gnat_equiv))
371 gnat_equiv = gnat_type;
373 /* If it there already a dummy type, use that one. Else make one. */
374 if (PRESENT_DUMMY_NODE (gnat_equiv))
375 return GET_DUMMY_NODE (gnat_equiv);
377 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
378 an ENUMERAL_TYPE. */
379 gnu_type = make_node (Is_Record_Type (gnat_equiv)
380 ? tree_code_for_record_type (gnat_equiv)
381 : ENUMERAL_TYPE);
382 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
383 TYPE_DUMMY_P (gnu_type) = 1;
384 TYPE_STUB_DECL (gnu_type)
385 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
386 if (Is_By_Reference_Type (gnat_equiv))
387 TYPE_BY_REFERENCE_P (gnu_type) = 1;
389 SET_DUMMY_NODE (gnat_equiv, gnu_type);
391 /* Create a debug type so that debug info consumers only see an unspecified
392 type. */
393 if (Needs_Debug_Info (gnat_type))
395 debug_type = make_node (LANG_TYPE);
396 SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
398 TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
399 TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
402 return gnu_type;
405 /* Return the dummy type that was made for GNAT_TYPE, if any. */
407 tree
408 get_dummy_type (Entity_Id gnat_type)
410 return GET_DUMMY_NODE (gnat_type);
413 /* Build dummy fat and thin pointer types whose designated type is specified
414 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
416 void
417 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
419 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
420 tree gnu_fat_type, fields, gnu_object_type;
422 gnu_template_type = make_node (RECORD_TYPE);
423 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
424 TYPE_DUMMY_P (gnu_template_type) = 1;
425 gnu_ptr_template = build_pointer_type (gnu_template_type);
427 gnu_array_type = make_node (ENUMERAL_TYPE);
428 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
429 TYPE_DUMMY_P (gnu_array_type) = 1;
430 gnu_ptr_array = build_pointer_type (gnu_array_type);
432 gnu_fat_type = make_node (RECORD_TYPE);
433 /* Build a stub DECL to trigger the special processing for fat pointer types
434 in gnat_pushdecl. */
435 TYPE_NAME (gnu_fat_type)
436 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
437 gnu_fat_type);
438 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
439 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
440 DECL_CHAIN (fields)
441 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
442 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
443 finish_fat_pointer_type (gnu_fat_type, fields);
444 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
445 /* Suppress debug info until after the type is completed. */
446 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
448 gnu_object_type = make_node (RECORD_TYPE);
449 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
450 TYPE_DUMMY_P (gnu_object_type) = 1;
452 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
453 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
454 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
457 /* Return true if we are in the global binding level. */
459 bool
460 global_bindings_p (void)
462 return force_global || !current_function_decl;
465 /* Enter a new binding level. */
467 void
468 gnat_pushlevel (void)
470 struct gnat_binding_level *newlevel = NULL;
472 /* Reuse a struct for this binding level, if there is one. */
473 if (free_binding_level)
475 newlevel = free_binding_level;
476 free_binding_level = free_binding_level->chain;
478 else
479 newlevel = ggc_alloc<gnat_binding_level> ();
481 /* Use a free BLOCK, if any; otherwise, allocate one. */
482 if (free_block_chain)
484 newlevel->block = free_block_chain;
485 free_block_chain = BLOCK_CHAIN (free_block_chain);
486 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
488 else
489 newlevel->block = make_node (BLOCK);
491 /* Point the BLOCK we just made to its parent. */
492 if (current_binding_level)
493 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
495 BLOCK_VARS (newlevel->block) = NULL_TREE;
496 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
497 TREE_USED (newlevel->block) = 1;
499 /* Add this level to the front of the chain (stack) of active levels. */
500 newlevel->chain = current_binding_level;
501 newlevel->jmpbuf_decl = NULL_TREE;
502 current_binding_level = newlevel;
505 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
506 and point FNDECL to this BLOCK. */
508 void
509 set_current_block_context (tree fndecl)
511 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
512 DECL_INITIAL (fndecl) = current_binding_level->block;
513 set_block_for_group (current_binding_level->block);
516 /* Set the jmpbuf_decl for the current binding level to DECL. */
518 void
519 set_block_jmpbuf_decl (tree decl)
521 current_binding_level->jmpbuf_decl = decl;
524 /* Get the jmpbuf_decl, if any, for the current binding level. */
526 tree
527 get_block_jmpbuf_decl (void)
529 return current_binding_level->jmpbuf_decl;
532 /* Exit a binding level. Set any BLOCK into the current code group. */
534 void
535 gnat_poplevel (void)
537 struct gnat_binding_level *level = current_binding_level;
538 tree block = level->block;
540 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
541 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
543 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
544 are no variables free the block and merge its subblocks into those of its
545 parent block. Otherwise, add it to the list of its parent. */
546 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
548 else if (!BLOCK_VARS (block))
550 BLOCK_SUBBLOCKS (level->chain->block)
551 = block_chainon (BLOCK_SUBBLOCKS (block),
552 BLOCK_SUBBLOCKS (level->chain->block));
553 BLOCK_CHAIN (block) = free_block_chain;
554 free_block_chain = block;
556 else
558 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
559 BLOCK_SUBBLOCKS (level->chain->block) = block;
560 TREE_USED (block) = 1;
561 set_block_for_group (block);
564 /* Free this binding structure. */
565 current_binding_level = level->chain;
566 level->chain = free_binding_level;
567 free_binding_level = level;
570 /* Exit a binding level and discard the associated BLOCK. */
572 void
573 gnat_zaplevel (void)
575 struct gnat_binding_level *level = current_binding_level;
576 tree block = level->block;
578 BLOCK_CHAIN (block) = free_block_chain;
579 free_block_chain = block;
581 /* Free this binding structure. */
582 current_binding_level = level->chain;
583 level->chain = free_binding_level;
584 free_binding_level = level;
587 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
589 static void
590 gnat_set_type_context (tree type, tree context)
592 tree decl = TYPE_STUB_DECL (type);
594 TYPE_CONTEXT (type) = context;
596 while (decl && DECL_PARALLEL_TYPE (decl))
598 tree parallel_type = DECL_PARALLEL_TYPE (decl);
600 /* Give a context to the parallel types and their stub decl, if any.
601 Some parallel types seems to be present in multiple parallel type
602 chains, so don't mess with their context if they already have one. */
603 if (!TYPE_CONTEXT (parallel_type))
605 if (TYPE_STUB_DECL (parallel_type))
606 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
607 TYPE_CONTEXT (parallel_type) = context;
610 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
614 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
615 the debug info, or Empty if there is no such scope. If not NULL, set
616 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
618 Entity_Id
619 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
621 Entity_Id gnat_entity;
623 if (is_subprogram)
624 *is_subprogram = false;
626 if (Nkind (gnat_node) == N_Defining_Identifier
627 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
628 gnat_entity = Scope (gnat_node);
629 else
630 return Empty;
632 while (Present (gnat_entity))
634 switch (Ekind (gnat_entity))
636 case E_Function:
637 case E_Procedure:
638 if (Present (Protected_Body_Subprogram (gnat_entity)))
639 gnat_entity = Protected_Body_Subprogram (gnat_entity);
641 /* If the scope is a subprogram, then just rely on
642 current_function_decl, so that we don't have to defer
643 anything. This is needed because other places rely on the
644 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
645 if (is_subprogram)
646 *is_subprogram = true;
647 return gnat_entity;
649 case E_Record_Type:
650 case E_Record_Subtype:
651 return gnat_entity;
653 default:
654 /* By default, we are not interested in this particular scope: go to
655 the outer one. */
656 break;
659 gnat_entity = Scope (gnat_entity);
662 return Empty;
665 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
666 of N otherwise. */
668 static void
669 defer_or_set_type_context (tree type, tree context,
670 struct deferred_decl_context_node *n)
672 if (n)
673 add_deferred_type_context (n, type);
674 else
675 gnat_set_type_context (type, context);
678 /* Return global_context, but create it first if need be. */
680 static tree
681 get_global_context (void)
683 if (!global_context)
685 global_context
686 = build_translation_unit_decl (get_identifier (main_input_filename));
687 debug_hooks->register_main_translation_unit (global_context);
690 return global_context;
693 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
694 for location information and flag propagation. */
696 void
697 gnat_pushdecl (tree decl, Node_Id gnat_node)
699 tree context = NULL_TREE;
700 struct deferred_decl_context_node *deferred_decl_context = NULL;
702 /* If explicitely asked to make DECL global or if it's an imported nested
703 object, short-circuit the regular Scope-based context computation. */
704 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
706 /* Rely on the GNAT scope, or fallback to the current_function_decl if
707 the GNAT scope reached the global scope, if it reached a subprogram
708 or the declaration is a subprogram or a variable (for them we skip
709 intermediate context types because the subprogram body elaboration
710 machinery and the inliner both expect a subprogram context).
712 Falling back to current_function_decl is necessary for implicit
713 subprograms created by gigi, such as the elaboration subprograms. */
714 bool context_is_subprogram = false;
715 const Entity_Id gnat_scope
716 = get_debug_scope (gnat_node, &context_is_subprogram);
718 if (Present (gnat_scope)
719 && !context_is_subprogram
720 && TREE_CODE (decl) != FUNCTION_DECL
721 && TREE_CODE (decl) != VAR_DECL)
722 /* Always assume the scope has not been elaborated, thus defer the
723 context propagation to the time its elaboration will be
724 available. */
725 deferred_decl_context
726 = add_deferred_decl_context (decl, gnat_scope, force_global);
728 /* External declarations (when force_global > 0) may not be in a
729 local context. */
730 else if (current_function_decl && force_global == 0)
731 context = current_function_decl;
734 /* If either we are forced to be in global mode or if both the GNAT scope and
735 the current_function_decl did not help in determining the context, use the
736 global scope. */
737 if (!deferred_decl_context && !context)
738 context = get_global_context ();
740 /* Functions imported in another function are not really nested.
741 For really nested functions mark them initially as needing
742 a static chain for uses of that flag before unnesting;
743 lower_nested_functions will then recompute it. */
744 if (TREE_CODE (decl) == FUNCTION_DECL
745 && !TREE_PUBLIC (decl)
746 && context
747 && (TREE_CODE (context) == FUNCTION_DECL
748 || decl_function_context (context)))
749 DECL_STATIC_CHAIN (decl) = 1;
751 if (!deferred_decl_context)
752 DECL_CONTEXT (decl) = context;
754 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
756 /* Set the location of DECL and emit a declaration for it. */
757 if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
758 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
760 add_decl_expr (decl, gnat_node);
762 /* Put the declaration on the list. The list of declarations is in reverse
763 order. The list will be reversed later. Put global declarations in the
764 globals list and local ones in the current block. But skip TYPE_DECLs
765 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
766 with the debugger and aren't needed anyway. */
767 if (!(TREE_CODE (decl) == TYPE_DECL
768 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
770 /* External declarations must go to the binding level they belong to.
771 This will make corresponding imported entities are available in the
772 debugger at the proper time. */
773 if (DECL_EXTERNAL (decl)
774 && TREE_CODE (decl) == FUNCTION_DECL
775 && DECL_BUILT_IN (decl))
776 vec_safe_push (builtin_decls, decl);
777 else if (global_bindings_p ())
778 vec_safe_push (global_decls, decl);
779 else
781 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
782 BLOCK_VARS (current_binding_level->block) = decl;
786 /* For the declaration of a type, set its name either if it isn't already
787 set or if the previous type name was not derived from a source name.
788 We'd rather have the type named with a real name and all the pointer
789 types to the same object have the same node, except when the names are
790 both derived from source names. */
791 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
793 tree t = TREE_TYPE (decl);
795 /* Array and pointer types aren't tagged types in the C sense so we need
796 to generate a typedef in DWARF for them and make sure it is preserved,
797 unless the type is artificial. */
798 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
799 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
800 || DECL_ARTIFICIAL (decl)))
802 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
803 generate the typedef in DWARF. Also do that for fat pointer types
804 because, even though they are tagged types in the C sense, they are
805 still XUP types attached to the base array type at this point. */
806 else if (!DECL_ARTIFICIAL (decl)
807 && (TREE_CODE (t) == ARRAY_TYPE
808 || TREE_CODE (t) == POINTER_TYPE
809 || TYPE_IS_FAT_POINTER_P (t)))
811 tree tt = build_variant_type_copy (t);
812 TYPE_NAME (tt) = decl;
813 defer_or_set_type_context (tt,
814 DECL_CONTEXT (decl),
815 deferred_decl_context);
816 TREE_TYPE (decl) = tt;
817 if (TYPE_NAME (t)
818 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
819 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
820 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
821 else
822 DECL_ORIGINAL_TYPE (decl) = t;
823 /* Array types need to have a name so that they can be related to
824 their GNAT encodings. */
825 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
826 TYPE_NAME (t) = DECL_NAME (decl);
827 t = NULL_TREE;
829 else if (TYPE_NAME (t)
830 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
831 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
833 else
834 t = NULL_TREE;
836 /* Propagate the name to all the variants, this is needed for the type
837 qualifiers machinery to work properly (see check_qualified_type).
838 Also propagate the context to them. Note that it will be propagated
839 to all parallel types too thanks to gnat_set_type_context. */
840 if (t)
841 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
842 /* ??? Because of the previous kludge, we can have variants of fat
843 pointer types with different names. */
844 if (!(TYPE_IS_FAT_POINTER_P (t)
845 && TYPE_NAME (t)
846 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
848 TYPE_NAME (t) = decl;
849 defer_or_set_type_context (t,
850 DECL_CONTEXT (decl),
851 deferred_decl_context);
856 /* Create a record type that contains a SIZE bytes long field of TYPE with a
857 starting bit position so that it is aligned to ALIGN bits, and leaving at
858 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
859 record is guaranteed to get. GNAT_NODE is used for the position of the
860 associated TYPE_DECL. */
862 tree
863 make_aligning_type (tree type, unsigned int align, tree size,
864 unsigned int base_align, int room, Node_Id gnat_node)
866 /* We will be crafting a record type with one field at a position set to be
867 the next multiple of ALIGN past record'address + room bytes. We use a
868 record placeholder to express record'address. */
869 tree record_type = make_node (RECORD_TYPE);
870 tree record = build0 (PLACEHOLDER_EXPR, record_type);
872 tree record_addr_st
873 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
875 /* The diagram below summarizes the shape of what we manipulate:
877 <--------- pos ---------->
878 { +------------+-------------+-----------------+
879 record =>{ |############| ... | field (type) |
880 { +------------+-------------+-----------------+
881 |<-- room -->|<- voffset ->|<---- size ----->|
884 record_addr vblock_addr
886 Every length is in sizetype bytes there, except "pos" which has to be
887 set as a bit position in the GCC tree for the record. */
888 tree room_st = size_int (room);
889 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
890 tree voffset_st, pos, field;
892 tree name = TYPE_IDENTIFIER (type);
894 name = concat_name (name, "ALIGN");
895 TYPE_NAME (record_type) = name;
897 /* Compute VOFFSET and then POS. The next byte position multiple of some
898 alignment after some address is obtained by "and"ing the alignment minus
899 1 with the two's complement of the address. */
900 voffset_st = size_binop (BIT_AND_EXPR,
901 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
902 size_int ((align / BITS_PER_UNIT) - 1));
904 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
905 pos = size_binop (MULT_EXPR,
906 convert (bitsizetype,
907 size_binop (PLUS_EXPR, room_st, voffset_st)),
908 bitsize_unit_node);
910 /* Craft the GCC record representation. We exceptionally do everything
911 manually here because 1) our generic circuitry is not quite ready to
912 handle the complex position/size expressions we are setting up, 2) we
913 have a strong simplifying factor at hand: we know the maximum possible
914 value of voffset, and 3) we have to set/reset at least the sizes in
915 accordance with this maximum value anyway, as we need them to convey
916 what should be "alloc"ated for this type.
918 Use -1 as the 'addressable' indication for the field to prevent the
919 creation of a bitfield. We don't need one, it would have damaging
920 consequences on the alignment computation, and create_field_decl would
921 make one without this special argument, for instance because of the
922 complex position expression. */
923 field = create_field_decl (get_identifier ("F"), type, record_type, size,
924 pos, 1, -1);
925 TYPE_FIELDS (record_type) = field;
927 SET_TYPE_ALIGN (record_type, base_align);
928 TYPE_USER_ALIGN (record_type) = 1;
930 TYPE_SIZE (record_type)
931 = size_binop (PLUS_EXPR,
932 size_binop (MULT_EXPR, convert (bitsizetype, size),
933 bitsize_unit_node),
934 bitsize_int (align + room * BITS_PER_UNIT));
935 TYPE_SIZE_UNIT (record_type)
936 = size_binop (PLUS_EXPR, size,
937 size_int (room + align / BITS_PER_UNIT));
939 SET_TYPE_MODE (record_type, BLKmode);
940 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
942 /* Declare it now since it will never be declared otherwise. This is
943 necessary to ensure that its subtrees are properly marked. */
944 create_type_decl (name, record_type, true, false, gnat_node);
946 return record_type;
949 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
950 as the field type of a packed record if IN_RECORD is true, or as the
951 component type of a packed array if IN_RECORD is false. See if we can
952 rewrite it either as a type that has non-BLKmode, which we can pack
953 tighter in the packed record case, or as a smaller type with at most
954 MAX_ALIGN alignment if the value is non-zero. If so, return the new
955 type; if not, return the original type. */
957 tree
958 make_packable_type (tree type, bool in_record, unsigned int max_align)
960 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
961 unsigned HOST_WIDE_INT new_size;
962 unsigned int align = TYPE_ALIGN (type);
963 unsigned int new_align;
965 /* No point in doing anything if the size is zero. */
966 if (size == 0)
967 return type;
969 tree new_type = make_node (TREE_CODE (type));
971 /* Copy the name and flags from the old type to that of the new.
972 Note that we rely on the pointer equality created here for
973 TYPE_NAME to look through conversions in various places. */
974 TYPE_NAME (new_type) = TYPE_NAME (type);
975 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
976 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
977 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
978 if (TREE_CODE (type) == RECORD_TYPE)
979 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
981 /* If we are in a record and have a small size, set the alignment to
982 try for an integral mode. Otherwise set it to try for a smaller
983 type with BLKmode. */
984 if (in_record && size <= MAX_FIXED_MODE_SIZE)
986 new_size = ceil_pow2 (size);
987 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
988 SET_TYPE_ALIGN (new_type, new_align);
990 else
992 /* Do not try to shrink the size if the RM size is not constant. */
993 if (TYPE_CONTAINS_TEMPLATE_P (type)
994 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
995 return type;
997 /* Round the RM size up to a unit boundary to get the minimal size
998 for a BLKmode record. Give up if it's already the size and we
999 don't need to lower the alignment. */
1000 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
1001 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1002 if (new_size == size && (max_align == 0 || align <= max_align))
1003 return type;
1005 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1006 if (max_align > 0 && new_align > max_align)
1007 new_align = max_align;
1008 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1011 TYPE_USER_ALIGN (new_type) = 1;
1013 /* Now copy the fields, keeping the position and size as we don't want
1014 to change the layout by propagating the packedness downwards. */
1015 tree new_field_list = NULL_TREE;
1016 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1018 tree new_field_type = TREE_TYPE (field);
1019 tree new_field, new_size;
1021 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1022 && !TYPE_FAT_POINTER_P (new_field_type)
1023 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1024 new_field_type = make_packable_type (new_field_type, true, max_align);
1026 /* However, for the last field in a not already packed record type
1027 that is of an aggregate type, we need to use the RM size in the
1028 packable version of the record type, see finish_record_type. */
1029 if (!DECL_CHAIN (field)
1030 && !TYPE_PACKED (type)
1031 && RECORD_OR_UNION_TYPE_P (new_field_type)
1032 && !TYPE_FAT_POINTER_P (new_field_type)
1033 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1034 && TYPE_ADA_SIZE (new_field_type))
1035 new_size = TYPE_ADA_SIZE (new_field_type);
1036 else
1037 new_size = DECL_SIZE (field);
1039 new_field
1040 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1041 new_size, bit_position (field),
1042 TYPE_PACKED (type),
1043 !DECL_NONADDRESSABLE_P (field));
1045 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1046 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1047 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1048 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1050 DECL_CHAIN (new_field) = new_field_list;
1051 new_field_list = new_field;
1054 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1055 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1056 if (TYPE_STUB_DECL (type))
1057 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1058 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1060 /* If this is a padding record, we never want to make the size smaller
1061 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1062 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1064 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1065 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1066 new_size = size;
1068 else
1070 TYPE_SIZE (new_type) = bitsize_int (new_size);
1071 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1074 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1075 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1077 compute_record_mode (new_type);
1079 /* Try harder to get a packable type if necessary, for example
1080 in case the record itself contains a BLKmode field. */
1081 if (in_record && TYPE_MODE (new_type) == BLKmode)
1082 SET_TYPE_MODE (new_type,
1083 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1085 /* If neither mode nor size nor alignment shrunk, return the old type. */
1086 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1087 return type;
1089 return new_type;
1092 /* Return true if TYPE has an unsigned representation. This needs to be used
1093 when the representation of types whose precision is not equal to their size
1094 is manipulated based on the RM size. */
1096 static inline bool
1097 type_unsigned_for_rm (tree type)
1099 /* This is the common case. */
1100 if (TYPE_UNSIGNED (type))
1101 return true;
1103 /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1104 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1105 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1106 return true;
1108 return false;
1111 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1112 If TYPE is the best type, return it. Otherwise, make a new type. We
1113 only support new integral and pointer types. FOR_BIASED is true if
1114 we are making a biased type. */
1116 tree
1117 make_type_from_size (tree type, tree size_tree, bool for_biased)
1119 unsigned HOST_WIDE_INT size;
1120 bool biased_p;
1121 tree new_type;
1123 /* If size indicates an error, just return TYPE to avoid propagating
1124 the error. Likewise if it's too large to represent. */
1125 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1126 return type;
1128 size = tree_to_uhwi (size_tree);
1130 switch (TREE_CODE (type))
1132 case INTEGER_TYPE:
1133 case ENUMERAL_TYPE:
1134 case BOOLEAN_TYPE:
1135 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1136 && TYPE_BIASED_REPRESENTATION_P (type));
1138 /* Integer types with precision 0 are forbidden. */
1139 if (size == 0)
1140 size = 1;
1142 /* Only do something if the type isn't a packed array type and doesn't
1143 already have the proper size and the size isn't too large. */
1144 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1145 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1146 || size > LONG_LONG_TYPE_SIZE)
1147 break;
1149 biased_p |= for_biased;
1151 /* The type should be an unsigned type if the original type is unsigned
1152 or if the lower bound is constant and non-negative or if the type is
1153 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1154 if (type_unsigned_for_rm (type) || biased_p)
1155 new_type = make_unsigned_type (size);
1156 else
1157 new_type = make_signed_type (size);
1158 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1159 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1160 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1161 /* Copy the name to show that it's essentially the same type and
1162 not a subrange type. */
1163 TYPE_NAME (new_type) = TYPE_NAME (type);
1164 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1165 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1166 return new_type;
1168 case RECORD_TYPE:
1169 /* Do something if this is a fat pointer, in which case we
1170 may need to return the thin pointer. */
1171 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1173 scalar_int_mode p_mode;
1174 if (!int_mode_for_size (size, 0).exists (&p_mode)
1175 || !targetm.valid_pointer_mode (p_mode))
1176 p_mode = ptr_mode;
1177 return
1178 build_pointer_type_for_mode
1179 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1180 p_mode, 0);
1182 break;
1184 case POINTER_TYPE:
1185 /* Only do something if this is a thin pointer, in which case we
1186 may need to return the fat pointer. */
1187 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1188 return
1189 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1190 break;
1192 default:
1193 break;
1196 return type;
1199 /* Return true iff the padded types are equivalent. */
1201 bool
1202 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1204 tree type1, type2;
1206 if (t1->hash != t2->hash)
1207 return 0;
1209 type1 = t1->type;
1210 type2 = t2->type;
1212 /* We consider that the padded types are equivalent if they pad the same type
1213 and have the same size, alignment, RM size and storage order. Taking the
1214 mode into account is redundant since it is determined by the others. */
1215 return
1216 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1217 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1218 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1219 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1220 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1223 /* Look up the padded TYPE in the hash table and return its canonical version
1224 if it exists; otherwise, insert it into the hash table. */
1226 static tree
1227 lookup_and_insert_pad_type (tree type)
1229 hashval_t hashcode;
1230 struct pad_type_hash in, *h;
1232 hashcode
1233 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1234 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1235 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1236 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1238 in.hash = hashcode;
1239 in.type = type;
1240 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1241 if (h)
1242 return h->type;
1244 h = ggc_alloc<pad_type_hash> ();
1245 h->hash = hashcode;
1246 h->type = type;
1247 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
1248 return NULL_TREE;
1251 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1252 if needed. We have already verified that SIZE and ALIGN are large enough.
1253 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1254 IS_COMPONENT_TYPE is true if this is being done for the component type of
1255 an array. IS_USER_TYPE is true if the original type needs to be completed.
1256 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1257 the RM size of the resulting type is to be set to SIZE too; in this case,
1258 the padded type is canonicalized before being returned. */
1260 tree
1261 maybe_pad_type (tree type, tree size, unsigned int align,
1262 Entity_Id gnat_entity, bool is_component_type,
1263 bool is_user_type, bool definition, bool set_rm_size)
1265 tree orig_size = TYPE_SIZE (type);
1266 unsigned int orig_align = TYPE_ALIGN (type);
1267 tree record, field;
1269 /* If TYPE is a padded type, see if it agrees with any size and alignment
1270 we were given. If so, return the original type. Otherwise, strip
1271 off the padding, since we will either be returning the inner type
1272 or repadding it. If no size or alignment is specified, use that of
1273 the original padded type. */
1274 if (TYPE_IS_PADDING_P (type))
1276 if ((!size
1277 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1278 && (align == 0 || align == orig_align))
1279 return type;
1281 if (!size)
1282 size = orig_size;
1283 if (align == 0)
1284 align = orig_align;
1286 type = TREE_TYPE (TYPE_FIELDS (type));
1287 orig_size = TYPE_SIZE (type);
1288 orig_align = TYPE_ALIGN (type);
1291 /* If the size is either not being changed or is being made smaller (which
1292 is not done here and is only valid for bitfields anyway), show the size
1293 isn't changing. Likewise, clear the alignment if it isn't being
1294 changed. Then return if we aren't doing anything. */
1295 if (size
1296 && (operand_equal_p (size, orig_size, 0)
1297 || (TREE_CODE (orig_size) == INTEGER_CST
1298 && tree_int_cst_lt (size, orig_size))))
1299 size = NULL_TREE;
1301 if (align == orig_align)
1302 align = 0;
1304 if (align == 0 && !size)
1305 return type;
1307 /* If requested, complete the original type and give it a name. */
1308 if (is_user_type)
1309 create_type_decl (get_entity_name (gnat_entity), type,
1310 !Comes_From_Source (gnat_entity),
1311 !(TYPE_NAME (type)
1312 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1313 && DECL_IGNORED_P (TYPE_NAME (type))),
1314 gnat_entity);
1316 /* We used to modify the record in place in some cases, but that could
1317 generate incorrect debugging information. So make a new record
1318 type and name. */
1319 record = make_node (RECORD_TYPE);
1320 TYPE_PADDING_P (record) = 1;
1322 /* ??? Padding types around packed array implementation types will be
1323 considered as root types in the array descriptor language hook (see
1324 gnat_get_array_descr_info). Give them the original packed array type
1325 name so that the one coming from sources appears in the debugging
1326 information. */
1327 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1328 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1329 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1330 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1331 else if (Present (gnat_entity))
1332 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1334 SET_TYPE_ALIGN (record, align ? align : orig_align);
1335 TYPE_SIZE (record) = size ? size : orig_size;
1336 TYPE_SIZE_UNIT (record)
1337 = convert (sizetype,
1338 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1339 bitsize_unit_node));
1341 /* If we are changing the alignment and the input type is a record with
1342 BLKmode and a small constant size, try to make a form that has an
1343 integral mode. This might allow the padding record to also have an
1344 integral mode, which will be much more efficient. There is no point
1345 in doing so if a size is specified unless it is also a small constant
1346 size and it is incorrect to do so if we cannot guarantee that the mode
1347 will be naturally aligned since the field must always be addressable.
1349 ??? This might not always be a win when done for a stand-alone object:
1350 since the nominal and the effective type of the object will now have
1351 different modes, a VIEW_CONVERT_EXPR will be required for converting
1352 between them and it might be hard to overcome afterwards, including
1353 at the RTL level when the stand-alone object is accessed as a whole. */
1354 if (align != 0
1355 && RECORD_OR_UNION_TYPE_P (type)
1356 && TYPE_MODE (type) == BLKmode
1357 && !TYPE_BY_REFERENCE_P (type)
1358 && TREE_CODE (orig_size) == INTEGER_CST
1359 && !TREE_OVERFLOW (orig_size)
1360 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1361 && (!size
1362 || (TREE_CODE (size) == INTEGER_CST
1363 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1365 tree packable_type = make_packable_type (type, true);
1366 if (TYPE_MODE (packable_type) != BLKmode
1367 && align >= TYPE_ALIGN (packable_type))
1368 type = packable_type;
1371 /* Now create the field with the original size. */
1372 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1373 bitsize_zero_node, 0, 1);
1374 DECL_INTERNAL_P (field) = 1;
1376 /* We will output additional debug info manually below. */
1377 finish_record_type (record, field, 1, false);
1379 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1380 SET_TYPE_DEBUG_TYPE (record, type);
1382 /* Set the RM size if requested. */
1383 if (set_rm_size)
1385 tree canonical_pad_type;
1387 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1389 /* If the padded type is complete and has constant size, we canonicalize
1390 it by means of the hash table. This is consistent with the language
1391 semantics and ensures that gigi and the middle-end have a common view
1392 of these padded types. */
1393 if (TREE_CONSTANT (TYPE_SIZE (record))
1394 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1396 record = canonical_pad_type;
1397 goto built;
1401 /* Unless debugging information isn't being written for the input type,
1402 write a record that shows what we are a subtype of and also make a
1403 variable that indicates our size, if still variable. */
1404 if (TREE_CODE (orig_size) != INTEGER_CST
1405 && TYPE_NAME (record)
1406 && TYPE_NAME (type)
1407 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1408 && DECL_IGNORED_P (TYPE_NAME (type))))
1410 tree name = TYPE_IDENTIFIER (record);
1411 tree size_unit = TYPE_SIZE_UNIT (record);
1413 /* A variable that holds the size is required even with no encoding since
1414 it will be referenced by debugging information attributes. At global
1415 level, we need a single variable across all translation units. */
1416 if (size
1417 && TREE_CODE (size) != INTEGER_CST
1418 && (definition || global_bindings_p ()))
1420 /* Whether or not gnat_entity comes from source, this XVZ variable is
1421 is a compilation artifact. */
1422 size_unit
1423 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1424 size_unit, true, global_bindings_p (),
1425 !definition && global_bindings_p (), false,
1426 false, true, true, NULL, gnat_entity);
1427 TYPE_SIZE_UNIT (record) = size_unit;
1430 /* There is no need to show what we are a subtype of when outputting as
1431 few encodings as possible: regular debugging infomation makes this
1432 redundant. */
1433 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1435 tree marker = make_node (RECORD_TYPE);
1436 tree orig_name = TYPE_IDENTIFIER (type);
1438 TYPE_NAME (marker) = concat_name (name, "XVS");
1439 finish_record_type (marker,
1440 create_field_decl (orig_name,
1441 build_reference_type (type),
1442 marker, NULL_TREE, NULL_TREE,
1443 0, 0),
1444 0, true);
1445 TYPE_SIZE_UNIT (marker) = size_unit;
1447 add_parallel_type (record, marker);
1451 built:
1452 /* If a simple size was explicitly given, maybe issue a warning. */
1453 if (!size
1454 || TREE_CODE (size) == COND_EXPR
1455 || TREE_CODE (size) == MAX_EXPR
1456 || No (gnat_entity))
1457 return record;
1459 /* But don't do it if we are just annotating types and the type is tagged or
1460 concurrent, since these types aren't fully laid out in this mode. */
1461 if (type_annotate_only)
1463 Entity_Id gnat_type
1464 = is_component_type
1465 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1467 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1468 return record;
1471 /* Take the original size as the maximum size of the input if there was an
1472 unconstrained record involved and round it up to the specified alignment,
1473 if one was specified, but only for aggregate types. */
1474 if (CONTAINS_PLACEHOLDER_P (orig_size))
1475 orig_size = max_size (orig_size, true);
1477 if (align && AGGREGATE_TYPE_P (type))
1478 orig_size = round_up (orig_size, align);
1480 if (!operand_equal_p (size, orig_size, 0)
1481 && !(TREE_CODE (size) == INTEGER_CST
1482 && TREE_CODE (orig_size) == INTEGER_CST
1483 && (TREE_OVERFLOW (size)
1484 || TREE_OVERFLOW (orig_size)
1485 || tree_int_cst_lt (size, orig_size))))
1487 Node_Id gnat_error_node = Empty;
1489 /* For a packed array, post the message on the original array type. */
1490 if (Is_Packed_Array_Impl_Type (gnat_entity))
1491 gnat_entity = Original_Array_Type (gnat_entity);
1493 if ((Ekind (gnat_entity) == E_Component
1494 || Ekind (gnat_entity) == E_Discriminant)
1495 && Present (Component_Clause (gnat_entity)))
1496 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1497 else if (Present (Size_Clause (gnat_entity)))
1498 gnat_error_node = Expression (Size_Clause (gnat_entity));
1500 /* Generate message only for entities that come from source, since
1501 if we have an entity created by expansion, the message will be
1502 generated for some other corresponding source entity. */
1503 if (Comes_From_Source (gnat_entity))
1505 if (Present (gnat_error_node))
1506 post_error_ne_tree ("{^ }bits of & unused?",
1507 gnat_error_node, gnat_entity,
1508 size_diffop (size, orig_size));
1509 else if (is_component_type)
1510 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1511 gnat_entity, gnat_entity,
1512 size_diffop (size, orig_size));
1516 return record;
1519 /* Return a copy of the padded TYPE but with reverse storage order. */
1521 tree
1522 set_reverse_storage_order_on_pad_type (tree type)
1524 tree field, canonical_pad_type;
1526 if (flag_checking)
1528 /* If the inner type is not scalar then the function does nothing. */
1529 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1530 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1531 && !VECTOR_TYPE_P (inner_type));
1534 /* This is required for the canonicalization. */
1535 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1537 field = copy_node (TYPE_FIELDS (type));
1538 type = copy_type (type);
1539 DECL_CONTEXT (field) = type;
1540 TYPE_FIELDS (type) = field;
1541 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1542 canonical_pad_type = lookup_and_insert_pad_type (type);
1543 return canonical_pad_type ? canonical_pad_type : type;
1546 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1547 If this is a multi-dimensional array type, do this recursively.
1549 OP may be
1550 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1551 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1552 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1554 void
1555 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1557 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1558 of a one-dimensional array, since the padding has the same alias set
1559 as the field type, but if it's a multi-dimensional array, we need to
1560 see the inner types. */
1561 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1562 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1563 || TYPE_PADDING_P (gnu_old_type)))
1564 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1566 /* Unconstrained array types are deemed incomplete and would thus be given
1567 alias set 0. Retrieve the underlying array type. */
1568 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1569 gnu_old_type
1570 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1571 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1572 gnu_new_type
1573 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1575 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1576 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1577 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1578 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1580 switch (op)
1582 case ALIAS_SET_COPY:
1583 /* The alias set shouldn't be copied between array types with different
1584 aliasing settings because this can break the aliasing relationship
1585 between the array type and its element type. */
1586 if (flag_checking || flag_strict_aliasing)
1587 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1588 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1589 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1590 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1592 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1593 break;
1595 case ALIAS_SET_SUBSET:
1596 case ALIAS_SET_SUPERSET:
1598 alias_set_type old_set = get_alias_set (gnu_old_type);
1599 alias_set_type new_set = get_alias_set (gnu_new_type);
1601 /* Do nothing if the alias sets conflict. This ensures that we
1602 never call record_alias_subset several times for the same pair
1603 or at all for alias set 0. */
1604 if (!alias_sets_conflict_p (old_set, new_set))
1606 if (op == ALIAS_SET_SUBSET)
1607 record_alias_subset (old_set, new_set);
1608 else
1609 record_alias_subset (new_set, old_set);
1612 break;
1614 default:
1615 gcc_unreachable ();
1618 record_component_aliases (gnu_new_type);
1621 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1622 ARTIFICIAL_P is true if the type was generated by the compiler. */
1624 void
1625 record_builtin_type (const char *name, tree type, bool artificial_p)
1627 tree type_decl = build_decl (input_location,
1628 TYPE_DECL, get_identifier (name), type);
1629 DECL_ARTIFICIAL (type_decl) = artificial_p;
1630 TYPE_ARTIFICIAL (type) = artificial_p;
1631 gnat_pushdecl (type_decl, Empty);
1633 if (debug_hooks->type_decl)
1634 debug_hooks->type_decl (type_decl, false);
1637 /* Finish constructing the character type CHAR_TYPE.
1639 In Ada character types are enumeration types and, as a consequence, are
1640 represented in the front-end by integral types holding the positions of
1641 the enumeration values as defined by the language, which means that the
1642 integral types are unsigned.
1644 Unfortunately the signedness of 'char' in C is implementation-defined
1645 and GCC even has the option -fsigned-char to toggle it at run time.
1646 Since GNAT's philosophy is to be compatible with C by default, to wit
1647 Interfaces.C.char is defined as a mere copy of Character, we may need
1648 to declare character types as signed types in GENERIC and generate the
1649 necessary adjustments to make them behave as unsigned types.
1651 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1652 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1653 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1654 types. The idea is to ensure that the bit pattern contained in the
1655 Esize'd objects is not changed, even though the numerical value will
1656 be interpreted differently depending on the signedness. */
1658 void
1659 finish_character_type (tree char_type)
1661 if (TYPE_UNSIGNED (char_type))
1662 return;
1664 /* Make a copy of a generic unsigned version since we'll modify it. */
1665 tree unsigned_char_type
1666 = (char_type == char_type_node
1667 ? unsigned_char_type_node
1668 : copy_type (gnat_unsigned_type_for (char_type)));
1670 /* Create an unsigned version of the type and set it as debug type. */
1671 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1672 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1673 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1674 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1676 /* If this is a subtype, make the debug type a subtype of the debug type
1677 of the base type and convert literal RM bounds to unsigned. */
1678 if (TREE_TYPE (char_type))
1680 tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1681 tree min_value = TYPE_RM_MIN_VALUE (char_type);
1682 tree max_value = TYPE_RM_MAX_VALUE (char_type);
1684 if (TREE_CODE (min_value) == INTEGER_CST)
1685 min_value = fold_convert (base_unsigned_char_type, min_value);
1686 if (TREE_CODE (max_value) == INTEGER_CST)
1687 max_value = fold_convert (base_unsigned_char_type, max_value);
1689 TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1690 SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1691 SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1694 /* Adjust the RM bounds of the original type to unsigned; that's especially
1695 important for types since they are implicit in this case. */
1696 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1697 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1700 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1701 finish constructing the record type as a fat pointer type. */
1703 void
1704 finish_fat_pointer_type (tree record_type, tree field_list)
1706 /* Make sure we can put it into a register. */
1707 if (STRICT_ALIGNMENT)
1708 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1710 /* Show what it really is. */
1711 TYPE_FAT_POINTER_P (record_type) = 1;
1713 /* Do not emit debug info for it since the types of its fields may still be
1714 incomplete at this point. */
1715 finish_record_type (record_type, field_list, 0, false);
1717 /* Force type_contains_placeholder_p to return true on it. Although the
1718 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1719 type but the representation of the unconstrained array. */
1720 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1723 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1724 finish constructing the record or union type. If REP_LEVEL is zero, this
1725 record has no representation clause and so will be entirely laid out here.
1726 If REP_LEVEL is one, this record has a representation clause and has been
1727 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1728 this record is derived from a parent record and thus inherits its layout;
1729 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1730 additional debug info needs to be output for this type. */
1732 void
1733 finish_record_type (tree record_type, tree field_list, int rep_level,
1734 bool debug_info_p)
1736 enum tree_code code = TREE_CODE (record_type);
1737 tree name = TYPE_IDENTIFIER (record_type);
1738 tree ada_size = bitsize_zero_node;
1739 tree size = bitsize_zero_node;
1740 bool had_size = TYPE_SIZE (record_type) != 0;
1741 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1742 bool had_align = TYPE_ALIGN (record_type) != 0;
1743 tree field;
1745 TYPE_FIELDS (record_type) = field_list;
1747 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1748 generate debug info and have a parallel type. */
1749 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1751 /* Globally initialize the record first. If this is a rep'ed record,
1752 that just means some initializations; otherwise, layout the record. */
1753 if (rep_level > 0)
1755 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1756 TYPE_ALIGN (record_type)));
1758 if (!had_size_unit)
1759 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1761 if (!had_size)
1762 TYPE_SIZE (record_type) = bitsize_zero_node;
1764 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1765 out just like a UNION_TYPE, since the size will be fixed. */
1766 else if (code == QUAL_UNION_TYPE)
1767 code = UNION_TYPE;
1769 else
1771 /* Ensure there isn't a size already set. There can be in an error
1772 case where there is a rep clause but all fields have errors and
1773 no longer have a position. */
1774 TYPE_SIZE (record_type) = 0;
1776 /* Ensure we use the traditional GCC layout for bitfields when we need
1777 to pack the record type or have a representation clause. The other
1778 possible layout (Microsoft C compiler), if available, would prevent
1779 efficient packing in almost all cases. */
1780 #ifdef TARGET_MS_BITFIELD_LAYOUT
1781 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1782 decl_attributes (&record_type,
1783 tree_cons (get_identifier ("gcc_struct"),
1784 NULL_TREE, NULL_TREE),
1785 ATTR_FLAG_TYPE_IN_PLACE);
1786 #endif
1788 layout_type (record_type);
1791 /* At this point, the position and size of each field is known. It was
1792 either set before entry by a rep clause, or by laying out the type above.
1794 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1795 to compute the Ada size; the GCC size and alignment (for rep'ed records
1796 that are not padding types); and the mode (for rep'ed records). We also
1797 clear the DECL_BIT_FIELD indication for the cases we know have not been
1798 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1800 if (code == QUAL_UNION_TYPE)
1801 field_list = nreverse (field_list);
1803 for (field = field_list; field; field = DECL_CHAIN (field))
1805 tree type = TREE_TYPE (field);
1806 tree pos = bit_position (field);
1807 tree this_size = DECL_SIZE (field);
1808 tree this_ada_size;
1810 if (RECORD_OR_UNION_TYPE_P (type)
1811 && !TYPE_FAT_POINTER_P (type)
1812 && !TYPE_CONTAINS_TEMPLATE_P (type)
1813 && TYPE_ADA_SIZE (type))
1814 this_ada_size = TYPE_ADA_SIZE (type);
1815 else
1816 this_ada_size = this_size;
1818 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1819 if (DECL_BIT_FIELD (field)
1820 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1822 unsigned int align = TYPE_ALIGN (type);
1824 /* In the general case, type alignment is required. */
1825 if (value_factor_p (pos, align))
1827 /* The enclosing record type must be sufficiently aligned.
1828 Otherwise, if no alignment was specified for it and it
1829 has been laid out already, bump its alignment to the
1830 desired one if this is compatible with its size and
1831 maximum alignment, if any. */
1832 if (TYPE_ALIGN (record_type) >= align)
1834 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1835 DECL_BIT_FIELD (field) = 0;
1837 else if (!had_align
1838 && rep_level == 0
1839 && value_factor_p (TYPE_SIZE (record_type), align)
1840 && (!TYPE_MAX_ALIGN (record_type)
1841 || TYPE_MAX_ALIGN (record_type) >= align))
1843 SET_TYPE_ALIGN (record_type, align);
1844 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1845 DECL_BIT_FIELD (field) = 0;
1849 /* In the non-strict alignment case, only byte alignment is. */
1850 if (!STRICT_ALIGNMENT
1851 && DECL_BIT_FIELD (field)
1852 && value_factor_p (pos, BITS_PER_UNIT))
1853 DECL_BIT_FIELD (field) = 0;
1856 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1857 field is technically not addressable. Except that it can actually
1858 be addressed if it is BLKmode and happens to be properly aligned. */
1859 if (DECL_BIT_FIELD (field)
1860 && !(DECL_MODE (field) == BLKmode
1861 && value_factor_p (pos, BITS_PER_UNIT)))
1862 DECL_NONADDRESSABLE_P (field) = 1;
1864 /* A type must be as aligned as its most aligned field that is not
1865 a bit-field. But this is already enforced by layout_type. */
1866 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1867 SET_TYPE_ALIGN (record_type,
1868 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
1870 switch (code)
1872 case UNION_TYPE:
1873 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1874 size = size_binop (MAX_EXPR, size, this_size);
1875 break;
1877 case QUAL_UNION_TYPE:
1878 ada_size
1879 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1880 this_ada_size, ada_size);
1881 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1882 this_size, size);
1883 break;
1885 case RECORD_TYPE:
1886 /* Since we know here that all fields are sorted in order of
1887 increasing bit position, the size of the record is one
1888 higher than the ending bit of the last field processed
1889 unless we have a rep clause, since in that case we might
1890 have a field outside a QUAL_UNION_TYPE that has a higher ending
1891 position. So use a MAX in that case. Also, if this field is a
1892 QUAL_UNION_TYPE, we need to take into account the previous size in
1893 the case of empty variants. */
1894 ada_size
1895 = merge_sizes (ada_size, pos, this_ada_size,
1896 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1897 size
1898 = merge_sizes (size, pos, this_size,
1899 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1900 break;
1902 default:
1903 gcc_unreachable ();
1907 if (code == QUAL_UNION_TYPE)
1908 nreverse (field_list);
1910 if (rep_level < 2)
1912 /* If this is a padding record, we never want to make the size smaller
1913 than what was specified in it, if any. */
1914 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1915 size = TYPE_SIZE (record_type);
1917 /* Now set any of the values we've just computed that apply. */
1918 if (!TYPE_FAT_POINTER_P (record_type)
1919 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1920 SET_TYPE_ADA_SIZE (record_type, ada_size);
1922 if (rep_level > 0)
1924 tree size_unit = had_size_unit
1925 ? TYPE_SIZE_UNIT (record_type)
1926 : convert (sizetype,
1927 size_binop (CEIL_DIV_EXPR, size,
1928 bitsize_unit_node));
1929 unsigned int align = TYPE_ALIGN (record_type);
1931 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1932 TYPE_SIZE_UNIT (record_type)
1933 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1935 compute_record_mode (record_type);
1939 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1940 TYPE_MAX_ALIGN (record_type) = 0;
1942 if (debug_info_p)
1943 rest_of_record_type_compilation (record_type);
1946 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1947 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1948 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1949 moment TYPE will get a context. */
1951 void
1952 add_parallel_type (tree type, tree parallel_type)
1954 tree decl = TYPE_STUB_DECL (type);
1956 while (DECL_PARALLEL_TYPE (decl))
1957 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1959 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1961 /* If PARALLEL_TYPE already has a context, we are done. */
1962 if (TYPE_CONTEXT (parallel_type))
1963 return;
1965 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
1966 it to PARALLEL_TYPE. */
1967 if (TYPE_CONTEXT (type))
1968 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1970 /* Otherwise TYPE has not context yet. We know it will have one thanks to
1971 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
1972 so we have nothing to do in this case. */
1975 /* Return true if TYPE has a parallel type. */
1977 static bool
1978 has_parallel_type (tree type)
1980 tree decl = TYPE_STUB_DECL (type);
1982 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1985 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
1986 associated with it. It need not be invoked directly in most cases as
1987 finish_record_type takes care of doing so. */
1989 void
1990 rest_of_record_type_compilation (tree record_type)
1992 bool var_size = false;
1993 tree field;
1995 /* If this is a padded type, the bulk of the debug info has already been
1996 generated for the field's type. */
1997 if (TYPE_IS_PADDING_P (record_type))
1998 return;
2000 /* If the type already has a parallel type (XVS type), then we're done. */
2001 if (has_parallel_type (record_type))
2002 return;
2004 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2006 /* We need to make an XVE/XVU record if any field has variable size,
2007 whether or not the record does. For example, if we have a union,
2008 it may be that all fields, rounded up to the alignment, have the
2009 same size, in which case we'll use that size. But the debug
2010 output routines (except Dwarf2) won't be able to output the fields,
2011 so we need to make the special record. */
2012 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2013 /* If a field has a non-constant qualifier, the record will have
2014 variable size too. */
2015 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2016 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2018 var_size = true;
2019 break;
2023 /* If this record type is of variable size, make a parallel record type that
2024 will tell the debugger how the former is laid out (see exp_dbug.ads). */
2025 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
2027 tree new_record_type
2028 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2029 ? UNION_TYPE : TREE_CODE (record_type));
2030 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2031 tree last_pos = bitsize_zero_node;
2032 tree old_field, prev_old_field = NULL_TREE;
2034 new_name
2035 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2036 ? "XVU" : "XVE");
2037 TYPE_NAME (new_record_type) = new_name;
2038 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2039 TYPE_STUB_DECL (new_record_type)
2040 = create_type_stub_decl (new_name, new_record_type);
2041 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2042 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2043 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2044 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2045 TYPE_SIZE_UNIT (new_record_type)
2046 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2048 /* Now scan all the fields, replacing each field with a new field
2049 corresponding to the new encoding. */
2050 for (old_field = TYPE_FIELDS (record_type); old_field;
2051 old_field = DECL_CHAIN (old_field))
2053 tree field_type = TREE_TYPE (old_field);
2054 tree field_name = DECL_NAME (old_field);
2055 tree curpos = fold_bit_position (old_field);
2056 tree pos, new_field;
2057 bool var = false;
2058 unsigned int align = 0;
2060 /* See how the position was modified from the last position.
2062 There are two basic cases we support: a value was added
2063 to the last position or the last position was rounded to
2064 a boundary and they something was added. Check for the
2065 first case first. If not, see if there is any evidence
2066 of rounding. If so, round the last position and retry.
2068 If this is a union, the position can be taken as zero. */
2069 if (TREE_CODE (new_record_type) == UNION_TYPE)
2070 pos = bitsize_zero_node;
2071 else
2072 pos = compute_related_constant (curpos, last_pos);
2074 if (!pos
2075 && TREE_CODE (curpos) == MULT_EXPR
2076 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2078 tree offset = TREE_OPERAND (curpos, 0);
2079 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2080 align = scale_by_factor_of (offset, align);
2081 last_pos = round_up (last_pos, align);
2082 pos = compute_related_constant (curpos, last_pos);
2084 else if (!pos
2085 && TREE_CODE (curpos) == PLUS_EXPR
2086 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2087 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2088 && tree_fits_uhwi_p
2089 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2091 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2092 unsigned HOST_WIDE_INT addend
2093 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2094 align
2095 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2096 align = scale_by_factor_of (offset, align);
2097 align = MIN (align, addend & -addend);
2098 last_pos = round_up (last_pos, align);
2099 pos = compute_related_constant (curpos, last_pos);
2101 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2103 align = TYPE_ALIGN (field_type);
2104 last_pos = round_up (last_pos, align);
2105 pos = compute_related_constant (curpos, last_pos);
2108 /* If we can't compute a position, set it to zero.
2110 ??? We really should abort here, but it's too much work
2111 to get this correct for all cases. */
2112 if (!pos)
2113 pos = bitsize_zero_node;
2115 /* See if this type is variable-sized and make a pointer type
2116 and indicate the indirection if so. Beware that the debug
2117 back-end may adjust the position computed above according
2118 to the alignment of the field type, i.e. the pointer type
2119 in this case, if we don't preventively counter that. */
2120 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2122 field_type = build_pointer_type (field_type);
2123 if (align != 0 && TYPE_ALIGN (field_type) > align)
2125 field_type = copy_type (field_type);
2126 SET_TYPE_ALIGN (field_type, align);
2128 var = true;
2131 /* Make a new field name, if necessary. */
2132 if (var || align != 0)
2134 char suffix[16];
2136 if (align != 0)
2137 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2138 align / BITS_PER_UNIT);
2139 else
2140 strcpy (suffix, "XVL");
2142 field_name = concat_name (field_name, suffix);
2145 new_field
2146 = create_field_decl (field_name, field_type, new_record_type,
2147 DECL_SIZE (old_field), pos, 0, 0);
2148 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2149 TYPE_FIELDS (new_record_type) = new_field;
2151 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2152 zero. The only time it's not the last field of the record
2153 is when there are other components at fixed positions after
2154 it (meaning there was a rep clause for every field) and we
2155 want to be able to encode them. */
2156 last_pos = size_binop (PLUS_EXPR, curpos,
2157 (TREE_CODE (TREE_TYPE (old_field))
2158 == QUAL_UNION_TYPE)
2159 ? bitsize_zero_node
2160 : DECL_SIZE (old_field));
2161 prev_old_field = old_field;
2164 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2166 add_parallel_type (record_type, new_record_type);
2170 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2171 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2172 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2173 replace a value of zero with the old size. If HAS_REP is true, we take the
2174 MAX of the end position of this field with LAST_SIZE. In all other cases,
2175 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2177 static tree
2178 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2179 bool has_rep)
2181 tree type = TREE_TYPE (last_size);
2182 tree new_size;
2184 if (!special || TREE_CODE (size) != COND_EXPR)
2186 new_size = size_binop (PLUS_EXPR, first_bit, size);
2187 if (has_rep)
2188 new_size = size_binop (MAX_EXPR, last_size, new_size);
2191 else
2192 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2193 integer_zerop (TREE_OPERAND (size, 1))
2194 ? last_size : merge_sizes (last_size, first_bit,
2195 TREE_OPERAND (size, 1),
2196 1, has_rep),
2197 integer_zerop (TREE_OPERAND (size, 2))
2198 ? last_size : merge_sizes (last_size, first_bit,
2199 TREE_OPERAND (size, 2),
2200 1, has_rep));
2202 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2203 when fed through substitute_in_expr) into thinking that a constant
2204 size is not constant. */
2205 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2206 new_size = TREE_OPERAND (new_size, 0);
2208 return new_size;
2211 /* Return the bit position of FIELD, in bits from the start of the record,
2212 and fold it as much as possible. This is a tree of type bitsizetype. */
2214 static tree
2215 fold_bit_position (const_tree field)
2217 tree offset = DECL_FIELD_OFFSET (field);
2218 if (TREE_CODE (offset) == MULT_EXPR || TREE_CODE (offset) == PLUS_EXPR)
2219 offset = size_binop (TREE_CODE (offset),
2220 fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
2221 fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
2222 else
2223 offset = fold_convert (bitsizetype, offset);
2224 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2225 size_binop (MULT_EXPR, offset, bitsize_unit_node));
2228 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2229 related by the addition of a constant. Return that constant if so. */
2231 static tree
2232 compute_related_constant (tree op0, tree op1)
2234 tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2236 if (TREE_CODE (op0) == MULT_EXPR
2237 && TREE_CODE (op1) == MULT_EXPR
2238 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2239 && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2241 factor = TREE_OPERAND (op0, 1);
2242 op0 = TREE_OPERAND (op0, 0);
2243 op1 = TREE_OPERAND (op1, 0);
2245 else
2246 factor = NULL_TREE;
2248 op0_cst = split_plus (op0, &op0_var);
2249 op1_cst = split_plus (op1, &op1_var);
2250 result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2252 if (operand_equal_p (op0_var, op1_var, 0))
2253 return factor ? size_binop (MULT_EXPR, factor, result) : result;
2255 return NULL_TREE;
2258 /* Utility function of above to split a tree OP which may be a sum, into a
2259 constant part, which is returned, and a variable part, which is stored
2260 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2261 bitsizetype. */
2263 static tree
2264 split_plus (tree in, tree *pvar)
2266 /* Strip conversions in order to ease the tree traversal and maximize the
2267 potential for constant or plus/minus discovery. We need to be careful
2268 to always return and set *pvar to bitsizetype trees, but it's worth
2269 the effort. */
2270 in = remove_conversions (in, false);
2272 *pvar = convert (bitsizetype, in);
2274 if (TREE_CODE (in) == INTEGER_CST)
2276 *pvar = bitsize_zero_node;
2277 return convert (bitsizetype, in);
2279 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2281 tree lhs_var, rhs_var;
2282 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2283 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2285 if (lhs_var == TREE_OPERAND (in, 0)
2286 && rhs_var == TREE_OPERAND (in, 1))
2287 return bitsize_zero_node;
2289 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2290 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2292 else
2293 return bitsize_zero_node;
2296 /* Return a copy of TYPE but safe to modify in any way. */
2298 tree
2299 copy_type (tree type)
2301 tree new_type = copy_node (type);
2303 /* Unshare the language-specific data. */
2304 if (TYPE_LANG_SPECIFIC (type))
2306 TYPE_LANG_SPECIFIC (new_type) = NULL;
2307 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2310 /* And the contents of the language-specific slot if needed. */
2311 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2312 && TYPE_RM_VALUES (type))
2314 TYPE_RM_VALUES (new_type) = NULL_TREE;
2315 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2316 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2317 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2320 /* copy_node clears this field instead of copying it, because it is
2321 aliased with TREE_CHAIN. */
2322 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2324 TYPE_POINTER_TO (new_type) = NULL_TREE;
2325 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2326 TYPE_MAIN_VARIANT (new_type) = new_type;
2327 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2328 TYPE_CANONICAL (new_type) = new_type;
2330 return new_type;
2333 /* Return a subtype of sizetype with range MIN to MAX and whose
2334 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2335 of the associated TYPE_DECL. */
2337 tree
2338 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2340 /* First build a type for the desired range. */
2341 tree type = build_nonshared_range_type (sizetype, min, max);
2343 /* Then set the index type. */
2344 SET_TYPE_INDEX_TYPE (type, index);
2345 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2347 return type;
2350 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2351 sizetype is used. */
2353 tree
2354 create_range_type (tree type, tree min, tree max)
2356 tree range_type;
2358 if (!type)
2359 type = sizetype;
2361 /* First build a type with the base range. */
2362 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2363 TYPE_MAX_VALUE (type));
2365 /* Then set the actual range. */
2366 SET_TYPE_RM_MIN_VALUE (range_type, min);
2367 SET_TYPE_RM_MAX_VALUE (range_type, max);
2369 return range_type;
2372 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2373 NAME gives the name of the type to be used in the declaration. */
2375 tree
2376 create_type_stub_decl (tree name, tree type)
2378 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2379 DECL_ARTIFICIAL (type_decl) = 1;
2380 TYPE_ARTIFICIAL (type) = 1;
2381 return type_decl;
2384 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2385 used in the declaration. ARTIFICIAL_P is true if the declaration was
2386 generated by the compiler. DEBUG_INFO_P is true if we need to write
2387 debug information about this type. GNAT_NODE is used for the position
2388 of the decl. */
2390 tree
2391 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2392 Node_Id gnat_node)
2394 enum tree_code code = TREE_CODE (type);
2395 bool is_named
2396 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2397 tree type_decl;
2399 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2400 gcc_assert (!TYPE_IS_DUMMY_P (type));
2402 /* If the type hasn't been named yet, we're naming it; preserve an existing
2403 TYPE_STUB_DECL that has been attached to it for some purpose. */
2404 if (!is_named && TYPE_STUB_DECL (type))
2406 type_decl = TYPE_STUB_DECL (type);
2407 DECL_NAME (type_decl) = name;
2409 else
2410 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2412 DECL_ARTIFICIAL (type_decl) = artificial_p;
2413 TYPE_ARTIFICIAL (type) = artificial_p;
2415 /* Add this decl to the current binding level. */
2416 gnat_pushdecl (type_decl, gnat_node);
2418 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2419 causes the name to be also viewed as a "tag" by the debug back-end, with
2420 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2421 types in DWARF.
2423 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2424 from multiple contexts, and "type_decl" references a copy of it: in such a
2425 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2426 with the mechanism above. */
2427 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2428 TYPE_STUB_DECL (type) = type_decl;
2430 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2431 back-end doesn't support, and for others if we don't need to. */
2432 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2433 DECL_IGNORED_P (type_decl) = 1;
2435 return type_decl;
2438 /* Return a VAR_DECL or CONST_DECL node.
2440 NAME gives the name of the variable. ASM_NAME is its assembler name
2441 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2442 the GCC tree for an optional initial expression; NULL_TREE if none.
2444 CONST_FLAG is true if this variable is constant, in which case we might
2445 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2447 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2448 definition to be made visible outside of the current compilation unit, for
2449 instance variable definitions in a package specification.
2451 EXTERN_FLAG is true when processing an external variable declaration (as
2452 opposed to a definition: no storage is to be allocated for the variable).
2454 STATIC_FLAG is only relevant when not at top level and indicates whether
2455 to always allocate storage to the variable.
2457 VOLATILE_FLAG is true if this variable is declared as volatile.
2459 ARTIFICIAL_P is true if the variable was generated by the compiler.
2461 DEBUG_INFO_P is true if we need to write debug information for it.
2463 ATTR_LIST is the list of attributes to be attached to the variable.
2465 GNAT_NODE is used for the position of the decl. */
2467 tree
2468 create_var_decl (tree name, tree asm_name, tree type, tree init,
2469 bool const_flag, bool public_flag, bool extern_flag,
2470 bool static_flag, bool volatile_flag, bool artificial_p,
2471 bool debug_info_p, struct attrib *attr_list,
2472 Node_Id gnat_node, bool const_decl_allowed_p)
2474 /* Whether the object has static storage duration, either explicitly or by
2475 virtue of being declared at the global level. */
2476 const bool static_storage = static_flag || global_bindings_p ();
2478 /* Whether the initializer is constant: for an external object or an object
2479 with static storage duration, we check that the initializer is a valid
2480 constant expression for initializing a static variable; otherwise, we
2481 only check that it is constant. */
2482 const bool init_const
2483 = (init
2484 && gnat_types_compatible_p (type, TREE_TYPE (init))
2485 && (extern_flag || static_storage
2486 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2487 != NULL_TREE
2488 : TREE_CONSTANT (init)));
2490 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2491 case the initializer may be used in lieu of the DECL node (as done in
2492 Identifier_to_gnu). This is useful to prevent the need of elaboration
2493 code when an identifier for which such a DECL is made is in turn used
2494 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2495 but extra constraints apply to this choice (see below) and they are not
2496 relevant to the distinction we wish to make. */
2497 const bool constant_p = const_flag && init_const;
2499 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2500 and may be used for scalars in general but not for aggregates. */
2501 tree var_decl
2502 = build_decl (input_location,
2503 (constant_p
2504 && const_decl_allowed_p
2505 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2506 name, type);
2508 /* Detect constants created by the front-end to hold 'reference to function
2509 calls for stabilization purposes. This is needed for renaming. */
2510 if (const_flag && init && POINTER_TYPE_P (type))
2512 tree inner = init;
2513 if (TREE_CODE (inner) == COMPOUND_EXPR)
2514 inner = TREE_OPERAND (inner, 1);
2515 inner = remove_conversions (inner, true);
2516 if (TREE_CODE (inner) == ADDR_EXPR
2517 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2518 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2519 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2520 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2521 DECL_RETURN_VALUE_P (var_decl) = 1;
2524 /* If this is external, throw away any initializations (they will be done
2525 elsewhere) unless this is a constant for which we would like to remain
2526 able to get the initializer. If we are defining a global here, leave a
2527 constant initialization and save any variable elaborations for the
2528 elaboration routine. If we are just annotating types, throw away the
2529 initialization if it isn't a constant. */
2530 if ((extern_flag && !constant_p)
2531 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2532 init = NULL_TREE;
2534 /* At the global level, a non-constant initializer generates elaboration
2535 statements. Check that such statements are allowed, that is to say,
2536 not violating a No_Elaboration_Code restriction. */
2537 if (init && !init_const && global_bindings_p ())
2538 Check_Elaboration_Code_Allowed (gnat_node);
2540 /* Attach the initializer, if any. */
2541 DECL_INITIAL (var_decl) = init;
2543 /* Directly set some flags. */
2544 DECL_ARTIFICIAL (var_decl) = artificial_p;
2545 DECL_EXTERNAL (var_decl) = extern_flag;
2547 TREE_CONSTANT (var_decl) = constant_p;
2548 TREE_READONLY (var_decl) = const_flag;
2550 /* The object is public if it is external or if it is declared public
2551 and has static storage duration. */
2552 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2554 /* We need to allocate static storage for an object with static storage
2555 duration if it isn't external. */
2556 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2558 TREE_SIDE_EFFECTS (var_decl)
2559 = TREE_THIS_VOLATILE (var_decl)
2560 = TYPE_VOLATILE (type) | volatile_flag;
2562 if (TREE_SIDE_EFFECTS (var_decl))
2563 TREE_ADDRESSABLE (var_decl) = 1;
2565 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2566 try to fiddle with DECL_COMMON. However, on platforms that don't
2567 support global BSS sections, uninitialized global variables would
2568 go in DATA instead, thus increasing the size of the executable. */
2569 if (!flag_no_common
2570 && TREE_CODE (var_decl) == VAR_DECL
2571 && TREE_PUBLIC (var_decl)
2572 && !have_global_bss_p ())
2573 DECL_COMMON (var_decl) = 1;
2575 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2576 since we will create an associated variable. Likewise for an external
2577 constant whose initializer is not absolute, because this would mean a
2578 global relocation in a read-only section which runs afoul of the PE-COFF
2579 run-time relocation mechanism. */
2580 if (!debug_info_p
2581 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2582 || (extern_flag
2583 && constant_p
2584 && init
2585 && initializer_constant_valid_p (init, TREE_TYPE (init))
2586 != null_pointer_node))
2587 DECL_IGNORED_P (var_decl) = 1;
2589 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2590 if (TREE_CODE (var_decl) == VAR_DECL)
2591 process_attributes (&var_decl, &attr_list, true, gnat_node);
2593 /* Add this decl to the current binding level. */
2594 gnat_pushdecl (var_decl, gnat_node);
2596 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2598 /* Let the target mangle the name if this isn't a verbatim asm. */
2599 if (*IDENTIFIER_POINTER (asm_name) != '*')
2600 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2602 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2605 return var_decl;
2608 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2610 static bool
2611 aggregate_type_contains_array_p (tree type)
2613 switch (TREE_CODE (type))
2615 case RECORD_TYPE:
2616 case UNION_TYPE:
2617 case QUAL_UNION_TYPE:
2619 tree field;
2620 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2621 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2622 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2623 return true;
2624 return false;
2627 case ARRAY_TYPE:
2628 return true;
2630 default:
2631 gcc_unreachable ();
2635 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2636 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2637 is the specified size of the field. If POS is nonzero, it is the bit
2638 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2639 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2640 means we are allowed to take the address of the field; if it is negative,
2641 we should not make a bitfield, which is used by make_aligning_type. */
2643 tree
2644 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2645 int packed, int addressable)
2647 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2649 DECL_CONTEXT (field_decl) = record_type;
2650 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2652 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2653 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2654 Likewise for an aggregate without specified position that contains an
2655 array, because in this case slices of variable length of this array
2656 must be handled by GCC and variable-sized objects need to be aligned
2657 to at least a byte boundary. */
2658 if (packed && (TYPE_MODE (type) == BLKmode
2659 || (!pos
2660 && AGGREGATE_TYPE_P (type)
2661 && aggregate_type_contains_array_p (type))))
2662 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2664 /* If a size is specified, use it. Otherwise, if the record type is packed
2665 compute a size to use, which may differ from the object's natural size.
2666 We always set a size in this case to trigger the checks for bitfield
2667 creation below, which is typically required when no position has been
2668 specified. */
2669 if (size)
2670 size = convert (bitsizetype, size);
2671 else if (packed == 1)
2673 size = rm_size (type);
2674 if (TYPE_MODE (type) == BLKmode)
2675 size = round_up (size, BITS_PER_UNIT);
2678 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2679 specified for two reasons: first if the size differs from the natural
2680 size. Second, if the alignment is insufficient. There are a number of
2681 ways the latter can be true.
2683 We never make a bitfield if the type of the field has a nonconstant size,
2684 because no such entity requiring bitfield operations should reach here.
2686 We do *preventively* make a bitfield when there might be the need for it
2687 but we don't have all the necessary information to decide, as is the case
2688 of a field with no specified position in a packed record.
2690 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2691 in layout_decl or finish_record_type to clear the bit_field indication if
2692 it is in fact not needed. */
2693 if (addressable >= 0
2694 && size
2695 && TREE_CODE (size) == INTEGER_CST
2696 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2697 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2698 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2699 || packed
2700 || (TYPE_ALIGN (record_type) != 0
2701 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2703 DECL_BIT_FIELD (field_decl) = 1;
2704 DECL_SIZE (field_decl) = size;
2705 if (!packed && !pos)
2707 if (TYPE_ALIGN (record_type) != 0
2708 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2709 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2710 else
2711 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2715 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2717 /* Bump the alignment if need be, either for bitfield/packing purposes or
2718 to satisfy the type requirements if no such consideration applies. When
2719 we get the alignment from the type, indicate if this is from an explicit
2720 user request, which prevents stor-layout from lowering it later on. */
2722 unsigned int bit_align
2723 = (DECL_BIT_FIELD (field_decl) ? 1
2724 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2726 if (bit_align > DECL_ALIGN (field_decl))
2727 SET_DECL_ALIGN (field_decl, bit_align);
2728 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2730 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2731 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2735 if (pos)
2737 /* We need to pass in the alignment the DECL is known to have.
2738 This is the lowest-order bit set in POS, but no more than
2739 the alignment of the record, if one is specified. Note
2740 that an alignment of 0 is taken as infinite. */
2741 unsigned int known_align;
2743 if (tree_fits_uhwi_p (pos))
2744 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2745 else
2746 known_align = BITS_PER_UNIT;
2748 if (TYPE_ALIGN (record_type)
2749 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2750 known_align = TYPE_ALIGN (record_type);
2752 layout_decl (field_decl, known_align);
2753 SET_DECL_OFFSET_ALIGN (field_decl,
2754 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2755 : BITS_PER_UNIT);
2756 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2757 &DECL_FIELD_BIT_OFFSET (field_decl),
2758 DECL_OFFSET_ALIGN (field_decl), pos);
2761 /* In addition to what our caller says, claim the field is addressable if we
2762 know that its type is not suitable.
2764 The field may also be "technically" nonaddressable, meaning that even if
2765 we attempt to take the field's address we will actually get the address
2766 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2767 value we have at this point is not accurate enough, so we don't account
2768 for this here and let finish_record_type decide. */
2769 if (!addressable && !type_for_nonaliased_component_p (type))
2770 addressable = 1;
2772 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2774 return field_decl;
2777 /* Return a PARM_DECL node with NAME and TYPE. */
2779 tree
2780 create_param_decl (tree name, tree type)
2782 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2784 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2785 can lead to various ABI violations. */
2786 if (targetm.calls.promote_prototypes (NULL_TREE)
2787 && INTEGRAL_TYPE_P (type)
2788 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2790 /* We have to be careful about biased types here. Make a subtype
2791 of integer_type_node with the proper biasing. */
2792 if (TREE_CODE (type) == INTEGER_TYPE
2793 && TYPE_BIASED_REPRESENTATION_P (type))
2795 tree subtype
2796 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2797 TREE_TYPE (subtype) = integer_type_node;
2798 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2799 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2800 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2801 type = subtype;
2803 else
2804 type = integer_type_node;
2807 DECL_ARG_TYPE (param_decl) = type;
2808 return param_decl;
2811 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2812 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2813 changed. GNAT_NODE is used for the position of error messages. */
2815 void
2816 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2817 Node_Id gnat_node)
2819 struct attrib *attr;
2821 for (attr = *attr_list; attr; attr = attr->next)
2822 switch (attr->type)
2824 case ATTR_MACHINE_ATTRIBUTE:
2825 Sloc_to_locus (Sloc (gnat_node), &input_location);
2826 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2827 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2828 break;
2830 case ATTR_LINK_ALIAS:
2831 if (!DECL_EXTERNAL (*node))
2833 TREE_STATIC (*node) = 1;
2834 assemble_alias (*node, attr->name);
2836 break;
2838 case ATTR_WEAK_EXTERNAL:
2839 if (SUPPORTS_WEAK)
2840 declare_weak (*node);
2841 else
2842 post_error ("?weak declarations not supported on this target",
2843 attr->error_point);
2844 break;
2846 case ATTR_LINK_SECTION:
2847 if (targetm_common.have_named_sections)
2849 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2850 DECL_COMMON (*node) = 0;
2852 else
2853 post_error ("?section attributes are not supported for this target",
2854 attr->error_point);
2855 break;
2857 case ATTR_LINK_CONSTRUCTOR:
2858 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2859 TREE_USED (*node) = 1;
2860 break;
2862 case ATTR_LINK_DESTRUCTOR:
2863 DECL_STATIC_DESTRUCTOR (*node) = 1;
2864 TREE_USED (*node) = 1;
2865 break;
2867 case ATTR_THREAD_LOCAL_STORAGE:
2868 set_decl_tls_model (*node, decl_default_tls_model (*node));
2869 DECL_COMMON (*node) = 0;
2870 break;
2873 *attr_list = NULL;
2876 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2877 a power of 2. */
2879 bool
2880 value_factor_p (tree value, HOST_WIDE_INT factor)
2882 if (tree_fits_uhwi_p (value))
2883 return tree_to_uhwi (value) % factor == 0;
2885 if (TREE_CODE (value) == MULT_EXPR)
2886 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2887 || value_factor_p (TREE_OPERAND (value, 1), factor));
2889 return false;
2892 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2893 from the parameter association for the instantiation of a generic. We do
2894 not want to emit source location for them: the code generated for their
2895 initialization is likely to disturb debugging. */
2897 bool
2898 renaming_from_instantiation_p (Node_Id gnat_node)
2900 if (Nkind (gnat_node) != N_Defining_Identifier
2901 || !Is_Object (gnat_node)
2902 || Comes_From_Source (gnat_node)
2903 || !Present (Renamed_Object (gnat_node)))
2904 return false;
2906 /* Get the object declaration of the renamed object, if any and if the
2907 renamed object is a mere identifier. */
2908 gnat_node = Renamed_Object (gnat_node);
2909 if (Nkind (gnat_node) != N_Identifier)
2910 return false;
2912 gnat_node = Entity (gnat_node);
2913 if (!Present (Parent (gnat_node)))
2914 return false;
2916 gnat_node = Parent (gnat_node);
2917 return
2918 (Present (gnat_node)
2919 && Nkind (gnat_node) == N_Object_Declaration
2920 && Present (Corresponding_Generic_Association (gnat_node)));
2923 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2924 feed it with the elaboration of GNAT_SCOPE. */
2926 static struct deferred_decl_context_node *
2927 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2929 struct deferred_decl_context_node *new_node;
2931 new_node
2932 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2933 new_node->decl = decl;
2934 new_node->gnat_scope = gnat_scope;
2935 new_node->force_global = force_global;
2936 new_node->types.create (1);
2937 new_node->next = deferred_decl_context_queue;
2938 deferred_decl_context_queue = new_node;
2939 return new_node;
2942 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2943 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2944 computed. */
2946 static void
2947 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2949 n->types.safe_push (type);
2952 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2953 NULL_TREE if it is not available. */
2955 static tree
2956 compute_deferred_decl_context (Entity_Id gnat_scope)
2958 tree context;
2960 if (present_gnu_tree (gnat_scope))
2961 context = get_gnu_tree (gnat_scope);
2962 else
2963 return NULL_TREE;
2965 if (TREE_CODE (context) == TYPE_DECL)
2967 const tree context_type = TREE_TYPE (context);
2969 /* Skip dummy types: only the final ones can appear in the context
2970 chain. */
2971 if (TYPE_DUMMY_P (context_type))
2972 return NULL_TREE;
2974 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2975 chain. */
2976 else
2977 context = context_type;
2980 return context;
2983 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2984 that cannot be processed yet, remove the other ones. If FORCE is true,
2985 force the processing for all nodes, use the global context when nodes don't
2986 have a GNU translation. */
2988 void
2989 process_deferred_decl_context (bool force)
2991 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2992 struct deferred_decl_context_node *node;
2994 while (*it)
2996 bool processed = false;
2997 tree context = NULL_TREE;
2998 Entity_Id gnat_scope;
3000 node = *it;
3002 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
3003 get the first scope. */
3004 gnat_scope = node->gnat_scope;
3005 while (Present (gnat_scope))
3007 context = compute_deferred_decl_context (gnat_scope);
3008 if (!force || context)
3009 break;
3010 gnat_scope = get_debug_scope (gnat_scope, NULL);
3013 /* Imported declarations must not be in a local context (i.e. not inside
3014 a function). */
3015 if (context && node->force_global > 0)
3017 tree ctx = context;
3019 while (ctx)
3021 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3022 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3026 /* If FORCE, we want to get rid of all nodes in the queue: in case there
3027 was no elaborated scope, use the global context. */
3028 if (force && !context)
3029 context = get_global_context ();
3031 if (context)
3033 tree t;
3034 int i;
3036 DECL_CONTEXT (node->decl) = context;
3038 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3039 ..._TYPE nodes. */
3040 FOR_EACH_VEC_ELT (node->types, i, t)
3042 gnat_set_type_context (t, context);
3044 processed = true;
3047 /* If this node has been successfuly processed, remove it from the
3048 queue. Then move to the next node. */
3049 if (processed)
3051 *it = node->next;
3052 node->types.release ();
3053 free (node);
3055 else
3056 it = &node->next;
3060 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3062 static unsigned int
3063 scale_by_factor_of (tree expr, unsigned int value)
3065 unsigned HOST_WIDE_INT addend = 0;
3066 unsigned HOST_WIDE_INT factor = 1;
3068 /* Peel conversions around EXPR and try to extract bodies from function
3069 calls: it is possible to get the scale factor from size functions. */
3070 expr = remove_conversions (expr, true);
3071 if (TREE_CODE (expr) == CALL_EXPR)
3072 expr = maybe_inline_call_in_expr (expr);
3074 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3075 multiple of the scale factor we are looking for. */
3076 if (TREE_CODE (expr) == PLUS_EXPR
3077 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3078 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3080 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3081 expr = TREE_OPERAND (expr, 0);
3084 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3085 corresponding to the number of trailing zeros of the mask. */
3086 if (TREE_CODE (expr) == BIT_AND_EXPR
3087 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3089 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3090 unsigned int i = 0;
3092 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3094 mask >>= 1;
3095 factor *= 2;
3096 i++;
3100 /* If the addend is not a multiple of the factor we found, give up. In
3101 theory we could find a smaller common factor but it's useless for our
3102 needs. This situation arises when dealing with a field F1 with no
3103 alignment requirement but that is following a field F2 with such
3104 requirements. As long as we have F2's offset, we don't need alignment
3105 information to compute F1's. */
3106 if (addend % factor != 0)
3107 factor = 1;
3109 return factor * value;
3112 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3113 unless we can prove these 2 fields are laid out in such a way that no gap
3114 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3115 is the distance in bits between the end of PREV_FIELD and the starting
3116 position of CURR_FIELD. It is ignored if null. */
3118 static bool
3119 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3121 /* If this is the first field of the record, there cannot be any gap */
3122 if (!prev_field)
3123 return false;
3125 /* If the previous field is a union type, then return false: The only
3126 time when such a field is not the last field of the record is when
3127 there are other components at fixed positions after it (meaning there
3128 was a rep clause for every field), in which case we don't want the
3129 alignment constraint to override them. */
3130 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3131 return false;
3133 /* If the distance between the end of prev_field and the beginning of
3134 curr_field is constant, then there is a gap if the value of this
3135 constant is not null. */
3136 if (offset && tree_fits_uhwi_p (offset))
3137 return !integer_zerop (offset);
3139 /* If the size and position of the previous field are constant,
3140 then check the sum of this size and position. There will be a gap
3141 iff it is not multiple of the current field alignment. */
3142 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3143 && tree_fits_uhwi_p (bit_position (prev_field)))
3144 return ((tree_to_uhwi (bit_position (prev_field))
3145 + tree_to_uhwi (DECL_SIZE (prev_field)))
3146 % DECL_ALIGN (curr_field) != 0);
3148 /* If both the position and size of the previous field are multiples
3149 of the current field alignment, there cannot be any gap. */
3150 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3151 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3152 return false;
3154 /* Fallback, return that there may be a potential gap */
3155 return true;
3158 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3159 the decl. */
3161 tree
3162 create_label_decl (tree name, Node_Id gnat_node)
3164 tree label_decl
3165 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3167 SET_DECL_MODE (label_decl, VOIDmode);
3169 /* Add this decl to the current binding level. */
3170 gnat_pushdecl (label_decl, gnat_node);
3172 return label_decl;
3175 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3176 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3177 the list of its parameters (a list of PARM_DECL nodes chained through the
3178 DECL_CHAIN field).
3180 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3182 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3183 definition to be made visible outside of the current compilation unit.
3185 EXTERN_FLAG is true when processing an external subprogram declaration.
3187 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3189 DEBUG_INFO_P is true if we need to write debug information for it.
3191 DEFINITION is true if the subprogram is to be considered as a definition.
3193 ATTR_LIST is the list of attributes to be attached to the subprogram.
3195 GNAT_NODE is used for the position of the decl. */
3197 tree
3198 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3199 enum inline_status_t inline_status, bool public_flag,
3200 bool extern_flag, bool artificial_p, bool debug_info_p,
3201 bool definition, struct attrib *attr_list,
3202 Node_Id gnat_node)
3204 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3205 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3207 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3208 DECL_EXTERNAL (subprog_decl) = extern_flag;
3209 TREE_PUBLIC (subprog_decl) = public_flag;
3211 if (!debug_info_p)
3212 DECL_IGNORED_P (subprog_decl) = 1;
3213 if (definition)
3214 DECL_FUNCTION_IS_DEF (subprog_decl) = 1;
3216 switch (inline_status)
3218 case is_suppressed:
3219 DECL_UNINLINABLE (subprog_decl) = 1;
3220 break;
3222 case is_disabled:
3223 break;
3225 case is_required:
3226 if (Back_End_Inlining)
3228 decl_attributes (&subprog_decl,
3229 tree_cons (get_identifier ("always_inline"),
3230 NULL_TREE, NULL_TREE),
3231 ATTR_FLAG_TYPE_IN_PLACE);
3233 /* Inline_Always guarantees that every direct call is inlined and
3234 that there is no indirect reference to the subprogram, so the
3235 instance in the original package (as well as its clones in the
3236 client packages created for inter-unit inlining) can be made
3237 private, which causes the out-of-line body to be eliminated. */
3238 TREE_PUBLIC (subprog_decl) = 0;
3241 /* ... fall through ... */
3243 case is_enabled:
3244 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3245 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3246 break;
3248 default:
3249 gcc_unreachable ();
3252 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3254 /* Once everything is processed, finish the subprogram declaration. */
3255 finish_subprog_decl (subprog_decl, asm_name, type);
3257 /* Add this decl to the current binding level. */
3258 gnat_pushdecl (subprog_decl, gnat_node);
3260 /* Output the assembler code and/or RTL for the declaration. */
3261 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3263 return subprog_decl;
3266 /* Given a subprogram declaration DECL, its assembler name and its type,
3267 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3269 void
3270 finish_subprog_decl (tree decl, tree asm_name, tree type)
3272 tree result_decl
3273 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3274 TREE_TYPE (type));
3276 DECL_ARTIFICIAL (result_decl) = 1;
3277 DECL_IGNORED_P (result_decl) = 1;
3278 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3279 DECL_RESULT (decl) = result_decl;
3281 TREE_READONLY (decl) = TYPE_READONLY (type);
3282 TREE_SIDE_EFFECTS (decl) = TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3284 if (asm_name)
3286 /* Let the target mangle the name if this isn't a verbatim asm. */
3287 if (*IDENTIFIER_POINTER (asm_name) != '*')
3288 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3290 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3292 /* The expand_main_function circuitry expects "main_identifier_node" to
3293 designate the DECL_NAME of the 'main' entry point, in turn expected
3294 to be declared as the "main" function literally by default. Ada
3295 program entry points are typically declared with a different name
3296 within the binder generated file, exported as 'main' to satisfy the
3297 system expectations. Force main_identifier_node in this case. */
3298 if (asm_name == main_identifier_node)
3299 DECL_NAME (decl) = main_identifier_node;
3303 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3304 body. This routine needs to be invoked before processing the declarations
3305 appearing in the subprogram. */
3307 void
3308 begin_subprog_body (tree subprog_decl)
3310 tree param_decl;
3312 announce_function (subprog_decl);
3314 /* This function is being defined. */
3315 TREE_STATIC (subprog_decl) = 1;
3317 /* The failure of this assertion will likely come from a wrong context for
3318 the subprogram body, e.g. another procedure for a procedure declared at
3319 library level. */
3320 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3322 current_function_decl = subprog_decl;
3324 /* Enter a new binding level and show that all the parameters belong to
3325 this function. */
3326 gnat_pushlevel ();
3328 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3329 param_decl = DECL_CHAIN (param_decl))
3330 DECL_CONTEXT (param_decl) = subprog_decl;
3332 make_decl_rtl (subprog_decl);
3335 /* Finish translating the current subprogram and set its BODY. */
3337 void
3338 end_subprog_body (tree body)
3340 tree fndecl = current_function_decl;
3342 /* Attach the BLOCK for this level to the function and pop the level. */
3343 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3344 DECL_INITIAL (fndecl) = current_binding_level->block;
3345 gnat_poplevel ();
3347 /* Mark the RESULT_DECL as being in this subprogram. */
3348 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3350 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3351 if (TREE_CODE (body) == BIND_EXPR)
3353 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3354 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3357 DECL_SAVED_TREE (fndecl) = body;
3359 current_function_decl = decl_function_context (fndecl);
3362 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3364 void
3365 rest_of_subprog_body_compilation (tree subprog_decl)
3367 /* We cannot track the location of errors past this point. */
3368 error_gnat_node = Empty;
3370 /* If we're only annotating types, don't actually compile this function. */
3371 if (type_annotate_only)
3372 return;
3374 /* Dump functions before gimplification. */
3375 dump_function (TDI_original, subprog_decl);
3377 if (!decl_function_context (subprog_decl))
3378 cgraph_node::finalize_function (subprog_decl, false);
3379 else
3380 /* Register this function with cgraph just far enough to get it
3381 added to our parent's nested function list. */
3382 (void) cgraph_node::get_create (subprog_decl);
3385 tree
3386 gnat_builtin_function (tree decl)
3388 gnat_pushdecl (decl, Empty);
3389 return decl;
3392 /* Return an integer type with the number of bits of precision given by
3393 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3394 it is a signed type. */
3396 tree
3397 gnat_type_for_size (unsigned precision, int unsignedp)
3399 tree t;
3400 char type_name[20];
3402 if (precision <= 2 * MAX_BITS_PER_WORD
3403 && signed_and_unsigned_types[precision][unsignedp])
3404 return signed_and_unsigned_types[precision][unsignedp];
3406 if (unsignedp)
3407 t = make_unsigned_type (precision);
3408 else
3409 t = make_signed_type (precision);
3410 TYPE_ARTIFICIAL (t) = 1;
3412 if (precision <= 2 * MAX_BITS_PER_WORD)
3413 signed_and_unsigned_types[precision][unsignedp] = t;
3415 if (!TYPE_NAME (t))
3417 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3418 TYPE_NAME (t) = get_identifier (type_name);
3421 return t;
3424 /* Likewise for floating-point types. */
3426 static tree
3427 float_type_for_precision (int precision, machine_mode mode)
3429 tree t;
3430 char type_name[20];
3432 if (float_types[(int) mode])
3433 return float_types[(int) mode];
3435 float_types[(int) mode] = t = make_node (REAL_TYPE);
3436 TYPE_PRECISION (t) = precision;
3437 layout_type (t);
3439 gcc_assert (TYPE_MODE (t) == mode);
3440 if (!TYPE_NAME (t))
3442 sprintf (type_name, "FLOAT_%d", precision);
3443 TYPE_NAME (t) = get_identifier (type_name);
3446 return t;
3449 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3450 an unsigned type; otherwise a signed type is returned. */
3452 tree
3453 gnat_type_for_mode (machine_mode mode, int unsignedp)
3455 if (mode == BLKmode)
3456 return NULL_TREE;
3458 if (mode == VOIDmode)
3459 return void_type_node;
3461 if (COMPLEX_MODE_P (mode))
3462 return NULL_TREE;
3464 scalar_float_mode float_mode;
3465 if (is_a <scalar_float_mode> (mode, &float_mode))
3466 return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3467 float_mode);
3469 scalar_int_mode int_mode;
3470 if (is_a <scalar_int_mode> (mode, &int_mode))
3471 return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3473 if (VECTOR_MODE_P (mode))
3475 machine_mode inner_mode = GET_MODE_INNER (mode);
3476 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3477 if (inner_type)
3478 return build_vector_type_for_mode (inner_type, mode);
3481 return NULL_TREE;
3484 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3485 signedness being specified by UNSIGNEDP. */
3487 tree
3488 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3490 if (type_node == char_type_node)
3491 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3493 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3495 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3497 type = copy_type (type);
3498 TREE_TYPE (type) = type_node;
3500 else if (TREE_TYPE (type_node)
3501 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3502 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3504 type = copy_type (type);
3505 TREE_TYPE (type) = TREE_TYPE (type_node);
3508 return type;
3511 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3512 transparently converted to each other. */
3515 gnat_types_compatible_p (tree t1, tree t2)
3517 enum tree_code code;
3519 /* This is the default criterion. */
3520 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3521 return 1;
3523 /* We only check structural equivalence here. */
3524 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3525 return 0;
3527 /* Vector types are also compatible if they have the same number of subparts
3528 and the same form of (scalar) element type. */
3529 if (code == VECTOR_TYPE
3530 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3531 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3532 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3533 return 1;
3535 /* Array types are also compatible if they are constrained and have the same
3536 domain(s), the same component type and the same scalar storage order. */
3537 if (code == ARRAY_TYPE
3538 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3539 || (TYPE_DOMAIN (t1)
3540 && TYPE_DOMAIN (t2)
3541 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3542 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3543 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3544 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3545 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3546 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3547 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3548 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3549 return 1;
3551 return 0;
3554 /* Return true if EXPR is a useless type conversion. */
3556 bool
3557 gnat_useless_type_conversion (tree expr)
3559 if (CONVERT_EXPR_P (expr)
3560 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3561 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3562 return gnat_types_compatible_p (TREE_TYPE (expr),
3563 TREE_TYPE (TREE_OPERAND (expr, 0)));
3565 return false;
3568 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3570 bool
3571 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3572 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3574 return TYPE_CI_CO_LIST (t) == cico_list
3575 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3576 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3577 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3580 /* EXP is an expression for the size of an object. If this size contains
3581 discriminant references, replace them with the maximum (if MAX_P) or
3582 minimum (if !MAX_P) possible value of the discriminant. */
3584 tree
3585 max_size (tree exp, bool max_p)
3587 enum tree_code code = TREE_CODE (exp);
3588 tree type = TREE_TYPE (exp);
3589 tree op0, op1, op2;
3591 switch (TREE_CODE_CLASS (code))
3593 case tcc_declaration:
3594 case tcc_constant:
3595 return exp;
3597 case tcc_vl_exp:
3598 if (code == CALL_EXPR)
3600 tree t, *argarray;
3601 int n, i;
3603 t = maybe_inline_call_in_expr (exp);
3604 if (t)
3605 return max_size (t, max_p);
3607 n = call_expr_nargs (exp);
3608 gcc_assert (n > 0);
3609 argarray = XALLOCAVEC (tree, n);
3610 for (i = 0; i < n; i++)
3611 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3612 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3614 break;
3616 case tcc_reference:
3617 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3618 modify. Otherwise, we treat it like a variable. */
3619 if (CONTAINS_PLACEHOLDER_P (exp))
3621 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3622 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3623 return
3624 convert (type,
3625 max_size (convert (get_base_type (val_type), val), true));
3628 return exp;
3630 case tcc_comparison:
3631 return build_int_cst (type, max_p ? 1 : 0);
3633 case tcc_unary:
3634 if (code == NON_LVALUE_EXPR)
3635 return max_size (TREE_OPERAND (exp, 0), max_p);
3637 op0 = max_size (TREE_OPERAND (exp, 0),
3638 code == NEGATE_EXPR ? !max_p : max_p);
3640 if (op0 == TREE_OPERAND (exp, 0))
3641 return exp;
3643 return fold_build1 (code, type, op0);
3645 case tcc_binary:
3647 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3648 tree rhs = max_size (TREE_OPERAND (exp, 1),
3649 code == MINUS_EXPR ? !max_p : max_p);
3651 /* Special-case wanting the maximum value of a MIN_EXPR.
3652 In that case, if one side overflows, return the other. */
3653 if (max_p && code == MIN_EXPR)
3655 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3656 return lhs;
3658 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3659 return rhs;
3662 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3663 overflowing and the RHS a variable. */
3664 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3665 && TREE_CODE (lhs) == INTEGER_CST
3666 && TREE_OVERFLOW (lhs)
3667 && TREE_CODE (rhs) != INTEGER_CST)
3668 return lhs;
3670 /* If we are going to subtract a "negative" value in an unsigned type,
3671 do the operation as an addition of the negated value, in order to
3672 avoid creating a spurious overflow below. */
3673 if (code == MINUS_EXPR
3674 && TYPE_UNSIGNED (type)
3675 && TREE_CODE (rhs) == INTEGER_CST
3676 && !TREE_OVERFLOW (rhs)
3677 && tree_int_cst_sign_bit (rhs) != 0)
3679 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3680 code = PLUS_EXPR;
3683 if (lhs == TREE_OPERAND (exp, 0) && rhs == TREE_OPERAND (exp, 1))
3684 return exp;
3686 /* We need to detect overflows so we call size_binop here. */
3687 return size_binop (code, lhs, rhs);
3690 case tcc_expression:
3691 switch (TREE_CODE_LENGTH (code))
3693 case 1:
3694 if (code == SAVE_EXPR)
3695 return exp;
3697 op0 = max_size (TREE_OPERAND (exp, 0),
3698 code == TRUTH_NOT_EXPR ? !max_p : max_p);
3700 if (op0 == TREE_OPERAND (exp, 0))
3701 return exp;
3703 return fold_build1 (code, type, op0);
3705 case 2:
3706 if (code == COMPOUND_EXPR)
3707 return max_size (TREE_OPERAND (exp, 1), max_p);
3709 op0 = max_size (TREE_OPERAND (exp, 0), max_p);
3710 op1 = max_size (TREE_OPERAND (exp, 1), max_p);
3712 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3713 return exp;
3715 return fold_build2 (code, type, op0, op1);
3717 case 3:
3718 if (code == COND_EXPR)
3720 op1 = TREE_OPERAND (exp, 1);
3721 op2 = TREE_OPERAND (exp, 2);
3723 if (!op1 || !op2)
3724 return exp;
3726 return
3727 fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3728 max_size (op1, max_p), max_size (op2, max_p));
3730 break;
3732 default:
3733 break;
3736 /* Other tree classes cannot happen. */
3737 default:
3738 break;
3741 gcc_unreachable ();
3744 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3745 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3746 Return a constructor for the template. */
3748 tree
3749 build_template (tree template_type, tree array_type, tree expr)
3751 vec<constructor_elt, va_gc> *template_elts = NULL;
3752 tree bound_list = NULL_TREE;
3753 tree field;
3755 while (TREE_CODE (array_type) == RECORD_TYPE
3756 && (TYPE_PADDING_P (array_type)
3757 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3758 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3760 if (TREE_CODE (array_type) == ARRAY_TYPE
3761 || (TREE_CODE (array_type) == INTEGER_TYPE
3762 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3763 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3765 /* First make the list for a CONSTRUCTOR for the template. Go down the
3766 field list of the template instead of the type chain because this
3767 array might be an Ada array of arrays and we can't tell where the
3768 nested arrays stop being the underlying object. */
3770 for (field = TYPE_FIELDS (template_type); field;
3771 (bound_list
3772 ? (bound_list = TREE_CHAIN (bound_list))
3773 : (array_type = TREE_TYPE (array_type))),
3774 field = DECL_CHAIN (DECL_CHAIN (field)))
3776 tree bounds, min, max;
3778 /* If we have a bound list, get the bounds from there. Likewise
3779 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3780 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3781 This will give us a maximum range. */
3782 if (bound_list)
3783 bounds = TREE_VALUE (bound_list);
3784 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3785 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3786 else if (expr && TREE_CODE (expr) == PARM_DECL
3787 && DECL_BY_COMPONENT_PTR_P (expr))
3788 bounds = TREE_TYPE (field);
3789 else
3790 gcc_unreachable ();
3792 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3793 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3795 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3796 substitute it from OBJECT. */
3797 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3798 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3800 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3801 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3804 return gnat_build_constructor (template_type, template_elts);
3807 /* Return true if TYPE is suitable for the element type of a vector. */
3809 static bool
3810 type_for_vector_element_p (tree type)
3812 machine_mode mode;
3814 if (!INTEGRAL_TYPE_P (type)
3815 && !SCALAR_FLOAT_TYPE_P (type)
3816 && !FIXED_POINT_TYPE_P (type))
3817 return false;
3819 mode = TYPE_MODE (type);
3820 if (GET_MODE_CLASS (mode) != MODE_INT
3821 && !SCALAR_FLOAT_MODE_P (mode)
3822 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3823 return false;
3825 return true;
3828 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3829 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3830 attribute declaration and want to issue error messages on failure. */
3832 static tree
3833 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3835 unsigned HOST_WIDE_INT size_int, inner_size_int;
3836 int nunits;
3838 /* Silently punt on variable sizes. We can't make vector types for them,
3839 need to ignore them on front-end generated subtypes of unconstrained
3840 base types, and this attribute is for binding implementors, not end
3841 users, so we should never get there from legitimate explicit uses. */
3842 if (!tree_fits_uhwi_p (size))
3843 return NULL_TREE;
3844 size_int = tree_to_uhwi (size);
3846 if (!type_for_vector_element_p (inner_type))
3848 if (attribute)
3849 error ("invalid element type for attribute %qs",
3850 IDENTIFIER_POINTER (attribute));
3851 return NULL_TREE;
3853 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3855 if (size_int % inner_size_int)
3857 if (attribute)
3858 error ("vector size not an integral multiple of component size");
3859 return NULL_TREE;
3862 if (size_int == 0)
3864 if (attribute)
3865 error ("zero vector size");
3866 return NULL_TREE;
3869 nunits = size_int / inner_size_int;
3870 if (nunits & (nunits - 1))
3872 if (attribute)
3873 error ("number of components of vector not a power of two");
3874 return NULL_TREE;
3877 return build_vector_type (inner_type, nunits);
3880 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3881 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3882 processing the attribute and want to issue error messages on failure. */
3884 static tree
3885 build_vector_type_for_array (tree array_type, tree attribute)
3887 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3888 TYPE_SIZE_UNIT (array_type),
3889 attribute);
3890 if (!vector_type)
3891 return NULL_TREE;
3893 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3894 return vector_type;
3897 /* Build a type to be used to represent an aliased object whose nominal type
3898 is an unconstrained array. This consists of a RECORD_TYPE containing a
3899 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3900 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3901 an arbitrary unconstrained object. Use NAME as the name of the record.
3902 DEBUG_INFO_P is true if we need to write debug information for the type. */
3904 tree
3905 build_unc_object_type (tree template_type, tree object_type, tree name,
3906 bool debug_info_p)
3908 tree decl;
3909 tree type = make_node (RECORD_TYPE);
3910 tree template_field
3911 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3912 NULL_TREE, NULL_TREE, 0, 1);
3913 tree array_field
3914 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3915 NULL_TREE, NULL_TREE, 0, 1);
3917 TYPE_NAME (type) = name;
3918 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3919 DECL_CHAIN (template_field) = array_field;
3920 finish_record_type (type, template_field, 0, true);
3922 /* Declare it now since it will never be declared otherwise. This is
3923 necessary to ensure that its subtrees are properly marked. */
3924 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3926 /* template_type will not be used elsewhere than here, so to keep the debug
3927 info clean and in order to avoid scoping issues, make decl its
3928 context. */
3929 gnat_set_type_context (template_type, decl);
3931 return type;
3934 /* Same, taking a thin or fat pointer type instead of a template type. */
3936 tree
3937 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3938 tree name, bool debug_info_p)
3940 tree template_type;
3942 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3944 template_type
3945 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3946 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3947 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3949 return
3950 build_unc_object_type (template_type, object_type, name, debug_info_p);
3953 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3954 In the normal case this is just two adjustments, but we have more to
3955 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3957 void
3958 update_pointer_to (tree old_type, tree new_type)
3960 tree ptr = TYPE_POINTER_TO (old_type);
3961 tree ref = TYPE_REFERENCE_TO (old_type);
3962 tree t;
3964 /* If this is the main variant, process all the other variants first. */
3965 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3966 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3967 update_pointer_to (t, new_type);
3969 /* If no pointers and no references, we are done. */
3970 if (!ptr && !ref)
3971 return;
3973 /* Merge the old type qualifiers in the new type.
3975 Each old variant has qualifiers for specific reasons, and the new
3976 designated type as well. Each set of qualifiers represents useful
3977 information grabbed at some point, and merging the two simply unifies
3978 these inputs into the final type description.
3980 Consider for instance a volatile type frozen after an access to constant
3981 type designating it; after the designated type's freeze, we get here with
3982 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3983 when the access type was processed. We will make a volatile and readonly
3984 designated type, because that's what it really is.
3986 We might also get here for a non-dummy OLD_TYPE variant with different
3987 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3988 to private record type elaboration (see the comments around the call to
3989 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3990 the qualifiers in those cases too, to avoid accidentally discarding the
3991 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3992 new_type
3993 = build_qualified_type (new_type,
3994 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3996 /* If old type and new type are identical, there is nothing to do. */
3997 if (old_type == new_type)
3998 return;
4000 /* Otherwise, first handle the simple case. */
4001 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4003 tree new_ptr, new_ref;
4005 /* If pointer or reference already points to new type, nothing to do.
4006 This can happen as update_pointer_to can be invoked multiple times
4007 on the same couple of types because of the type variants. */
4008 if ((ptr && TREE_TYPE (ptr) == new_type)
4009 || (ref && TREE_TYPE (ref) == new_type))
4010 return;
4012 /* Chain PTR and its variants at the end. */
4013 new_ptr = TYPE_POINTER_TO (new_type);
4014 if (new_ptr)
4016 while (TYPE_NEXT_PTR_TO (new_ptr))
4017 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4018 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4020 else
4021 TYPE_POINTER_TO (new_type) = ptr;
4023 /* Now adjust them. */
4024 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4025 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4027 TREE_TYPE (t) = new_type;
4028 if (TYPE_NULL_BOUNDS (t))
4029 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4032 /* Chain REF and its variants at the end. */
4033 new_ref = TYPE_REFERENCE_TO (new_type);
4034 if (new_ref)
4036 while (TYPE_NEXT_REF_TO (new_ref))
4037 new_ref = TYPE_NEXT_REF_TO (new_ref);
4038 TYPE_NEXT_REF_TO (new_ref) = ref;
4040 else
4041 TYPE_REFERENCE_TO (new_type) = ref;
4043 /* Now adjust them. */
4044 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4045 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4046 TREE_TYPE (t) = new_type;
4048 TYPE_POINTER_TO (old_type) = NULL_TREE;
4049 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4052 /* Now deal with the unconstrained array case. In this case the pointer
4053 is actually a record where both fields are pointers to dummy nodes.
4054 Turn them into pointers to the correct types using update_pointer_to.
4055 Likewise for the pointer to the object record (thin pointer). */
4056 else
4058 tree new_ptr = TYPE_POINTER_TO (new_type);
4060 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4062 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4063 since update_pointer_to can be invoked multiple times on the same
4064 couple of types because of the type variants. */
4065 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4066 return;
4068 update_pointer_to
4069 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4070 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4072 update_pointer_to
4073 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4074 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4076 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4077 TYPE_OBJECT_RECORD_TYPE (new_type));
4079 TYPE_POINTER_TO (old_type) = NULL_TREE;
4080 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4084 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4085 unconstrained one. This involves making or finding a template. */
4087 static tree
4088 convert_to_fat_pointer (tree type, tree expr)
4090 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4091 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4092 tree etype = TREE_TYPE (expr);
4093 tree template_addr;
4094 vec<constructor_elt, va_gc> *v;
4095 vec_alloc (v, 2);
4097 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4098 array (compare_fat_pointers ensures that this is the full discriminant)
4099 and a valid pointer to the bounds. This latter property is necessary
4100 since the compiler can hoist the load of the bounds done through it. */
4101 if (integer_zerop (expr))
4103 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4104 tree null_bounds, t;
4106 if (TYPE_NULL_BOUNDS (ptr_template_type))
4107 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4108 else
4110 /* The template type can still be dummy at this point so we build an
4111 empty constructor. The middle-end will fill it in with zeros. */
4112 t = build_constructor (template_type, NULL);
4113 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4114 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4115 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4118 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4119 fold_convert (p_array_type, null_pointer_node));
4120 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4121 t = build_constructor (type, v);
4122 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4123 TREE_CONSTANT (t) = 0;
4124 TREE_STATIC (t) = 1;
4126 return t;
4129 /* If EXPR is a thin pointer, make template and data from the record. */
4130 if (TYPE_IS_THIN_POINTER_P (etype))
4132 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4134 expr = gnat_protect_expr (expr);
4136 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4137 the thin pointer value has been shifted so we shift it back to get
4138 the template address. */
4139 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4141 template_addr
4142 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4143 fold_build1 (NEGATE_EXPR, sizetype,
4144 byte_position
4145 (DECL_CHAIN (field))));
4146 template_addr
4147 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4148 template_addr);
4151 /* Otherwise we explicitly take the address of the fields. */
4152 else
4154 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4155 template_addr
4156 = build_unary_op (ADDR_EXPR, NULL_TREE,
4157 build_component_ref (expr, field, false));
4158 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4159 build_component_ref (expr, DECL_CHAIN (field),
4160 false));
4164 /* Otherwise, build the constructor for the template. */
4165 else
4166 template_addr
4167 = build_unary_op (ADDR_EXPR, NULL_TREE,
4168 build_template (template_type, TREE_TYPE (etype),
4169 expr));
4171 /* The final result is a constructor for the fat pointer.
4173 If EXPR is an argument of a foreign convention subprogram, the type it
4174 points to is directly the component type. In this case, the expression
4175 type may not match the corresponding FIELD_DECL type at this point, so we
4176 call "convert" here to fix that up if necessary. This type consistency is
4177 required, for instance because it ensures that possible later folding of
4178 COMPONENT_REFs against this constructor always yields something of the
4179 same type as the initial reference.
4181 Note that the call to "build_template" above is still fine because it
4182 will only refer to the provided TEMPLATE_TYPE in this case. */
4183 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4184 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4185 return gnat_build_constructor (type, v);
4188 /* Create an expression whose value is that of EXPR,
4189 converted to type TYPE. The TREE_TYPE of the value
4190 is always TYPE. This function implements all reasonable
4191 conversions; callers should filter out those that are
4192 not permitted by the language being compiled. */
4194 tree
4195 convert (tree type, tree expr)
4197 tree etype = TREE_TYPE (expr);
4198 enum tree_code ecode = TREE_CODE (etype);
4199 enum tree_code code = TREE_CODE (type);
4201 /* If the expression is already of the right type, we are done. */
4202 if (etype == type)
4203 return expr;
4205 /* If both input and output have padding and are of variable size, do this
4206 as an unchecked conversion. Likewise if one is a mere variant of the
4207 other, so we avoid a pointless unpad/repad sequence. */
4208 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4209 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4210 && (!TREE_CONSTANT (TYPE_SIZE (type))
4211 || !TREE_CONSTANT (TYPE_SIZE (etype))
4212 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4213 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4214 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4217 /* If the output type has padding, convert to the inner type and make a
4218 constructor to build the record, unless a variable size is involved. */
4219 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4221 vec<constructor_elt, va_gc> *v;
4223 /* If we previously converted from another type and our type is
4224 of variable size, remove the conversion to avoid the need for
4225 variable-sized temporaries. Likewise for a conversion between
4226 original and packable version. */
4227 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4228 && (!TREE_CONSTANT (TYPE_SIZE (type))
4229 || (ecode == RECORD_TYPE
4230 && TYPE_NAME (etype)
4231 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4232 expr = TREE_OPERAND (expr, 0);
4234 /* If we are just removing the padding from expr, convert the original
4235 object if we have variable size in order to avoid the need for some
4236 variable-sized temporaries. Likewise if the padding is a variant
4237 of the other, so we avoid a pointless unpad/repad sequence. */
4238 if (TREE_CODE (expr) == COMPONENT_REF
4239 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4240 && (!TREE_CONSTANT (TYPE_SIZE (type))
4241 || TYPE_MAIN_VARIANT (type)
4242 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4243 || (ecode == RECORD_TYPE
4244 && TYPE_NAME (etype)
4245 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4246 return convert (type, TREE_OPERAND (expr, 0));
4248 /* If the inner type is of self-referential size and the expression type
4249 is a record, do this as an unchecked conversion. But first pad the
4250 expression if possible to have the same size on both sides. */
4251 if (ecode == RECORD_TYPE
4252 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4254 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4255 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4256 false, false, false, true),
4257 expr);
4258 return unchecked_convert (type, expr, false);
4261 /* If we are converting between array types with variable size, do the
4262 final conversion as an unchecked conversion, again to avoid the need
4263 for some variable-sized temporaries. If valid, this conversion is
4264 very likely purely technical and without real effects. */
4265 if (ecode == ARRAY_TYPE
4266 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4267 && !TREE_CONSTANT (TYPE_SIZE (etype))
4268 && !TREE_CONSTANT (TYPE_SIZE (type)))
4269 return unchecked_convert (type,
4270 convert (TREE_TYPE (TYPE_FIELDS (type)),
4271 expr),
4272 false);
4274 vec_alloc (v, 1);
4275 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4276 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4277 return gnat_build_constructor (type, v);
4280 /* If the input type has padding, remove it and convert to the output type.
4281 The conditions ordering is arranged to ensure that the output type is not
4282 a padding type here, as it is not clear whether the conversion would
4283 always be correct if this was to happen. */
4284 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4286 tree unpadded;
4288 /* If we have just converted to this padded type, just get the
4289 inner expression. */
4290 if (TREE_CODE (expr) == CONSTRUCTOR)
4291 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4293 /* Otherwise, build an explicit component reference. */
4294 else
4295 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4297 return convert (type, unpadded);
4300 /* If the input is a biased type, convert first to the base type and add
4301 the bias. Note that the bias must go through a full conversion to the
4302 base type, lest it is itself a biased value; this happens for subtypes
4303 of biased types. */
4304 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4305 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4306 fold_convert (TREE_TYPE (etype), expr),
4307 convert (TREE_TYPE (etype),
4308 TYPE_MIN_VALUE (etype))));
4310 /* If the input is a justified modular type, we need to extract the actual
4311 object before converting it to any other type with the exceptions of an
4312 unconstrained array or of a mere type variant. It is useful to avoid the
4313 extraction and conversion in the type variant case because it could end
4314 up replacing a VAR_DECL expr by a constructor and we might be about the
4315 take the address of the result. */
4316 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4317 && code != UNCONSTRAINED_ARRAY_TYPE
4318 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4319 return
4320 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4322 /* If converting to a type that contains a template, convert to the data
4323 type and then build the template. */
4324 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4326 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4327 vec<constructor_elt, va_gc> *v;
4328 vec_alloc (v, 2);
4330 /* If the source already has a template, get a reference to the
4331 associated array only, as we are going to rebuild a template
4332 for the target type anyway. */
4333 expr = maybe_unconstrained_array (expr);
4335 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4336 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4337 obj_type, NULL_TREE));
4338 if (expr)
4339 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4340 convert (obj_type, expr));
4341 return gnat_build_constructor (type, v);
4344 /* There are some cases of expressions that we process specially. */
4345 switch (TREE_CODE (expr))
4347 case ERROR_MARK:
4348 return expr;
4350 case NULL_EXPR:
4351 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4352 conversion in gnat_expand_expr. NULL_EXPR does not represent
4353 and actual value, so no conversion is needed. */
4354 expr = copy_node (expr);
4355 TREE_TYPE (expr) = type;
4356 return expr;
4358 case STRING_CST:
4359 /* If we are converting a STRING_CST to another constrained array type,
4360 just make a new one in the proper type. */
4361 if (code == ecode && AGGREGATE_TYPE_P (etype)
4362 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4363 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4365 expr = copy_node (expr);
4366 TREE_TYPE (expr) = type;
4367 return expr;
4369 break;
4371 case VECTOR_CST:
4372 /* If we are converting a VECTOR_CST to a mere type variant, just make
4373 a new one in the proper type. */
4374 if (code == ecode && gnat_types_compatible_p (type, etype))
4376 expr = copy_node (expr);
4377 TREE_TYPE (expr) = type;
4378 return expr;
4380 break;
4382 case CONSTRUCTOR:
4383 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4384 another padding type around the same type, just make a new one in
4385 the proper type. */
4386 if (code == ecode
4387 && (gnat_types_compatible_p (type, etype)
4388 || (code == RECORD_TYPE
4389 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4390 && TREE_TYPE (TYPE_FIELDS (type))
4391 == TREE_TYPE (TYPE_FIELDS (etype)))))
4393 expr = copy_node (expr);
4394 TREE_TYPE (expr) = type;
4395 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4396 return expr;
4399 /* Likewise for a conversion between original and packable version, or
4400 conversion between types of the same size and with the same list of
4401 fields, but we have to work harder to preserve type consistency. */
4402 if (code == ecode
4403 && code == RECORD_TYPE
4404 && (TYPE_NAME (type) == TYPE_NAME (etype)
4405 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4408 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4409 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4410 vec<constructor_elt, va_gc> *v;
4411 vec_alloc (v, len);
4412 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4413 unsigned HOST_WIDE_INT idx;
4414 tree index, value;
4416 /* Whether we need to clear TREE_CONSTANT et al. on the output
4417 constructor when we convert in place. */
4418 bool clear_constant = false;
4420 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4422 /* Skip the missing fields in the CONSTRUCTOR. */
4423 while (efield && field && !SAME_FIELD_P (efield, index))
4425 efield = DECL_CHAIN (efield);
4426 field = DECL_CHAIN (field);
4428 /* The field must be the same. */
4429 if (!(efield && field && SAME_FIELD_P (efield, field)))
4430 break;
4431 constructor_elt elt
4432 = {field, convert (TREE_TYPE (field), value)};
4433 v->quick_push (elt);
4435 /* If packing has made this field a bitfield and the input
4436 value couldn't be emitted statically any more, we need to
4437 clear TREE_CONSTANT on our output. */
4438 if (!clear_constant
4439 && TREE_CONSTANT (expr)
4440 && !CONSTRUCTOR_BITFIELD_P (efield)
4441 && CONSTRUCTOR_BITFIELD_P (field)
4442 && !initializer_constant_valid_for_bitfield_p (value))
4443 clear_constant = true;
4445 efield = DECL_CHAIN (efield);
4446 field = DECL_CHAIN (field);
4449 /* If we have been able to match and convert all the input fields
4450 to their output type, convert in place now. We'll fallback to a
4451 view conversion downstream otherwise. */
4452 if (idx == len)
4454 expr = copy_node (expr);
4455 TREE_TYPE (expr) = type;
4456 CONSTRUCTOR_ELTS (expr) = v;
4457 if (clear_constant)
4458 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4459 return expr;
4463 /* Likewise for a conversion between array type and vector type with a
4464 compatible representative array. */
4465 else if (code == VECTOR_TYPE
4466 && ecode == ARRAY_TYPE
4467 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4468 etype))
4470 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4471 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4472 vec<constructor_elt, va_gc> *v;
4473 unsigned HOST_WIDE_INT ix;
4474 tree value;
4476 /* Build a VECTOR_CST from a *constant* array constructor. */
4477 if (TREE_CONSTANT (expr))
4479 bool constant_p = true;
4481 /* Iterate through elements and check if all constructor
4482 elements are *_CSTs. */
4483 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4484 if (!CONSTANT_CLASS_P (value))
4486 constant_p = false;
4487 break;
4490 if (constant_p)
4491 return build_vector_from_ctor (type,
4492 CONSTRUCTOR_ELTS (expr));
4495 /* Otherwise, build a regular vector constructor. */
4496 vec_alloc (v, len);
4497 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4499 constructor_elt elt = {NULL_TREE, value};
4500 v->quick_push (elt);
4502 expr = copy_node (expr);
4503 TREE_TYPE (expr) = type;
4504 CONSTRUCTOR_ELTS (expr) = v;
4505 return expr;
4507 break;
4509 case UNCONSTRAINED_ARRAY_REF:
4510 /* First retrieve the underlying array. */
4511 expr = maybe_unconstrained_array (expr);
4512 etype = TREE_TYPE (expr);
4513 ecode = TREE_CODE (etype);
4514 break;
4516 case VIEW_CONVERT_EXPR:
4518 /* GCC 4.x is very sensitive to type consistency overall, and view
4519 conversions thus are very frequent. Even though just "convert"ing
4520 the inner operand to the output type is fine in most cases, it
4521 might expose unexpected input/output type mismatches in special
4522 circumstances so we avoid such recursive calls when we can. */
4523 tree op0 = TREE_OPERAND (expr, 0);
4525 /* If we are converting back to the original type, we can just
4526 lift the input conversion. This is a common occurrence with
4527 switches back-and-forth amongst type variants. */
4528 if (type == TREE_TYPE (op0))
4529 return op0;
4531 /* Otherwise, if we're converting between two aggregate or vector
4532 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4533 target type in place or to just convert the inner expression. */
4534 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4535 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4537 /* If we are converting between mere variants, we can just
4538 substitute the VIEW_CONVERT_EXPR in place. */
4539 if (gnat_types_compatible_p (type, etype))
4540 return build1 (VIEW_CONVERT_EXPR, type, op0);
4542 /* Otherwise, we may just bypass the input view conversion unless
4543 one of the types is a fat pointer, which is handled by
4544 specialized code below which relies on exact type matching. */
4545 else if (!TYPE_IS_FAT_POINTER_P (type)
4546 && !TYPE_IS_FAT_POINTER_P (etype))
4547 return convert (type, op0);
4550 break;
4553 default:
4554 break;
4557 /* Check for converting to a pointer to an unconstrained array. */
4558 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4559 return convert_to_fat_pointer (type, expr);
4561 /* If we are converting between two aggregate or vector types that are mere
4562 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4563 to a vector type from its representative array type. */
4564 else if ((code == ecode
4565 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4566 && gnat_types_compatible_p (type, etype))
4567 || (code == VECTOR_TYPE
4568 && ecode == ARRAY_TYPE
4569 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4570 etype)))
4571 return build1 (VIEW_CONVERT_EXPR, type, expr);
4573 /* If we are converting between tagged types, try to upcast properly. */
4574 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4575 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4577 tree child_etype = etype;
4578 do {
4579 tree field = TYPE_FIELDS (child_etype);
4580 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4581 return build_component_ref (expr, field, false);
4582 child_etype = TREE_TYPE (field);
4583 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4586 /* If we are converting from a smaller form of record type back to it, just
4587 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4588 size on both sides. */
4589 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4590 && smaller_form_type_p (etype, type))
4592 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4593 false, false, false, true),
4594 expr);
4595 return build1 (VIEW_CONVERT_EXPR, type, expr);
4598 /* In all other cases of related types, make a NOP_EXPR. */
4599 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4600 return fold_convert (type, expr);
4602 switch (code)
4604 case VOID_TYPE:
4605 return fold_build1 (CONVERT_EXPR, type, expr);
4607 case INTEGER_TYPE:
4608 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4609 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4610 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4611 return unchecked_convert (type, expr, false);
4613 /* If the output is a biased type, convert first to the base type and
4614 subtract the bias. Note that the bias itself must go through a full
4615 conversion to the base type, lest it is a biased value; this happens
4616 for subtypes of biased types. */
4617 if (TYPE_BIASED_REPRESENTATION_P (type))
4618 return fold_convert (type,
4619 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4620 convert (TREE_TYPE (type), expr),
4621 convert (TREE_TYPE (type),
4622 TYPE_MIN_VALUE (type))));
4624 /* ... fall through ... */
4626 case ENUMERAL_TYPE:
4627 case BOOLEAN_TYPE:
4628 /* If we are converting an additive expression to an integer type
4629 with lower precision, be wary of the optimization that can be
4630 applied by convert_to_integer. There are 2 problematic cases:
4631 - if the first operand was originally of a biased type,
4632 because we could be recursively called to convert it
4633 to an intermediate type and thus rematerialize the
4634 additive operator endlessly,
4635 - if the expression contains a placeholder, because an
4636 intermediate conversion that changes the sign could
4637 be inserted and thus introduce an artificial overflow
4638 at compile time when the placeholder is substituted. */
4639 if (code == INTEGER_TYPE
4640 && ecode == INTEGER_TYPE
4641 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4642 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4644 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4646 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4647 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4648 || CONTAINS_PLACEHOLDER_P (expr))
4649 return build1 (NOP_EXPR, type, expr);
4652 return fold (convert_to_integer (type, expr));
4654 case POINTER_TYPE:
4655 case REFERENCE_TYPE:
4656 /* If converting between two thin pointers, adjust if needed to account
4657 for differing offsets from the base pointer, depending on whether
4658 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4659 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4661 tree etype_pos
4662 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4663 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4664 : size_zero_node;
4665 tree type_pos
4666 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4667 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4668 : size_zero_node;
4669 tree byte_diff = size_diffop (type_pos, etype_pos);
4671 expr = build1 (NOP_EXPR, type, expr);
4672 if (integer_zerop (byte_diff))
4673 return expr;
4675 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4676 fold_convert (sizetype, byte_diff));
4679 /* If converting fat pointer to normal or thin pointer, get the pointer
4680 to the array and then convert it. */
4681 if (TYPE_IS_FAT_POINTER_P (etype))
4682 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4684 return fold (convert_to_pointer (type, expr));
4686 case REAL_TYPE:
4687 return fold (convert_to_real (type, expr));
4689 case RECORD_TYPE:
4690 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4692 vec<constructor_elt, va_gc> *v;
4693 vec_alloc (v, 1);
4695 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4696 convert (TREE_TYPE (TYPE_FIELDS (type)),
4697 expr));
4698 return gnat_build_constructor (type, v);
4701 /* ... fall through ... */
4703 case ARRAY_TYPE:
4704 /* In these cases, assume the front-end has validated the conversion.
4705 If the conversion is valid, it will be a bit-wise conversion, so
4706 it can be viewed as an unchecked conversion. */
4707 return unchecked_convert (type, expr, false);
4709 case UNION_TYPE:
4710 /* This is a either a conversion between a tagged type and some
4711 subtype, which we have to mark as a UNION_TYPE because of
4712 overlapping fields or a conversion of an Unchecked_Union. */
4713 return unchecked_convert (type, expr, false);
4715 case UNCONSTRAINED_ARRAY_TYPE:
4716 /* If the input is a VECTOR_TYPE, convert to the representative
4717 array type first. */
4718 if (ecode == VECTOR_TYPE)
4720 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4721 etype = TREE_TYPE (expr);
4722 ecode = TREE_CODE (etype);
4725 /* If EXPR is a constrained array, take its address, convert it to a
4726 fat pointer, and then dereference it. Likewise if EXPR is a
4727 record containing both a template and a constrained array.
4728 Note that a record representing a justified modular type
4729 always represents a packed constrained array. */
4730 if (ecode == ARRAY_TYPE
4731 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4732 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4733 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4734 return
4735 build_unary_op
4736 (INDIRECT_REF, NULL_TREE,
4737 convert_to_fat_pointer (TREE_TYPE (type),
4738 build_unary_op (ADDR_EXPR,
4739 NULL_TREE, expr)));
4741 /* Do something very similar for converting one unconstrained
4742 array to another. */
4743 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4744 return
4745 build_unary_op (INDIRECT_REF, NULL_TREE,
4746 convert (TREE_TYPE (type),
4747 build_unary_op (ADDR_EXPR,
4748 NULL_TREE, expr)));
4749 else
4750 gcc_unreachable ();
4752 case COMPLEX_TYPE:
4753 return fold (convert_to_complex (type, expr));
4755 default:
4756 gcc_unreachable ();
4760 /* Create an expression whose value is that of EXPR converted to the common
4761 index type, which is sizetype. EXPR is supposed to be in the base type
4762 of the GNAT index type. Calling it is equivalent to doing
4764 convert (sizetype, expr)
4766 but we try to distribute the type conversion with the knowledge that EXPR
4767 cannot overflow in its type. This is a best-effort approach and we fall
4768 back to the above expression as soon as difficulties are encountered.
4770 This is necessary to overcome issues that arise when the GNAT base index
4771 type and the GCC common index type (sizetype) don't have the same size,
4772 which is quite frequent on 64-bit architectures. In this case, and if
4773 the GNAT base index type is signed but the iteration type of the loop has
4774 been forced to unsigned, the loop scalar evolution engine cannot compute
4775 a simple evolution for the general induction variables associated with the
4776 array indices, because it will preserve the wrap-around semantics in the
4777 unsigned type of their "inner" part. As a result, many loop optimizations
4778 are blocked.
4780 The solution is to use a special (basic) induction variable that is at
4781 least as large as sizetype, and to express the aforementioned general
4782 induction variables in terms of this induction variable, eliminating
4783 the problematic intermediate truncation to the GNAT base index type.
4784 This is possible as long as the original expression doesn't overflow
4785 and if the middle-end hasn't introduced artificial overflows in the
4786 course of the various simplification it can make to the expression. */
4788 tree
4789 convert_to_index_type (tree expr)
4791 enum tree_code code = TREE_CODE (expr);
4792 tree type = TREE_TYPE (expr);
4794 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4795 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4796 if (TYPE_UNSIGNED (type) || !optimize)
4797 return convert (sizetype, expr);
4799 switch (code)
4801 case VAR_DECL:
4802 /* The main effect of the function: replace a loop parameter with its
4803 associated special induction variable. */
4804 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4805 expr = DECL_INDUCTION_VAR (expr);
4806 break;
4808 CASE_CONVERT:
4810 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4811 /* Bail out as soon as we suspect some sort of type frobbing. */
4812 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4813 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4814 break;
4817 /* ... fall through ... */
4819 case NON_LVALUE_EXPR:
4820 return fold_build1 (code, sizetype,
4821 convert_to_index_type (TREE_OPERAND (expr, 0)));
4823 case PLUS_EXPR:
4824 case MINUS_EXPR:
4825 case MULT_EXPR:
4826 return fold_build2 (code, sizetype,
4827 convert_to_index_type (TREE_OPERAND (expr, 0)),
4828 convert_to_index_type (TREE_OPERAND (expr, 1)));
4830 case COMPOUND_EXPR:
4831 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4832 convert_to_index_type (TREE_OPERAND (expr, 1)));
4834 case COND_EXPR:
4835 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4836 convert_to_index_type (TREE_OPERAND (expr, 1)),
4837 convert_to_index_type (TREE_OPERAND (expr, 2)));
4839 default:
4840 break;
4843 return convert (sizetype, expr);
4846 /* Remove all conversions that are done in EXP. This includes converting
4847 from a padded type or to a justified modular type. If TRUE_ADDRESS
4848 is true, always return the address of the containing object even if
4849 the address is not bit-aligned. */
4851 tree
4852 remove_conversions (tree exp, bool true_address)
4854 switch (TREE_CODE (exp))
4856 case CONSTRUCTOR:
4857 if (true_address
4858 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4859 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4860 return
4861 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
4862 break;
4864 case COMPONENT_REF:
4865 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4866 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4867 break;
4869 CASE_CONVERT:
4870 case VIEW_CONVERT_EXPR:
4871 case NON_LVALUE_EXPR:
4872 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4874 default:
4875 break;
4878 return exp;
4881 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4882 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4883 likewise return an expression pointing to the underlying array. */
4885 tree
4886 maybe_unconstrained_array (tree exp)
4888 enum tree_code code = TREE_CODE (exp);
4889 tree type = TREE_TYPE (exp);
4891 switch (TREE_CODE (type))
4893 case UNCONSTRAINED_ARRAY_TYPE:
4894 if (code == UNCONSTRAINED_ARRAY_REF)
4896 const bool read_only = TREE_READONLY (exp);
4897 const bool no_trap = TREE_THIS_NOTRAP (exp);
4899 exp = TREE_OPERAND (exp, 0);
4900 type = TREE_TYPE (exp);
4902 if (TREE_CODE (exp) == COND_EXPR)
4904 tree op1
4905 = build_unary_op (INDIRECT_REF, NULL_TREE,
4906 build_component_ref (TREE_OPERAND (exp, 1),
4907 TYPE_FIELDS (type),
4908 false));
4909 tree op2
4910 = build_unary_op (INDIRECT_REF, NULL_TREE,
4911 build_component_ref (TREE_OPERAND (exp, 2),
4912 TYPE_FIELDS (type),
4913 false));
4915 exp = build3 (COND_EXPR,
4916 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4917 TREE_OPERAND (exp, 0), op1, op2);
4919 else
4921 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4922 build_component_ref (exp,
4923 TYPE_FIELDS (type),
4924 false));
4925 TREE_READONLY (exp) = read_only;
4926 TREE_THIS_NOTRAP (exp) = no_trap;
4930 else if (code == NULL_EXPR)
4931 exp = build1 (NULL_EXPR,
4932 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4933 TREE_OPERAND (exp, 0));
4934 break;
4936 case RECORD_TYPE:
4937 /* If this is a padded type and it contains a template, convert to the
4938 unpadded type first. */
4939 if (TYPE_PADDING_P (type)
4940 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4941 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4943 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4944 code = TREE_CODE (exp);
4945 type = TREE_TYPE (exp);
4948 if (TYPE_CONTAINS_TEMPLATE_P (type))
4950 /* If the array initializer is a box, return NULL_TREE. */
4951 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
4952 return NULL_TREE;
4954 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
4955 false);
4956 type = TREE_TYPE (exp);
4958 /* If the array type is padded, convert to the unpadded type. */
4959 if (TYPE_IS_PADDING_P (type))
4960 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4962 break;
4964 default:
4965 break;
4968 return exp;
4971 /* Return true if EXPR is an expression that can be folded as an operand
4972 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4974 static bool
4975 can_fold_for_view_convert_p (tree expr)
4977 tree t1, t2;
4979 /* The folder will fold NOP_EXPRs between integral types with the same
4980 precision (in the middle-end's sense). We cannot allow it if the
4981 types don't have the same precision in the Ada sense as well. */
4982 if (TREE_CODE (expr) != NOP_EXPR)
4983 return true;
4985 t1 = TREE_TYPE (expr);
4986 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4988 /* Defer to the folder for non-integral conversions. */
4989 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4990 return true;
4992 /* Only fold conversions that preserve both precisions. */
4993 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4994 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4995 return true;
4997 return false;
5000 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5001 If NOTRUNC_P is true, truncation operations should be suppressed.
5003 Special care is required with (source or target) integral types whose
5004 precision is not equal to their size, to make sure we fetch or assign
5005 the value bits whose location might depend on the endianness, e.g.
5007 Rmsize : constant := 8;
5008 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5010 type Bit_Array is array (1 .. Rmsize) of Boolean;
5011 pragma Pack (Bit_Array);
5013 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5015 Value : Int := 2#1000_0001#;
5016 Vbits : Bit_Array := To_Bit_Array (Value);
5018 we expect the 8 bits at Vbits'Address to always contain Value, while
5019 their original location depends on the endianness, at Value'Address
5020 on a little-endian architecture but not on a big-endian one.
5022 One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5023 the bits between the precision and the size are filled, because of the
5024 trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5025 So we use the special predicate type_unsigned_for_rm above. */
5027 tree
5028 unchecked_convert (tree type, tree expr, bool notrunc_p)
5030 tree etype = TREE_TYPE (expr);
5031 enum tree_code ecode = TREE_CODE (etype);
5032 enum tree_code code = TREE_CODE (type);
5033 tree tem;
5034 int c;
5036 /* If the expression is already of the right type, we are done. */
5037 if (etype == type)
5038 return expr;
5040 /* If both types are integral just do a normal conversion.
5041 Likewise for a conversion to an unconstrained array. */
5042 if (((INTEGRAL_TYPE_P (type)
5043 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5044 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5045 && (INTEGRAL_TYPE_P (etype)
5046 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5047 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5048 || code == UNCONSTRAINED_ARRAY_TYPE)
5050 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
5052 tree ntype = copy_type (etype);
5053 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5054 TYPE_MAIN_VARIANT (ntype) = ntype;
5055 expr = build1 (NOP_EXPR, ntype, expr);
5058 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5060 tree rtype = copy_type (type);
5061 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5062 TYPE_MAIN_VARIANT (rtype) = rtype;
5063 expr = convert (rtype, expr);
5064 expr = build1 (NOP_EXPR, type, expr);
5066 else
5067 expr = convert (type, expr);
5070 /* If we are converting to an integral type whose precision is not equal
5071 to its size, first unchecked convert to a record type that contains a
5072 field of the given precision. Then extract the result from the field.
5074 There is a subtlety if the source type is an aggregate type with reverse
5075 storage order because its representation is not contiguous in the native
5076 storage order, i.e. a direct unchecked conversion to an integral type
5077 with N bits of precision cannot read the first N bits of the aggregate
5078 type. To overcome it, we do an unchecked conversion to an integral type
5079 with reverse storage order and return the resulting value. This also
5080 ensures that the result of the unchecked conversion doesn't depend on
5081 the endianness of the target machine, but only on the storage order of
5082 the aggregate type.
5084 Finally, for the sake of consistency, we do the unchecked conversion
5085 to an integral type with reverse storage order as soon as the source
5086 type is an aggregate type with reverse storage order, even if there
5087 are no considerations of precision or size involved. */
5088 else if (INTEGRAL_TYPE_P (type)
5089 && TYPE_RM_SIZE (type)
5090 && (tree_int_cst_compare (TYPE_RM_SIZE (type),
5091 TYPE_SIZE (type)) < 0
5092 || (AGGREGATE_TYPE_P (etype)
5093 && TYPE_REVERSE_STORAGE_ORDER (etype))))
5095 tree rec_type = make_node (RECORD_TYPE);
5096 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5097 tree field_type, field;
5099 if (AGGREGATE_TYPE_P (etype))
5100 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5101 = TYPE_REVERSE_STORAGE_ORDER (etype);
5103 if (type_unsigned_for_rm (type))
5104 field_type = make_unsigned_type (prec);
5105 else
5106 field_type = make_signed_type (prec);
5107 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5109 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5110 NULL_TREE, bitsize_zero_node, 1, 0);
5112 finish_record_type (rec_type, field, 1, false);
5114 expr = unchecked_convert (rec_type, expr, notrunc_p);
5115 expr = build_component_ref (expr, field, false);
5116 expr = fold_build1 (NOP_EXPR, type, expr);
5119 /* Similarly if we are converting from an integral type whose precision is
5120 not equal to its size, first copy into a field of the given precision
5121 and unchecked convert the record type.
5123 The same considerations as above apply if the target type is an aggregate
5124 type with reverse storage order and we also proceed similarly. */
5125 else if (INTEGRAL_TYPE_P (etype)
5126 && TYPE_RM_SIZE (etype)
5127 && (tree_int_cst_compare (TYPE_RM_SIZE (etype),
5128 TYPE_SIZE (etype)) < 0
5129 || (AGGREGATE_TYPE_P (type)
5130 && TYPE_REVERSE_STORAGE_ORDER (type))))
5132 tree rec_type = make_node (RECORD_TYPE);
5133 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5134 vec<constructor_elt, va_gc> *v;
5135 vec_alloc (v, 1);
5136 tree field_type, field;
5138 if (AGGREGATE_TYPE_P (type))
5139 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5140 = TYPE_REVERSE_STORAGE_ORDER (type);
5142 if (type_unsigned_for_rm (etype))
5143 field_type = make_unsigned_type (prec);
5144 else
5145 field_type = make_signed_type (prec);
5146 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5148 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5149 NULL_TREE, bitsize_zero_node, 1, 0);
5151 finish_record_type (rec_type, field, 1, false);
5153 expr = fold_build1 (NOP_EXPR, field_type, expr);
5154 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5155 expr = gnat_build_constructor (rec_type, v);
5156 expr = unchecked_convert (type, expr, notrunc_p);
5159 /* If we are converting from a scalar type to a type with a different size,
5160 we need to pad to have the same size on both sides.
5162 ??? We cannot do it unconditionally because unchecked conversions are
5163 used liberally by the front-end to implement polymorphism, e.g. in:
5165 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5166 return p___size__4 (p__object!(S191s.all));
5168 so we skip all expressions that are references. */
5169 else if (!REFERENCE_CLASS_P (expr)
5170 && !AGGREGATE_TYPE_P (etype)
5171 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5172 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5174 if (c < 0)
5176 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5177 false, false, false, true),
5178 expr);
5179 expr = unchecked_convert (type, expr, notrunc_p);
5181 else
5183 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5184 false, false, false, true);
5185 expr = unchecked_convert (rec_type, expr, notrunc_p);
5186 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5190 /* We have a special case when we are converting between two unconstrained
5191 array types. In that case, take the address, convert the fat pointer
5192 types, and dereference. */
5193 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5194 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5195 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5196 build_unary_op (ADDR_EXPR, NULL_TREE,
5197 expr)));
5199 /* Another special case is when we are converting to a vector type from its
5200 representative array type; this a regular conversion. */
5201 else if (code == VECTOR_TYPE
5202 && ecode == ARRAY_TYPE
5203 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5204 etype))
5205 expr = convert (type, expr);
5207 /* And, if the array type is not the representative, we try to build an
5208 intermediate vector type of which the array type is the representative
5209 and to do the unchecked conversion between the vector types, in order
5210 to enable further simplifications in the middle-end. */
5211 else if (code == VECTOR_TYPE
5212 && ecode == ARRAY_TYPE
5213 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5215 expr = convert (tem, expr);
5216 return unchecked_convert (type, expr, notrunc_p);
5219 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5220 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5221 else if (TREE_CODE (expr) == CONSTRUCTOR
5222 && code == RECORD_TYPE
5223 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5225 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5226 Empty, false, false, false, true),
5227 expr);
5228 return unchecked_convert (type, expr, notrunc_p);
5231 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5232 else
5234 expr = maybe_unconstrained_array (expr);
5235 etype = TREE_TYPE (expr);
5236 ecode = TREE_CODE (etype);
5237 if (can_fold_for_view_convert_p (expr))
5238 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5239 else
5240 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5243 /* If the result is a non-biased integral type whose precision is not equal
5244 to its size, sign- or zero-extend the result. But we need not do this
5245 if the input is also an integral type and both are unsigned or both are
5246 signed and have the same precision. */
5247 if (!notrunc_p
5248 && INTEGRAL_TYPE_P (type)
5249 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5250 && TYPE_RM_SIZE (type)
5251 && tree_int_cst_compare (TYPE_RM_SIZE (type), TYPE_SIZE (type)) < 0
5252 && !(INTEGRAL_TYPE_P (etype)
5253 && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5254 && (type_unsigned_for_rm (type)
5255 || tree_int_cst_compare (TYPE_RM_SIZE (type),
5256 TYPE_RM_SIZE (etype)
5257 ? TYPE_RM_SIZE (etype)
5258 : TYPE_SIZE (etype)) == 0)))
5260 if (integer_zerop (TYPE_RM_SIZE (type)))
5261 expr = build_int_cst (type, 0);
5262 else
5264 tree base_type
5265 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5266 type_unsigned_for_rm (type));
5267 tree shift_expr
5268 = convert (base_type,
5269 size_binop (MINUS_EXPR,
5270 TYPE_SIZE (type), TYPE_RM_SIZE (type)));
5271 expr
5272 = convert (type,
5273 build_binary_op (RSHIFT_EXPR, base_type,
5274 build_binary_op (LSHIFT_EXPR, base_type,
5275 convert (base_type,
5276 expr),
5277 shift_expr),
5278 shift_expr));
5282 /* An unchecked conversion should never raise Constraint_Error. The code
5283 below assumes that GCC's conversion routines overflow the same way that
5284 the underlying hardware does. This is probably true. In the rare case
5285 when it is false, we can rely on the fact that such conversions are
5286 erroneous anyway. */
5287 if (TREE_CODE (expr) == INTEGER_CST)
5288 TREE_OVERFLOW (expr) = 0;
5290 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5291 show no longer constant. */
5292 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5293 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5294 OEP_ONLY_CONST))
5295 TREE_CONSTANT (expr) = 0;
5297 return expr;
5300 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5301 the latter being a record type as predicated by Is_Record_Type. */
5303 enum tree_code
5304 tree_code_for_record_type (Entity_Id gnat_type)
5306 Node_Id component_list, component;
5308 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5309 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5310 if (!Is_Unchecked_Union (gnat_type))
5311 return RECORD_TYPE;
5313 gnat_type = Implementation_Base_Type (gnat_type);
5314 component_list
5315 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5317 for (component = First_Non_Pragma (Component_Items (component_list));
5318 Present (component);
5319 component = Next_Non_Pragma (component))
5320 if (Ekind (Defining_Entity (component)) == E_Component)
5321 return RECORD_TYPE;
5323 return UNION_TYPE;
5326 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5327 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5328 according to the presence of an alignment clause on the type or, if it
5329 is an array, on the component type. */
5331 bool
5332 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5334 gnat_type = Underlying_Type (gnat_type);
5336 *align_clause = Present (Alignment_Clause (gnat_type));
5338 if (Is_Array_Type (gnat_type))
5340 gnat_type = Underlying_Type (Component_Type (gnat_type));
5341 if (Present (Alignment_Clause (gnat_type)))
5342 *align_clause = true;
5345 if (!Is_Floating_Point_Type (gnat_type))
5346 return false;
5348 if (UI_To_Int (Esize (gnat_type)) != 64)
5349 return false;
5351 return true;
5354 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5355 size is greater or equal to 64 bits, or an array of such a type. Set
5356 ALIGN_CLAUSE according to the presence of an alignment clause on the
5357 type or, if it is an array, on the component type. */
5359 bool
5360 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5362 gnat_type = Underlying_Type (gnat_type);
5364 *align_clause = Present (Alignment_Clause (gnat_type));
5366 if (Is_Array_Type (gnat_type))
5368 gnat_type = Underlying_Type (Component_Type (gnat_type));
5369 if (Present (Alignment_Clause (gnat_type)))
5370 *align_clause = true;
5373 if (!Is_Scalar_Type (gnat_type))
5374 return false;
5376 if (UI_To_Int (Esize (gnat_type)) < 64)
5377 return false;
5379 return true;
5382 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5383 component of an aggregate type. */
5385 bool
5386 type_for_nonaliased_component_p (tree gnu_type)
5388 /* If the type is passed by reference, we may have pointers to the
5389 component so it cannot be made non-aliased. */
5390 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5391 return false;
5393 /* We used to say that any component of aggregate type is aliased
5394 because the front-end may take 'Reference of it. The front-end
5395 has been enhanced in the meantime so as to use a renaming instead
5396 in most cases, but the back-end can probably take the address of
5397 such a component too so we go for the conservative stance.
5399 For instance, we might need the address of any array type, even
5400 if normally passed by copy, to construct a fat pointer if the
5401 component is used as an actual for an unconstrained formal.
5403 Likewise for record types: even if a specific record subtype is
5404 passed by copy, the parent type might be passed by ref (e.g. if
5405 it's of variable size) and we might take the address of a child
5406 component to pass to a parent formal. We have no way to check
5407 for such conditions here. */
5408 if (AGGREGATE_TYPE_P (gnu_type))
5409 return false;
5411 return true;
5414 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5416 bool
5417 smaller_form_type_p (tree type, tree orig_type)
5419 tree size, osize;
5421 /* We're not interested in variants here. */
5422 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5423 return false;
5425 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5426 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5427 return false;
5429 size = TYPE_SIZE (type);
5430 osize = TYPE_SIZE (orig_type);
5432 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5433 return false;
5435 return tree_int_cst_lt (size, osize) != 0;
5438 /* Return whether EXPR, which is the renamed object in an object renaming
5439 declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5440 This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration. */
5442 bool
5443 can_materialize_object_renaming_p (Node_Id expr)
5445 while (true)
5447 expr = Original_Node (expr);
5449 switch Nkind (expr)
5451 case N_Identifier:
5452 case N_Expanded_Name:
5453 if (!Present (Renamed_Object (Entity (expr))))
5454 return true;
5455 expr = Renamed_Object (Entity (expr));
5456 break;
5458 case N_Selected_Component:
5460 if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5461 return false;
5463 const Uint bitpos
5464 = Normalized_First_Bit (Entity (Selector_Name (expr)));
5465 if (!UI_Is_In_Int_Range (bitpos)
5466 || (bitpos != UI_No_Uint && bitpos != UI_From_Int (0)))
5467 return false;
5469 expr = Prefix (expr);
5470 break;
5473 case N_Indexed_Component:
5474 case N_Slice:
5476 const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5478 if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5479 return false;
5481 expr = Prefix (expr);
5482 break;
5485 case N_Explicit_Dereference:
5486 expr = Prefix (expr);
5487 break;
5489 default:
5490 return true;
5495 /* Perform final processing on global declarations. */
5497 static GTY (()) tree dummy_global;
5499 void
5500 gnat_write_global_declarations (void)
5502 unsigned int i;
5503 tree iter;
5505 /* If we have declared types as used at the global level, insert them in
5506 the global hash table. We use a dummy variable for this purpose, but
5507 we need to build it unconditionally to avoid -fcompare-debug issues. */
5508 if (first_global_object_name)
5510 struct varpool_node *node;
5511 char *label;
5513 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5514 dummy_global
5515 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5516 void_type_node);
5517 DECL_HARD_REGISTER (dummy_global) = 1;
5518 TREE_STATIC (dummy_global) = 1;
5519 node = varpool_node::get_create (dummy_global);
5520 node->definition = 1;
5521 node->force_output = 1;
5523 if (types_used_by_cur_var_decl)
5524 while (!types_used_by_cur_var_decl->is_empty ())
5526 tree t = types_used_by_cur_var_decl->pop ();
5527 types_used_by_var_decl_insert (t, dummy_global);
5531 /* Output debug information for all global type declarations first. This
5532 ensures that global types whose compilation hasn't been finalized yet,
5533 for example pointers to Taft amendment types, have their compilation
5534 finalized in the right context. */
5535 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5536 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5537 debug_hooks->type_decl (iter, false);
5539 /* Output imported functions. */
5540 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5541 if (TREE_CODE (iter) == FUNCTION_DECL
5542 && DECL_EXTERNAL (iter)
5543 && DECL_INITIAL (iter) == NULL
5544 && !DECL_IGNORED_P (iter)
5545 && DECL_FUNCTION_IS_DEF (iter))
5546 debug_hooks->early_global_decl (iter);
5548 /* Then output the global variables. We need to do that after the debug
5549 information for global types is emitted so that they are finalized. Skip
5550 external global variables, unless we need to emit debug info for them:
5551 this is useful for imported variables, for instance. */
5552 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5553 if (TREE_CODE (iter) == VAR_DECL
5554 && (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5555 rest_of_decl_compilation (iter, true, 0);
5557 /* Output the imported modules/declarations. In GNAT, these are only
5558 materializing subprogram. */
5559 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5560 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5561 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5562 DECL_CONTEXT (iter), false, false);
5565 /* ************************************************************************
5566 * * GCC builtins support *
5567 * ************************************************************************ */
5569 /* The general scheme is fairly simple:
5571 For each builtin function/type to be declared, gnat_install_builtins calls
5572 internal facilities which eventually get to gnat_pushdecl, which in turn
5573 tracks the so declared builtin function decls in the 'builtin_decls' global
5574 datastructure. When an Intrinsic subprogram declaration is processed, we
5575 search this global datastructure to retrieve the associated BUILT_IN DECL
5576 node. */
5578 /* Search the chain of currently available builtin declarations for a node
5579 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5580 found, if any, or NULL_TREE otherwise. */
5581 tree
5582 builtin_decl_for (tree name)
5584 unsigned i;
5585 tree decl;
5587 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5588 if (DECL_NAME (decl) == name)
5589 return decl;
5591 return NULL_TREE;
5594 /* The code below eventually exposes gnat_install_builtins, which declares
5595 the builtin types and functions we might need, either internally or as
5596 user accessible facilities.
5598 ??? This is a first implementation shot, still in rough shape. It is
5599 heavily inspired from the "C" family implementation, with chunks copied
5600 verbatim from there.
5602 Two obvious improvement candidates are:
5603 o Use a more efficient name/decl mapping scheme
5604 o Devise a middle-end infrastructure to avoid having to copy
5605 pieces between front-ends. */
5607 /* ----------------------------------------------------------------------- *
5608 * BUILTIN ELEMENTARY TYPES *
5609 * ----------------------------------------------------------------------- */
5611 /* Standard data types to be used in builtin argument declarations. */
5613 enum c_tree_index
5615 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5616 CTI_STRING_TYPE,
5617 CTI_CONST_STRING_TYPE,
5619 CTI_MAX
5622 static tree c_global_trees[CTI_MAX];
5624 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5625 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5626 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5628 /* ??? In addition some attribute handlers, we currently don't support a
5629 (small) number of builtin-types, which in turns inhibits support for a
5630 number of builtin functions. */
5631 #define wint_type_node void_type_node
5632 #define intmax_type_node void_type_node
5633 #define uintmax_type_node void_type_node
5635 /* Used to help initialize the builtin-types.def table. When a type of
5636 the correct size doesn't exist, use error_mark_node instead of NULL.
5637 The later results in segfaults even when a decl using the type doesn't
5638 get invoked. */
5640 static tree
5641 builtin_type_for_size (int size, bool unsignedp)
5643 tree type = gnat_type_for_size (size, unsignedp);
5644 return type ? type : error_mark_node;
5647 /* Build/push the elementary type decls that builtin functions/types
5648 will need. */
5650 static void
5651 install_builtin_elementary_types (void)
5653 signed_size_type_node = gnat_signed_type_for (size_type_node);
5654 pid_type_node = integer_type_node;
5656 string_type_node = build_pointer_type (char_type_node);
5657 const_string_type_node
5658 = build_pointer_type (build_qualified_type
5659 (char_type_node, TYPE_QUAL_CONST));
5662 /* ----------------------------------------------------------------------- *
5663 * BUILTIN FUNCTION TYPES *
5664 * ----------------------------------------------------------------------- */
5666 /* Now, builtin function types per se. */
5668 enum c_builtin_type
5670 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5671 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5672 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5673 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5674 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5675 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5676 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5677 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5678 ARG6) NAME,
5679 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5680 ARG6, ARG7) NAME,
5681 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5682 ARG6, ARG7, ARG8) NAME,
5683 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5684 ARG6, ARG7, ARG8, ARG9) NAME,
5685 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5686 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5687 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5688 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5689 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5690 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5691 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5692 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5693 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5694 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5695 NAME,
5696 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5697 ARG6) NAME,
5698 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5699 ARG6, ARG7) NAME,
5700 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5701 #include "builtin-types.def"
5702 #undef DEF_PRIMITIVE_TYPE
5703 #undef DEF_FUNCTION_TYPE_0
5704 #undef DEF_FUNCTION_TYPE_1
5705 #undef DEF_FUNCTION_TYPE_2
5706 #undef DEF_FUNCTION_TYPE_3
5707 #undef DEF_FUNCTION_TYPE_4
5708 #undef DEF_FUNCTION_TYPE_5
5709 #undef DEF_FUNCTION_TYPE_6
5710 #undef DEF_FUNCTION_TYPE_7
5711 #undef DEF_FUNCTION_TYPE_8
5712 #undef DEF_FUNCTION_TYPE_9
5713 #undef DEF_FUNCTION_TYPE_10
5714 #undef DEF_FUNCTION_TYPE_11
5715 #undef DEF_FUNCTION_TYPE_VAR_0
5716 #undef DEF_FUNCTION_TYPE_VAR_1
5717 #undef DEF_FUNCTION_TYPE_VAR_2
5718 #undef DEF_FUNCTION_TYPE_VAR_3
5719 #undef DEF_FUNCTION_TYPE_VAR_4
5720 #undef DEF_FUNCTION_TYPE_VAR_5
5721 #undef DEF_FUNCTION_TYPE_VAR_6
5722 #undef DEF_FUNCTION_TYPE_VAR_7
5723 #undef DEF_POINTER_TYPE
5724 BT_LAST
5727 typedef enum c_builtin_type builtin_type;
5729 /* A temporary array used in communication with def_fn_type. */
5730 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5732 /* A helper function for install_builtin_types. Build function type
5733 for DEF with return type RET and N arguments. If VAR is true, then the
5734 function should be variadic after those N arguments.
5736 Takes special care not to ICE if any of the types involved are
5737 error_mark_node, which indicates that said type is not in fact available
5738 (see builtin_type_for_size). In which case the function type as a whole
5739 should be error_mark_node. */
5741 static void
5742 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5744 tree t;
5745 tree *args = XALLOCAVEC (tree, n);
5746 va_list list;
5747 int i;
5749 va_start (list, n);
5750 for (i = 0; i < n; ++i)
5752 builtin_type a = (builtin_type) va_arg (list, int);
5753 t = builtin_types[a];
5754 if (t == error_mark_node)
5755 goto egress;
5756 args[i] = t;
5759 t = builtin_types[ret];
5760 if (t == error_mark_node)
5761 goto egress;
5762 if (var)
5763 t = build_varargs_function_type_array (t, n, args);
5764 else
5765 t = build_function_type_array (t, n, args);
5767 egress:
5768 builtin_types[def] = t;
5769 va_end (list);
5772 /* Build the builtin function types and install them in the builtin_types
5773 array for later use in builtin function decls. */
5775 static void
5776 install_builtin_function_types (void)
5778 tree va_list_ref_type_node;
5779 tree va_list_arg_type_node;
5781 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5783 va_list_arg_type_node = va_list_ref_type_node =
5784 build_pointer_type (TREE_TYPE (va_list_type_node));
5786 else
5788 va_list_arg_type_node = va_list_type_node;
5789 va_list_ref_type_node = build_reference_type (va_list_type_node);
5792 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5793 builtin_types[ENUM] = VALUE;
5794 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5795 def_fn_type (ENUM, RETURN, 0, 0);
5796 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5797 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5798 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5799 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5800 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5801 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5802 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5803 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5804 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5805 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5806 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5807 ARG6) \
5808 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5809 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5810 ARG6, ARG7) \
5811 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5812 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5813 ARG6, ARG7, ARG8) \
5814 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5815 ARG7, ARG8);
5816 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5817 ARG6, ARG7, ARG8, ARG9) \
5818 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5819 ARG7, ARG8, ARG9);
5820 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5821 ARG6, ARG7, ARG8, ARG9, ARG10) \
5822 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5823 ARG7, ARG8, ARG9, ARG10);
5824 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5825 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5826 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5827 ARG7, ARG8, ARG9, ARG10, ARG11);
5828 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5829 def_fn_type (ENUM, RETURN, 1, 0);
5830 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5831 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5832 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5833 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5834 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5835 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5836 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5837 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5838 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5839 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5840 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5841 ARG6) \
5842 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5843 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5844 ARG6, ARG7) \
5845 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5846 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5847 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5849 #include "builtin-types.def"
5851 #undef DEF_PRIMITIVE_TYPE
5852 #undef DEF_FUNCTION_TYPE_0
5853 #undef DEF_FUNCTION_TYPE_1
5854 #undef DEF_FUNCTION_TYPE_2
5855 #undef DEF_FUNCTION_TYPE_3
5856 #undef DEF_FUNCTION_TYPE_4
5857 #undef DEF_FUNCTION_TYPE_5
5858 #undef DEF_FUNCTION_TYPE_6
5859 #undef DEF_FUNCTION_TYPE_7
5860 #undef DEF_FUNCTION_TYPE_8
5861 #undef DEF_FUNCTION_TYPE_9
5862 #undef DEF_FUNCTION_TYPE_10
5863 #undef DEF_FUNCTION_TYPE_11
5864 #undef DEF_FUNCTION_TYPE_VAR_0
5865 #undef DEF_FUNCTION_TYPE_VAR_1
5866 #undef DEF_FUNCTION_TYPE_VAR_2
5867 #undef DEF_FUNCTION_TYPE_VAR_3
5868 #undef DEF_FUNCTION_TYPE_VAR_4
5869 #undef DEF_FUNCTION_TYPE_VAR_5
5870 #undef DEF_FUNCTION_TYPE_VAR_6
5871 #undef DEF_FUNCTION_TYPE_VAR_7
5872 #undef DEF_POINTER_TYPE
5873 builtin_types[(int) BT_LAST] = NULL_TREE;
5876 /* ----------------------------------------------------------------------- *
5877 * BUILTIN ATTRIBUTES *
5878 * ----------------------------------------------------------------------- */
5880 enum built_in_attribute
5882 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5883 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5884 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5885 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5886 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5887 #include "builtin-attrs.def"
5888 #undef DEF_ATTR_NULL_TREE
5889 #undef DEF_ATTR_INT
5890 #undef DEF_ATTR_STRING
5891 #undef DEF_ATTR_IDENT
5892 #undef DEF_ATTR_TREE_LIST
5893 ATTR_LAST
5896 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5898 static void
5899 install_builtin_attributes (void)
5901 /* Fill in the built_in_attributes array. */
5902 #define DEF_ATTR_NULL_TREE(ENUM) \
5903 built_in_attributes[(int) ENUM] = NULL_TREE;
5904 #define DEF_ATTR_INT(ENUM, VALUE) \
5905 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5906 #define DEF_ATTR_STRING(ENUM, VALUE) \
5907 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5908 #define DEF_ATTR_IDENT(ENUM, STRING) \
5909 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5910 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5911 built_in_attributes[(int) ENUM] \
5912 = tree_cons (built_in_attributes[(int) PURPOSE], \
5913 built_in_attributes[(int) VALUE], \
5914 built_in_attributes[(int) CHAIN]);
5915 #include "builtin-attrs.def"
5916 #undef DEF_ATTR_NULL_TREE
5917 #undef DEF_ATTR_INT
5918 #undef DEF_ATTR_STRING
5919 #undef DEF_ATTR_IDENT
5920 #undef DEF_ATTR_TREE_LIST
5923 /* Handle a "const" attribute; arguments as in
5924 struct attribute_spec.handler. */
5926 static tree
5927 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5928 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5929 bool *no_add_attrs)
5931 if (TREE_CODE (*node) == FUNCTION_DECL)
5932 TREE_READONLY (*node) = 1;
5933 else
5934 *no_add_attrs = true;
5936 return NULL_TREE;
5939 /* Handle a "nothrow" attribute; arguments as in
5940 struct attribute_spec.handler. */
5942 static tree
5943 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5944 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5945 bool *no_add_attrs)
5947 if (TREE_CODE (*node) == FUNCTION_DECL)
5948 TREE_NOTHROW (*node) = 1;
5949 else
5950 *no_add_attrs = true;
5952 return NULL_TREE;
5955 /* Handle a "pure" attribute; arguments as in
5956 struct attribute_spec.handler. */
5958 static tree
5959 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5960 int ARG_UNUSED (flags), bool *no_add_attrs)
5962 if (TREE_CODE (*node) == FUNCTION_DECL)
5963 DECL_PURE_P (*node) = 1;
5964 /* TODO: support types. */
5965 else
5967 warning (OPT_Wattributes, "%qs attribute ignored",
5968 IDENTIFIER_POINTER (name));
5969 *no_add_attrs = true;
5972 return NULL_TREE;
5975 /* Handle a "no vops" attribute; arguments as in
5976 struct attribute_spec.handler. */
5978 static tree
5979 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5980 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5981 bool *ARG_UNUSED (no_add_attrs))
5983 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5984 DECL_IS_NOVOPS (*node) = 1;
5985 return NULL_TREE;
5988 /* Helper for nonnull attribute handling; fetch the operand number
5989 from the attribute argument list. */
5991 static bool
5992 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5994 /* Verify the arg number is a constant. */
5995 if (!tree_fits_uhwi_p (arg_num_expr))
5996 return false;
5998 *valp = TREE_INT_CST_LOW (arg_num_expr);
5999 return true;
6002 /* Handle the "nonnull" attribute. */
6003 static tree
6004 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6005 tree args, int ARG_UNUSED (flags),
6006 bool *no_add_attrs)
6008 tree type = *node;
6009 unsigned HOST_WIDE_INT attr_arg_num;
6011 /* If no arguments are specified, all pointer arguments should be
6012 non-null. Verify a full prototype is given so that the arguments
6013 will have the correct types when we actually check them later.
6014 Avoid diagnosing type-generic built-ins since those have no
6015 prototype. */
6016 if (!args)
6018 if (!prototype_p (type)
6019 && (!TYPE_ATTRIBUTES (type)
6020 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6022 error ("nonnull attribute without arguments on a non-prototype");
6023 *no_add_attrs = true;
6025 return NULL_TREE;
6028 /* Argument list specified. Verify that each argument number references
6029 a pointer argument. */
6030 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6032 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6034 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6036 error ("nonnull argument has invalid operand number (argument %lu)",
6037 (unsigned long) attr_arg_num);
6038 *no_add_attrs = true;
6039 return NULL_TREE;
6042 if (prototype_p (type))
6044 function_args_iterator iter;
6045 tree argument;
6047 function_args_iter_init (&iter, type);
6048 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6050 argument = function_args_iter_cond (&iter);
6051 if (!argument || ck_num == arg_num)
6052 break;
6055 if (!argument
6056 || TREE_CODE (argument) == VOID_TYPE)
6058 error ("nonnull argument with out-of-range operand number "
6059 "(argument %lu, operand %lu)",
6060 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6061 *no_add_attrs = true;
6062 return NULL_TREE;
6065 if (TREE_CODE (argument) != POINTER_TYPE)
6067 error ("nonnull argument references non-pointer operand "
6068 "(argument %lu, operand %lu)",
6069 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6070 *no_add_attrs = true;
6071 return NULL_TREE;
6076 return NULL_TREE;
6079 /* Handle a "sentinel" attribute. */
6081 static tree
6082 handle_sentinel_attribute (tree *node, tree name, tree args,
6083 int ARG_UNUSED (flags), bool *no_add_attrs)
6085 if (!prototype_p (*node))
6087 warning (OPT_Wattributes,
6088 "%qs attribute requires prototypes with named arguments",
6089 IDENTIFIER_POINTER (name));
6090 *no_add_attrs = true;
6092 else
6094 if (!stdarg_p (*node))
6096 warning (OPT_Wattributes,
6097 "%qs attribute only applies to variadic functions",
6098 IDENTIFIER_POINTER (name));
6099 *no_add_attrs = true;
6103 if (args)
6105 tree position = TREE_VALUE (args);
6107 if (TREE_CODE (position) != INTEGER_CST)
6109 warning (0, "requested position is not an integer constant");
6110 *no_add_attrs = true;
6112 else
6114 if (tree_int_cst_lt (position, integer_zero_node))
6116 warning (0, "requested position is less than zero");
6117 *no_add_attrs = true;
6122 return NULL_TREE;
6125 /* Handle a "noreturn" attribute; arguments as in
6126 struct attribute_spec.handler. */
6128 static tree
6129 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6130 int ARG_UNUSED (flags), bool *no_add_attrs)
6132 tree type = TREE_TYPE (*node);
6134 /* See FIXME comment in c_common_attribute_table. */
6135 if (TREE_CODE (*node) == FUNCTION_DECL)
6136 TREE_THIS_VOLATILE (*node) = 1;
6137 else if (TREE_CODE (type) == POINTER_TYPE
6138 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6139 TREE_TYPE (*node)
6140 = build_pointer_type
6141 (build_type_variant (TREE_TYPE (type),
6142 TYPE_READONLY (TREE_TYPE (type)), 1));
6143 else
6145 warning (OPT_Wattributes, "%qs attribute ignored",
6146 IDENTIFIER_POINTER (name));
6147 *no_add_attrs = true;
6150 return NULL_TREE;
6153 /* Handle a "noinline" attribute; arguments as in
6154 struct attribute_spec.handler. */
6156 static tree
6157 handle_noinline_attribute (tree *node, tree name,
6158 tree ARG_UNUSED (args),
6159 int ARG_UNUSED (flags), bool *no_add_attrs)
6161 if (TREE_CODE (*node) == FUNCTION_DECL)
6163 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6165 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6166 "with attribute %qs", name, "always_inline");
6167 *no_add_attrs = true;
6169 else
6170 DECL_UNINLINABLE (*node) = 1;
6172 else
6174 warning (OPT_Wattributes, "%qE attribute ignored", name);
6175 *no_add_attrs = true;
6178 return NULL_TREE;
6181 /* Handle a "noclone" attribute; arguments as in
6182 struct attribute_spec.handler. */
6184 static tree
6185 handle_noclone_attribute (tree *node, tree name,
6186 tree ARG_UNUSED (args),
6187 int ARG_UNUSED (flags), bool *no_add_attrs)
6189 if (TREE_CODE (*node) != FUNCTION_DECL)
6191 warning (OPT_Wattributes, "%qE attribute ignored", name);
6192 *no_add_attrs = true;
6195 return NULL_TREE;
6198 /* Handle a "leaf" attribute; arguments as in
6199 struct attribute_spec.handler. */
6201 static tree
6202 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6203 int ARG_UNUSED (flags), bool *no_add_attrs)
6205 if (TREE_CODE (*node) != FUNCTION_DECL)
6207 warning (OPT_Wattributes, "%qE attribute ignored", name);
6208 *no_add_attrs = true;
6210 if (!TREE_PUBLIC (*node))
6212 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6213 *no_add_attrs = true;
6216 return NULL_TREE;
6219 /* Handle a "always_inline" attribute; arguments as in
6220 struct attribute_spec.handler. */
6222 static tree
6223 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6224 int ARG_UNUSED (flags), bool *no_add_attrs)
6226 if (TREE_CODE (*node) == FUNCTION_DECL)
6228 /* Set the attribute and mark it for disregarding inline limits. */
6229 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6231 else
6233 warning (OPT_Wattributes, "%qE attribute ignored", name);
6234 *no_add_attrs = true;
6237 return NULL_TREE;
6240 /* Handle a "malloc" attribute; arguments as in
6241 struct attribute_spec.handler. */
6243 static tree
6244 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6245 int ARG_UNUSED (flags), bool *no_add_attrs)
6247 if (TREE_CODE (*node) == FUNCTION_DECL
6248 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6249 DECL_IS_MALLOC (*node) = 1;
6250 else
6252 warning (OPT_Wattributes, "%qs attribute ignored",
6253 IDENTIFIER_POINTER (name));
6254 *no_add_attrs = true;
6257 return NULL_TREE;
6260 /* Fake handler for attributes we don't properly support. */
6262 tree
6263 fake_attribute_handler (tree * ARG_UNUSED (node),
6264 tree ARG_UNUSED (name),
6265 tree ARG_UNUSED (args),
6266 int ARG_UNUSED (flags),
6267 bool * ARG_UNUSED (no_add_attrs))
6269 return NULL_TREE;
6272 /* Handle a "type_generic" attribute. */
6274 static tree
6275 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6276 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6277 bool * ARG_UNUSED (no_add_attrs))
6279 /* Ensure we have a function type. */
6280 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6282 /* Ensure we have a variadic function. */
6283 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6285 return NULL_TREE;
6288 /* Handle a "vector_size" attribute; arguments as in
6289 struct attribute_spec.handler. */
6291 static tree
6292 handle_vector_size_attribute (tree *node, tree name, tree args,
6293 int ARG_UNUSED (flags), bool *no_add_attrs)
6295 tree type = *node;
6296 tree vector_type;
6298 *no_add_attrs = true;
6300 /* We need to provide for vector pointers, vector arrays, and
6301 functions returning vectors. For example:
6303 __attribute__((vector_size(16))) short *foo;
6305 In this case, the mode is SI, but the type being modified is
6306 HI, so we need to look further. */
6307 while (POINTER_TYPE_P (type)
6308 || TREE_CODE (type) == FUNCTION_TYPE
6309 || TREE_CODE (type) == ARRAY_TYPE)
6310 type = TREE_TYPE (type);
6312 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6313 if (!vector_type)
6314 return NULL_TREE;
6316 /* Build back pointers if needed. */
6317 *node = reconstruct_complex_type (*node, vector_type);
6319 return NULL_TREE;
6322 /* Handle a "vector_type" attribute; arguments as in
6323 struct attribute_spec.handler. */
6325 static tree
6326 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6327 int ARG_UNUSED (flags), bool *no_add_attrs)
6329 tree type = *node;
6330 tree vector_type;
6332 *no_add_attrs = true;
6334 if (TREE_CODE (type) != ARRAY_TYPE)
6336 error ("attribute %qs applies to array types only",
6337 IDENTIFIER_POINTER (name));
6338 return NULL_TREE;
6341 vector_type = build_vector_type_for_array (type, name);
6342 if (!vector_type)
6343 return NULL_TREE;
6345 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6346 *node = vector_type;
6348 return NULL_TREE;
6351 /* ----------------------------------------------------------------------- *
6352 * BUILTIN FUNCTIONS *
6353 * ----------------------------------------------------------------------- */
6355 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6356 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6357 if nonansi_p and flag_no_nonansi_builtin. */
6359 static void
6360 def_builtin_1 (enum built_in_function fncode,
6361 const char *name,
6362 enum built_in_class fnclass,
6363 tree fntype, tree libtype,
6364 bool both_p, bool fallback_p,
6365 bool nonansi_p ATTRIBUTE_UNUSED,
6366 tree fnattrs, bool implicit_p)
6368 tree decl;
6369 const char *libname;
6371 /* Preserve an already installed decl. It most likely was setup in advance
6372 (e.g. as part of the internal builtins) for specific reasons. */
6373 if (builtin_decl_explicit (fncode))
6374 return;
6376 gcc_assert ((!both_p && !fallback_p)
6377 || !strncmp (name, "__builtin_",
6378 strlen ("__builtin_")));
6380 libname = name + strlen ("__builtin_");
6381 decl = add_builtin_function (name, fntype, fncode, fnclass,
6382 (fallback_p ? libname : NULL),
6383 fnattrs);
6384 if (both_p)
6385 /* ??? This is normally further controlled by command-line options
6386 like -fno-builtin, but we don't have them for Ada. */
6387 add_builtin_function (libname, libtype, fncode, fnclass,
6388 NULL, fnattrs);
6390 set_builtin_decl (fncode, decl, implicit_p);
6393 static int flag_isoc94 = 0;
6394 static int flag_isoc99 = 0;
6395 static int flag_isoc11 = 0;
6397 /* Install what the common builtins.def offers. */
6399 static void
6400 install_builtin_functions (void)
6402 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6403 NONANSI_P, ATTRS, IMPLICIT, COND) \
6404 if (NAME && COND) \
6405 def_builtin_1 (ENUM, NAME, CLASS, \
6406 builtin_types[(int) TYPE], \
6407 builtin_types[(int) LIBTYPE], \
6408 BOTH_P, FALLBACK_P, NONANSI_P, \
6409 built_in_attributes[(int) ATTRS], IMPLICIT);
6410 #include "builtins.def"
6413 /* ----------------------------------------------------------------------- *
6414 * BUILTIN FUNCTIONS *
6415 * ----------------------------------------------------------------------- */
6417 /* Install the builtin functions we might need. */
6419 void
6420 gnat_install_builtins (void)
6422 install_builtin_elementary_types ();
6423 install_builtin_function_types ();
6424 install_builtin_attributes ();
6426 /* Install builtins used by generic middle-end pieces first. Some of these
6427 know about internal specificities and control attributes accordingly, for
6428 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6429 the generic definition from builtins.def. */
6430 build_common_builtin_nodes ();
6432 /* Now, install the target specific builtins, such as the AltiVec family on
6433 ppc, and the common set as exposed by builtins.def. */
6434 targetm.init_builtins ();
6435 install_builtin_functions ();
6438 #include "gt-ada-utils.h"
6439 #include "gtype-ada.h"