2014-12-19 Andrew MacLeod <amacleod@redhat.com>
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blobb566257e9a4c75274b491d67d47ccd15f9865f9c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "stringpool.h"
32 #include "stor-layout.h"
33 #include "attribs.h"
34 #include "varasm.h"
35 #include "flags.h"
36 #include "toplev.h"
37 #include "diagnostic-core.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "debug.h"
41 #include "convert.h"
42 #include "target.h"
43 #include "common/common-target.h"
44 #include "langhooks.h"
45 #include "hash-map.h"
46 #include "is-a.h"
47 #include "plugin-api.h"
48 #include "vec.h"
49 #include "hashtab.h"
50 #include "hash-set.h"
51 #include "machmode.h"
52 #include "hard-reg-set.h"
53 #include "input.h"
54 #include "function.h"
55 #include "ipa-ref.h"
56 #include "cgraph.h"
57 #include "diagnostic.h"
58 #include "timevar.h"
59 #include "tree-dump.h"
60 #include "tree-inline.h"
61 #include "tree-iterator.h"
63 #include "ada.h"
64 #include "types.h"
65 #include "atree.h"
66 #include "elists.h"
67 #include "namet.h"
68 #include "nlists.h"
69 #include "stringt.h"
70 #include "uintp.h"
71 #include "fe.h"
72 #include "sinfo.h"
73 #include "einfo.h"
74 #include "ada-tree.h"
75 #include "gigi.h"
77 /* If nonzero, pretend we are allocating at global level. */
78 int force_global;
80 /* The default alignment of "double" floating-point types, i.e. floating
81 point types whose size is equal to 64 bits, or 0 if this alignment is
82 not specifically capped. */
83 int double_float_alignment;
85 /* The default alignment of "double" or larger scalar types, i.e. scalar
86 types whose size is greater or equal to 64 bits, or 0 if this alignment
87 is not specifically capped. */
88 int double_scalar_alignment;
90 /* True if floating-point arithmetics may use wider intermediate results. */
91 bool fp_arith_may_widen = true;
93 /* Tree nodes for the various types and decls we create. */
94 tree gnat_std_decls[(int) ADT_LAST];
96 /* Functions to call for each of the possible raise reasons. */
97 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
99 /* Likewise, but with extra info for each of the possible raise reasons. */
100 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
102 /* Forward declarations for handlers of attributes. */
103 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
104 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
105 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
106 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
107 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
108 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
109 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
110 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
111 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
112 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
113 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
114 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
115 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
117 /* Fake handler for attributes we don't properly support, typically because
118 they'd require dragging a lot of the common-c front-end circuitry. */
119 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
121 /* Table of machine-independent internal attributes for Ada. We support
122 this minimal set of attributes to accommodate the needs of builtins. */
123 const struct attribute_spec gnat_internal_attribute_table[] =
125 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, decl_handler,
126 type_handler, affects_type_identity } */
127 { "const", 0, 0, true, false, false, handle_const_attribute,
128 NULL, false },
129 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
130 NULL, false },
131 { "pure", 0, 0, true, false, false, handle_pure_attribute,
132 NULL, false },
133 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
134 NULL, false },
135 { "nonnull", 0, -1, false, true, true, NULL,
136 handle_nonnull_attribute, false },
137 { "sentinel", 0, 1, false, true, true, NULL,
138 handle_sentinel_attribute, false },
139 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
140 NULL, false },
141 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
142 NULL, false },
143 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
144 NULL, false },
145 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
146 NULL, false },
147 { "type generic", 0, 0, false, true, true, NULL,
148 handle_type_generic_attribute, false },
150 { "vector_size", 1, 1, false, true, false, NULL,
151 handle_vector_size_attribute, false },
152 { "vector_type", 0, 0, false, true, false, NULL,
153 handle_vector_type_attribute, false },
154 { "may_alias", 0, 0, false, true, false, NULL, NULL, false },
156 /* ??? format and format_arg are heavy and not supported, which actually
157 prevents support for stdio builtins, which we however declare as part
158 of the common builtins.def contents. */
159 { "format", 3, 3, false, true, true, NULL,
160 fake_attribute_handler, false },
161 { "format_arg", 1, 1, false, true, true, NULL,
162 fake_attribute_handler, false },
164 { NULL, 0, 0, false, false, false, NULL, NULL, false }
167 /* Associates a GNAT tree node to a GCC tree node. It is used in
168 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
169 of `save_gnu_tree' for more info. */
170 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
172 #define GET_GNU_TREE(GNAT_ENTITY) \
173 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
175 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
176 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
178 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
179 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
181 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
182 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
184 #define GET_DUMMY_NODE(GNAT_ENTITY) \
185 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
187 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
188 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
190 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
191 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
193 /* This variable keeps a table for types for each precision so that we only
194 allocate each of them once. Signed and unsigned types are kept separate.
196 Note that these types are only used when fold-const requests something
197 special. Perhaps we should NOT share these types; we'll see how it
198 goes later. */
199 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
201 /* Likewise for float types, but record these by mode. */
202 static GTY(()) tree float_types[NUM_MACHINE_MODES];
204 /* For each binding contour we allocate a binding_level structure to indicate
205 the binding depth. */
207 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
208 /* The binding level containing this one (the enclosing binding level). */
209 struct gnat_binding_level *chain;
210 /* The BLOCK node for this level. */
211 tree block;
212 /* If nonzero, the setjmp buffer that needs to be updated for any
213 variable-sized definition within this context. */
214 tree jmpbuf_decl;
217 /* The binding level currently in effect. */
218 static GTY(()) struct gnat_binding_level *current_binding_level;
220 /* A chain of gnat_binding_level structures awaiting reuse. */
221 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
223 /* The context to be used for global declarations. */
224 static GTY(()) tree global_context;
226 /* An array of global declarations. */
227 static GTY(()) vec<tree, va_gc> *global_decls;
229 /* An array of builtin function declarations. */
230 static GTY(()) vec<tree, va_gc> *builtin_decls;
232 /* An array of global renaming pointers. */
233 static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
235 /* A chain of unused BLOCK nodes. */
236 static GTY((deletable)) tree free_block_chain;
238 /* A hash table of padded types. It is modelled on the generic type
239 hash table in tree.c, which must thus be used as a reference. */
241 struct GTY((for_user)) pad_type_hash {
242 unsigned long hash;
243 tree type;
246 struct pad_type_hasher : ggc_cache_hasher<pad_type_hash *>
248 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
249 static bool equal (pad_type_hash *a, pad_type_hash *b);
250 static void handle_cache_entry (pad_type_hash *&);
253 static GTY ((cache))
254 hash_table<pad_type_hasher> *pad_type_hash_table;
256 static tree merge_sizes (tree, tree, tree, bool, bool);
257 static tree compute_related_constant (tree, tree);
258 static tree split_plus (tree, tree *);
259 static tree float_type_for_precision (int, machine_mode);
260 static tree convert_to_fat_pointer (tree, tree);
261 static unsigned int scale_by_factor_of (tree, unsigned int);
262 static bool potential_alignment_gap (tree, tree, tree);
264 /* A linked list used as a queue to defer the initialization of the
265 DECL_CONTEXT attribute of ..._DECL nodes and of the TYPE_CONTEXT attribute
266 of ..._TYPE nodes. */
267 struct deferred_decl_context_node
269 tree decl; /* The ..._DECL node to work on. */
270 Entity_Id gnat_scope; /* The corresponding entity's Scope attribute. */
271 int force_global; /* force_global value when pushing DECL. */
272 vec<tree, va_heap, vl_ptr> types; /* A list of ..._TYPE nodes to propagate the
273 context to. */
274 struct deferred_decl_context_node *next; /* The next queue item. */
277 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
279 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
280 feed it with the elaboration of GNAT_SCOPE. */
281 static struct deferred_decl_context_node *
282 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
284 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
285 feed it with the DECL_CONTEXT computed as part of N as soon as it is
286 computed. */
287 static void add_deferred_type_context (struct deferred_decl_context_node *n,
288 tree type);
290 /* Initialize data structures of the utils.c module. */
292 void
293 init_gnat_utils (void)
295 /* Initialize the association of GNAT nodes to GCC trees. */
296 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
298 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
299 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
301 /* Initialize the hash table of padded types. */
302 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
305 /* Destroy data structures of the utils.c module. */
307 void
308 destroy_gnat_utils (void)
310 /* Destroy the association of GNAT nodes to GCC trees. */
311 ggc_free (associate_gnat_to_gnu);
312 associate_gnat_to_gnu = NULL;
314 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
315 ggc_free (dummy_node_table);
316 dummy_node_table = NULL;
318 /* Destroy the hash table of padded types. */
319 pad_type_hash_table->empty ();
320 pad_type_hash_table = NULL;
322 /* Invalidate the global renaming pointers. */
323 invalidate_global_renaming_pointers ();
326 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
327 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
328 If NO_CHECK is true, the latter check is suppressed.
330 If GNU_DECL is zero, reset a previous association. */
332 void
333 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
335 /* Check that GNAT_ENTITY is not already defined and that it is being set
336 to something which is a decl. If that is not the case, this usually
337 means GNAT_ENTITY is defined twice, but occasionally is due to some
338 Gigi problem. */
339 gcc_assert (!(gnu_decl
340 && (PRESENT_GNU_TREE (gnat_entity)
341 || (!no_check && !DECL_P (gnu_decl)))));
343 SET_GNU_TREE (gnat_entity, gnu_decl);
346 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
347 that was associated with it. If there is no such tree node, abort.
349 In some cases, such as delayed elaboration or expressions that need to
350 be elaborated only once, GNAT_ENTITY is really not an entity. */
352 tree
353 get_gnu_tree (Entity_Id gnat_entity)
355 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
356 return GET_GNU_TREE (gnat_entity);
359 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
361 bool
362 present_gnu_tree (Entity_Id gnat_entity)
364 return PRESENT_GNU_TREE (gnat_entity);
367 /* Make a dummy type corresponding to GNAT_TYPE. */
369 tree
370 make_dummy_type (Entity_Id gnat_type)
372 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
373 tree gnu_type;
375 /* If there was no equivalent type (can only happen when just annotating
376 types) or underlying type, go back to the original type. */
377 if (No (gnat_equiv))
378 gnat_equiv = gnat_type;
380 /* If it there already a dummy type, use that one. Else make one. */
381 if (PRESENT_DUMMY_NODE (gnat_equiv))
382 return GET_DUMMY_NODE (gnat_equiv);
384 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
385 an ENUMERAL_TYPE. */
386 gnu_type = make_node (Is_Record_Type (gnat_equiv)
387 ? tree_code_for_record_type (gnat_equiv)
388 : ENUMERAL_TYPE);
389 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
390 TYPE_DUMMY_P (gnu_type) = 1;
391 TYPE_STUB_DECL (gnu_type)
392 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
393 if (Is_By_Reference_Type (gnat_equiv))
394 TYPE_BY_REFERENCE_P (gnu_type) = 1;
396 SET_DUMMY_NODE (gnat_equiv, gnu_type);
398 return gnu_type;
401 /* Return the dummy type that was made for GNAT_TYPE, if any. */
403 tree
404 get_dummy_type (Entity_Id gnat_type)
406 return GET_DUMMY_NODE (gnat_type);
409 /* Build dummy fat and thin pointer types whose designated type is specified
410 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
412 void
413 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
415 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
416 tree gnu_fat_type, fields, gnu_object_type;
418 gnu_template_type = make_node (RECORD_TYPE);
419 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
420 TYPE_DUMMY_P (gnu_template_type) = 1;
421 gnu_ptr_template = build_pointer_type (gnu_template_type);
423 gnu_array_type = make_node (ENUMERAL_TYPE);
424 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
425 TYPE_DUMMY_P (gnu_array_type) = 1;
426 gnu_ptr_array = build_pointer_type (gnu_array_type);
428 gnu_fat_type = make_node (RECORD_TYPE);
429 /* Build a stub DECL to trigger the special processing for fat pointer types
430 in gnat_pushdecl. */
431 TYPE_NAME (gnu_fat_type)
432 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
433 gnu_fat_type);
434 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
435 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
436 DECL_CHAIN (fields)
437 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
438 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
439 finish_fat_pointer_type (gnu_fat_type, fields);
440 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
441 /* Suppress debug info until after the type is completed. */
442 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
444 gnu_object_type = make_node (RECORD_TYPE);
445 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
446 TYPE_DUMMY_P (gnu_object_type) = 1;
448 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
449 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
452 /* Return true if we are in the global binding level. */
454 bool
455 global_bindings_p (void)
457 return force_global || current_function_decl == NULL_TREE;
460 /* Enter a new binding level. */
462 void
463 gnat_pushlevel (void)
465 struct gnat_binding_level *newlevel = NULL;
467 /* Reuse a struct for this binding level, if there is one. */
468 if (free_binding_level)
470 newlevel = free_binding_level;
471 free_binding_level = free_binding_level->chain;
473 else
474 newlevel = ggc_alloc<gnat_binding_level> ();
476 /* Use a free BLOCK, if any; otherwise, allocate one. */
477 if (free_block_chain)
479 newlevel->block = free_block_chain;
480 free_block_chain = BLOCK_CHAIN (free_block_chain);
481 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
483 else
484 newlevel->block = make_node (BLOCK);
486 /* Point the BLOCK we just made to its parent. */
487 if (current_binding_level)
488 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
490 BLOCK_VARS (newlevel->block) = NULL_TREE;
491 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
492 TREE_USED (newlevel->block) = 1;
494 /* Add this level to the front of the chain (stack) of active levels. */
495 newlevel->chain = current_binding_level;
496 newlevel->jmpbuf_decl = NULL_TREE;
497 current_binding_level = newlevel;
500 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
501 and point FNDECL to this BLOCK. */
503 void
504 set_current_block_context (tree fndecl)
506 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
507 DECL_INITIAL (fndecl) = current_binding_level->block;
508 set_block_for_group (current_binding_level->block);
511 /* Set the jmpbuf_decl for the current binding level to DECL. */
513 void
514 set_block_jmpbuf_decl (tree decl)
516 current_binding_level->jmpbuf_decl = decl;
519 /* Get the jmpbuf_decl, if any, for the current binding level. */
521 tree
522 get_block_jmpbuf_decl (void)
524 return current_binding_level->jmpbuf_decl;
527 /* Exit a binding level. Set any BLOCK into the current code group. */
529 void
530 gnat_poplevel (void)
532 struct gnat_binding_level *level = current_binding_level;
533 tree block = level->block;
535 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
536 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
538 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
539 are no variables free the block and merge its subblocks into those of its
540 parent block. Otherwise, add it to the list of its parent. */
541 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
543 else if (BLOCK_VARS (block) == NULL_TREE)
545 BLOCK_SUBBLOCKS (level->chain->block)
546 = block_chainon (BLOCK_SUBBLOCKS (block),
547 BLOCK_SUBBLOCKS (level->chain->block));
548 BLOCK_CHAIN (block) = free_block_chain;
549 free_block_chain = block;
551 else
553 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
554 BLOCK_SUBBLOCKS (level->chain->block) = block;
555 TREE_USED (block) = 1;
556 set_block_for_group (block);
559 /* Free this binding structure. */
560 current_binding_level = level->chain;
561 level->chain = free_binding_level;
562 free_binding_level = level;
565 /* Exit a binding level and discard the associated BLOCK. */
567 void
568 gnat_zaplevel (void)
570 struct gnat_binding_level *level = current_binding_level;
571 tree block = level->block;
573 BLOCK_CHAIN (block) = free_block_chain;
574 free_block_chain = block;
576 /* Free this binding structure. */
577 current_binding_level = level->chain;
578 level->chain = free_binding_level;
579 free_binding_level = level;
582 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
584 static void
585 gnat_set_type_context (tree type, tree context)
587 tree decl = TYPE_STUB_DECL (type);
589 TYPE_CONTEXT (type) = context;
591 while (decl && DECL_PARALLEL_TYPE (decl))
593 tree parallel_type = DECL_PARALLEL_TYPE (decl);
595 /* Give a context to the parallel types and their stub decl, if any.
596 Some parallel types seems to be present in multiple parallel type
597 chains, so don't mess with their context if they already have one. */
598 if (TYPE_CONTEXT (parallel_type) == NULL_TREE)
600 if (TYPE_STUB_DECL (parallel_type) != NULL_TREE)
601 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
602 TYPE_CONTEXT (parallel_type) = context;
605 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
609 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
610 the debug info, or Empty if there is no such scope. If not NULL, set
611 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
613 static Entity_Id
614 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
616 Entity_Id gnat_entity;
618 if (is_subprogram)
619 *is_subprogram = false;
621 if (Nkind (gnat_node) == N_Defining_Identifier)
622 gnat_entity = Scope (gnat_node);
623 else
624 return Empty;
626 while (Present (gnat_entity))
628 switch (Ekind (gnat_entity))
630 case E_Function:
631 case E_Procedure:
632 if (Present (Protected_Body_Subprogram (gnat_entity)))
633 gnat_entity = Protected_Body_Subprogram (gnat_entity);
635 /* If the scope is a subprogram, then just rely on
636 current_function_decl, so that we don't have to defer
637 anything. This is needed because other places rely on the
638 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
639 if (is_subprogram)
640 *is_subprogram = true;
641 return gnat_entity;
643 case E_Record_Type:
644 case E_Record_Subtype:
645 return gnat_entity;
647 default:
648 /* By default, we are not interested in this particular scope: go to
649 the outer one. */
650 break;
652 gnat_entity = Scope (gnat_entity);
654 return Empty;
657 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing of
658 N otherwise. */
660 static void
661 defer_or_set_type_context (tree type,
662 tree context,
663 struct deferred_decl_context_node *n)
665 if (n)
666 add_deferred_type_context (n, type);
667 else
668 gnat_set_type_context (type, context);
671 /* Return global_context. Create it if needed, first. */
673 static tree
674 get_global_context (void)
676 if (!global_context)
677 global_context = build_translation_unit_decl (NULL_TREE);
678 return global_context;
681 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
682 for location information and flag propagation. */
684 void
685 gnat_pushdecl (tree decl, Node_Id gnat_node)
687 tree context = NULL_TREE;
688 struct deferred_decl_context_node *deferred_decl_context = NULL;
690 /* If explicitely asked to make DECL global or if it's an imported nested
691 object, short-circuit the regular Scope-based context computation. */
692 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
694 /* Rely on the GNAT scope, or fallback to the current_function_decl if
695 the GNAT scope reached the global scope, if it reached a subprogram
696 or the declaration is a subprogram or a variable (for them we skip
697 intermediate context types because the subprogram body elaboration
698 machinery and the inliner both expect a subprogram context).
700 Falling back to current_function_decl is necessary for implicit
701 subprograms created by gigi, such as the elaboration subprograms. */
702 bool context_is_subprogram = false;
703 const Entity_Id gnat_scope
704 = get_debug_scope (gnat_node, &context_is_subprogram);
706 if (Present (gnat_scope)
707 && !context_is_subprogram
708 && TREE_CODE (decl) != FUNCTION_DECL
709 && TREE_CODE (decl) != VAR_DECL)
710 /* Always assume the scope has not been elaborated, thus defer the
711 context propagation to the time its elaboration will be
712 available. */
713 deferred_decl_context
714 = add_deferred_decl_context (decl, gnat_scope, force_global);
716 /* External declarations (when force_global > 0) may not be in a
717 local context. */
718 else if (current_function_decl != NULL_TREE && force_global == 0)
719 context = current_function_decl;
722 /* If either we are forced to be in global mode or if both the GNAT scope and
723 the current_function_decl did not help determining the context, use the
724 global scope. */
725 if (!deferred_decl_context && context == NULL_TREE)
726 context = get_global_context ();
728 /* Functions imported in another function are not really nested.
729 For really nested functions mark them initially as needing
730 a static chain for uses of that flag before unnesting;
731 lower_nested_functions will then recompute it. */
732 if (TREE_CODE (decl) == FUNCTION_DECL
733 && !TREE_PUBLIC (decl)
734 && context != NULL_TREE
735 && (TREE_CODE (context) == FUNCTION_DECL
736 || decl_function_context (context) != NULL_TREE))
737 DECL_STATIC_CHAIN (decl) = 1;
739 if (!deferred_decl_context)
740 DECL_CONTEXT (decl) = context;
742 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
744 /* Set the location of DECL and emit a declaration for it. */
745 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
746 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
748 add_decl_expr (decl, gnat_node);
750 /* Put the declaration on the list. The list of declarations is in reverse
751 order. The list will be reversed later. Put global declarations in the
752 globals list and local ones in the current block. But skip TYPE_DECLs
753 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
754 with the debugger and aren't needed anyway. */
755 if (!(TREE_CODE (decl) == TYPE_DECL
756 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
758 if (DECL_EXTERNAL (decl))
760 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
761 vec_safe_push (builtin_decls, decl);
763 else if (global_bindings_p ())
764 vec_safe_push (global_decls, decl);
765 else
767 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
768 BLOCK_VARS (current_binding_level->block) = decl;
772 /* For the declaration of a type, set its name if it either is not already
773 set or if the previous type name was not derived from a source name.
774 We'd rather have the type named with a real name and all the pointer
775 types to the same object have the same POINTER_TYPE node. Code in the
776 equivalent function of c-decl.c makes a copy of the type node here, but
777 that may cause us trouble with incomplete types. We make an exception
778 for fat pointer types because the compiler automatically builds them
779 for unconstrained array types and the debugger uses them to represent
780 both these and pointers to these. */
781 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
783 tree t = TREE_TYPE (decl);
785 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
787 /* Array and pointer types aren't "tagged" types so we force the
788 type to be associated with its typedef in the DWARF back-end,
789 in order to make sure that the latter is always preserved. */
790 if (!DECL_ARTIFICIAL (decl)
791 && (TREE_CODE (t) == ARRAY_TYPE
792 || TREE_CODE (t) == POINTER_TYPE))
794 tree tt = build_distinct_type_copy (t);
795 if (TREE_CODE (t) == POINTER_TYPE)
796 TYPE_NEXT_PTR_TO (t) = tt;
797 TYPE_NAME (tt) = DECL_NAME (decl);
798 defer_or_set_type_context (tt,
799 DECL_CONTEXT (decl),
800 deferred_decl_context);
801 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
802 DECL_ORIGINAL_TYPE (decl) = tt;
805 else if (TYPE_IS_FAT_POINTER_P (t))
807 /* We need a variant for the placeholder machinery to work. */
808 tree tt = build_variant_type_copy (t);
809 TYPE_NAME (tt) = decl;
810 defer_or_set_type_context (tt,
811 DECL_CONTEXT (decl),
812 deferred_decl_context);
813 TREE_USED (tt) = TREE_USED (t);
814 TREE_TYPE (decl) = tt;
815 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
816 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
817 else
818 DECL_ORIGINAL_TYPE (decl) = t;
819 DECL_ARTIFICIAL (decl) = 0;
820 t = NULL_TREE;
822 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
824 else
825 t = NULL_TREE;
827 /* Propagate the name to all the anonymous variants. This is needed
828 for the type qualifiers machinery to work properly. Also propagate
829 the context to them. Note that the context will be propagated to all
830 parallel types too thanks to gnat_set_type_context. */
831 if (t)
832 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
833 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
835 TYPE_NAME (t) = decl;
836 defer_or_set_type_context (t,
837 DECL_CONTEXT (decl),
838 deferred_decl_context);
843 /* Create a record type that contains a SIZE bytes long field of TYPE with a
844 starting bit position so that it is aligned to ALIGN bits, and leaving at
845 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
846 record is guaranteed to get. GNAT_NODE is used for the position of the
847 associated TYPE_DECL. */
849 tree
850 make_aligning_type (tree type, unsigned int align, tree size,
851 unsigned int base_align, int room, Node_Id gnat_node)
853 /* We will be crafting a record type with one field at a position set to be
854 the next multiple of ALIGN past record'address + room bytes. We use a
855 record placeholder to express record'address. */
856 tree record_type = make_node (RECORD_TYPE);
857 tree record = build0 (PLACEHOLDER_EXPR, record_type);
859 tree record_addr_st
860 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
862 /* The diagram below summarizes the shape of what we manipulate:
864 <--------- pos ---------->
865 { +------------+-------------+-----------------+
866 record =>{ |############| ... | field (type) |
867 { +------------+-------------+-----------------+
868 |<-- room -->|<- voffset ->|<---- size ----->|
871 record_addr vblock_addr
873 Every length is in sizetype bytes there, except "pos" which has to be
874 set as a bit position in the GCC tree for the record. */
875 tree room_st = size_int (room);
876 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
877 tree voffset_st, pos, field;
879 tree name = TYPE_IDENTIFIER (type);
881 name = concat_name (name, "ALIGN");
882 TYPE_NAME (record_type) = name;
884 /* Compute VOFFSET and then POS. The next byte position multiple of some
885 alignment after some address is obtained by "and"ing the alignment minus
886 1 with the two's complement of the address. */
887 voffset_st = size_binop (BIT_AND_EXPR,
888 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
889 size_int ((align / BITS_PER_UNIT) - 1));
891 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
892 pos = size_binop (MULT_EXPR,
893 convert (bitsizetype,
894 size_binop (PLUS_EXPR, room_st, voffset_st)),
895 bitsize_unit_node);
897 /* Craft the GCC record representation. We exceptionally do everything
898 manually here because 1) our generic circuitry is not quite ready to
899 handle the complex position/size expressions we are setting up, 2) we
900 have a strong simplifying factor at hand: we know the maximum possible
901 value of voffset, and 3) we have to set/reset at least the sizes in
902 accordance with this maximum value anyway, as we need them to convey
903 what should be "alloc"ated for this type.
905 Use -1 as the 'addressable' indication for the field to prevent the
906 creation of a bitfield. We don't need one, it would have damaging
907 consequences on the alignment computation, and create_field_decl would
908 make one without this special argument, for instance because of the
909 complex position expression. */
910 field = create_field_decl (get_identifier ("F"), type, record_type, size,
911 pos, 1, -1);
912 TYPE_FIELDS (record_type) = field;
914 TYPE_ALIGN (record_type) = base_align;
915 TYPE_USER_ALIGN (record_type) = 1;
917 TYPE_SIZE (record_type)
918 = size_binop (PLUS_EXPR,
919 size_binop (MULT_EXPR, convert (bitsizetype, size),
920 bitsize_unit_node),
921 bitsize_int (align + room * BITS_PER_UNIT));
922 TYPE_SIZE_UNIT (record_type)
923 = size_binop (PLUS_EXPR, size,
924 size_int (room + align / BITS_PER_UNIT));
926 SET_TYPE_MODE (record_type, BLKmode);
927 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
929 /* Declare it now since it will never be declared otherwise. This is
930 necessary to ensure that its subtrees are properly marked. */
931 create_type_decl (name, record_type, true, false, gnat_node);
933 return record_type;
936 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
937 as the field type of a packed record if IN_RECORD is true, or as the
938 component type of a packed array if IN_RECORD is false. See if we can
939 rewrite it either as a type that has a non-BLKmode, which we can pack
940 tighter in the packed record case, or as a smaller type. If so, return
941 the new type. If not, return the original type. */
943 tree
944 make_packable_type (tree type, bool in_record)
946 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
947 unsigned HOST_WIDE_INT new_size;
948 tree new_type, old_field, field_list = NULL_TREE;
949 unsigned int align;
951 /* No point in doing anything if the size is zero. */
952 if (size == 0)
953 return type;
955 new_type = make_node (TREE_CODE (type));
957 /* Copy the name and flags from the old type to that of the new.
958 Note that we rely on the pointer equality created here for
959 TYPE_NAME to look through conversions in various places. */
960 TYPE_NAME (new_type) = TYPE_NAME (type);
961 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
962 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
963 if (TREE_CODE (type) == RECORD_TYPE)
964 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
966 /* If we are in a record and have a small size, set the alignment to
967 try for an integral mode. Otherwise set it to try for a smaller
968 type with BLKmode. */
969 if (in_record && size <= MAX_FIXED_MODE_SIZE)
971 align = ceil_pow2 (size);
972 TYPE_ALIGN (new_type) = align;
973 new_size = (size + align - 1) & -align;
975 else
977 unsigned HOST_WIDE_INT align;
979 /* Do not try to shrink the size if the RM size is not constant. */
980 if (TYPE_CONTAINS_TEMPLATE_P (type)
981 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
982 return type;
984 /* Round the RM size up to a unit boundary to get the minimal size
985 for a BLKmode record. Give up if it's already the size. */
986 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
987 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
988 if (new_size == size)
989 return type;
991 align = new_size & -new_size;
992 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
995 TYPE_USER_ALIGN (new_type) = 1;
997 /* Now copy the fields, keeping the position and size as we don't want
998 to change the layout by propagating the packedness downwards. */
999 for (old_field = TYPE_FIELDS (type); old_field;
1000 old_field = DECL_CHAIN (old_field))
1002 tree new_field_type = TREE_TYPE (old_field);
1003 tree new_field, new_size;
1005 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1006 && !TYPE_FAT_POINTER_P (new_field_type)
1007 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1008 new_field_type = make_packable_type (new_field_type, true);
1010 /* However, for the last field in a not already packed record type
1011 that is of an aggregate type, we need to use the RM size in the
1012 packable version of the record type, see finish_record_type. */
1013 if (!DECL_CHAIN (old_field)
1014 && !TYPE_PACKED (type)
1015 && RECORD_OR_UNION_TYPE_P (new_field_type)
1016 && !TYPE_FAT_POINTER_P (new_field_type)
1017 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1018 && TYPE_ADA_SIZE (new_field_type))
1019 new_size = TYPE_ADA_SIZE (new_field_type);
1020 else
1021 new_size = DECL_SIZE (old_field);
1023 new_field
1024 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
1025 new_size, bit_position (old_field),
1026 TYPE_PACKED (type),
1027 !DECL_NONADDRESSABLE_P (old_field));
1029 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
1030 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
1031 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1032 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
1034 DECL_CHAIN (new_field) = field_list;
1035 field_list = new_field;
1038 finish_record_type (new_type, nreverse (field_list), 2, false);
1039 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1040 if (TYPE_STUB_DECL (type))
1041 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1042 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1044 /* If this is a padding record, we never want to make the size smaller
1045 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1046 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1048 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1049 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1050 new_size = size;
1052 else
1054 TYPE_SIZE (new_type) = bitsize_int (new_size);
1055 TYPE_SIZE_UNIT (new_type)
1056 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
1059 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1060 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1062 compute_record_mode (new_type);
1064 /* Try harder to get a packable type if necessary, for example
1065 in case the record itself contains a BLKmode field. */
1066 if (in_record && TYPE_MODE (new_type) == BLKmode)
1067 SET_TYPE_MODE (new_type,
1068 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1070 /* If neither the mode nor the size has shrunk, return the old type. */
1071 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
1072 return type;
1074 return new_type;
1077 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1078 If TYPE is the best type, return it. Otherwise, make a new type. We
1079 only support new integral and pointer types. FOR_BIASED is true if
1080 we are making a biased type. */
1082 tree
1083 make_type_from_size (tree type, tree size_tree, bool for_biased)
1085 unsigned HOST_WIDE_INT size;
1086 bool biased_p;
1087 tree new_type;
1089 /* If size indicates an error, just return TYPE to avoid propagating
1090 the error. Likewise if it's too large to represent. */
1091 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1092 return type;
1094 size = tree_to_uhwi (size_tree);
1096 switch (TREE_CODE (type))
1098 case INTEGER_TYPE:
1099 case ENUMERAL_TYPE:
1100 case BOOLEAN_TYPE:
1101 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1102 && TYPE_BIASED_REPRESENTATION_P (type));
1104 /* Integer types with precision 0 are forbidden. */
1105 if (size == 0)
1106 size = 1;
1108 /* Only do something if the type isn't a packed array type and doesn't
1109 already have the proper size and the size isn't too large. */
1110 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1111 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1112 || size > LONG_LONG_TYPE_SIZE)
1113 break;
1115 biased_p |= for_biased;
1116 if (TYPE_UNSIGNED (type) || biased_p)
1117 new_type = make_unsigned_type (size);
1118 else
1119 new_type = make_signed_type (size);
1120 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1121 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1122 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1123 /* Copy the name to show that it's essentially the same type and
1124 not a subrange type. */
1125 TYPE_NAME (new_type) = TYPE_NAME (type);
1126 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1127 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1128 return new_type;
1130 case RECORD_TYPE:
1131 /* Do something if this is a fat pointer, in which case we
1132 may need to return the thin pointer. */
1133 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1135 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
1136 if (!targetm.valid_pointer_mode (p_mode))
1137 p_mode = ptr_mode;
1138 return
1139 build_pointer_type_for_mode
1140 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1141 p_mode, 0);
1143 break;
1145 case POINTER_TYPE:
1146 /* Only do something if this is a thin pointer, in which case we
1147 may need to return the fat pointer. */
1148 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1149 return
1150 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1151 break;
1153 default:
1154 break;
1157 return type;
1160 /* See if the data pointed to by the hash table slot is marked. */
1162 void
1163 pad_type_hasher::handle_cache_entry (pad_type_hash *&t)
1165 extern void gt_ggc_mx (pad_type_hash *&);
1166 if (t == HTAB_EMPTY_ENTRY || t == HTAB_DELETED_ENTRY)
1167 return;
1168 else if (ggc_marked_p (t->type))
1169 gt_ggc_mx (t);
1170 else
1171 t = static_cast<pad_type_hash *> (HTAB_DELETED_ENTRY);
1174 /* Return true iff the padded types are equivalent. */
1176 bool
1177 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1179 tree type1, type2;
1181 if (t1->hash != t2->hash)
1182 return 0;
1184 type1 = t1->type;
1185 type2 = t2->type;
1187 /* We consider that the padded types are equivalent if they pad the same
1188 type and have the same size, alignment and RM size. Taking the mode
1189 into account is redundant since it is determined by the others. */
1190 return
1191 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1192 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1193 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1194 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1197 /* Look up the padded TYPE in the hash table and return its canonical version
1198 if it exists; otherwise, insert it into the hash table. */
1200 static tree
1201 lookup_and_insert_pad_type (tree type)
1203 hashval_t hashcode;
1204 struct pad_type_hash in, *h;
1206 hashcode
1207 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1208 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1209 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1210 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1212 in.hash = hashcode;
1213 in.type = type;
1214 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1215 if (h)
1216 return h->type;
1218 h = ggc_alloc<pad_type_hash> ();
1219 h->hash = hashcode;
1220 h->type = type;
1221 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
1222 return NULL_TREE;
1225 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1226 if needed. We have already verified that SIZE and ALIGN are large enough.
1227 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1228 IS_COMPONENT_TYPE is true if this is being done for the component type of
1229 an array. IS_USER_TYPE is true if the original type needs to be completed.
1230 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1231 the RM size of the resulting type is to be set to SIZE too. */
1233 tree
1234 maybe_pad_type (tree type, tree size, unsigned int align,
1235 Entity_Id gnat_entity, bool is_component_type,
1236 bool is_user_type, bool definition, bool set_rm_size)
1238 tree orig_size = TYPE_SIZE (type);
1239 unsigned int orig_align = TYPE_ALIGN (type);
1240 tree record, field;
1242 /* If TYPE is a padded type, see if it agrees with any size and alignment
1243 we were given. If so, return the original type. Otherwise, strip
1244 off the padding, since we will either be returning the inner type
1245 or repadding it. If no size or alignment is specified, use that of
1246 the original padded type. */
1247 if (TYPE_IS_PADDING_P (type))
1249 if ((!size
1250 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1251 && (align == 0 || align == orig_align))
1252 return type;
1254 if (!size)
1255 size = orig_size;
1256 if (align == 0)
1257 align = orig_align;
1259 type = TREE_TYPE (TYPE_FIELDS (type));
1260 orig_size = TYPE_SIZE (type);
1261 orig_align = TYPE_ALIGN (type);
1264 /* If the size is either not being changed or is being made smaller (which
1265 is not done here and is only valid for bitfields anyway), show the size
1266 isn't changing. Likewise, clear the alignment if it isn't being
1267 changed. Then return if we aren't doing anything. */
1268 if (size
1269 && (operand_equal_p (size, orig_size, 0)
1270 || (TREE_CODE (orig_size) == INTEGER_CST
1271 && tree_int_cst_lt (size, orig_size))))
1272 size = NULL_TREE;
1274 if (align == orig_align)
1275 align = 0;
1277 if (align == 0 && !size)
1278 return type;
1280 /* If requested, complete the original type and give it a name. */
1281 if (is_user_type)
1282 create_type_decl (get_entity_name (gnat_entity), type,
1283 !Comes_From_Source (gnat_entity),
1284 !(TYPE_NAME (type)
1285 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1286 && DECL_IGNORED_P (TYPE_NAME (type))),
1287 gnat_entity);
1289 /* We used to modify the record in place in some cases, but that could
1290 generate incorrect debugging information. So make a new record
1291 type and name. */
1292 record = make_node (RECORD_TYPE);
1293 TYPE_PADDING_P (record) = 1;
1295 if (Present (gnat_entity))
1296 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1298 TYPE_ALIGN (record) = align ? align : orig_align;
1299 TYPE_SIZE (record) = size ? size : orig_size;
1300 TYPE_SIZE_UNIT (record)
1301 = convert (sizetype,
1302 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1303 bitsize_unit_node));
1305 /* If we are changing the alignment and the input type is a record with
1306 BLKmode and a small constant size, try to make a form that has an
1307 integral mode. This might allow the padding record to also have an
1308 integral mode, which will be much more efficient. There is no point
1309 in doing so if a size is specified unless it is also a small constant
1310 size and it is incorrect to do so if we cannot guarantee that the mode
1311 will be naturally aligned since the field must always be addressable.
1313 ??? This might not always be a win when done for a stand-alone object:
1314 since the nominal and the effective type of the object will now have
1315 different modes, a VIEW_CONVERT_EXPR will be required for converting
1316 between them and it might be hard to overcome afterwards, including
1317 at the RTL level when the stand-alone object is accessed as a whole. */
1318 if (align != 0
1319 && RECORD_OR_UNION_TYPE_P (type)
1320 && TYPE_MODE (type) == BLKmode
1321 && !TYPE_BY_REFERENCE_P (type)
1322 && TREE_CODE (orig_size) == INTEGER_CST
1323 && !TREE_OVERFLOW (orig_size)
1324 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1325 && (!size
1326 || (TREE_CODE (size) == INTEGER_CST
1327 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1329 tree packable_type = make_packable_type (type, true);
1330 if (TYPE_MODE (packable_type) != BLKmode
1331 && align >= TYPE_ALIGN (packable_type))
1332 type = packable_type;
1335 /* Now create the field with the original size. */
1336 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1337 bitsize_zero_node, 0, 1);
1338 DECL_INTERNAL_P (field) = 1;
1340 /* Do not emit debug info until after the auxiliary record is built. */
1341 finish_record_type (record, field, 1, false);
1343 /* Set the RM size if requested. */
1344 if (set_rm_size)
1346 tree canonical_pad_type;
1348 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1350 /* If the padded type is complete and has constant size, we canonicalize
1351 it by means of the hash table. This is consistent with the language
1352 semantics and ensures that gigi and the middle-end have a common view
1353 of these padded types. */
1354 if (TREE_CONSTANT (TYPE_SIZE (record))
1355 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1357 record = canonical_pad_type;
1358 goto built;
1362 /* Unless debugging information isn't being written for the input type,
1363 write a record that shows what we are a subtype of and also make a
1364 variable that indicates our size, if still variable. */
1365 if (TREE_CODE (orig_size) != INTEGER_CST
1366 && TYPE_NAME (record)
1367 && TYPE_NAME (type)
1368 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1369 && DECL_IGNORED_P (TYPE_NAME (type))))
1371 tree marker = make_node (RECORD_TYPE);
1372 tree name = TYPE_IDENTIFIER (record);
1373 tree orig_name = TYPE_IDENTIFIER (type);
1375 TYPE_NAME (marker) = concat_name (name, "XVS");
1376 finish_record_type (marker,
1377 create_field_decl (orig_name,
1378 build_reference_type (type),
1379 marker, NULL_TREE, NULL_TREE,
1380 0, 0),
1381 0, true);
1383 add_parallel_type (record, marker);
1385 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1386 TYPE_SIZE_UNIT (marker)
1387 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1388 TYPE_SIZE_UNIT (record), false, false, false,
1389 false, NULL, gnat_entity);
1392 rest_of_record_type_compilation (record);
1394 built:
1395 /* If the size was widened explicitly, maybe give a warning. Take the
1396 original size as the maximum size of the input if there was an
1397 unconstrained record involved and round it up to the specified alignment,
1398 if one was specified. But don't do it if we are just annotating types
1399 and the type is tagged, since tagged types aren't fully laid out in this
1400 mode. */
1401 if (!size
1402 || TREE_CODE (size) == COND_EXPR
1403 || TREE_CODE (size) == MAX_EXPR
1404 || No (gnat_entity)
1405 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1406 return record;
1408 if (CONTAINS_PLACEHOLDER_P (orig_size))
1409 orig_size = max_size (orig_size, true);
1411 if (align)
1412 orig_size = round_up (orig_size, align);
1414 if (!operand_equal_p (size, orig_size, 0)
1415 && !(TREE_CODE (size) == INTEGER_CST
1416 && TREE_CODE (orig_size) == INTEGER_CST
1417 && (TREE_OVERFLOW (size)
1418 || TREE_OVERFLOW (orig_size)
1419 || tree_int_cst_lt (size, orig_size))))
1421 Node_Id gnat_error_node = Empty;
1423 /* For a packed array, post the message on the original array type. */
1424 if (Is_Packed_Array_Impl_Type (gnat_entity))
1425 gnat_entity = Original_Array_Type (gnat_entity);
1427 if ((Ekind (gnat_entity) == E_Component
1428 || Ekind (gnat_entity) == E_Discriminant)
1429 && Present (Component_Clause (gnat_entity)))
1430 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1431 else if (Present (Size_Clause (gnat_entity)))
1432 gnat_error_node = Expression (Size_Clause (gnat_entity));
1434 /* Generate message only for entities that come from source, since
1435 if we have an entity created by expansion, the message will be
1436 generated for some other corresponding source entity. */
1437 if (Comes_From_Source (gnat_entity))
1439 if (Present (gnat_error_node))
1440 post_error_ne_tree ("{^ }bits of & unused?",
1441 gnat_error_node, gnat_entity,
1442 size_diffop (size, orig_size));
1443 else if (is_component_type)
1444 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1445 gnat_entity, gnat_entity,
1446 size_diffop (size, orig_size));
1450 return record;
1453 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1454 If this is a multi-dimensional array type, do this recursively.
1456 OP may be
1457 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1458 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1459 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1461 void
1462 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1464 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1465 of a one-dimensional array, since the padding has the same alias set
1466 as the field type, but if it's a multi-dimensional array, we need to
1467 see the inner types. */
1468 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1469 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1470 || TYPE_PADDING_P (gnu_old_type)))
1471 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1473 /* Unconstrained array types are deemed incomplete and would thus be given
1474 alias set 0. Retrieve the underlying array type. */
1475 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1476 gnu_old_type
1477 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1478 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1479 gnu_new_type
1480 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1482 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1483 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1484 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1485 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1487 switch (op)
1489 case ALIAS_SET_COPY:
1490 /* The alias set shouldn't be copied between array types with different
1491 aliasing settings because this can break the aliasing relationship
1492 between the array type and its element type. */
1493 #ifndef ENABLE_CHECKING
1494 if (flag_strict_aliasing)
1495 #endif
1496 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1497 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1498 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1499 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1501 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1502 break;
1504 case ALIAS_SET_SUBSET:
1505 case ALIAS_SET_SUPERSET:
1507 alias_set_type old_set = get_alias_set (gnu_old_type);
1508 alias_set_type new_set = get_alias_set (gnu_new_type);
1510 /* Do nothing if the alias sets conflict. This ensures that we
1511 never call record_alias_subset several times for the same pair
1512 or at all for alias set 0. */
1513 if (!alias_sets_conflict_p (old_set, new_set))
1515 if (op == ALIAS_SET_SUBSET)
1516 record_alias_subset (old_set, new_set);
1517 else
1518 record_alias_subset (new_set, old_set);
1521 break;
1523 default:
1524 gcc_unreachable ();
1527 record_component_aliases (gnu_new_type);
1530 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1531 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1533 void
1534 record_builtin_type (const char *name, tree type, bool artificial_p)
1536 tree type_decl = build_decl (input_location,
1537 TYPE_DECL, get_identifier (name), type);
1538 DECL_ARTIFICIAL (type_decl) = artificial_p;
1539 TYPE_ARTIFICIAL (type) = artificial_p;
1540 gnat_pushdecl (type_decl, Empty);
1542 if (debug_hooks->type_decl)
1543 debug_hooks->type_decl (type_decl, false);
1546 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1547 finish constructing the record type as a fat pointer type. */
1549 void
1550 finish_fat_pointer_type (tree record_type, tree field_list)
1552 /* Make sure we can put it into a register. */
1553 if (STRICT_ALIGNMENT)
1554 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
1556 /* Show what it really is. */
1557 TYPE_FAT_POINTER_P (record_type) = 1;
1559 /* Do not emit debug info for it since the types of its fields may still be
1560 incomplete at this point. */
1561 finish_record_type (record_type, field_list, 0, false);
1563 /* Force type_contains_placeholder_p to return true on it. Although the
1564 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1565 type but the representation of the unconstrained array. */
1566 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1569 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1570 finish constructing the record or union type. If REP_LEVEL is zero, this
1571 record has no representation clause and so will be entirely laid out here.
1572 If REP_LEVEL is one, this record has a representation clause and has been
1573 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1574 this record is derived from a parent record and thus inherits its layout;
1575 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1576 we need to write debug information about this type. */
1578 void
1579 finish_record_type (tree record_type, tree field_list, int rep_level,
1580 bool debug_info_p)
1582 enum tree_code code = TREE_CODE (record_type);
1583 tree name = TYPE_IDENTIFIER (record_type);
1584 tree ada_size = bitsize_zero_node;
1585 tree size = bitsize_zero_node;
1586 bool had_size = TYPE_SIZE (record_type) != 0;
1587 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1588 bool had_align = TYPE_ALIGN (record_type) != 0;
1589 tree field;
1591 TYPE_FIELDS (record_type) = field_list;
1593 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1594 generate debug info and have a parallel type. */
1595 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1597 /* Globally initialize the record first. If this is a rep'ed record,
1598 that just means some initializations; otherwise, layout the record. */
1599 if (rep_level > 0)
1601 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
1603 if (!had_size_unit)
1604 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1606 if (!had_size)
1607 TYPE_SIZE (record_type) = bitsize_zero_node;
1609 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1610 out just like a UNION_TYPE, since the size will be fixed. */
1611 else if (code == QUAL_UNION_TYPE)
1612 code = UNION_TYPE;
1614 else
1616 /* Ensure there isn't a size already set. There can be in an error
1617 case where there is a rep clause but all fields have errors and
1618 no longer have a position. */
1619 TYPE_SIZE (record_type) = 0;
1621 /* Ensure we use the traditional GCC layout for bitfields when we need
1622 to pack the record type or have a representation clause. The other
1623 possible layout (Microsoft C compiler), if available, would prevent
1624 efficient packing in almost all cases. */
1625 #ifdef TARGET_MS_BITFIELD_LAYOUT
1626 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1627 type_attributes (&record_type,
1628 tree_cons (get_identifier ("gcc_struct"),
1629 NULL_TREE, NULL_TREE),
1630 ATTR_FLAG_TYPE_IN_PLACE);
1631 #endif
1633 layout_type (record_type);
1636 /* At this point, the position and size of each field is known. It was
1637 either set before entry by a rep clause, or by laying out the type above.
1639 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1640 to compute the Ada size; the GCC size and alignment (for rep'ed records
1641 that are not padding types); and the mode (for rep'ed records). We also
1642 clear the DECL_BIT_FIELD indication for the cases we know have not been
1643 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1645 if (code == QUAL_UNION_TYPE)
1646 field_list = nreverse (field_list);
1648 for (field = field_list; field; field = DECL_CHAIN (field))
1650 tree type = TREE_TYPE (field);
1651 tree pos = bit_position (field);
1652 tree this_size = DECL_SIZE (field);
1653 tree this_ada_size;
1655 if (RECORD_OR_UNION_TYPE_P (type)
1656 && !TYPE_FAT_POINTER_P (type)
1657 && !TYPE_CONTAINS_TEMPLATE_P (type)
1658 && TYPE_ADA_SIZE (type))
1659 this_ada_size = TYPE_ADA_SIZE (type);
1660 else
1661 this_ada_size = this_size;
1663 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1664 if (DECL_BIT_FIELD (field)
1665 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1667 unsigned int align = TYPE_ALIGN (type);
1669 /* In the general case, type alignment is required. */
1670 if (value_factor_p (pos, align))
1672 /* The enclosing record type must be sufficiently aligned.
1673 Otherwise, if no alignment was specified for it and it
1674 has been laid out already, bump its alignment to the
1675 desired one if this is compatible with its size. */
1676 if (TYPE_ALIGN (record_type) >= align)
1678 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1679 DECL_BIT_FIELD (field) = 0;
1681 else if (!had_align
1682 && rep_level == 0
1683 && value_factor_p (TYPE_SIZE (record_type), align))
1685 TYPE_ALIGN (record_type) = align;
1686 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1687 DECL_BIT_FIELD (field) = 0;
1691 /* In the non-strict alignment case, only byte alignment is. */
1692 if (!STRICT_ALIGNMENT
1693 && DECL_BIT_FIELD (field)
1694 && value_factor_p (pos, BITS_PER_UNIT))
1695 DECL_BIT_FIELD (field) = 0;
1698 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1699 field is technically not addressable. Except that it can actually
1700 be addressed if it is BLKmode and happens to be properly aligned. */
1701 if (DECL_BIT_FIELD (field)
1702 && !(DECL_MODE (field) == BLKmode
1703 && value_factor_p (pos, BITS_PER_UNIT)))
1704 DECL_NONADDRESSABLE_P (field) = 1;
1706 /* A type must be as aligned as its most aligned field that is not
1707 a bit-field. But this is already enforced by layout_type. */
1708 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1709 TYPE_ALIGN (record_type)
1710 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1712 switch (code)
1714 case UNION_TYPE:
1715 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1716 size = size_binop (MAX_EXPR, size, this_size);
1717 break;
1719 case QUAL_UNION_TYPE:
1720 ada_size
1721 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1722 this_ada_size, ada_size);
1723 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1724 this_size, size);
1725 break;
1727 case RECORD_TYPE:
1728 /* Since we know here that all fields are sorted in order of
1729 increasing bit position, the size of the record is one
1730 higher than the ending bit of the last field processed
1731 unless we have a rep clause, since in that case we might
1732 have a field outside a QUAL_UNION_TYPE that has a higher ending
1733 position. So use a MAX in that case. Also, if this field is a
1734 QUAL_UNION_TYPE, we need to take into account the previous size in
1735 the case of empty variants. */
1736 ada_size
1737 = merge_sizes (ada_size, pos, this_ada_size,
1738 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1739 size
1740 = merge_sizes (size, pos, this_size,
1741 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1742 break;
1744 default:
1745 gcc_unreachable ();
1749 if (code == QUAL_UNION_TYPE)
1750 nreverse (field_list);
1752 if (rep_level < 2)
1754 /* If this is a padding record, we never want to make the size smaller
1755 than what was specified in it, if any. */
1756 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1757 size = TYPE_SIZE (record_type);
1759 /* Now set any of the values we've just computed that apply. */
1760 if (!TYPE_FAT_POINTER_P (record_type)
1761 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1762 SET_TYPE_ADA_SIZE (record_type, ada_size);
1764 if (rep_level > 0)
1766 tree size_unit = had_size_unit
1767 ? TYPE_SIZE_UNIT (record_type)
1768 : convert (sizetype,
1769 size_binop (CEIL_DIV_EXPR, size,
1770 bitsize_unit_node));
1771 unsigned int align = TYPE_ALIGN (record_type);
1773 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1774 TYPE_SIZE_UNIT (record_type)
1775 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1777 compute_record_mode (record_type);
1781 if (debug_info_p)
1782 rest_of_record_type_compilation (record_type);
1785 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1786 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1787 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1788 moment TYPE will get a context. */
1790 void
1791 add_parallel_type (tree type, tree parallel_type)
1793 tree decl = TYPE_STUB_DECL (type);
1795 while (DECL_PARALLEL_TYPE (decl))
1796 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1798 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1800 /* If PARALLEL_TYPE already has a context, we are done. */
1801 if (TYPE_CONTEXT (parallel_type) != NULL_TREE)
1802 return;
1804 /* Otherwise, try to get one from TYPE's context. */
1805 if (TYPE_CONTEXT (type) != NULL_TREE)
1806 /* TYPE already has a context, so simply propagate it to PARALLEL_TYPE. */
1807 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1809 /* ... otherwise TYPE has not context yet. We know it will thanks to
1810 gnat_pushdecl, and then its context will be propagated to PARALLEL_TYPE.
1811 So we have nothing to do in this case. */
1814 /* Return true if TYPE has a parallel type. */
1816 static bool
1817 has_parallel_type (tree type)
1819 tree decl = TYPE_STUB_DECL (type);
1821 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1824 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1825 associated with it. It need not be invoked directly in most cases since
1826 finish_record_type takes care of doing so, but this can be necessary if
1827 a parallel type is to be attached to the record type. */
1829 void
1830 rest_of_record_type_compilation (tree record_type)
1832 bool var_size = false;
1833 tree field;
1835 /* If this is a padded type, the bulk of the debug info has already been
1836 generated for the field's type. */
1837 if (TYPE_IS_PADDING_P (record_type))
1838 return;
1840 /* If the type already has a parallel type (XVS type), then we're done. */
1841 if (has_parallel_type (record_type))
1842 return;
1844 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1846 /* We need to make an XVE/XVU record if any field has variable size,
1847 whether or not the record does. For example, if we have a union,
1848 it may be that all fields, rounded up to the alignment, have the
1849 same size, in which case we'll use that size. But the debug
1850 output routines (except Dwarf2) won't be able to output the fields,
1851 so we need to make the special record. */
1852 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1853 /* If a field has a non-constant qualifier, the record will have
1854 variable size too. */
1855 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1856 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1858 var_size = true;
1859 break;
1863 /* If this record type is of variable size, make a parallel record type that
1864 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1865 if (var_size)
1867 tree new_record_type
1868 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1869 ? UNION_TYPE : TREE_CODE (record_type));
1870 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
1871 tree last_pos = bitsize_zero_node;
1872 tree old_field, prev_old_field = NULL_TREE;
1874 new_name
1875 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1876 ? "XVU" : "XVE");
1877 TYPE_NAME (new_record_type) = new_name;
1878 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1879 TYPE_STUB_DECL (new_record_type)
1880 = create_type_stub_decl (new_name, new_record_type);
1881 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1882 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1883 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1884 TYPE_SIZE_UNIT (new_record_type)
1885 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1887 /* Now scan all the fields, replacing each field with a new field
1888 corresponding to the new encoding. */
1889 for (old_field = TYPE_FIELDS (record_type); old_field;
1890 old_field = DECL_CHAIN (old_field))
1892 tree field_type = TREE_TYPE (old_field);
1893 tree field_name = DECL_NAME (old_field);
1894 tree curpos = bit_position (old_field);
1895 tree pos, new_field;
1896 bool var = false;
1897 unsigned int align = 0;
1899 /* We're going to do some pattern matching below so remove as many
1900 conversions as possible. */
1901 curpos = remove_conversions (curpos, true);
1903 /* See how the position was modified from the last position.
1905 There are two basic cases we support: a value was added
1906 to the last position or the last position was rounded to
1907 a boundary and they something was added. Check for the
1908 first case first. If not, see if there is any evidence
1909 of rounding. If so, round the last position and retry.
1911 If this is a union, the position can be taken as zero. */
1912 if (TREE_CODE (new_record_type) == UNION_TYPE)
1913 pos = bitsize_zero_node;
1914 else
1915 pos = compute_related_constant (curpos, last_pos);
1917 if (!pos
1918 && TREE_CODE (curpos) == MULT_EXPR
1919 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
1921 tree offset = TREE_OPERAND (curpos, 0);
1922 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1923 align = scale_by_factor_of (offset, align);
1924 last_pos = round_up (last_pos, align);
1925 pos = compute_related_constant (curpos, last_pos);
1927 else if (!pos
1928 && TREE_CODE (curpos) == PLUS_EXPR
1929 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
1930 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1931 && tree_fits_uhwi_p
1932 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
1934 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1935 unsigned HOST_WIDE_INT addend
1936 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1937 align
1938 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
1939 align = scale_by_factor_of (offset, align);
1940 align = MIN (align, addend & -addend);
1941 last_pos = round_up (last_pos, align);
1942 pos = compute_related_constant (curpos, last_pos);
1944 else if (potential_alignment_gap (prev_old_field, old_field, pos))
1946 align = TYPE_ALIGN (field_type);
1947 last_pos = round_up (last_pos, align);
1948 pos = compute_related_constant (curpos, last_pos);
1951 /* If we can't compute a position, set it to zero.
1953 ??? We really should abort here, but it's too much work
1954 to get this correct for all cases. */
1955 if (!pos)
1956 pos = bitsize_zero_node;
1958 /* See if this type is variable-sized and make a pointer type
1959 and indicate the indirection if so. Beware that the debug
1960 back-end may adjust the position computed above according
1961 to the alignment of the field type, i.e. the pointer type
1962 in this case, if we don't preventively counter that. */
1963 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1965 field_type = build_pointer_type (field_type);
1966 if (align != 0 && TYPE_ALIGN (field_type) > align)
1968 field_type = copy_node (field_type);
1969 TYPE_ALIGN (field_type) = align;
1971 var = true;
1974 /* Make a new field name, if necessary. */
1975 if (var || align != 0)
1977 char suffix[16];
1979 if (align != 0)
1980 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1981 align / BITS_PER_UNIT);
1982 else
1983 strcpy (suffix, "XVL");
1985 field_name = concat_name (field_name, suffix);
1988 new_field
1989 = create_field_decl (field_name, field_type, new_record_type,
1990 DECL_SIZE (old_field), pos, 0, 0);
1991 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1992 TYPE_FIELDS (new_record_type) = new_field;
1994 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1995 zero. The only time it's not the last field of the record
1996 is when there are other components at fixed positions after
1997 it (meaning there was a rep clause for every field) and we
1998 want to be able to encode them. */
1999 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2000 (TREE_CODE (TREE_TYPE (old_field))
2001 == QUAL_UNION_TYPE)
2002 ? bitsize_zero_node
2003 : DECL_SIZE (old_field));
2004 prev_old_field = old_field;
2007 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2009 add_parallel_type (record_type, new_record_type);
2013 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2014 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2015 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2016 replace a value of zero with the old size. If HAS_REP is true, we take the
2017 MAX of the end position of this field with LAST_SIZE. In all other cases,
2018 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2020 static tree
2021 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2022 bool has_rep)
2024 tree type = TREE_TYPE (last_size);
2025 tree new_size;
2027 if (!special || TREE_CODE (size) != COND_EXPR)
2029 new_size = size_binop (PLUS_EXPR, first_bit, size);
2030 if (has_rep)
2031 new_size = size_binop (MAX_EXPR, last_size, new_size);
2034 else
2035 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2036 integer_zerop (TREE_OPERAND (size, 1))
2037 ? last_size : merge_sizes (last_size, first_bit,
2038 TREE_OPERAND (size, 1),
2039 1, has_rep),
2040 integer_zerop (TREE_OPERAND (size, 2))
2041 ? last_size : merge_sizes (last_size, first_bit,
2042 TREE_OPERAND (size, 2),
2043 1, has_rep));
2045 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2046 when fed through substitute_in_expr) into thinking that a constant
2047 size is not constant. */
2048 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2049 new_size = TREE_OPERAND (new_size, 0);
2051 return new_size;
2054 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2055 related by the addition of a constant. Return that constant if so. */
2057 static tree
2058 compute_related_constant (tree op0, tree op1)
2060 tree op0_var, op1_var;
2061 tree op0_con = split_plus (op0, &op0_var);
2062 tree op1_con = split_plus (op1, &op1_var);
2063 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2065 if (operand_equal_p (op0_var, op1_var, 0))
2066 return result;
2067 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2068 return result;
2069 else
2070 return 0;
2073 /* Utility function of above to split a tree OP which may be a sum, into a
2074 constant part, which is returned, and a variable part, which is stored
2075 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2076 bitsizetype. */
2078 static tree
2079 split_plus (tree in, tree *pvar)
2081 /* Strip conversions in order to ease the tree traversal and maximize the
2082 potential for constant or plus/minus discovery. We need to be careful
2083 to always return and set *pvar to bitsizetype trees, but it's worth
2084 the effort. */
2085 in = remove_conversions (in, false);
2087 *pvar = convert (bitsizetype, in);
2089 if (TREE_CODE (in) == INTEGER_CST)
2091 *pvar = bitsize_zero_node;
2092 return convert (bitsizetype, in);
2094 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2096 tree lhs_var, rhs_var;
2097 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2098 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2100 if (lhs_var == TREE_OPERAND (in, 0)
2101 && rhs_var == TREE_OPERAND (in, 1))
2102 return bitsize_zero_node;
2104 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2105 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2107 else
2108 return bitsize_zero_node;
2111 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
2112 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
2113 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
2114 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
2115 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
2116 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
2117 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
2118 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
2119 invisible reference. */
2121 tree
2122 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
2123 bool return_unconstrained_p, bool return_by_direct_ref_p,
2124 bool return_by_invisi_ref_p)
2126 /* A list of the data type nodes of the subprogram formal parameters.
2127 This list is generated by traversing the input list of PARM_DECL
2128 nodes. */
2129 vec<tree, va_gc> *param_type_list = NULL;
2130 tree t, type;
2132 for (t = param_decl_list; t; t = DECL_CHAIN (t))
2133 vec_safe_push (param_type_list, TREE_TYPE (t));
2135 type = build_function_type_vec (return_type, param_type_list);
2137 /* TYPE may have been shared since GCC hashes types. If it has a different
2138 CICO_LIST, make a copy. Likewise for the various flags. */
2139 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
2140 return_by_direct_ref_p, return_by_invisi_ref_p))
2142 type = copy_type (type);
2143 TYPE_CI_CO_LIST (type) = cico_list;
2144 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
2145 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
2146 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
2149 return type;
2152 /* Return a copy of TYPE but safe to modify in any way. */
2154 tree
2155 copy_type (tree type)
2157 tree new_type = copy_node (type);
2159 /* Unshare the language-specific data. */
2160 if (TYPE_LANG_SPECIFIC (type))
2162 TYPE_LANG_SPECIFIC (new_type) = NULL;
2163 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2166 /* And the contents of the language-specific slot if needed. */
2167 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2168 && TYPE_RM_VALUES (type))
2170 TYPE_RM_VALUES (new_type) = NULL_TREE;
2171 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2172 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2173 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2176 /* copy_node clears this field instead of copying it, because it is
2177 aliased with TREE_CHAIN. */
2178 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2180 TYPE_POINTER_TO (new_type) = 0;
2181 TYPE_REFERENCE_TO (new_type) = 0;
2182 TYPE_MAIN_VARIANT (new_type) = new_type;
2183 TYPE_NEXT_VARIANT (new_type) = 0;
2185 return new_type;
2188 /* Return a subtype of sizetype with range MIN to MAX and whose
2189 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2190 of the associated TYPE_DECL. */
2192 tree
2193 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2195 /* First build a type for the desired range. */
2196 tree type = build_nonshared_range_type (sizetype, min, max);
2198 /* Then set the index type. */
2199 SET_TYPE_INDEX_TYPE (type, index);
2200 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2202 return type;
2205 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2206 sizetype is used. */
2208 tree
2209 create_range_type (tree type, tree min, tree max)
2211 tree range_type;
2213 if (type == NULL_TREE)
2214 type = sizetype;
2216 /* First build a type with the base range. */
2217 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2218 TYPE_MAX_VALUE (type));
2220 /* Then set the actual range. */
2221 SET_TYPE_RM_MIN_VALUE (range_type, min);
2222 SET_TYPE_RM_MAX_VALUE (range_type, max);
2224 return range_type;
2227 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2228 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2229 its data type. */
2231 tree
2232 create_type_stub_decl (tree type_name, tree type)
2234 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2235 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2236 emitted in DWARF. */
2237 tree type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2238 DECL_ARTIFICIAL (type_decl) = 1;
2239 TYPE_ARTIFICIAL (type) = 1;
2240 return type_decl;
2243 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2244 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2245 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2246 true if we need to write debug information about this type. GNAT_NODE
2247 is used for the position of the decl. */
2249 tree
2250 create_type_decl (tree type_name, tree type, bool artificial_p,
2251 bool debug_info_p, Node_Id gnat_node)
2253 enum tree_code code = TREE_CODE (type);
2254 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2255 tree type_decl;
2257 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2258 gcc_assert (!TYPE_IS_DUMMY_P (type));
2260 /* If the type hasn't been named yet, we're naming it; preserve an existing
2261 TYPE_STUB_DECL that has been attached to it for some purpose. */
2262 if (!named && TYPE_STUB_DECL (type))
2264 type_decl = TYPE_STUB_DECL (type);
2265 DECL_NAME (type_decl) = type_name;
2267 else
2268 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2270 DECL_ARTIFICIAL (type_decl) = artificial_p;
2271 TYPE_ARTIFICIAL (type) = artificial_p;
2273 /* Add this decl to the current binding level. */
2274 gnat_pushdecl (type_decl, gnat_node);
2276 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2277 This causes the name to be also viewed as a "tag" by the debug
2278 back-end, with the advantage that no DW_TAG_typedef is emitted
2279 for artificial "tagged" types in DWARF. */
2280 if (!named)
2281 TYPE_STUB_DECL (type) = type_decl;
2283 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2284 back-end doesn't support, and for others if we don't need to. */
2285 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2286 DECL_IGNORED_P (type_decl) = 1;
2288 return type_decl;
2291 /* Return a VAR_DECL or CONST_DECL node.
2293 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2294 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2295 the GCC tree for an optional initial expression; NULL_TREE if none.
2297 CONST_FLAG is true if this variable is constant, in which case we might
2298 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2300 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2301 definition to be made visible outside of the current compilation unit, for
2302 instance variable definitions in a package specification.
2304 EXTERN_FLAG is true when processing an external variable declaration (as
2305 opposed to a definition: no storage is to be allocated for the variable).
2307 STATIC_FLAG is only relevant when not at top level. In that case
2308 it indicates whether to always allocate storage to the variable.
2310 GNAT_NODE is used for the position of the decl. */
2312 tree
2313 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2314 bool const_flag, bool public_flag, bool extern_flag,
2315 bool static_flag, bool const_decl_allowed_p,
2316 struct attrib *attr_list, Node_Id gnat_node)
2318 /* Whether the object has static storage duration, either explicitly or by
2319 virtue of being declared at the global level. */
2320 const bool static_storage = static_flag || global_bindings_p ();
2322 /* Whether the initializer is constant: for an external object or an object
2323 with static storage duration, we check that the initializer is a valid
2324 constant expression for initializing a static variable; otherwise, we
2325 only check that it is constant. */
2326 const bool init_const
2327 = (var_init
2328 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
2329 && (extern_flag || static_storage
2330 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2331 != NULL_TREE
2332 : TREE_CONSTANT (var_init)));
2334 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2335 case the initializer may be used in lieu of the DECL node (as done in
2336 Identifier_to_gnu). This is useful to prevent the need of elaboration
2337 code when an identifier for which such a DECL is made is in turn used
2338 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2339 but extra constraints apply to this choice (see below) and they are not
2340 relevant to the distinction we wish to make. */
2341 const bool constant_p = const_flag && init_const;
2343 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2344 and may be used for scalars in general but not for aggregates. */
2345 tree var_decl
2346 = build_decl (input_location,
2347 (constant_p && const_decl_allowed_p
2348 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2349 var_name, type);
2351 /* If this is external, throw away any initializations (they will be done
2352 elsewhere) unless this is a constant for which we would like to remain
2353 able to get the initializer. If we are defining a global here, leave a
2354 constant initialization and save any variable elaborations for the
2355 elaboration routine. If we are just annotating types, throw away the
2356 initialization if it isn't a constant. */
2357 if ((extern_flag && !constant_p)
2358 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2359 var_init = NULL_TREE;
2361 /* At the global level, a non-constant initializer generates elaboration
2362 statements. Check that such statements are allowed, that is to say,
2363 not violating a No_Elaboration_Code restriction. */
2364 if (var_init && !init_const && global_bindings_p ())
2365 Check_Elaboration_Code_Allowed (gnat_node);
2367 DECL_INITIAL (var_decl) = var_init;
2368 TREE_READONLY (var_decl) = const_flag;
2369 DECL_EXTERNAL (var_decl) = extern_flag;
2370 TREE_CONSTANT (var_decl) = constant_p;
2372 /* We need to allocate static storage for an object with static storage
2373 duration if it isn't external. */
2374 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2376 /* The object is public if it is external or if it is declared public
2377 and has static storage duration. */
2378 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2380 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2381 try to fiddle with DECL_COMMON. However, on platforms that don't
2382 support global BSS sections, uninitialized global variables would
2383 go in DATA instead, thus increasing the size of the executable. */
2384 if (!flag_no_common
2385 && TREE_CODE (var_decl) == VAR_DECL
2386 && TREE_PUBLIC (var_decl)
2387 && !have_global_bss_p ())
2388 DECL_COMMON (var_decl) = 1;
2390 /* For an external constant whose initializer is not absolute, do not emit
2391 debug info. In DWARF this would mean a global relocation in a read-only
2392 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2393 if (extern_flag
2394 && constant_p
2395 && var_init
2396 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2397 != null_pointer_node)
2398 DECL_IGNORED_P (var_decl) = 1;
2400 if (TYPE_VOLATILE (type))
2401 TREE_SIDE_EFFECTS (var_decl) = TREE_THIS_VOLATILE (var_decl) = 1;
2403 if (TREE_SIDE_EFFECTS (var_decl))
2404 TREE_ADDRESSABLE (var_decl) = 1;
2406 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2407 if (TREE_CODE (var_decl) == VAR_DECL)
2408 process_attributes (&var_decl, &attr_list, true, gnat_node);
2410 /* Add this decl to the current binding level. */
2411 gnat_pushdecl (var_decl, gnat_node);
2413 if (TREE_CODE (var_decl) == VAR_DECL)
2415 if (asm_name)
2416 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2418 if (global_bindings_p ())
2419 rest_of_decl_compilation (var_decl, true, 0);
2422 return var_decl;
2425 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2427 static bool
2428 aggregate_type_contains_array_p (tree type)
2430 switch (TREE_CODE (type))
2432 case RECORD_TYPE:
2433 case UNION_TYPE:
2434 case QUAL_UNION_TYPE:
2436 tree field;
2437 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2438 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2439 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2440 return true;
2441 return false;
2444 case ARRAY_TYPE:
2445 return true;
2447 default:
2448 gcc_unreachable ();
2452 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2453 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2454 nonzero, it is the specified size of the field. If POS is nonzero, it is
2455 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2456 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2457 means we are allowed to take the address of the field; if it is negative,
2458 we should not make a bitfield, which is used by make_aligning_type. */
2460 tree
2461 create_field_decl (tree field_name, tree field_type, tree record_type,
2462 tree size, tree pos, int packed, int addressable)
2464 tree field_decl = build_decl (input_location,
2465 FIELD_DECL, field_name, field_type);
2467 DECL_CONTEXT (field_decl) = record_type;
2468 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2470 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2471 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2472 Likewise for an aggregate without specified position that contains an
2473 array, because in this case slices of variable length of this array
2474 must be handled by GCC and variable-sized objects need to be aligned
2475 to at least a byte boundary. */
2476 if (packed && (TYPE_MODE (field_type) == BLKmode
2477 || (!pos
2478 && AGGREGATE_TYPE_P (field_type)
2479 && aggregate_type_contains_array_p (field_type))))
2480 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2482 /* If a size is specified, use it. Otherwise, if the record type is packed
2483 compute a size to use, which may differ from the object's natural size.
2484 We always set a size in this case to trigger the checks for bitfield
2485 creation below, which is typically required when no position has been
2486 specified. */
2487 if (size)
2488 size = convert (bitsizetype, size);
2489 else if (packed == 1)
2491 size = rm_size (field_type);
2492 if (TYPE_MODE (field_type) == BLKmode)
2493 size = round_up (size, BITS_PER_UNIT);
2496 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2497 specified for two reasons: first if the size differs from the natural
2498 size. Second, if the alignment is insufficient. There are a number of
2499 ways the latter can be true.
2501 We never make a bitfield if the type of the field has a nonconstant size,
2502 because no such entity requiring bitfield operations should reach here.
2504 We do *preventively* make a bitfield when there might be the need for it
2505 but we don't have all the necessary information to decide, as is the case
2506 of a field with no specified position in a packed record.
2508 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2509 in layout_decl or finish_record_type to clear the bit_field indication if
2510 it is in fact not needed. */
2511 if (addressable >= 0
2512 && size
2513 && TREE_CODE (size) == INTEGER_CST
2514 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2515 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2516 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2517 || packed
2518 || (TYPE_ALIGN (record_type) != 0
2519 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2521 DECL_BIT_FIELD (field_decl) = 1;
2522 DECL_SIZE (field_decl) = size;
2523 if (!packed && !pos)
2525 if (TYPE_ALIGN (record_type) != 0
2526 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2527 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2528 else
2529 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2533 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2535 /* Bump the alignment if need be, either for bitfield/packing purposes or
2536 to satisfy the type requirements if no such consideration applies. When
2537 we get the alignment from the type, indicate if this is from an explicit
2538 user request, which prevents stor-layout from lowering it later on. */
2540 unsigned int bit_align
2541 = (DECL_BIT_FIELD (field_decl) ? 1
2542 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2544 if (bit_align > DECL_ALIGN (field_decl))
2545 DECL_ALIGN (field_decl) = bit_align;
2546 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2548 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2549 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2553 if (pos)
2555 /* We need to pass in the alignment the DECL is known to have.
2556 This is the lowest-order bit set in POS, but no more than
2557 the alignment of the record, if one is specified. Note
2558 that an alignment of 0 is taken as infinite. */
2559 unsigned int known_align;
2561 if (tree_fits_uhwi_p (pos))
2562 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2563 else
2564 known_align = BITS_PER_UNIT;
2566 if (TYPE_ALIGN (record_type)
2567 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2568 known_align = TYPE_ALIGN (record_type);
2570 layout_decl (field_decl, known_align);
2571 SET_DECL_OFFSET_ALIGN (field_decl,
2572 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2573 : BITS_PER_UNIT);
2574 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2575 &DECL_FIELD_BIT_OFFSET (field_decl),
2576 DECL_OFFSET_ALIGN (field_decl), pos);
2579 /* In addition to what our caller says, claim the field is addressable if we
2580 know that its type is not suitable.
2582 The field may also be "technically" nonaddressable, meaning that even if
2583 we attempt to take the field's address we will actually get the address
2584 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2585 value we have at this point is not accurate enough, so we don't account
2586 for this here and let finish_record_type decide. */
2587 if (!addressable && !type_for_nonaliased_component_p (field_type))
2588 addressable = 1;
2590 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2592 return field_decl;
2595 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2596 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2597 (either an In parameter or an address of a pass-by-ref parameter). */
2599 tree
2600 create_param_decl (tree param_name, tree param_type, bool readonly)
2602 tree param_decl = build_decl (input_location,
2603 PARM_DECL, param_name, param_type);
2605 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2606 can lead to various ABI violations. */
2607 if (targetm.calls.promote_prototypes (NULL_TREE)
2608 && INTEGRAL_TYPE_P (param_type)
2609 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2611 /* We have to be careful about biased types here. Make a subtype
2612 of integer_type_node with the proper biasing. */
2613 if (TREE_CODE (param_type) == INTEGER_TYPE
2614 && TYPE_BIASED_REPRESENTATION_P (param_type))
2616 tree subtype
2617 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2618 TREE_TYPE (subtype) = integer_type_node;
2619 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2620 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2621 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
2622 param_type = subtype;
2624 else
2625 param_type = integer_type_node;
2628 DECL_ARG_TYPE (param_decl) = param_type;
2629 TREE_READONLY (param_decl) = readonly;
2630 return param_decl;
2633 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2634 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2635 changed. GNAT_NODE is used for the position of error messages. */
2637 void
2638 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2639 Node_Id gnat_node)
2641 struct attrib *attr;
2643 for (attr = *attr_list; attr; attr = attr->next)
2644 switch (attr->type)
2646 case ATTR_MACHINE_ATTRIBUTE:
2647 Sloc_to_locus (Sloc (gnat_node), &input_location);
2648 if (TYPE_P (*node))
2649 type_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2650 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2651 else
2652 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2653 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2654 break;
2656 case ATTR_LINK_ALIAS:
2657 if (!DECL_EXTERNAL (*node))
2659 TREE_STATIC (*node) = 1;
2660 assemble_alias (*node, attr->name);
2662 break;
2664 case ATTR_WEAK_EXTERNAL:
2665 if (SUPPORTS_WEAK)
2666 declare_weak (*node);
2667 else
2668 post_error ("?weak declarations not supported on this target",
2669 attr->error_point);
2670 break;
2672 case ATTR_LINK_SECTION:
2673 if (targetm_common.have_named_sections)
2675 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2676 DECL_COMMON (*node) = 0;
2678 else
2679 post_error ("?section attributes are not supported for this target",
2680 attr->error_point);
2681 break;
2683 case ATTR_LINK_CONSTRUCTOR:
2684 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2685 TREE_USED (*node) = 1;
2686 break;
2688 case ATTR_LINK_DESTRUCTOR:
2689 DECL_STATIC_DESTRUCTOR (*node) = 1;
2690 TREE_USED (*node) = 1;
2691 break;
2693 case ATTR_THREAD_LOCAL_STORAGE:
2694 set_decl_tls_model (*node, decl_default_tls_model (*node));
2695 DECL_COMMON (*node) = 0;
2696 break;
2699 *attr_list = NULL;
2702 /* Record DECL as a global renaming pointer. */
2704 void
2705 record_global_renaming_pointer (tree decl)
2707 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
2708 vec_safe_push (global_renaming_pointers, decl);
2711 /* Invalidate the global renaming pointers that are not constant, lest their
2712 renamed object contains SAVE_EXPRs tied to an elaboration routine. Note
2713 that we should not blindly invalidate everything here because of the need
2714 to propagate constant values through renaming. */
2716 void
2717 invalidate_global_renaming_pointers (void)
2719 unsigned int i;
2720 tree iter;
2722 if (global_renaming_pointers == NULL)
2723 return;
2725 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
2726 if (!TREE_CONSTANT (DECL_RENAMED_OBJECT (iter)))
2727 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2729 vec_free (global_renaming_pointers);
2732 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2733 a power of 2. */
2735 bool
2736 value_factor_p (tree value, HOST_WIDE_INT factor)
2738 if (tree_fits_uhwi_p (value))
2739 return tree_to_uhwi (value) % factor == 0;
2741 if (TREE_CODE (value) == MULT_EXPR)
2742 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2743 || value_factor_p (TREE_OPERAND (value, 1), factor));
2745 return false;
2748 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2749 from the parameter association for the instantiation of a generic. We do
2750 not want to emit source location for them: the code generated for their
2751 initialization is likely to disturb debugging. */
2753 bool
2754 renaming_from_generic_instantiation_p (Node_Id gnat_node)
2756 if (Nkind (gnat_node) != N_Defining_Identifier
2757 || !IN (Ekind (gnat_node), Object_Kind)
2758 || Comes_From_Source (gnat_node)
2759 || !Present (Renamed_Object (gnat_node)))
2760 return false;
2762 /* Get the object declaration of the renamed object, if any and if the
2763 renamed object is a mere identifier. */
2764 gnat_node = Renamed_Object (gnat_node);
2765 if (Nkind (gnat_node) != N_Identifier)
2766 return false;
2768 gnat_node = Entity (gnat_node);
2769 if (!Present (Parent (gnat_node)))
2770 return false;
2772 gnat_node = Parent (gnat_node);
2773 return
2774 (Present (gnat_node)
2775 && Nkind (gnat_node) == N_Object_Declaration
2776 && Present (Corresponding_Generic_Association (gnat_node)));
2779 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2780 feed it with the elaboration of GNAT_SCOPE. */
2782 static struct deferred_decl_context_node *
2783 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2785 struct deferred_decl_context_node *new_node;
2787 new_node
2788 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2789 new_node->decl = decl;
2790 new_node->gnat_scope = gnat_scope;
2791 new_node->force_global = force_global;
2792 new_node->types.create (1);
2793 new_node->next = deferred_decl_context_queue;
2794 deferred_decl_context_queue = new_node;
2795 return new_node;
2798 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2799 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2800 computed. */
2802 static void
2803 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2805 n->types.safe_push (type);
2808 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2809 NULL_TREE if it is not available. */
2811 static tree
2812 compute_deferred_decl_context (Entity_Id gnat_scope)
2814 tree context;
2816 if (present_gnu_tree (gnat_scope))
2817 context = get_gnu_tree (gnat_scope);
2818 else
2819 return NULL_TREE;
2821 if (TREE_CODE (context) == TYPE_DECL)
2823 const tree context_type = TREE_TYPE (context);
2825 /* Skip dummy types: only the final ones can appear in the context
2826 chain. */
2827 if (TYPE_DUMMY_P (context_type))
2828 return NULL_TREE;
2830 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2831 chain. */
2832 else
2833 context = context_type;
2836 return context;
2839 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2840 that cannot be processed yet, remove the other ones. If FORCE is true,
2841 force the processing for all nodes, use the global context when nodes don't
2842 have a GNU translation. */
2844 void
2845 process_deferred_decl_context (bool force)
2847 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2848 struct deferred_decl_context_node *node;
2850 while (*it != NULL)
2852 bool processed = false;
2853 tree context = NULL_TREE;
2854 Entity_Id gnat_scope;
2856 node = *it;
2858 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2859 get the first scope. */
2860 gnat_scope = node->gnat_scope;
2861 while (Present (gnat_scope))
2863 context = compute_deferred_decl_context (gnat_scope);
2864 if (!force || context != NULL_TREE)
2865 break;
2866 gnat_scope = get_debug_scope (gnat_scope, NULL);
2869 /* Imported declarations must not be in a local context (i.e. not inside
2870 a function). */
2871 if (context != NULL_TREE && node->force_global > 0)
2873 tree ctx = context;
2875 while (ctx != NULL_TREE)
2877 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2878 ctx = (DECL_P (ctx))
2879 ? DECL_CONTEXT (ctx)
2880 : TYPE_CONTEXT (ctx);
2884 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2885 was no elaborated scope, use the global context. */
2886 if (force && context == NULL_TREE)
2887 context = get_global_context ();
2889 if (context != NULL_TREE)
2891 tree t;
2892 int i;
2894 DECL_CONTEXT (node->decl) = context;
2896 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2897 ..._TYPE nodes. */
2898 FOR_EACH_VEC_ELT (node->types, i, t)
2900 gnat_set_type_context (t, context);
2902 processed = true;
2905 /* If this node has been successfuly processed, remove it from the
2906 queue. Then move to the next node. */
2907 if (processed)
2909 *it = node->next;
2910 node->types.release ();
2911 free (node);
2913 else
2914 it = &node->next;
2919 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2921 static unsigned int
2922 scale_by_factor_of (tree expr, unsigned int value)
2924 expr = remove_conversions (expr, true);
2926 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2927 corresponding to the number of trailing zeros of the mask. */
2928 if (TREE_CODE (expr) == BIT_AND_EXPR
2929 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2931 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2932 unsigned int i = 0;
2934 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2936 mask >>= 1;
2937 value *= 2;
2938 i++;
2942 return value;
2945 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2946 unless we can prove these 2 fields are laid out in such a way that no gap
2947 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2948 is the distance in bits between the end of PREV_FIELD and the starting
2949 position of CURR_FIELD. It is ignored if null. */
2951 static bool
2952 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2954 /* If this is the first field of the record, there cannot be any gap */
2955 if (!prev_field)
2956 return false;
2958 /* If the previous field is a union type, then return false: The only
2959 time when such a field is not the last field of the record is when
2960 there are other components at fixed positions after it (meaning there
2961 was a rep clause for every field), in which case we don't want the
2962 alignment constraint to override them. */
2963 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2964 return false;
2966 /* If the distance between the end of prev_field and the beginning of
2967 curr_field is constant, then there is a gap if the value of this
2968 constant is not null. */
2969 if (offset && tree_fits_uhwi_p (offset))
2970 return !integer_zerop (offset);
2972 /* If the size and position of the previous field are constant,
2973 then check the sum of this size and position. There will be a gap
2974 iff it is not multiple of the current field alignment. */
2975 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
2976 && tree_fits_uhwi_p (bit_position (prev_field)))
2977 return ((tree_to_uhwi (bit_position (prev_field))
2978 + tree_to_uhwi (DECL_SIZE (prev_field)))
2979 % DECL_ALIGN (curr_field) != 0);
2981 /* If both the position and size of the previous field are multiples
2982 of the current field alignment, there cannot be any gap. */
2983 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2984 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2985 return false;
2987 /* Fallback, return that there may be a potential gap */
2988 return true;
2991 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2992 of the decl. */
2994 tree
2995 create_label_decl (tree label_name, Node_Id gnat_node)
2997 tree label_decl
2998 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
3000 DECL_MODE (label_decl) = VOIDmode;
3002 /* Add this decl to the current binding level. */
3003 gnat_pushdecl (label_decl, gnat_node);
3005 return label_decl;
3008 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
3009 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
3010 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
3011 PARM_DECL nodes chained through the DECL_CHAIN field).
3013 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
3014 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
3015 used for the position of the decl. */
3017 tree
3018 create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
3019 tree param_decl_list, enum inline_status_t inline_status,
3020 bool public_flag, bool extern_flag, bool artificial_flag,
3021 struct attrib *attr_list, Node_Id gnat_node)
3023 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
3024 subprog_type);
3025 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
3026 TREE_TYPE (subprog_type));
3027 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3029 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
3030 DECL_EXTERNAL (subprog_decl) = extern_flag;
3032 switch (inline_status)
3034 case is_suppressed:
3035 DECL_UNINLINABLE (subprog_decl) = 1;
3036 break;
3038 case is_disabled:
3039 break;
3041 case is_required:
3042 if (Back_End_Inlining)
3043 decl_attributes (&subprog_decl,
3044 tree_cons (get_identifier ("always_inline"),
3045 NULL_TREE, NULL_TREE),
3046 ATTR_FLAG_TYPE_IN_PLACE);
3048 /* ... fall through ... */
3050 case is_enabled:
3051 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3052 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
3053 break;
3055 default:
3056 gcc_unreachable ();
3059 TREE_PUBLIC (subprog_decl) = public_flag;
3060 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
3061 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
3062 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
3064 DECL_ARTIFICIAL (result_decl) = 1;
3065 DECL_IGNORED_P (result_decl) = 1;
3066 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
3067 DECL_RESULT (subprog_decl) = result_decl;
3069 if (asm_name)
3071 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
3073 /* The expand_main_function circuitry expects "main_identifier_node" to
3074 designate the DECL_NAME of the 'main' entry point, in turn expected
3075 to be declared as the "main" function literally by default. Ada
3076 program entry points are typically declared with a different name
3077 within the binder generated file, exported as 'main' to satisfy the
3078 system expectations. Force main_identifier_node in this case. */
3079 if (asm_name == main_identifier_node)
3080 DECL_NAME (subprog_decl) = main_identifier_node;
3083 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3085 /* Add this decl to the current binding level. */
3086 gnat_pushdecl (subprog_decl, gnat_node);
3088 /* Output the assembler code and/or RTL for the declaration. */
3089 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3091 return subprog_decl;
3094 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3095 body. This routine needs to be invoked before processing the declarations
3096 appearing in the subprogram. */
3098 void
3099 begin_subprog_body (tree subprog_decl)
3101 tree param_decl;
3103 announce_function (subprog_decl);
3105 /* This function is being defined. */
3106 TREE_STATIC (subprog_decl) = 1;
3108 current_function_decl = subprog_decl;
3110 /* Enter a new binding level and show that all the parameters belong to
3111 this function. */
3112 gnat_pushlevel ();
3114 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3115 param_decl = DECL_CHAIN (param_decl))
3116 DECL_CONTEXT (param_decl) = subprog_decl;
3118 make_decl_rtl (subprog_decl);
3121 /* Finish translating the current subprogram and set its BODY. */
3123 void
3124 end_subprog_body (tree body)
3126 tree fndecl = current_function_decl;
3128 /* Attach the BLOCK for this level to the function and pop the level. */
3129 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3130 DECL_INITIAL (fndecl) = current_binding_level->block;
3131 gnat_poplevel ();
3133 /* Mark the RESULT_DECL as being in this subprogram. */
3134 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3136 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3137 if (TREE_CODE (body) == BIND_EXPR)
3139 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3140 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3143 DECL_SAVED_TREE (fndecl) = body;
3145 current_function_decl = decl_function_context (fndecl);
3148 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3150 void
3151 rest_of_subprog_body_compilation (tree subprog_decl)
3153 /* We cannot track the location of errors past this point. */
3154 error_gnat_node = Empty;
3156 /* If we're only annotating types, don't actually compile this function. */
3157 if (type_annotate_only)
3158 return;
3160 /* Dump functions before gimplification. */
3161 dump_function (TDI_original, subprog_decl);
3163 if (!decl_function_context (subprog_decl))
3164 cgraph_node::finalize_function (subprog_decl, false);
3165 else
3166 /* Register this function with cgraph just far enough to get it
3167 added to our parent's nested function list. */
3168 (void) cgraph_node::get_create (subprog_decl);
3171 tree
3172 gnat_builtin_function (tree decl)
3174 gnat_pushdecl (decl, Empty);
3175 return decl;
3178 /* Return an integer type with the number of bits of precision given by
3179 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3180 it is a signed type. */
3182 tree
3183 gnat_type_for_size (unsigned precision, int unsignedp)
3185 tree t;
3186 char type_name[20];
3188 if (precision <= 2 * MAX_BITS_PER_WORD
3189 && signed_and_unsigned_types[precision][unsignedp])
3190 return signed_and_unsigned_types[precision][unsignedp];
3192 if (unsignedp)
3193 t = make_unsigned_type (precision);
3194 else
3195 t = make_signed_type (precision);
3197 if (precision <= 2 * MAX_BITS_PER_WORD)
3198 signed_and_unsigned_types[precision][unsignedp] = t;
3200 if (!TYPE_NAME (t))
3202 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3203 TYPE_NAME (t) = get_identifier (type_name);
3206 return t;
3209 /* Likewise for floating-point types. */
3211 static tree
3212 float_type_for_precision (int precision, machine_mode mode)
3214 tree t;
3215 char type_name[20];
3217 if (float_types[(int) mode])
3218 return float_types[(int) mode];
3220 float_types[(int) mode] = t = make_node (REAL_TYPE);
3221 TYPE_PRECISION (t) = precision;
3222 layout_type (t);
3224 gcc_assert (TYPE_MODE (t) == mode);
3225 if (!TYPE_NAME (t))
3227 sprintf (type_name, "FLOAT_%d", precision);
3228 TYPE_NAME (t) = get_identifier (type_name);
3231 return t;
3234 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3235 an unsigned type; otherwise a signed type is returned. */
3237 tree
3238 gnat_type_for_mode (machine_mode mode, int unsignedp)
3240 if (mode == BLKmode)
3241 return NULL_TREE;
3243 if (mode == VOIDmode)
3244 return void_type_node;
3246 if (COMPLEX_MODE_P (mode))
3247 return NULL_TREE;
3249 if (SCALAR_FLOAT_MODE_P (mode))
3250 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
3252 if (SCALAR_INT_MODE_P (mode))
3253 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
3255 if (VECTOR_MODE_P (mode))
3257 machine_mode inner_mode = GET_MODE_INNER (mode);
3258 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3259 if (inner_type)
3260 return build_vector_type_for_mode (inner_type, mode);
3263 return NULL_TREE;
3266 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
3268 tree
3269 gnat_unsigned_type (tree type_node)
3271 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
3273 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3275 type = copy_node (type);
3276 TREE_TYPE (type) = type_node;
3278 else if (TREE_TYPE (type_node)
3279 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3280 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3282 type = copy_node (type);
3283 TREE_TYPE (type) = TREE_TYPE (type_node);
3286 return type;
3289 /* Return the signed version of a TYPE_NODE, a scalar type. */
3291 tree
3292 gnat_signed_type (tree type_node)
3294 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
3296 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3298 type = copy_node (type);
3299 TREE_TYPE (type) = type_node;
3301 else if (TREE_TYPE (type_node)
3302 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3303 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3305 type = copy_node (type);
3306 TREE_TYPE (type) = TREE_TYPE (type_node);
3309 return type;
3312 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3313 transparently converted to each other. */
3316 gnat_types_compatible_p (tree t1, tree t2)
3318 enum tree_code code;
3320 /* This is the default criterion. */
3321 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3322 return 1;
3324 /* We only check structural equivalence here. */
3325 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3326 return 0;
3328 /* Vector types are also compatible if they have the same number of subparts
3329 and the same form of (scalar) element type. */
3330 if (code == VECTOR_TYPE
3331 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3332 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3333 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3334 return 1;
3336 /* Array types are also compatible if they are constrained and have the same
3337 domain(s) and the same component type. */
3338 if (code == ARRAY_TYPE
3339 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3340 || (TYPE_DOMAIN (t1)
3341 && TYPE_DOMAIN (t2)
3342 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3343 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3344 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3345 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3346 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3347 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3348 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
3349 return 1;
3351 return 0;
3354 /* Return true if EXPR is a useless type conversion. */
3356 bool
3357 gnat_useless_type_conversion (tree expr)
3359 if (CONVERT_EXPR_P (expr)
3360 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3361 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3362 return gnat_types_compatible_p (TREE_TYPE (expr),
3363 TREE_TYPE (TREE_OPERAND (expr, 0)));
3365 return false;
3368 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3370 bool
3371 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3372 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3374 return TYPE_CI_CO_LIST (t) == cico_list
3375 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3376 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3377 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3380 /* EXP is an expression for the size of an object. If this size contains
3381 discriminant references, replace them with the maximum (if MAX_P) or
3382 minimum (if !MAX_P) possible value of the discriminant. */
3384 tree
3385 max_size (tree exp, bool max_p)
3387 enum tree_code code = TREE_CODE (exp);
3388 tree type = TREE_TYPE (exp);
3390 switch (TREE_CODE_CLASS (code))
3392 case tcc_declaration:
3393 case tcc_constant:
3394 return exp;
3396 case tcc_vl_exp:
3397 if (code == CALL_EXPR)
3399 tree t, *argarray;
3400 int n, i;
3402 t = maybe_inline_call_in_expr (exp);
3403 if (t)
3404 return max_size (t, max_p);
3406 n = call_expr_nargs (exp);
3407 gcc_assert (n > 0);
3408 argarray = XALLOCAVEC (tree, n);
3409 for (i = 0; i < n; i++)
3410 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3411 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3413 break;
3415 case tcc_reference:
3416 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3417 modify. Otherwise, we treat it like a variable. */
3418 if (CONTAINS_PLACEHOLDER_P (exp))
3420 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3421 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3422 return max_size (convert (get_base_type (val_type), val), true);
3425 return exp;
3427 case tcc_comparison:
3428 return max_p ? size_one_node : size_zero_node;
3430 case tcc_unary:
3431 if (code == NON_LVALUE_EXPR)
3432 return max_size (TREE_OPERAND (exp, 0), max_p);
3434 return fold_build1 (code, type,
3435 max_size (TREE_OPERAND (exp, 0),
3436 code == NEGATE_EXPR ? !max_p : max_p));
3438 case tcc_binary:
3440 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3441 tree rhs = max_size (TREE_OPERAND (exp, 1),
3442 code == MINUS_EXPR ? !max_p : max_p);
3444 /* Special-case wanting the maximum value of a MIN_EXPR.
3445 In that case, if one side overflows, return the other. */
3446 if (max_p && code == MIN_EXPR)
3448 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3449 return lhs;
3451 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3452 return rhs;
3455 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3456 overflowing and the RHS a variable. */
3457 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3458 && TREE_CODE (lhs) == INTEGER_CST
3459 && TREE_OVERFLOW (lhs)
3460 && !TREE_CONSTANT (rhs))
3461 return lhs;
3463 return size_binop (code, lhs, rhs);
3466 case tcc_expression:
3467 switch (TREE_CODE_LENGTH (code))
3469 case 1:
3470 if (code == SAVE_EXPR)
3471 return exp;
3473 return fold_build1 (code, type,
3474 max_size (TREE_OPERAND (exp, 0), max_p));
3476 case 2:
3477 if (code == COMPOUND_EXPR)
3478 return max_size (TREE_OPERAND (exp, 1), max_p);
3480 return fold_build2 (code, type,
3481 max_size (TREE_OPERAND (exp, 0), max_p),
3482 max_size (TREE_OPERAND (exp, 1), max_p));
3484 case 3:
3485 if (code == COND_EXPR)
3486 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3487 max_size (TREE_OPERAND (exp, 1), max_p),
3488 max_size (TREE_OPERAND (exp, 2), max_p));
3490 default:
3491 break;
3494 /* Other tree classes cannot happen. */
3495 default:
3496 break;
3499 gcc_unreachable ();
3502 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3503 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3504 Return a constructor for the template. */
3506 tree
3507 build_template (tree template_type, tree array_type, tree expr)
3509 vec<constructor_elt, va_gc> *template_elts = NULL;
3510 tree bound_list = NULL_TREE;
3511 tree field;
3513 while (TREE_CODE (array_type) == RECORD_TYPE
3514 && (TYPE_PADDING_P (array_type)
3515 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3516 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3518 if (TREE_CODE (array_type) == ARRAY_TYPE
3519 || (TREE_CODE (array_type) == INTEGER_TYPE
3520 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3521 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3523 /* First make the list for a CONSTRUCTOR for the template. Go down the
3524 field list of the template instead of the type chain because this
3525 array might be an Ada array of arrays and we can't tell where the
3526 nested arrays stop being the underlying object. */
3528 for (field = TYPE_FIELDS (template_type); field;
3529 (bound_list
3530 ? (bound_list = TREE_CHAIN (bound_list))
3531 : (array_type = TREE_TYPE (array_type))),
3532 field = DECL_CHAIN (DECL_CHAIN (field)))
3534 tree bounds, min, max;
3536 /* If we have a bound list, get the bounds from there. Likewise
3537 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3538 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3539 This will give us a maximum range. */
3540 if (bound_list)
3541 bounds = TREE_VALUE (bound_list);
3542 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3543 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3544 else if (expr && TREE_CODE (expr) == PARM_DECL
3545 && DECL_BY_COMPONENT_PTR_P (expr))
3546 bounds = TREE_TYPE (field);
3547 else
3548 gcc_unreachable ();
3550 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3551 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3553 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3554 substitute it from OBJECT. */
3555 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3556 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3558 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3559 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3562 return gnat_build_constructor (template_type, template_elts);
3565 /* Return true if TYPE is suitable for the element type of a vector. */
3567 static bool
3568 type_for_vector_element_p (tree type)
3570 machine_mode mode;
3572 if (!INTEGRAL_TYPE_P (type)
3573 && !SCALAR_FLOAT_TYPE_P (type)
3574 && !FIXED_POINT_TYPE_P (type))
3575 return false;
3577 mode = TYPE_MODE (type);
3578 if (GET_MODE_CLASS (mode) != MODE_INT
3579 && !SCALAR_FLOAT_MODE_P (mode)
3580 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3581 return false;
3583 return true;
3586 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3587 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3588 attribute declaration and want to issue error messages on failure. */
3590 static tree
3591 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3593 unsigned HOST_WIDE_INT size_int, inner_size_int;
3594 int nunits;
3596 /* Silently punt on variable sizes. We can't make vector types for them,
3597 need to ignore them on front-end generated subtypes of unconstrained
3598 base types, and this attribute is for binding implementors, not end
3599 users, so we should never get there from legitimate explicit uses. */
3600 if (!tree_fits_uhwi_p (size))
3601 return NULL_TREE;
3602 size_int = tree_to_uhwi (size);
3604 if (!type_for_vector_element_p (inner_type))
3606 if (attribute)
3607 error ("invalid element type for attribute %qs",
3608 IDENTIFIER_POINTER (attribute));
3609 return NULL_TREE;
3611 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3613 if (size_int % inner_size_int)
3615 if (attribute)
3616 error ("vector size not an integral multiple of component size");
3617 return NULL_TREE;
3620 if (size_int == 0)
3622 if (attribute)
3623 error ("zero vector size");
3624 return NULL_TREE;
3627 nunits = size_int / inner_size_int;
3628 if (nunits & (nunits - 1))
3630 if (attribute)
3631 error ("number of components of vector not a power of two");
3632 return NULL_TREE;
3635 return build_vector_type (inner_type, nunits);
3638 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3639 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3640 processing the attribute and want to issue error messages on failure. */
3642 static tree
3643 build_vector_type_for_array (tree array_type, tree attribute)
3645 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3646 TYPE_SIZE_UNIT (array_type),
3647 attribute);
3648 if (!vector_type)
3649 return NULL_TREE;
3651 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3652 return vector_type;
3655 /* Build a type to be used to represent an aliased object whose nominal type
3656 is an unconstrained array. This consists of a RECORD_TYPE containing a
3657 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3658 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3659 an arbitrary unconstrained object. Use NAME as the name of the record.
3660 DEBUG_INFO_P is true if we need to write debug information for the type. */
3662 tree
3663 build_unc_object_type (tree template_type, tree object_type, tree name,
3664 bool debug_info_p)
3666 tree decl;
3667 tree type = make_node (RECORD_TYPE);
3668 tree template_field
3669 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3670 NULL_TREE, NULL_TREE, 0, 1);
3671 tree array_field
3672 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3673 NULL_TREE, NULL_TREE, 0, 1);
3675 TYPE_NAME (type) = name;
3676 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3677 DECL_CHAIN (template_field) = array_field;
3678 finish_record_type (type, template_field, 0, true);
3680 /* Declare it now since it will never be declared otherwise. This is
3681 necessary to ensure that its subtrees are properly marked. */
3682 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3684 /* template_type will not be used elsewhere than here, so to keep the debug
3685 info clean and in order to avoid scoping issues, make decl its
3686 context. */
3687 gnat_set_type_context (template_type, decl);
3689 return type;
3692 /* Same, taking a thin or fat pointer type instead of a template type. */
3694 tree
3695 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3696 tree name, bool debug_info_p)
3698 tree template_type;
3700 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3702 template_type
3703 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3704 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3705 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3707 return
3708 build_unc_object_type (template_type, object_type, name, debug_info_p);
3711 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3712 In the normal case this is just two adjustments, but we have more to
3713 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3715 void
3716 update_pointer_to (tree old_type, tree new_type)
3718 tree ptr = TYPE_POINTER_TO (old_type);
3719 tree ref = TYPE_REFERENCE_TO (old_type);
3720 tree t;
3722 /* If this is the main variant, process all the other variants first. */
3723 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3724 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3725 update_pointer_to (t, new_type);
3727 /* If no pointers and no references, we are done. */
3728 if (!ptr && !ref)
3729 return;
3731 /* Merge the old type qualifiers in the new type.
3733 Each old variant has qualifiers for specific reasons, and the new
3734 designated type as well. Each set of qualifiers represents useful
3735 information grabbed at some point, and merging the two simply unifies
3736 these inputs into the final type description.
3738 Consider for instance a volatile type frozen after an access to constant
3739 type designating it; after the designated type's freeze, we get here with
3740 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3741 when the access type was processed. We will make a volatile and readonly
3742 designated type, because that's what it really is.
3744 We might also get here for a non-dummy OLD_TYPE variant with different
3745 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3746 to private record type elaboration (see the comments around the call to
3747 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3748 the qualifiers in those cases too, to avoid accidentally discarding the
3749 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3750 new_type
3751 = build_qualified_type (new_type,
3752 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3754 /* If old type and new type are identical, there is nothing to do. */
3755 if (old_type == new_type)
3756 return;
3758 /* Otherwise, first handle the simple case. */
3759 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3761 tree new_ptr, new_ref;
3763 /* If pointer or reference already points to new type, nothing to do.
3764 This can happen as update_pointer_to can be invoked multiple times
3765 on the same couple of types because of the type variants. */
3766 if ((ptr && TREE_TYPE (ptr) == new_type)
3767 || (ref && TREE_TYPE (ref) == new_type))
3768 return;
3770 /* Chain PTR and its variants at the end. */
3771 new_ptr = TYPE_POINTER_TO (new_type);
3772 if (new_ptr)
3774 while (TYPE_NEXT_PTR_TO (new_ptr))
3775 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3776 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3778 else
3779 TYPE_POINTER_TO (new_type) = ptr;
3781 /* Now adjust them. */
3782 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3783 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3785 TREE_TYPE (t) = new_type;
3786 if (TYPE_NULL_BOUNDS (t))
3787 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3790 /* Chain REF and its variants at the end. */
3791 new_ref = TYPE_REFERENCE_TO (new_type);
3792 if (new_ref)
3794 while (TYPE_NEXT_REF_TO (new_ref))
3795 new_ref = TYPE_NEXT_REF_TO (new_ref);
3796 TYPE_NEXT_REF_TO (new_ref) = ref;
3798 else
3799 TYPE_REFERENCE_TO (new_type) = ref;
3801 /* Now adjust them. */
3802 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3803 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3804 TREE_TYPE (t) = new_type;
3806 TYPE_POINTER_TO (old_type) = NULL_TREE;
3807 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3810 /* Now deal with the unconstrained array case. In this case the pointer
3811 is actually a record where both fields are pointers to dummy nodes.
3812 Turn them into pointers to the correct types using update_pointer_to.
3813 Likewise for the pointer to the object record (thin pointer). */
3814 else
3816 tree new_ptr = TYPE_POINTER_TO (new_type);
3818 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3820 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3821 since update_pointer_to can be invoked multiple times on the same
3822 couple of types because of the type variants. */
3823 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3824 return;
3826 update_pointer_to
3827 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3828 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3830 update_pointer_to
3831 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3832 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3834 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3835 TYPE_OBJECT_RECORD_TYPE (new_type));
3837 TYPE_POINTER_TO (old_type) = NULL_TREE;
3841 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3842 unconstrained one. This involves making or finding a template. */
3844 static tree
3845 convert_to_fat_pointer (tree type, tree expr)
3847 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3848 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3849 tree etype = TREE_TYPE (expr);
3850 tree template_addr;
3851 vec<constructor_elt, va_gc> *v;
3852 vec_alloc (v, 2);
3854 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3855 array (compare_fat_pointers ensures that this is the full discriminant)
3856 and a valid pointer to the bounds. This latter property is necessary
3857 since the compiler can hoist the load of the bounds done through it. */
3858 if (integer_zerop (expr))
3860 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3861 tree null_bounds, t;
3863 if (TYPE_NULL_BOUNDS (ptr_template_type))
3864 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3865 else
3867 /* The template type can still be dummy at this point so we build an
3868 empty constructor. The middle-end will fill it in with zeros. */
3869 t = build_constructor (template_type, NULL);
3870 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3871 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3872 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
3875 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3876 fold_convert (p_array_type, null_pointer_node));
3877 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
3878 t = build_constructor (type, v);
3879 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3880 TREE_CONSTANT (t) = 0;
3881 TREE_STATIC (t) = 1;
3883 return t;
3886 /* If EXPR is a thin pointer, make template and data from the record. */
3887 if (TYPE_IS_THIN_POINTER_P (etype))
3889 tree field = TYPE_FIELDS (TREE_TYPE (etype));
3891 expr = gnat_protect_expr (expr);
3893 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
3894 the thin pointer value has been shifted so we shift it back to get
3895 the template address. */
3896 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
3898 template_addr
3899 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
3900 fold_build1 (NEGATE_EXPR, sizetype,
3901 byte_position
3902 (DECL_CHAIN (field))));
3903 template_addr
3904 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
3905 template_addr);
3908 /* Otherwise we explicitly take the address of the fields. */
3909 else
3911 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
3912 template_addr
3913 = build_unary_op (ADDR_EXPR, NULL_TREE,
3914 build_component_ref (expr, NULL_TREE, field,
3915 false));
3916 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3917 build_component_ref (expr, NULL_TREE,
3918 DECL_CHAIN (field),
3919 false));
3923 /* Otherwise, build the constructor for the template. */
3924 else
3925 template_addr
3926 = build_unary_op (ADDR_EXPR, NULL_TREE,
3927 build_template (template_type, TREE_TYPE (etype),
3928 expr));
3930 /* The final result is a constructor for the fat pointer.
3932 If EXPR is an argument of a foreign convention subprogram, the type it
3933 points to is directly the component type. In this case, the expression
3934 type may not match the corresponding FIELD_DECL type at this point, so we
3935 call "convert" here to fix that up if necessary. This type consistency is
3936 required, for instance because it ensures that possible later folding of
3937 COMPONENT_REFs against this constructor always yields something of the
3938 same type as the initial reference.
3940 Note that the call to "build_template" above is still fine because it
3941 will only refer to the provided TEMPLATE_TYPE in this case. */
3942 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
3943 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
3944 return gnat_build_constructor (type, v);
3947 /* Create an expression whose value is that of EXPR,
3948 converted to type TYPE. The TREE_TYPE of the value
3949 is always TYPE. This function implements all reasonable
3950 conversions; callers should filter out those that are
3951 not permitted by the language being compiled. */
3953 tree
3954 convert (tree type, tree expr)
3956 tree etype = TREE_TYPE (expr);
3957 enum tree_code ecode = TREE_CODE (etype);
3958 enum tree_code code = TREE_CODE (type);
3960 /* If the expression is already of the right type, we are done. */
3961 if (etype == type)
3962 return expr;
3964 /* If both input and output have padding and are of variable size, do this
3965 as an unchecked conversion. Likewise if one is a mere variant of the
3966 other, so we avoid a pointless unpad/repad sequence. */
3967 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3968 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3969 && (!TREE_CONSTANT (TYPE_SIZE (type))
3970 || !TREE_CONSTANT (TYPE_SIZE (etype))
3971 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3972 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3973 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3976 /* If the output type has padding, convert to the inner type and make a
3977 constructor to build the record, unless a variable size is involved. */
3978 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3980 vec<constructor_elt, va_gc> *v;
3982 /* If we previously converted from another type and our type is
3983 of variable size, remove the conversion to avoid the need for
3984 variable-sized temporaries. Likewise for a conversion between
3985 original and packable version. */
3986 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3987 && (!TREE_CONSTANT (TYPE_SIZE (type))
3988 || (ecode == RECORD_TYPE
3989 && TYPE_NAME (etype)
3990 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3991 expr = TREE_OPERAND (expr, 0);
3993 /* If we are just removing the padding from expr, convert the original
3994 object if we have variable size in order to avoid the need for some
3995 variable-sized temporaries. Likewise if the padding is a variant
3996 of the other, so we avoid a pointless unpad/repad sequence. */
3997 if (TREE_CODE (expr) == COMPONENT_REF
3998 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3999 && (!TREE_CONSTANT (TYPE_SIZE (type))
4000 || TYPE_MAIN_VARIANT (type)
4001 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4002 || (ecode == RECORD_TYPE
4003 && TYPE_NAME (etype)
4004 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4005 return convert (type, TREE_OPERAND (expr, 0));
4007 /* If the inner type is of self-referential size and the expression type
4008 is a record, do this as an unchecked conversion. But first pad the
4009 expression if possible to have the same size on both sides. */
4010 if (ecode == RECORD_TYPE
4011 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4013 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4014 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4015 false, false, false, true),
4016 expr);
4017 return unchecked_convert (type, expr, false);
4020 /* If we are converting between array types with variable size, do the
4021 final conversion as an unchecked conversion, again to avoid the need
4022 for some variable-sized temporaries. If valid, this conversion is
4023 very likely purely technical and without real effects. */
4024 if (ecode == ARRAY_TYPE
4025 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4026 && !TREE_CONSTANT (TYPE_SIZE (etype))
4027 && !TREE_CONSTANT (TYPE_SIZE (type)))
4028 return unchecked_convert (type,
4029 convert (TREE_TYPE (TYPE_FIELDS (type)),
4030 expr),
4031 false);
4033 vec_alloc (v, 1);
4034 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4035 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4036 return gnat_build_constructor (type, v);
4039 /* If the input type has padding, remove it and convert to the output type.
4040 The conditions ordering is arranged to ensure that the output type is not
4041 a padding type here, as it is not clear whether the conversion would
4042 always be correct if this was to happen. */
4043 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4045 tree unpadded;
4047 /* If we have just converted to this padded type, just get the
4048 inner expression. */
4049 if (TREE_CODE (expr) == CONSTRUCTOR
4050 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4051 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4052 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
4054 /* Otherwise, build an explicit component reference. */
4055 else
4056 unpadded
4057 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4059 return convert (type, unpadded);
4062 /* If the input is a biased type, adjust first. */
4063 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4064 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4065 fold_convert (TREE_TYPE (etype), expr),
4066 fold_convert (TREE_TYPE (etype),
4067 TYPE_MIN_VALUE (etype))));
4069 /* If the input is a justified modular type, we need to extract the actual
4070 object before converting it to any other type with the exceptions of an
4071 unconstrained array or of a mere type variant. It is useful to avoid the
4072 extraction and conversion in the type variant case because it could end
4073 up replacing a VAR_DECL expr by a constructor and we might be about the
4074 take the address of the result. */
4075 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4076 && code != UNCONSTRAINED_ARRAY_TYPE
4077 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4078 return convert (type, build_component_ref (expr, NULL_TREE,
4079 TYPE_FIELDS (etype), false));
4081 /* If converting to a type that contains a template, convert to the data
4082 type and then build the template. */
4083 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4085 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4086 vec<constructor_elt, va_gc> *v;
4087 vec_alloc (v, 2);
4089 /* If the source already has a template, get a reference to the
4090 associated array only, as we are going to rebuild a template
4091 for the target type anyway. */
4092 expr = maybe_unconstrained_array (expr);
4094 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4095 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4096 obj_type, NULL_TREE));
4097 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4098 convert (obj_type, expr));
4099 return gnat_build_constructor (type, v);
4102 /* There are some cases of expressions that we process specially. */
4103 switch (TREE_CODE (expr))
4105 case ERROR_MARK:
4106 return expr;
4108 case NULL_EXPR:
4109 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4110 conversion in gnat_expand_expr. NULL_EXPR does not represent
4111 and actual value, so no conversion is needed. */
4112 expr = copy_node (expr);
4113 TREE_TYPE (expr) = type;
4114 return expr;
4116 case STRING_CST:
4117 /* If we are converting a STRING_CST to another constrained array type,
4118 just make a new one in the proper type. */
4119 if (code == ecode && AGGREGATE_TYPE_P (etype)
4120 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4121 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4123 expr = copy_node (expr);
4124 TREE_TYPE (expr) = type;
4125 return expr;
4127 break;
4129 case VECTOR_CST:
4130 /* If we are converting a VECTOR_CST to a mere type variant, just make
4131 a new one in the proper type. */
4132 if (code == ecode && gnat_types_compatible_p (type, etype))
4134 expr = copy_node (expr);
4135 TREE_TYPE (expr) = type;
4136 return expr;
4139 case CONSTRUCTOR:
4140 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4141 another padding type around the same type, just make a new one in
4142 the proper type. */
4143 if (code == ecode
4144 && (gnat_types_compatible_p (type, etype)
4145 || (code == RECORD_TYPE
4146 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4147 && TREE_TYPE (TYPE_FIELDS (type))
4148 == TREE_TYPE (TYPE_FIELDS (etype)))))
4150 expr = copy_node (expr);
4151 TREE_TYPE (expr) = type;
4152 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4153 return expr;
4156 /* Likewise for a conversion between original and packable version, or
4157 conversion between types of the same size and with the same list of
4158 fields, but we have to work harder to preserve type consistency. */
4159 if (code == ecode
4160 && code == RECORD_TYPE
4161 && (TYPE_NAME (type) == TYPE_NAME (etype)
4162 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4165 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4166 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4167 vec<constructor_elt, va_gc> *v;
4168 vec_alloc (v, len);
4169 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4170 unsigned HOST_WIDE_INT idx;
4171 tree index, value;
4173 /* Whether we need to clear TREE_CONSTANT et al. on the output
4174 constructor when we convert in place. */
4175 bool clear_constant = false;
4177 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4179 /* Skip the missing fields in the CONSTRUCTOR. */
4180 while (efield && field && !SAME_FIELD_P (efield, index))
4182 efield = DECL_CHAIN (efield);
4183 field = DECL_CHAIN (field);
4185 /* The field must be the same. */
4186 if (!(efield && field && SAME_FIELD_P (efield, field)))
4187 break;
4188 constructor_elt elt
4189 = {field, convert (TREE_TYPE (field), value)};
4190 v->quick_push (elt);
4192 /* If packing has made this field a bitfield and the input
4193 value couldn't be emitted statically any more, we need to
4194 clear TREE_CONSTANT on our output. */
4195 if (!clear_constant
4196 && TREE_CONSTANT (expr)
4197 && !CONSTRUCTOR_BITFIELD_P (efield)
4198 && CONSTRUCTOR_BITFIELD_P (field)
4199 && !initializer_constant_valid_for_bitfield_p (value))
4200 clear_constant = true;
4202 efield = DECL_CHAIN (efield);
4203 field = DECL_CHAIN (field);
4206 /* If we have been able to match and convert all the input fields
4207 to their output type, convert in place now. We'll fallback to a
4208 view conversion downstream otherwise. */
4209 if (idx == len)
4211 expr = copy_node (expr);
4212 TREE_TYPE (expr) = type;
4213 CONSTRUCTOR_ELTS (expr) = v;
4214 if (clear_constant)
4215 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4216 return expr;
4220 /* Likewise for a conversion between array type and vector type with a
4221 compatible representative array. */
4222 else if (code == VECTOR_TYPE
4223 && ecode == ARRAY_TYPE
4224 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4225 etype))
4227 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4228 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4229 vec<constructor_elt, va_gc> *v;
4230 unsigned HOST_WIDE_INT ix;
4231 tree value;
4233 /* Build a VECTOR_CST from a *constant* array constructor. */
4234 if (TREE_CONSTANT (expr))
4236 bool constant_p = true;
4238 /* Iterate through elements and check if all constructor
4239 elements are *_CSTs. */
4240 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4241 if (!CONSTANT_CLASS_P (value))
4243 constant_p = false;
4244 break;
4247 if (constant_p)
4248 return build_vector_from_ctor (type,
4249 CONSTRUCTOR_ELTS (expr));
4252 /* Otherwise, build a regular vector constructor. */
4253 vec_alloc (v, len);
4254 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4256 constructor_elt elt = {NULL_TREE, value};
4257 v->quick_push (elt);
4259 expr = copy_node (expr);
4260 TREE_TYPE (expr) = type;
4261 CONSTRUCTOR_ELTS (expr) = v;
4262 return expr;
4264 break;
4266 case UNCONSTRAINED_ARRAY_REF:
4267 /* First retrieve the underlying array. */
4268 expr = maybe_unconstrained_array (expr);
4269 etype = TREE_TYPE (expr);
4270 ecode = TREE_CODE (etype);
4271 break;
4273 case VIEW_CONVERT_EXPR:
4275 /* GCC 4.x is very sensitive to type consistency overall, and view
4276 conversions thus are very frequent. Even though just "convert"ing
4277 the inner operand to the output type is fine in most cases, it
4278 might expose unexpected input/output type mismatches in special
4279 circumstances so we avoid such recursive calls when we can. */
4280 tree op0 = TREE_OPERAND (expr, 0);
4282 /* If we are converting back to the original type, we can just
4283 lift the input conversion. This is a common occurrence with
4284 switches back-and-forth amongst type variants. */
4285 if (type == TREE_TYPE (op0))
4286 return op0;
4288 /* Otherwise, if we're converting between two aggregate or vector
4289 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4290 target type in place or to just convert the inner expression. */
4291 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4292 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4294 /* If we are converting between mere variants, we can just
4295 substitute the VIEW_CONVERT_EXPR in place. */
4296 if (gnat_types_compatible_p (type, etype))
4297 return build1 (VIEW_CONVERT_EXPR, type, op0);
4299 /* Otherwise, we may just bypass the input view conversion unless
4300 one of the types is a fat pointer, which is handled by
4301 specialized code below which relies on exact type matching. */
4302 else if (!TYPE_IS_FAT_POINTER_P (type)
4303 && !TYPE_IS_FAT_POINTER_P (etype))
4304 return convert (type, op0);
4307 break;
4310 default:
4311 break;
4314 /* Check for converting to a pointer to an unconstrained array. */
4315 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4316 return convert_to_fat_pointer (type, expr);
4318 /* If we are converting between two aggregate or vector types that are mere
4319 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4320 to a vector type from its representative array type. */
4321 else if ((code == ecode
4322 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4323 && gnat_types_compatible_p (type, etype))
4324 || (code == VECTOR_TYPE
4325 && ecode == ARRAY_TYPE
4326 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4327 etype)))
4328 return build1 (VIEW_CONVERT_EXPR, type, expr);
4330 /* If we are converting between tagged types, try to upcast properly. */
4331 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4332 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4334 tree child_etype = etype;
4335 do {
4336 tree field = TYPE_FIELDS (child_etype);
4337 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4338 return build_component_ref (expr, NULL_TREE, field, false);
4339 child_etype = TREE_TYPE (field);
4340 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4343 /* If we are converting from a smaller form of record type back to it, just
4344 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4345 size on both sides. */
4346 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4347 && smaller_form_type_p (etype, type))
4349 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4350 false, false, false, true),
4351 expr);
4352 return build1 (VIEW_CONVERT_EXPR, type, expr);
4355 /* In all other cases of related types, make a NOP_EXPR. */
4356 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4357 return fold_convert (type, expr);
4359 switch (code)
4361 case VOID_TYPE:
4362 return fold_build1 (CONVERT_EXPR, type, expr);
4364 case INTEGER_TYPE:
4365 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4366 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4367 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4368 return unchecked_convert (type, expr, false);
4369 else if (TYPE_BIASED_REPRESENTATION_P (type))
4370 return fold_convert (type,
4371 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4372 convert (TREE_TYPE (type), expr),
4373 convert (TREE_TYPE (type),
4374 TYPE_MIN_VALUE (type))));
4376 /* ... fall through ... */
4378 case ENUMERAL_TYPE:
4379 case BOOLEAN_TYPE:
4380 /* If we are converting an additive expression to an integer type
4381 with lower precision, be wary of the optimization that can be
4382 applied by convert_to_integer. There are 2 problematic cases:
4383 - if the first operand was originally of a biased type,
4384 because we could be recursively called to convert it
4385 to an intermediate type and thus rematerialize the
4386 additive operator endlessly,
4387 - if the expression contains a placeholder, because an
4388 intermediate conversion that changes the sign could
4389 be inserted and thus introduce an artificial overflow
4390 at compile time when the placeholder is substituted. */
4391 if (code == INTEGER_TYPE
4392 && ecode == INTEGER_TYPE
4393 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4394 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4396 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4398 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4399 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4400 || CONTAINS_PLACEHOLDER_P (expr))
4401 return build1 (NOP_EXPR, type, expr);
4404 return fold (convert_to_integer (type, expr));
4406 case POINTER_TYPE:
4407 case REFERENCE_TYPE:
4408 /* If converting between two thin pointers, adjust if needed to account
4409 for differing offsets from the base pointer, depending on whether
4410 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4411 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4413 tree etype_pos
4414 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4415 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4416 : size_zero_node;
4417 tree type_pos
4418 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4419 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4420 : size_zero_node;
4421 tree byte_diff = size_diffop (type_pos, etype_pos);
4423 expr = build1 (NOP_EXPR, type, expr);
4424 if (integer_zerop (byte_diff))
4425 return expr;
4427 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4428 fold_convert (sizetype, byte_diff));
4431 /* If converting fat pointer to normal or thin pointer, get the pointer
4432 to the array and then convert it. */
4433 if (TYPE_IS_FAT_POINTER_P (etype))
4434 expr
4435 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4437 return fold (convert_to_pointer (type, expr));
4439 case REAL_TYPE:
4440 return fold (convert_to_real (type, expr));
4442 case RECORD_TYPE:
4443 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4445 vec<constructor_elt, va_gc> *v;
4446 vec_alloc (v, 1);
4448 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4449 convert (TREE_TYPE (TYPE_FIELDS (type)),
4450 expr));
4451 return gnat_build_constructor (type, v);
4454 /* ... fall through ... */
4456 case ARRAY_TYPE:
4457 /* In these cases, assume the front-end has validated the conversion.
4458 If the conversion is valid, it will be a bit-wise conversion, so
4459 it can be viewed as an unchecked conversion. */
4460 return unchecked_convert (type, expr, false);
4462 case UNION_TYPE:
4463 /* This is a either a conversion between a tagged type and some
4464 subtype, which we have to mark as a UNION_TYPE because of
4465 overlapping fields or a conversion of an Unchecked_Union. */
4466 return unchecked_convert (type, expr, false);
4468 case UNCONSTRAINED_ARRAY_TYPE:
4469 /* If the input is a VECTOR_TYPE, convert to the representative
4470 array type first. */
4471 if (ecode == VECTOR_TYPE)
4473 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4474 etype = TREE_TYPE (expr);
4475 ecode = TREE_CODE (etype);
4478 /* If EXPR is a constrained array, take its address, convert it to a
4479 fat pointer, and then dereference it. Likewise if EXPR is a
4480 record containing both a template and a constrained array.
4481 Note that a record representing a justified modular type
4482 always represents a packed constrained array. */
4483 if (ecode == ARRAY_TYPE
4484 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4485 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4486 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4487 return
4488 build_unary_op
4489 (INDIRECT_REF, NULL_TREE,
4490 convert_to_fat_pointer (TREE_TYPE (type),
4491 build_unary_op (ADDR_EXPR,
4492 NULL_TREE, expr)));
4494 /* Do something very similar for converting one unconstrained
4495 array to another. */
4496 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4497 return
4498 build_unary_op (INDIRECT_REF, NULL_TREE,
4499 convert (TREE_TYPE (type),
4500 build_unary_op (ADDR_EXPR,
4501 NULL_TREE, expr)));
4502 else
4503 gcc_unreachable ();
4505 case COMPLEX_TYPE:
4506 return fold (convert_to_complex (type, expr));
4508 default:
4509 gcc_unreachable ();
4513 /* Create an expression whose value is that of EXPR converted to the common
4514 index type, which is sizetype. EXPR is supposed to be in the base type
4515 of the GNAT index type. Calling it is equivalent to doing
4517 convert (sizetype, expr)
4519 but we try to distribute the type conversion with the knowledge that EXPR
4520 cannot overflow in its type. This is a best-effort approach and we fall
4521 back to the above expression as soon as difficulties are encountered.
4523 This is necessary to overcome issues that arise when the GNAT base index
4524 type and the GCC common index type (sizetype) don't have the same size,
4525 which is quite frequent on 64-bit architectures. In this case, and if
4526 the GNAT base index type is signed but the iteration type of the loop has
4527 been forced to unsigned, the loop scalar evolution engine cannot compute
4528 a simple evolution for the general induction variables associated with the
4529 array indices, because it will preserve the wrap-around semantics in the
4530 unsigned type of their "inner" part. As a result, many loop optimizations
4531 are blocked.
4533 The solution is to use a special (basic) induction variable that is at
4534 least as large as sizetype, and to express the aforementioned general
4535 induction variables in terms of this induction variable, eliminating
4536 the problematic intermediate truncation to the GNAT base index type.
4537 This is possible as long as the original expression doesn't overflow
4538 and if the middle-end hasn't introduced artificial overflows in the
4539 course of the various simplification it can make to the expression. */
4541 tree
4542 convert_to_index_type (tree expr)
4544 enum tree_code code = TREE_CODE (expr);
4545 tree type = TREE_TYPE (expr);
4547 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4548 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4549 if (TYPE_UNSIGNED (type) || !optimize)
4550 return convert (sizetype, expr);
4552 switch (code)
4554 case VAR_DECL:
4555 /* The main effect of the function: replace a loop parameter with its
4556 associated special induction variable. */
4557 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4558 expr = DECL_INDUCTION_VAR (expr);
4559 break;
4561 CASE_CONVERT:
4563 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4564 /* Bail out as soon as we suspect some sort of type frobbing. */
4565 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4566 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4567 break;
4570 /* ... fall through ... */
4572 case NON_LVALUE_EXPR:
4573 return fold_build1 (code, sizetype,
4574 convert_to_index_type (TREE_OPERAND (expr, 0)));
4576 case PLUS_EXPR:
4577 case MINUS_EXPR:
4578 case MULT_EXPR:
4579 return fold_build2 (code, sizetype,
4580 convert_to_index_type (TREE_OPERAND (expr, 0)),
4581 convert_to_index_type (TREE_OPERAND (expr, 1)));
4583 case COMPOUND_EXPR:
4584 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4585 convert_to_index_type (TREE_OPERAND (expr, 1)));
4587 case COND_EXPR:
4588 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4589 convert_to_index_type (TREE_OPERAND (expr, 1)),
4590 convert_to_index_type (TREE_OPERAND (expr, 2)));
4592 default:
4593 break;
4596 return convert (sizetype, expr);
4599 /* Remove all conversions that are done in EXP. This includes converting
4600 from a padded type or to a justified modular type. If TRUE_ADDRESS
4601 is true, always return the address of the containing object even if
4602 the address is not bit-aligned. */
4604 tree
4605 remove_conversions (tree exp, bool true_address)
4607 switch (TREE_CODE (exp))
4609 case CONSTRUCTOR:
4610 if (true_address
4611 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4612 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4613 return
4614 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
4615 break;
4617 case COMPONENT_REF:
4618 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4619 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4620 break;
4622 CASE_CONVERT:
4623 case VIEW_CONVERT_EXPR:
4624 case NON_LVALUE_EXPR:
4625 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4627 default:
4628 break;
4631 return exp;
4634 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4635 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4636 likewise return an expression pointing to the underlying array. */
4638 tree
4639 maybe_unconstrained_array (tree exp)
4641 enum tree_code code = TREE_CODE (exp);
4642 tree type = TREE_TYPE (exp);
4644 switch (TREE_CODE (type))
4646 case UNCONSTRAINED_ARRAY_TYPE:
4647 if (code == UNCONSTRAINED_ARRAY_REF)
4649 const bool read_only = TREE_READONLY (exp);
4650 const bool no_trap = TREE_THIS_NOTRAP (exp);
4652 exp = TREE_OPERAND (exp, 0);
4653 type = TREE_TYPE (exp);
4655 if (TREE_CODE (exp) == COND_EXPR)
4657 tree op1
4658 = build_unary_op (INDIRECT_REF, NULL_TREE,
4659 build_component_ref (TREE_OPERAND (exp, 1),
4660 NULL_TREE,
4661 TYPE_FIELDS (type),
4662 false));
4663 tree op2
4664 = build_unary_op (INDIRECT_REF, NULL_TREE,
4665 build_component_ref (TREE_OPERAND (exp, 2),
4666 NULL_TREE,
4667 TYPE_FIELDS (type),
4668 false));
4670 exp = build3 (COND_EXPR,
4671 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4672 TREE_OPERAND (exp, 0), op1, op2);
4674 else
4676 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4677 build_component_ref (exp, NULL_TREE,
4678 TYPE_FIELDS (type),
4679 false));
4680 TREE_READONLY (exp) = read_only;
4681 TREE_THIS_NOTRAP (exp) = no_trap;
4685 else if (code == NULL_EXPR)
4686 exp = build1 (NULL_EXPR,
4687 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4688 TREE_OPERAND (exp, 0));
4689 break;
4691 case RECORD_TYPE:
4692 /* If this is a padded type and it contains a template, convert to the
4693 unpadded type first. */
4694 if (TYPE_PADDING_P (type)
4695 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4696 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4698 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4699 type = TREE_TYPE (exp);
4702 if (TYPE_CONTAINS_TEMPLATE_P (type))
4704 exp = build_component_ref (exp, NULL_TREE,
4705 DECL_CHAIN (TYPE_FIELDS (type)),
4706 false);
4707 type = TREE_TYPE (exp);
4709 /* If the array type is padded, convert to the unpadded type. */
4710 if (TYPE_IS_PADDING_P (type))
4711 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4713 break;
4715 default:
4716 break;
4719 return exp;
4722 /* Return true if EXPR is an expression that can be folded as an operand
4723 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4725 static bool
4726 can_fold_for_view_convert_p (tree expr)
4728 tree t1, t2;
4730 /* The folder will fold NOP_EXPRs between integral types with the same
4731 precision (in the middle-end's sense). We cannot allow it if the
4732 types don't have the same precision in the Ada sense as well. */
4733 if (TREE_CODE (expr) != NOP_EXPR)
4734 return true;
4736 t1 = TREE_TYPE (expr);
4737 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4739 /* Defer to the folder for non-integral conversions. */
4740 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4741 return true;
4743 /* Only fold conversions that preserve both precisions. */
4744 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4745 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4746 return true;
4748 return false;
4751 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4752 If NOTRUNC_P is true, truncation operations should be suppressed.
4754 Special care is required with (source or target) integral types whose
4755 precision is not equal to their size, to make sure we fetch or assign
4756 the value bits whose location might depend on the endianness, e.g.
4758 Rmsize : constant := 8;
4759 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4761 type Bit_Array is array (1 .. Rmsize) of Boolean;
4762 pragma Pack (Bit_Array);
4764 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4766 Value : Int := 2#1000_0001#;
4767 Vbits : Bit_Array := To_Bit_Array (Value);
4769 we expect the 8 bits at Vbits'Address to always contain Value, while
4770 their original location depends on the endianness, at Value'Address
4771 on a little-endian architecture but not on a big-endian one. */
4773 tree
4774 unchecked_convert (tree type, tree expr, bool notrunc_p)
4776 tree etype = TREE_TYPE (expr);
4777 enum tree_code ecode = TREE_CODE (etype);
4778 enum tree_code code = TREE_CODE (type);
4779 tree tem;
4780 int c;
4782 /* If the expression is already of the right type, we are done. */
4783 if (etype == type)
4784 return expr;
4786 /* If both types types are integral just do a normal conversion.
4787 Likewise for a conversion to an unconstrained array. */
4788 if (((INTEGRAL_TYPE_P (type)
4789 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
4790 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4791 && (INTEGRAL_TYPE_P (etype)
4792 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4793 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4794 || code == UNCONSTRAINED_ARRAY_TYPE)
4796 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4798 tree ntype = copy_type (etype);
4799 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4800 TYPE_MAIN_VARIANT (ntype) = ntype;
4801 expr = build1 (NOP_EXPR, ntype, expr);
4804 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4806 tree rtype = copy_type (type);
4807 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4808 TYPE_MAIN_VARIANT (rtype) = rtype;
4809 expr = convert (rtype, expr);
4810 expr = build1 (NOP_EXPR, type, expr);
4812 else
4813 expr = convert (type, expr);
4816 /* If we are converting to an integral type whose precision is not equal
4817 to its size, first unchecked convert to a record type that contains an
4818 field of the given precision. Then extract the field. */
4819 else if (INTEGRAL_TYPE_P (type)
4820 && TYPE_RM_SIZE (type)
4821 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4822 GET_MODE_BITSIZE (TYPE_MODE (type))))
4824 tree rec_type = make_node (RECORD_TYPE);
4825 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4826 tree field_type, field;
4828 if (TYPE_UNSIGNED (type))
4829 field_type = make_unsigned_type (prec);
4830 else
4831 field_type = make_signed_type (prec);
4832 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4834 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4835 NULL_TREE, bitsize_zero_node, 1, 0);
4837 finish_record_type (rec_type, field, 1, false);
4839 expr = unchecked_convert (rec_type, expr, notrunc_p);
4840 expr = build_component_ref (expr, NULL_TREE, field, false);
4841 expr = fold_build1 (NOP_EXPR, type, expr);
4844 /* Similarly if we are converting from an integral type whose precision is
4845 not equal to its size, first copy into a field of the given precision
4846 and unchecked convert the record type. */
4847 else if (INTEGRAL_TYPE_P (etype)
4848 && TYPE_RM_SIZE (etype)
4849 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4850 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4852 tree rec_type = make_node (RECORD_TYPE);
4853 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
4854 vec<constructor_elt, va_gc> *v;
4855 vec_alloc (v, 1);
4856 tree field_type, field;
4858 if (TYPE_UNSIGNED (etype))
4859 field_type = make_unsigned_type (prec);
4860 else
4861 field_type = make_signed_type (prec);
4862 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
4864 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4865 NULL_TREE, bitsize_zero_node, 1, 0);
4867 finish_record_type (rec_type, field, 1, false);
4869 expr = fold_build1 (NOP_EXPR, field_type, expr);
4870 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4871 expr = gnat_build_constructor (rec_type, v);
4872 expr = unchecked_convert (type, expr, notrunc_p);
4875 /* If we are converting from a scalar type to a type with a different size,
4876 we need to pad to have the same size on both sides.
4878 ??? We cannot do it unconditionally because unchecked conversions are
4879 used liberally by the front-end to implement polymorphism, e.g. in:
4881 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4882 return p___size__4 (p__object!(S191s.all));
4884 so we skip all expressions that are references. */
4885 else if (!REFERENCE_CLASS_P (expr)
4886 && !AGGREGATE_TYPE_P (etype)
4887 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4888 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4890 if (c < 0)
4892 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4893 false, false, false, true),
4894 expr);
4895 expr = unchecked_convert (type, expr, notrunc_p);
4897 else
4899 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4900 false, false, false, true);
4901 expr = unchecked_convert (rec_type, expr, notrunc_p);
4902 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4903 false);
4907 /* We have a special case when we are converting between two unconstrained
4908 array types. In that case, take the address, convert the fat pointer
4909 types, and dereference. */
4910 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4911 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4912 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4913 build_unary_op (ADDR_EXPR, NULL_TREE,
4914 expr)));
4916 /* Another special case is when we are converting to a vector type from its
4917 representative array type; this a regular conversion. */
4918 else if (code == VECTOR_TYPE
4919 && ecode == ARRAY_TYPE
4920 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4921 etype))
4922 expr = convert (type, expr);
4924 /* And, if the array type is not the representative, we try to build an
4925 intermediate vector type of which the array type is the representative
4926 and to do the unchecked conversion between the vector types, in order
4927 to enable further simplifications in the middle-end. */
4928 else if (code == VECTOR_TYPE
4929 && ecode == ARRAY_TYPE
4930 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
4932 expr = convert (tem, expr);
4933 return unchecked_convert (type, expr, notrunc_p);
4936 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
4937 the alignment of the CONSTRUCTOR to speed up the copy operation. */
4938 else if (TREE_CODE (expr) == CONSTRUCTOR
4939 && code == RECORD_TYPE
4940 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
4942 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
4943 Empty, false, false, false, true),
4944 expr);
4945 return unchecked_convert (type, expr, notrunc_p);
4948 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
4949 else
4951 expr = maybe_unconstrained_array (expr);
4952 etype = TREE_TYPE (expr);
4953 ecode = TREE_CODE (etype);
4954 if (can_fold_for_view_convert_p (expr))
4955 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4956 else
4957 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4960 /* If the result is an integral type whose precision is not equal to its
4961 size, sign- or zero-extend the result. We need not do this if the input
4962 is an integral type of the same precision and signedness or if the output
4963 is a biased type or if both the input and output are unsigned. */
4964 if (!notrunc_p
4965 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4966 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4967 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4968 GET_MODE_BITSIZE (TYPE_MODE (type)))
4969 && !(INTEGRAL_TYPE_P (etype)
4970 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4971 && operand_equal_p (TYPE_RM_SIZE (type),
4972 (TYPE_RM_SIZE (etype) != 0
4973 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4975 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4977 tree base_type
4978 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4979 tree shift_expr
4980 = convert (base_type,
4981 size_binop (MINUS_EXPR,
4982 bitsize_int
4983 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4984 TYPE_RM_SIZE (type)));
4985 expr
4986 = convert (type,
4987 build_binary_op (RSHIFT_EXPR, base_type,
4988 build_binary_op (LSHIFT_EXPR, base_type,
4989 convert (base_type, expr),
4990 shift_expr),
4991 shift_expr));
4994 /* An unchecked conversion should never raise Constraint_Error. The code
4995 below assumes that GCC's conversion routines overflow the same way that
4996 the underlying hardware does. This is probably true. In the rare case
4997 when it is false, we can rely on the fact that such conversions are
4998 erroneous anyway. */
4999 if (TREE_CODE (expr) == INTEGER_CST)
5000 TREE_OVERFLOW (expr) = 0;
5002 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5003 show no longer constant. */
5004 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5005 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5006 OEP_ONLY_CONST))
5007 TREE_CONSTANT (expr) = 0;
5009 return expr;
5012 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5013 the latter being a record type as predicated by Is_Record_Type. */
5015 enum tree_code
5016 tree_code_for_record_type (Entity_Id gnat_type)
5018 Node_Id component_list, component;
5020 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5021 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5022 if (!Is_Unchecked_Union (gnat_type))
5023 return RECORD_TYPE;
5025 gnat_type = Implementation_Base_Type (gnat_type);
5026 component_list
5027 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5029 for (component = First_Non_Pragma (Component_Items (component_list));
5030 Present (component);
5031 component = Next_Non_Pragma (component))
5032 if (Ekind (Defining_Entity (component)) == E_Component)
5033 return RECORD_TYPE;
5035 return UNION_TYPE;
5038 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5039 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5040 according to the presence of an alignment clause on the type or, if it
5041 is an array, on the component type. */
5043 bool
5044 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5046 gnat_type = Underlying_Type (gnat_type);
5048 *align_clause = Present (Alignment_Clause (gnat_type));
5050 if (Is_Array_Type (gnat_type))
5052 gnat_type = Underlying_Type (Component_Type (gnat_type));
5053 if (Present (Alignment_Clause (gnat_type)))
5054 *align_clause = true;
5057 if (!Is_Floating_Point_Type (gnat_type))
5058 return false;
5060 if (UI_To_Int (Esize (gnat_type)) != 64)
5061 return false;
5063 return true;
5066 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5067 size is greater or equal to 64 bits, or an array of such a type. Set
5068 ALIGN_CLAUSE according to the presence of an alignment clause on the
5069 type or, if it is an array, on the component type. */
5071 bool
5072 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5074 gnat_type = Underlying_Type (gnat_type);
5076 *align_clause = Present (Alignment_Clause (gnat_type));
5078 if (Is_Array_Type (gnat_type))
5080 gnat_type = Underlying_Type (Component_Type (gnat_type));
5081 if (Present (Alignment_Clause (gnat_type)))
5082 *align_clause = true;
5085 if (!Is_Scalar_Type (gnat_type))
5086 return false;
5088 if (UI_To_Int (Esize (gnat_type)) < 64)
5089 return false;
5091 return true;
5094 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5095 component of an aggregate type. */
5097 bool
5098 type_for_nonaliased_component_p (tree gnu_type)
5100 /* If the type is passed by reference, we may have pointers to the
5101 component so it cannot be made non-aliased. */
5102 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5103 return false;
5105 /* We used to say that any component of aggregate type is aliased
5106 because the front-end may take 'Reference of it. The front-end
5107 has been enhanced in the meantime so as to use a renaming instead
5108 in most cases, but the back-end can probably take the address of
5109 such a component too so we go for the conservative stance.
5111 For instance, we might need the address of any array type, even
5112 if normally passed by copy, to construct a fat pointer if the
5113 component is used as an actual for an unconstrained formal.
5115 Likewise for record types: even if a specific record subtype is
5116 passed by copy, the parent type might be passed by ref (e.g. if
5117 it's of variable size) and we might take the address of a child
5118 component to pass to a parent formal. We have no way to check
5119 for such conditions here. */
5120 if (AGGREGATE_TYPE_P (gnu_type))
5121 return false;
5123 return true;
5126 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5128 bool
5129 smaller_form_type_p (tree type, tree orig_type)
5131 tree size, osize;
5133 /* We're not interested in variants here. */
5134 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5135 return false;
5137 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5138 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5139 return false;
5141 size = TYPE_SIZE (type);
5142 osize = TYPE_SIZE (orig_type);
5144 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5145 return false;
5147 return tree_int_cst_lt (size, osize) != 0;
5150 /* Perform final processing on global variables. */
5152 static GTY (()) tree dummy_global;
5154 void
5155 gnat_write_global_declarations (void)
5157 unsigned int i;
5158 tree iter;
5160 /* If we have declared types as used at the global level, insert them in
5161 the global hash table. We use a dummy variable for this purpose. */
5162 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
5164 struct varpool_node *node;
5165 char *label;
5167 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5168 dummy_global
5169 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5170 void_type_node);
5171 DECL_HARD_REGISTER (dummy_global) = 1;
5172 TREE_STATIC (dummy_global) = 1;
5173 node = varpool_node::get_create (dummy_global);
5174 node->definition = 1;
5175 node->force_output = 1;
5177 while (!types_used_by_cur_var_decl->is_empty ())
5179 tree t = types_used_by_cur_var_decl->pop ();
5180 types_used_by_var_decl_insert (t, dummy_global);
5184 /* Output debug information for all global type declarations first. This
5185 ensures that global types whose compilation hasn't been finalized yet,
5186 for example pointers to Taft amendment types, have their compilation
5187 finalized in the right context. */
5188 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5189 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5190 debug_hooks->global_decl (iter);
5192 /* Proceed to optimize and emit assembly. */
5193 symtab->finalize_compilation_unit ();
5195 /* After cgraph has had a chance to emit everything that's going to
5196 be emitted, output debug information for the rest of globals. */
5197 if (!seen_error ())
5199 timevar_push (TV_SYMOUT);
5200 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5201 if (TREE_CODE (iter) != TYPE_DECL && !DECL_IGNORED_P (iter))
5202 debug_hooks->global_decl (iter);
5203 timevar_pop (TV_SYMOUT);
5207 /* ************************************************************************
5208 * * GCC builtins support *
5209 * ************************************************************************ */
5211 /* The general scheme is fairly simple:
5213 For each builtin function/type to be declared, gnat_install_builtins calls
5214 internal facilities which eventually get to gnat_push_decl, which in turn
5215 tracks the so declared builtin function decls in the 'builtin_decls' global
5216 datastructure. When an Intrinsic subprogram declaration is processed, we
5217 search this global datastructure to retrieve the associated BUILT_IN DECL
5218 node. */
5220 /* Search the chain of currently available builtin declarations for a node
5221 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5222 found, if any, or NULL_TREE otherwise. */
5223 tree
5224 builtin_decl_for (tree name)
5226 unsigned i;
5227 tree decl;
5229 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5230 if (DECL_NAME (decl) == name)
5231 return decl;
5233 return NULL_TREE;
5236 /* The code below eventually exposes gnat_install_builtins, which declares
5237 the builtin types and functions we might need, either internally or as
5238 user accessible facilities.
5240 ??? This is a first implementation shot, still in rough shape. It is
5241 heavily inspired from the "C" family implementation, with chunks copied
5242 verbatim from there.
5244 Two obvious TODO candidates are
5245 o Use a more efficient name/decl mapping scheme
5246 o Devise a middle-end infrastructure to avoid having to copy
5247 pieces between front-ends. */
5249 /* ----------------------------------------------------------------------- *
5250 * BUILTIN ELEMENTARY TYPES *
5251 * ----------------------------------------------------------------------- */
5253 /* Standard data types to be used in builtin argument declarations. */
5255 enum c_tree_index
5257 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5258 CTI_STRING_TYPE,
5259 CTI_CONST_STRING_TYPE,
5261 CTI_MAX
5264 static tree c_global_trees[CTI_MAX];
5266 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5267 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5268 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5270 /* ??? In addition some attribute handlers, we currently don't support a
5271 (small) number of builtin-types, which in turns inhibits support for a
5272 number of builtin functions. */
5273 #define wint_type_node void_type_node
5274 #define intmax_type_node void_type_node
5275 #define uintmax_type_node void_type_node
5277 /* Build the void_list_node (void_type_node having been created). */
5279 static tree
5280 build_void_list_node (void)
5282 tree t = build_tree_list (NULL_TREE, void_type_node);
5283 return t;
5286 /* Used to help initialize the builtin-types.def table. When a type of
5287 the correct size doesn't exist, use error_mark_node instead of NULL.
5288 The later results in segfaults even when a decl using the type doesn't
5289 get invoked. */
5291 static tree
5292 builtin_type_for_size (int size, bool unsignedp)
5294 tree type = gnat_type_for_size (size, unsignedp);
5295 return type ? type : error_mark_node;
5298 /* Build/push the elementary type decls that builtin functions/types
5299 will need. */
5301 static void
5302 install_builtin_elementary_types (void)
5304 signed_size_type_node = gnat_signed_type (size_type_node);
5305 pid_type_node = integer_type_node;
5306 void_list_node = build_void_list_node ();
5308 string_type_node = build_pointer_type (char_type_node);
5309 const_string_type_node
5310 = build_pointer_type (build_qualified_type
5311 (char_type_node, TYPE_QUAL_CONST));
5314 /* ----------------------------------------------------------------------- *
5315 * BUILTIN FUNCTION TYPES *
5316 * ----------------------------------------------------------------------- */
5318 /* Now, builtin function types per se. */
5320 enum c_builtin_type
5322 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5323 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5324 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5325 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5326 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5327 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5328 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5329 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5330 ARG6) NAME,
5331 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5332 ARG6, ARG7) NAME,
5333 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5334 ARG6, ARG7, ARG8) NAME,
5335 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5336 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5337 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5338 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5339 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5340 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5341 NAME,
5342 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5343 #include "builtin-types.def"
5344 #undef DEF_PRIMITIVE_TYPE
5345 #undef DEF_FUNCTION_TYPE_0
5346 #undef DEF_FUNCTION_TYPE_1
5347 #undef DEF_FUNCTION_TYPE_2
5348 #undef DEF_FUNCTION_TYPE_3
5349 #undef DEF_FUNCTION_TYPE_4
5350 #undef DEF_FUNCTION_TYPE_5
5351 #undef DEF_FUNCTION_TYPE_6
5352 #undef DEF_FUNCTION_TYPE_7
5353 #undef DEF_FUNCTION_TYPE_8
5354 #undef DEF_FUNCTION_TYPE_VAR_0
5355 #undef DEF_FUNCTION_TYPE_VAR_1
5356 #undef DEF_FUNCTION_TYPE_VAR_2
5357 #undef DEF_FUNCTION_TYPE_VAR_3
5358 #undef DEF_FUNCTION_TYPE_VAR_4
5359 #undef DEF_FUNCTION_TYPE_VAR_5
5360 #undef DEF_POINTER_TYPE
5361 BT_LAST
5364 typedef enum c_builtin_type builtin_type;
5366 /* A temporary array used in communication with def_fn_type. */
5367 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5369 /* A helper function for install_builtin_types. Build function type
5370 for DEF with return type RET and N arguments. If VAR is true, then the
5371 function should be variadic after those N arguments.
5373 Takes special care not to ICE if any of the types involved are
5374 error_mark_node, which indicates that said type is not in fact available
5375 (see builtin_type_for_size). In which case the function type as a whole
5376 should be error_mark_node. */
5378 static void
5379 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5381 tree t;
5382 tree *args = XALLOCAVEC (tree, n);
5383 va_list list;
5384 int i;
5386 va_start (list, n);
5387 for (i = 0; i < n; ++i)
5389 builtin_type a = (builtin_type) va_arg (list, int);
5390 t = builtin_types[a];
5391 if (t == error_mark_node)
5392 goto egress;
5393 args[i] = t;
5396 t = builtin_types[ret];
5397 if (t == error_mark_node)
5398 goto egress;
5399 if (var)
5400 t = build_varargs_function_type_array (t, n, args);
5401 else
5402 t = build_function_type_array (t, n, args);
5404 egress:
5405 builtin_types[def] = t;
5406 va_end (list);
5409 /* Build the builtin function types and install them in the builtin_types
5410 array for later use in builtin function decls. */
5412 static void
5413 install_builtin_function_types (void)
5415 tree va_list_ref_type_node;
5416 tree va_list_arg_type_node;
5418 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5420 va_list_arg_type_node = va_list_ref_type_node =
5421 build_pointer_type (TREE_TYPE (va_list_type_node));
5423 else
5425 va_list_arg_type_node = va_list_type_node;
5426 va_list_ref_type_node = build_reference_type (va_list_type_node);
5429 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5430 builtin_types[ENUM] = VALUE;
5431 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5432 def_fn_type (ENUM, RETURN, 0, 0);
5433 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5434 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5435 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5436 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5437 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5438 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5439 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5440 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5441 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5442 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5443 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5444 ARG6) \
5445 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5446 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5447 ARG6, ARG7) \
5448 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5449 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5450 ARG6, ARG7, ARG8) \
5451 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5452 ARG7, ARG8);
5453 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5454 def_fn_type (ENUM, RETURN, 1, 0);
5455 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5456 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5457 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5458 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5459 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5460 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5461 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5462 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5463 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5464 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5465 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5466 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5468 #include "builtin-types.def"
5470 #undef DEF_PRIMITIVE_TYPE
5471 #undef DEF_FUNCTION_TYPE_0
5472 #undef DEF_FUNCTION_TYPE_1
5473 #undef DEF_FUNCTION_TYPE_2
5474 #undef DEF_FUNCTION_TYPE_3
5475 #undef DEF_FUNCTION_TYPE_4
5476 #undef DEF_FUNCTION_TYPE_5
5477 #undef DEF_FUNCTION_TYPE_6
5478 #undef DEF_FUNCTION_TYPE_7
5479 #undef DEF_FUNCTION_TYPE_8
5480 #undef DEF_FUNCTION_TYPE_VAR_0
5481 #undef DEF_FUNCTION_TYPE_VAR_1
5482 #undef DEF_FUNCTION_TYPE_VAR_2
5483 #undef DEF_FUNCTION_TYPE_VAR_3
5484 #undef DEF_FUNCTION_TYPE_VAR_4
5485 #undef DEF_FUNCTION_TYPE_VAR_5
5486 #undef DEF_POINTER_TYPE
5487 builtin_types[(int) BT_LAST] = NULL_TREE;
5490 /* ----------------------------------------------------------------------- *
5491 * BUILTIN ATTRIBUTES *
5492 * ----------------------------------------------------------------------- */
5494 enum built_in_attribute
5496 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5497 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5498 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5499 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5500 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5501 #include "builtin-attrs.def"
5502 #undef DEF_ATTR_NULL_TREE
5503 #undef DEF_ATTR_INT
5504 #undef DEF_ATTR_STRING
5505 #undef DEF_ATTR_IDENT
5506 #undef DEF_ATTR_TREE_LIST
5507 ATTR_LAST
5510 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5512 static void
5513 install_builtin_attributes (void)
5515 /* Fill in the built_in_attributes array. */
5516 #define DEF_ATTR_NULL_TREE(ENUM) \
5517 built_in_attributes[(int) ENUM] = NULL_TREE;
5518 #define DEF_ATTR_INT(ENUM, VALUE) \
5519 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5520 #define DEF_ATTR_STRING(ENUM, VALUE) \
5521 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5522 #define DEF_ATTR_IDENT(ENUM, STRING) \
5523 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5524 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5525 built_in_attributes[(int) ENUM] \
5526 = tree_cons (built_in_attributes[(int) PURPOSE], \
5527 built_in_attributes[(int) VALUE], \
5528 built_in_attributes[(int) CHAIN]);
5529 #include "builtin-attrs.def"
5530 #undef DEF_ATTR_NULL_TREE
5531 #undef DEF_ATTR_INT
5532 #undef DEF_ATTR_STRING
5533 #undef DEF_ATTR_IDENT
5534 #undef DEF_ATTR_TREE_LIST
5537 /* Handle a "const" attribute; arguments as in
5538 struct attribute_spec.handler. */
5540 static tree
5541 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5542 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5543 bool *no_add_attrs)
5545 if (TREE_CODE (*node) == FUNCTION_DECL)
5546 TREE_READONLY (*node) = 1;
5547 else
5548 *no_add_attrs = true;
5550 return NULL_TREE;
5553 /* Handle a "nothrow" attribute; arguments as in
5554 struct attribute_spec.handler. */
5556 static tree
5557 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5558 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5559 bool *no_add_attrs)
5561 if (TREE_CODE (*node) == FUNCTION_DECL)
5562 TREE_NOTHROW (*node) = 1;
5563 else
5564 *no_add_attrs = true;
5566 return NULL_TREE;
5569 /* Handle a "pure" attribute; arguments as in
5570 struct attribute_spec.handler. */
5572 static tree
5573 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5574 int ARG_UNUSED (flags), bool *no_add_attrs)
5576 if (TREE_CODE (*node) == FUNCTION_DECL)
5577 DECL_PURE_P (*node) = 1;
5578 /* ??? TODO: Support types. */
5579 else
5581 warning (OPT_Wattributes, "%qs attribute ignored",
5582 IDENTIFIER_POINTER (name));
5583 *no_add_attrs = true;
5586 return NULL_TREE;
5589 /* Handle a "no vops" attribute; arguments as in
5590 struct attribute_spec.handler. */
5592 static tree
5593 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5594 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5595 bool *ARG_UNUSED (no_add_attrs))
5597 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5598 DECL_IS_NOVOPS (*node) = 1;
5599 return NULL_TREE;
5602 /* Helper for nonnull attribute handling; fetch the operand number
5603 from the attribute argument list. */
5605 static bool
5606 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5608 /* Verify the arg number is a constant. */
5609 if (!tree_fits_uhwi_p (arg_num_expr))
5610 return false;
5612 *valp = TREE_INT_CST_LOW (arg_num_expr);
5613 return true;
5616 /* Handle the "nonnull" attribute. */
5617 static tree
5618 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5619 tree args, int ARG_UNUSED (flags),
5620 bool *no_add_attrs)
5622 tree type = *node;
5623 unsigned HOST_WIDE_INT attr_arg_num;
5625 /* If no arguments are specified, all pointer arguments should be
5626 non-null. Verify a full prototype is given so that the arguments
5627 will have the correct types when we actually check them later. */
5628 if (!args)
5630 if (!prototype_p (type))
5632 error ("nonnull attribute without arguments on a non-prototype");
5633 *no_add_attrs = true;
5635 return NULL_TREE;
5638 /* Argument list specified. Verify that each argument number references
5639 a pointer argument. */
5640 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5642 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5644 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5646 error ("nonnull argument has invalid operand number (argument %lu)",
5647 (unsigned long) attr_arg_num);
5648 *no_add_attrs = true;
5649 return NULL_TREE;
5652 if (prototype_p (type))
5654 function_args_iterator iter;
5655 tree argument;
5657 function_args_iter_init (&iter, type);
5658 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5660 argument = function_args_iter_cond (&iter);
5661 if (!argument || ck_num == arg_num)
5662 break;
5665 if (!argument
5666 || TREE_CODE (argument) == VOID_TYPE)
5668 error ("nonnull argument with out-of-range operand number "
5669 "(argument %lu, operand %lu)",
5670 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5671 *no_add_attrs = true;
5672 return NULL_TREE;
5675 if (TREE_CODE (argument) != POINTER_TYPE)
5677 error ("nonnull argument references non-pointer operand "
5678 "(argument %lu, operand %lu)",
5679 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5680 *no_add_attrs = true;
5681 return NULL_TREE;
5686 return NULL_TREE;
5689 /* Handle a "sentinel" attribute. */
5691 static tree
5692 handle_sentinel_attribute (tree *node, tree name, tree args,
5693 int ARG_UNUSED (flags), bool *no_add_attrs)
5695 if (!prototype_p (*node))
5697 warning (OPT_Wattributes,
5698 "%qs attribute requires prototypes with named arguments",
5699 IDENTIFIER_POINTER (name));
5700 *no_add_attrs = true;
5702 else
5704 if (!stdarg_p (*node))
5706 warning (OPT_Wattributes,
5707 "%qs attribute only applies to variadic functions",
5708 IDENTIFIER_POINTER (name));
5709 *no_add_attrs = true;
5713 if (args)
5715 tree position = TREE_VALUE (args);
5717 if (TREE_CODE (position) != INTEGER_CST)
5719 warning (0, "requested position is not an integer constant");
5720 *no_add_attrs = true;
5722 else
5724 if (tree_int_cst_lt (position, integer_zero_node))
5726 warning (0, "requested position is less than zero");
5727 *no_add_attrs = true;
5732 return NULL_TREE;
5735 /* Handle a "noreturn" attribute; arguments as in
5736 struct attribute_spec.handler. */
5738 static tree
5739 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5740 int ARG_UNUSED (flags), bool *no_add_attrs)
5742 tree type = TREE_TYPE (*node);
5744 /* See FIXME comment in c_common_attribute_table. */
5745 if (TREE_CODE (*node) == FUNCTION_DECL)
5746 TREE_THIS_VOLATILE (*node) = 1;
5747 else if (TREE_CODE (type) == POINTER_TYPE
5748 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5749 TREE_TYPE (*node)
5750 = build_pointer_type
5751 (build_type_variant (TREE_TYPE (type),
5752 TYPE_READONLY (TREE_TYPE (type)), 1));
5753 else
5755 warning (OPT_Wattributes, "%qs attribute ignored",
5756 IDENTIFIER_POINTER (name));
5757 *no_add_attrs = true;
5760 return NULL_TREE;
5763 /* Handle a "leaf" attribute; arguments as in
5764 struct attribute_spec.handler. */
5766 static tree
5767 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5768 int ARG_UNUSED (flags), bool *no_add_attrs)
5770 if (TREE_CODE (*node) != FUNCTION_DECL)
5772 warning (OPT_Wattributes, "%qE attribute ignored", name);
5773 *no_add_attrs = true;
5775 if (!TREE_PUBLIC (*node))
5777 warning (OPT_Wattributes, "%qE attribute has no effect", name);
5778 *no_add_attrs = true;
5781 return NULL_TREE;
5784 /* Handle a "always_inline" attribute; arguments as in
5785 struct attribute_spec.handler. */
5787 static tree
5788 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5789 int ARG_UNUSED (flags), bool *no_add_attrs)
5791 if (TREE_CODE (*node) == FUNCTION_DECL)
5793 /* Set the attribute and mark it for disregarding inline limits. */
5794 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
5796 else
5798 warning (OPT_Wattributes, "%qE attribute ignored", name);
5799 *no_add_attrs = true;
5802 return NULL_TREE;
5805 /* Handle a "malloc" attribute; arguments as in
5806 struct attribute_spec.handler. */
5808 static tree
5809 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5810 int ARG_UNUSED (flags), bool *no_add_attrs)
5812 if (TREE_CODE (*node) == FUNCTION_DECL
5813 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5814 DECL_IS_MALLOC (*node) = 1;
5815 else
5817 warning (OPT_Wattributes, "%qs attribute ignored",
5818 IDENTIFIER_POINTER (name));
5819 *no_add_attrs = true;
5822 return NULL_TREE;
5825 /* Fake handler for attributes we don't properly support. */
5827 tree
5828 fake_attribute_handler (tree * ARG_UNUSED (node),
5829 tree ARG_UNUSED (name),
5830 tree ARG_UNUSED (args),
5831 int ARG_UNUSED (flags),
5832 bool * ARG_UNUSED (no_add_attrs))
5834 return NULL_TREE;
5837 /* Handle a "type_generic" attribute. */
5839 static tree
5840 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5841 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5842 bool * ARG_UNUSED (no_add_attrs))
5844 /* Ensure we have a function type. */
5845 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5847 /* Ensure we have a variadic function. */
5848 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
5850 return NULL_TREE;
5853 /* Handle a "vector_size" attribute; arguments as in
5854 struct attribute_spec.handler. */
5856 static tree
5857 handle_vector_size_attribute (tree *node, tree name, tree args,
5858 int ARG_UNUSED (flags), bool *no_add_attrs)
5860 tree type = *node;
5861 tree vector_type;
5863 *no_add_attrs = true;
5865 /* We need to provide for vector pointers, vector arrays, and
5866 functions returning vectors. For example:
5868 __attribute__((vector_size(16))) short *foo;
5870 In this case, the mode is SI, but the type being modified is
5871 HI, so we need to look further. */
5872 while (POINTER_TYPE_P (type)
5873 || TREE_CODE (type) == FUNCTION_TYPE
5874 || TREE_CODE (type) == ARRAY_TYPE)
5875 type = TREE_TYPE (type);
5877 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
5878 if (!vector_type)
5879 return NULL_TREE;
5881 /* Build back pointers if needed. */
5882 *node = reconstruct_complex_type (*node, vector_type);
5884 return NULL_TREE;
5887 /* Handle a "vector_type" attribute; arguments as in
5888 struct attribute_spec.handler. */
5890 static tree
5891 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5892 int ARG_UNUSED (flags), bool *no_add_attrs)
5894 tree type = *node;
5895 tree vector_type;
5897 *no_add_attrs = true;
5899 if (TREE_CODE (type) != ARRAY_TYPE)
5901 error ("attribute %qs applies to array types only",
5902 IDENTIFIER_POINTER (name));
5903 return NULL_TREE;
5906 vector_type = build_vector_type_for_array (type, name);
5907 if (!vector_type)
5908 return NULL_TREE;
5910 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
5911 *node = vector_type;
5913 return NULL_TREE;
5916 /* ----------------------------------------------------------------------- *
5917 * BUILTIN FUNCTIONS *
5918 * ----------------------------------------------------------------------- */
5920 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5921 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5922 if nonansi_p and flag_no_nonansi_builtin. */
5924 static void
5925 def_builtin_1 (enum built_in_function fncode,
5926 const char *name,
5927 enum built_in_class fnclass,
5928 tree fntype, tree libtype,
5929 bool both_p, bool fallback_p,
5930 bool nonansi_p ATTRIBUTE_UNUSED,
5931 tree fnattrs, bool implicit_p)
5933 tree decl;
5934 const char *libname;
5936 /* Preserve an already installed decl. It most likely was setup in advance
5937 (e.g. as part of the internal builtins) for specific reasons. */
5938 if (builtin_decl_explicit (fncode) != NULL_TREE)
5939 return;
5941 gcc_assert ((!both_p && !fallback_p)
5942 || !strncmp (name, "__builtin_",
5943 strlen ("__builtin_")));
5945 libname = name + strlen ("__builtin_");
5946 decl = add_builtin_function (name, fntype, fncode, fnclass,
5947 (fallback_p ? libname : NULL),
5948 fnattrs);
5949 if (both_p)
5950 /* ??? This is normally further controlled by command-line options
5951 like -fno-builtin, but we don't have them for Ada. */
5952 add_builtin_function (libname, libtype, fncode, fnclass,
5953 NULL, fnattrs);
5955 set_builtin_decl (fncode, decl, implicit_p);
5958 static int flag_isoc94 = 0;
5959 static int flag_isoc99 = 0;
5960 static int flag_isoc11 = 0;
5962 /* Install what the common builtins.def offers. */
5964 static void
5965 install_builtin_functions (void)
5967 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5968 NONANSI_P, ATTRS, IMPLICIT, COND) \
5969 if (NAME && COND) \
5970 def_builtin_1 (ENUM, NAME, CLASS, \
5971 builtin_types[(int) TYPE], \
5972 builtin_types[(int) LIBTYPE], \
5973 BOTH_P, FALLBACK_P, NONANSI_P, \
5974 built_in_attributes[(int) ATTRS], IMPLICIT);
5975 #include "builtins.def"
5976 #undef DEF_BUILTIN
5979 /* ----------------------------------------------------------------------- *
5980 * BUILTIN FUNCTIONS *
5981 * ----------------------------------------------------------------------- */
5983 /* Install the builtin functions we might need. */
5985 void
5986 gnat_install_builtins (void)
5988 install_builtin_elementary_types ();
5989 install_builtin_function_types ();
5990 install_builtin_attributes ();
5992 /* Install builtins used by generic middle-end pieces first. Some of these
5993 know about internal specificities and control attributes accordingly, for
5994 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5995 the generic definition from builtins.def. */
5996 build_common_builtin_nodes ();
5998 /* Now, install the target specific builtins, such as the AltiVec family on
5999 ppc, and the common set as exposed by builtins.def. */
6000 targetm.init_builtins ();
6001 install_builtin_functions ();
6004 #include "gt-ada-utils.h"
6005 #include "gtype-ada.h"