svn merge -r215707:216846 svn+ssh://gcc.gnu.org/svn/gcc/trunk
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob4289c5f1fa82c6bb5feba539e63c2e4a38551a22
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "stringpool.h"
32 #include "stor-layout.h"
33 #include "attribs.h"
34 #include "varasm.h"
35 #include "flags.h"
36 #include "toplev.h"
37 #include "diagnostic-core.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "debug.h"
41 #include "convert.h"
42 #include "target.h"
43 #include "common/common-target.h"
44 #include "langhooks.h"
45 #include "hash-map.h"
46 #include "is-a.h"
47 #include "plugin-api.h"
48 #include "vec.h"
49 #include "hashtab.h"
50 #include "hash-set.h"
51 #include "machmode.h"
52 #include "hard-reg-set.h"
53 #include "input.h"
54 #include "function.h"
55 #include "ipa-ref.h"
56 #include "cgraph.h"
57 #include "diagnostic.h"
58 #include "timevar.h"
59 #include "tree-dump.h"
60 #include "tree-inline.h"
61 #include "tree-iterator.h"
63 #include "ada.h"
64 #include "types.h"
65 #include "atree.h"
66 #include "elists.h"
67 #include "namet.h"
68 #include "nlists.h"
69 #include "stringt.h"
70 #include "uintp.h"
71 #include "fe.h"
72 #include "sinfo.h"
73 #include "einfo.h"
74 #include "ada-tree.h"
75 #include "gigi.h"
77 /* If nonzero, pretend we are allocating at global level. */
78 int force_global;
80 /* The default alignment of "double" floating-point types, i.e. floating
81 point types whose size is equal to 64 bits, or 0 if this alignment is
82 not specifically capped. */
83 int double_float_alignment;
85 /* The default alignment of "double" or larger scalar types, i.e. scalar
86 types whose size is greater or equal to 64 bits, or 0 if this alignment
87 is not specifically capped. */
88 int double_scalar_alignment;
90 /* True if floating-point arithmetics may use wider intermediate results. */
91 bool fp_arith_may_widen = true;
93 /* Tree nodes for the various types and decls we create. */
94 tree gnat_std_decls[(int) ADT_LAST];
96 /* Functions to call for each of the possible raise reasons. */
97 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
99 /* Likewise, but with extra info for each of the possible raise reasons. */
100 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
102 /* Forward declarations for handlers of attributes. */
103 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
104 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
105 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
106 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
107 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
108 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
109 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
110 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
111 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
112 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
113 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
114 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
115 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
117 /* Fake handler for attributes we don't properly support, typically because
118 they'd require dragging a lot of the common-c front-end circuitry. */
119 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
121 /* Table of machine-independent internal attributes for Ada. We support
122 this minimal set of attributes to accommodate the needs of builtins. */
123 const struct attribute_spec gnat_internal_attribute_table[] =
125 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
126 affects_type_identity } */
127 { "const", 0, 0, true, false, false, handle_const_attribute,
128 false },
129 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
130 false },
131 { "pure", 0, 0, true, false, false, handle_pure_attribute,
132 false },
133 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
134 false },
135 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
136 false },
137 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
138 false },
139 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
140 false },
141 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
142 false },
143 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
144 false },
145 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
146 false },
147 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
148 false },
150 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
151 false },
152 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
153 false },
154 { "may_alias", 0, 0, false, true, false, NULL, false },
156 /* ??? format and format_arg are heavy and not supported, which actually
157 prevents support for stdio builtins, which we however declare as part
158 of the common builtins.def contents. */
159 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
160 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
162 { NULL, 0, 0, false, false, false, NULL, false }
165 /* Associates a GNAT tree node to a GCC tree node. It is used in
166 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
167 of `save_gnu_tree' for more info. */
168 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
170 #define GET_GNU_TREE(GNAT_ENTITY) \
171 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
173 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
174 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
176 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
177 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
179 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
180 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
182 #define GET_DUMMY_NODE(GNAT_ENTITY) \
183 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
185 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
186 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
188 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
189 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
191 /* This variable keeps a table for types for each precision so that we only
192 allocate each of them once. Signed and unsigned types are kept separate.
194 Note that these types are only used when fold-const requests something
195 special. Perhaps we should NOT share these types; we'll see how it
196 goes later. */
197 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
199 /* Likewise for float types, but record these by mode. */
200 static GTY(()) tree float_types[NUM_MACHINE_MODES];
202 /* For each binding contour we allocate a binding_level structure to indicate
203 the binding depth. */
205 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
206 /* The binding level containing this one (the enclosing binding level). */
207 struct gnat_binding_level *chain;
208 /* The BLOCK node for this level. */
209 tree block;
210 /* If nonzero, the setjmp buffer that needs to be updated for any
211 variable-sized definition within this context. */
212 tree jmpbuf_decl;
215 /* The binding level currently in effect. */
216 static GTY(()) struct gnat_binding_level *current_binding_level;
218 /* A chain of gnat_binding_level structures awaiting reuse. */
219 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
221 /* The context to be used for global declarations. */
222 static GTY(()) tree global_context;
224 /* An array of global declarations. */
225 static GTY(()) vec<tree, va_gc> *global_decls;
227 /* An array of builtin function declarations. */
228 static GTY(()) vec<tree, va_gc> *builtin_decls;
230 /* An array of global renaming pointers. */
231 static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
233 /* A chain of unused BLOCK nodes. */
234 static GTY((deletable)) tree free_block_chain;
236 static int pad_type_hash_marked_p (const void *p);
237 static hashval_t pad_type_hash_hash (const void *p);
238 static int pad_type_hash_eq (const void *p1, const void *p2);
240 /* A hash table of padded types. It is modelled on the generic type
241 hash table in tree.c, which must thus be used as a reference. */
242 struct GTY(()) pad_type_hash {
243 unsigned long hash;
244 tree type;
247 static GTY ((if_marked ("pad_type_hash_marked_p"),
248 param_is (struct pad_type_hash)))
249 htab_t pad_type_hash_table;
251 static tree merge_sizes (tree, tree, tree, bool, bool);
252 static tree compute_related_constant (tree, tree);
253 static tree split_plus (tree, tree *);
254 static tree float_type_for_precision (int, machine_mode);
255 static tree convert_to_fat_pointer (tree, tree);
256 static unsigned int scale_by_factor_of (tree, unsigned int);
257 static bool potential_alignment_gap (tree, tree, tree);
259 /* A linked list used as a queue to defer the initialization of the
260 DECL_CONTEXT attribute of ..._DECL nodes and of the TYPE_CONTEXT attribute
261 of ..._TYPE nodes. */
262 struct deferred_decl_context_node
264 tree decl; /* The ..._DECL node to work on. */
265 Entity_Id gnat_scope; /* The corresponding entity's Scope attribute. */
266 int force_global; /* force_global value when pushing DECL. */
267 vec<tree, va_heap, vl_ptr> types; /* A list of ..._TYPE nodes to propagate the
268 context to. */
269 struct deferred_decl_context_node *next; /* The next queue item. */
272 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
274 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
275 feed it with the elaboration of GNAT_SCOPE. */
276 static struct deferred_decl_context_node *
277 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
279 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
280 feed it with the DECL_CONTEXT computed as part of N as soon as it is
281 computed. */
282 static void add_deferred_type_context (struct deferred_decl_context_node *n,
283 tree type);
285 /* Initialize data structures of the utils.c module. */
287 void
288 init_gnat_utils (void)
290 /* Initialize the association of GNAT nodes to GCC trees. */
291 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
293 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
294 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
296 /* Initialize the hash table of padded types. */
297 pad_type_hash_table
298 = htab_create_ggc (512, pad_type_hash_hash, pad_type_hash_eq, 0);
301 /* Destroy data structures of the utils.c module. */
303 void
304 destroy_gnat_utils (void)
306 /* Destroy the association of GNAT nodes to GCC trees. */
307 ggc_free (associate_gnat_to_gnu);
308 associate_gnat_to_gnu = NULL;
310 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
311 ggc_free (dummy_node_table);
312 dummy_node_table = NULL;
314 /* Destroy the hash table of padded types. */
315 htab_delete (pad_type_hash_table);
316 pad_type_hash_table = NULL;
318 /* Invalidate the global renaming pointers. */
319 invalidate_global_renaming_pointers ();
322 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
323 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
324 If NO_CHECK is true, the latter check is suppressed.
326 If GNU_DECL is zero, reset a previous association. */
328 void
329 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
331 /* Check that GNAT_ENTITY is not already defined and that it is being set
332 to something which is a decl. If that is not the case, this usually
333 means GNAT_ENTITY is defined twice, but occasionally is due to some
334 Gigi problem. */
335 gcc_assert (!(gnu_decl
336 && (PRESENT_GNU_TREE (gnat_entity)
337 || (!no_check && !DECL_P (gnu_decl)))));
339 SET_GNU_TREE (gnat_entity, gnu_decl);
342 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
343 that was associated with it. If there is no such tree node, abort.
345 In some cases, such as delayed elaboration or expressions that need to
346 be elaborated only once, GNAT_ENTITY is really not an entity. */
348 tree
349 get_gnu_tree (Entity_Id gnat_entity)
351 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
352 return GET_GNU_TREE (gnat_entity);
355 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
357 bool
358 present_gnu_tree (Entity_Id gnat_entity)
360 return PRESENT_GNU_TREE (gnat_entity);
363 /* Make a dummy type corresponding to GNAT_TYPE. */
365 tree
366 make_dummy_type (Entity_Id gnat_type)
368 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
369 tree gnu_type;
371 /* If there was no equivalent type (can only happen when just annotating
372 types) or underlying type, go back to the original type. */
373 if (No (gnat_equiv))
374 gnat_equiv = gnat_type;
376 /* If it there already a dummy type, use that one. Else make one. */
377 if (PRESENT_DUMMY_NODE (gnat_equiv))
378 return GET_DUMMY_NODE (gnat_equiv);
380 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
381 an ENUMERAL_TYPE. */
382 gnu_type = make_node (Is_Record_Type (gnat_equiv)
383 ? tree_code_for_record_type (gnat_equiv)
384 : ENUMERAL_TYPE);
385 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
386 TYPE_DUMMY_P (gnu_type) = 1;
387 TYPE_STUB_DECL (gnu_type)
388 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
389 if (Is_By_Reference_Type (gnat_equiv))
390 TYPE_BY_REFERENCE_P (gnu_type) = 1;
392 SET_DUMMY_NODE (gnat_equiv, gnu_type);
394 return gnu_type;
397 /* Return the dummy type that was made for GNAT_TYPE, if any. */
399 tree
400 get_dummy_type (Entity_Id gnat_type)
402 return GET_DUMMY_NODE (gnat_type);
405 /* Build dummy fat and thin pointer types whose designated type is specified
406 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
408 void
409 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
411 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
412 tree gnu_fat_type, fields, gnu_object_type;
414 gnu_template_type = make_node (RECORD_TYPE);
415 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
416 TYPE_DUMMY_P (gnu_template_type) = 1;
417 gnu_ptr_template = build_pointer_type (gnu_template_type);
419 gnu_array_type = make_node (ENUMERAL_TYPE);
420 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
421 TYPE_DUMMY_P (gnu_array_type) = 1;
422 gnu_ptr_array = build_pointer_type (gnu_array_type);
424 gnu_fat_type = make_node (RECORD_TYPE);
425 /* Build a stub DECL to trigger the special processing for fat pointer types
426 in gnat_pushdecl. */
427 TYPE_NAME (gnu_fat_type)
428 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
429 gnu_fat_type);
430 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
431 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
432 DECL_CHAIN (fields)
433 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
434 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
435 finish_fat_pointer_type (gnu_fat_type, fields);
436 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
437 /* Suppress debug info until after the type is completed. */
438 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
440 gnu_object_type = make_node (RECORD_TYPE);
441 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
442 TYPE_DUMMY_P (gnu_object_type) = 1;
444 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
445 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
448 /* Return true if we are in the global binding level. */
450 bool
451 global_bindings_p (void)
453 return force_global || current_function_decl == NULL_TREE;
456 /* Enter a new binding level. */
458 void
459 gnat_pushlevel (void)
461 struct gnat_binding_level *newlevel = NULL;
463 /* Reuse a struct for this binding level, if there is one. */
464 if (free_binding_level)
466 newlevel = free_binding_level;
467 free_binding_level = free_binding_level->chain;
469 else
470 newlevel = ggc_alloc<gnat_binding_level> ();
472 /* Use a free BLOCK, if any; otherwise, allocate one. */
473 if (free_block_chain)
475 newlevel->block = free_block_chain;
476 free_block_chain = BLOCK_CHAIN (free_block_chain);
477 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
479 else
480 newlevel->block = make_node (BLOCK);
482 /* Point the BLOCK we just made to its parent. */
483 if (current_binding_level)
484 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
486 BLOCK_VARS (newlevel->block) = NULL_TREE;
487 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
488 TREE_USED (newlevel->block) = 1;
490 /* Add this level to the front of the chain (stack) of active levels. */
491 newlevel->chain = current_binding_level;
492 newlevel->jmpbuf_decl = NULL_TREE;
493 current_binding_level = newlevel;
496 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
497 and point FNDECL to this BLOCK. */
499 void
500 set_current_block_context (tree fndecl)
502 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
503 DECL_INITIAL (fndecl) = current_binding_level->block;
504 set_block_for_group (current_binding_level->block);
507 /* Set the jmpbuf_decl for the current binding level to DECL. */
509 void
510 set_block_jmpbuf_decl (tree decl)
512 current_binding_level->jmpbuf_decl = decl;
515 /* Get the jmpbuf_decl, if any, for the current binding level. */
517 tree
518 get_block_jmpbuf_decl (void)
520 return current_binding_level->jmpbuf_decl;
523 /* Exit a binding level. Set any BLOCK into the current code group. */
525 void
526 gnat_poplevel (void)
528 struct gnat_binding_level *level = current_binding_level;
529 tree block = level->block;
531 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
532 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
534 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
535 are no variables free the block and merge its subblocks into those of its
536 parent block. Otherwise, add it to the list of its parent. */
537 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
539 else if (BLOCK_VARS (block) == NULL_TREE)
541 BLOCK_SUBBLOCKS (level->chain->block)
542 = block_chainon (BLOCK_SUBBLOCKS (block),
543 BLOCK_SUBBLOCKS (level->chain->block));
544 BLOCK_CHAIN (block) = free_block_chain;
545 free_block_chain = block;
547 else
549 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
550 BLOCK_SUBBLOCKS (level->chain->block) = block;
551 TREE_USED (block) = 1;
552 set_block_for_group (block);
555 /* Free this binding structure. */
556 current_binding_level = level->chain;
557 level->chain = free_binding_level;
558 free_binding_level = level;
561 /* Exit a binding level and discard the associated BLOCK. */
563 void
564 gnat_zaplevel (void)
566 struct gnat_binding_level *level = current_binding_level;
567 tree block = level->block;
569 BLOCK_CHAIN (block) = free_block_chain;
570 free_block_chain = block;
572 /* Free this binding structure. */
573 current_binding_level = level->chain;
574 level->chain = free_binding_level;
575 free_binding_level = level;
578 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
580 static void
581 gnat_set_type_context (tree type, tree context)
583 tree decl = TYPE_STUB_DECL (type);
585 TYPE_CONTEXT (type) = context;
587 while (decl && DECL_PARALLEL_TYPE (decl))
589 tree parallel_type = DECL_PARALLEL_TYPE (decl);
591 /* Give a context to the parallel types and their stub decl, if any.
592 Some parallel types seems to be present in multiple parallel type
593 chains, so don't mess with their context if they already have one. */
594 if (TYPE_CONTEXT (parallel_type) == NULL_TREE)
596 if (TYPE_STUB_DECL (parallel_type) != NULL_TREE)
597 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
598 TYPE_CONTEXT (parallel_type) = context;
601 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
605 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
606 the debug info, or Empty if there is no such scope. If not NULL, set
607 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
609 static Entity_Id
610 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
612 Entity_Id gnat_entity;
614 if (is_subprogram)
615 *is_subprogram = false;
617 if (Nkind (gnat_node) == N_Defining_Identifier)
618 gnat_entity = Scope (gnat_node);
619 else
620 return Empty;
622 while (Present (gnat_entity))
624 switch (Ekind (gnat_entity))
626 case E_Function:
627 case E_Procedure:
628 if (Present (Protected_Body_Subprogram (gnat_entity)))
629 gnat_entity = Protected_Body_Subprogram (gnat_entity);
631 /* If the scope is a subprogram, then just rely on
632 current_function_decl, so that we don't have to defer
633 anything. This is needed because other places rely on the
634 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
635 if (is_subprogram)
636 *is_subprogram = true;
637 return gnat_entity;
639 case E_Record_Type:
640 case E_Record_Subtype:
641 return gnat_entity;
643 default:
644 /* By default, we are not interested in this particular scope: go to
645 the outer one. */
646 break;
648 gnat_entity = Scope (gnat_entity);
650 return Empty;
653 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing of
654 N otherwise. */
656 static void
657 defer_or_set_type_context (tree type,
658 tree context,
659 struct deferred_decl_context_node *n)
661 if (n)
662 add_deferred_type_context (n, type);
663 else
664 gnat_set_type_context (type, context);
667 /* Return global_context. Create it if needed, first. */
669 static tree
670 get_global_context (void)
672 if (!global_context)
673 global_context = build_translation_unit_decl (NULL_TREE);
674 return global_context;
677 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
678 for location information and flag propagation. */
680 void
681 gnat_pushdecl (tree decl, Node_Id gnat_node)
683 tree context = NULL_TREE;
684 struct deferred_decl_context_node *deferred_decl_context = NULL;
686 /* If explicitely asked to make DECL global or if it's an imported nested
687 object, short-circuit the regular Scope-based context computation. */
688 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
690 /* Rely on the GNAT scope, or fallback to the current_function_decl if
691 the GNAT scope reached the global scope, if it reached a subprogram
692 or the declaration is a subprogram or a variable (for them we skip
693 intermediate context types because the subprogram body elaboration
694 machinery and the inliner both expect a subprogram context).
696 Falling back to current_function_decl is necessary for implicit
697 subprograms created by gigi, such as the elaboration subprograms. */
698 bool context_is_subprogram = false;
699 const Entity_Id gnat_scope
700 = get_debug_scope (gnat_node, &context_is_subprogram);
702 if (Present (gnat_scope)
703 && !context_is_subprogram
704 && TREE_CODE (decl) != FUNCTION_DECL
705 && TREE_CODE (decl) != VAR_DECL)
706 /* Always assume the scope has not been elaborated, thus defer the
707 context propagation to the time its elaboration will be
708 available. */
709 deferred_decl_context
710 = add_deferred_decl_context (decl, gnat_scope, force_global);
712 /* External declarations (when force_global > 0) may not be in a
713 local context. */
714 else if (current_function_decl != NULL_TREE && force_global == 0)
715 context = current_function_decl;
718 /* If either we are forced to be in global mode or if both the GNAT scope and
719 the current_function_decl did not help determining the context, use the
720 global scope. */
721 if (!deferred_decl_context && context == NULL_TREE)
722 context = get_global_context ();
724 /* Functions imported in another function are not really nested.
725 For really nested functions mark them initially as needing
726 a static chain for uses of that flag before unnesting;
727 lower_nested_functions will then recompute it. */
728 if (TREE_CODE (decl) == FUNCTION_DECL
729 && !TREE_PUBLIC (decl)
730 && context != NULL_TREE
731 && (TREE_CODE (context) == FUNCTION_DECL
732 || decl_function_context (context) != NULL_TREE))
733 DECL_STATIC_CHAIN (decl) = 1;
735 if (!deferred_decl_context)
736 DECL_CONTEXT (decl) = context;
738 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
740 /* Set the location of DECL and emit a declaration for it. */
741 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
742 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
744 add_decl_expr (decl, gnat_node);
746 /* Put the declaration on the list. The list of declarations is in reverse
747 order. The list will be reversed later. Put global declarations in the
748 globals list and local ones in the current block. But skip TYPE_DECLs
749 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
750 with the debugger and aren't needed anyway. */
751 if (!(TREE_CODE (decl) == TYPE_DECL
752 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
754 if (DECL_EXTERNAL (decl))
756 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
757 vec_safe_push (builtin_decls, decl);
759 else if (global_bindings_p ())
760 vec_safe_push (global_decls, decl);
761 else
763 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
764 BLOCK_VARS (current_binding_level->block) = decl;
768 /* For the declaration of a type, set its name if it either is not already
769 set or if the previous type name was not derived from a source name.
770 We'd rather have the type named with a real name and all the pointer
771 types to the same object have the same POINTER_TYPE node. Code in the
772 equivalent function of c-decl.c makes a copy of the type node here, but
773 that may cause us trouble with incomplete types. We make an exception
774 for fat pointer types because the compiler automatically builds them
775 for unconstrained array types and the debugger uses them to represent
776 both these and pointers to these. */
777 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
779 tree t = TREE_TYPE (decl);
781 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
783 /* Array and pointer types aren't "tagged" types so we force the
784 type to be associated with its typedef in the DWARF back-end,
785 in order to make sure that the latter is always preserved. */
786 if (!DECL_ARTIFICIAL (decl)
787 && (TREE_CODE (t) == ARRAY_TYPE
788 || TREE_CODE (t) == POINTER_TYPE))
790 tree tt = build_distinct_type_copy (t);
791 if (TREE_CODE (t) == POINTER_TYPE)
792 TYPE_NEXT_PTR_TO (t) = tt;
793 TYPE_NAME (tt) = DECL_NAME (decl);
794 defer_or_set_type_context (tt,
795 DECL_CONTEXT (decl),
796 deferred_decl_context);
797 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
798 DECL_ORIGINAL_TYPE (decl) = tt;
801 else if (TYPE_IS_FAT_POINTER_P (t))
803 /* We need a variant for the placeholder machinery to work. */
804 tree tt = build_variant_type_copy (t);
805 TYPE_NAME (tt) = decl;
806 defer_or_set_type_context (tt,
807 DECL_CONTEXT (decl),
808 deferred_decl_context);
809 TREE_USED (tt) = TREE_USED (t);
810 TREE_TYPE (decl) = tt;
811 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
812 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
813 else
814 DECL_ORIGINAL_TYPE (decl) = t;
815 DECL_ARTIFICIAL (decl) = 0;
816 t = NULL_TREE;
818 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
820 else
821 t = NULL_TREE;
823 /* Propagate the name to all the anonymous variants. This is needed
824 for the type qualifiers machinery to work properly. Also propagate
825 the context to them. Note that the context will be propagated to all
826 parallel types too thanks to gnat_set_type_context. */
827 if (t)
828 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
829 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
831 TYPE_NAME (t) = decl;
832 defer_or_set_type_context (t,
833 DECL_CONTEXT (decl),
834 deferred_decl_context);
839 /* Create a record type that contains a SIZE bytes long field of TYPE with a
840 starting bit position so that it is aligned to ALIGN bits, and leaving at
841 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
842 record is guaranteed to get. GNAT_NODE is used for the position of the
843 associated TYPE_DECL. */
845 tree
846 make_aligning_type (tree type, unsigned int align, tree size,
847 unsigned int base_align, int room, Node_Id gnat_node)
849 /* We will be crafting a record type with one field at a position set to be
850 the next multiple of ALIGN past record'address + room bytes. We use a
851 record placeholder to express record'address. */
852 tree record_type = make_node (RECORD_TYPE);
853 tree record = build0 (PLACEHOLDER_EXPR, record_type);
855 tree record_addr_st
856 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
858 /* The diagram below summarizes the shape of what we manipulate:
860 <--------- pos ---------->
861 { +------------+-------------+-----------------+
862 record =>{ |############| ... | field (type) |
863 { +------------+-------------+-----------------+
864 |<-- room -->|<- voffset ->|<---- size ----->|
867 record_addr vblock_addr
869 Every length is in sizetype bytes there, except "pos" which has to be
870 set as a bit position in the GCC tree for the record. */
871 tree room_st = size_int (room);
872 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
873 tree voffset_st, pos, field;
875 tree name = TYPE_IDENTIFIER (type);
877 name = concat_name (name, "ALIGN");
878 TYPE_NAME (record_type) = name;
880 /* Compute VOFFSET and then POS. The next byte position multiple of some
881 alignment after some address is obtained by "and"ing the alignment minus
882 1 with the two's complement of the address. */
883 voffset_st = size_binop (BIT_AND_EXPR,
884 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
885 size_int ((align / BITS_PER_UNIT) - 1));
887 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
888 pos = size_binop (MULT_EXPR,
889 convert (bitsizetype,
890 size_binop (PLUS_EXPR, room_st, voffset_st)),
891 bitsize_unit_node);
893 /* Craft the GCC record representation. We exceptionally do everything
894 manually here because 1) our generic circuitry is not quite ready to
895 handle the complex position/size expressions we are setting up, 2) we
896 have a strong simplifying factor at hand: we know the maximum possible
897 value of voffset, and 3) we have to set/reset at least the sizes in
898 accordance with this maximum value anyway, as we need them to convey
899 what should be "alloc"ated for this type.
901 Use -1 as the 'addressable' indication for the field to prevent the
902 creation of a bitfield. We don't need one, it would have damaging
903 consequences on the alignment computation, and create_field_decl would
904 make one without this special argument, for instance because of the
905 complex position expression. */
906 field = create_field_decl (get_identifier ("F"), type, record_type, size,
907 pos, 1, -1);
908 TYPE_FIELDS (record_type) = field;
910 TYPE_ALIGN (record_type) = base_align;
911 TYPE_USER_ALIGN (record_type) = 1;
913 TYPE_SIZE (record_type)
914 = size_binop (PLUS_EXPR,
915 size_binop (MULT_EXPR, convert (bitsizetype, size),
916 bitsize_unit_node),
917 bitsize_int (align + room * BITS_PER_UNIT));
918 TYPE_SIZE_UNIT (record_type)
919 = size_binop (PLUS_EXPR, size,
920 size_int (room + align / BITS_PER_UNIT));
922 SET_TYPE_MODE (record_type, BLKmode);
923 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
925 /* Declare it now since it will never be declared otherwise. This is
926 necessary to ensure that its subtrees are properly marked. */
927 create_type_decl (name, record_type, true, false, gnat_node);
929 return record_type;
932 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
933 as the field type of a packed record if IN_RECORD is true, or as the
934 component type of a packed array if IN_RECORD is false. See if we can
935 rewrite it either as a type that has a non-BLKmode, which we can pack
936 tighter in the packed record case, or as a smaller type. If so, return
937 the new type. If not, return the original type. */
939 tree
940 make_packable_type (tree type, bool in_record)
942 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
943 unsigned HOST_WIDE_INT new_size;
944 tree new_type, old_field, field_list = NULL_TREE;
945 unsigned int align;
947 /* No point in doing anything if the size is zero. */
948 if (size == 0)
949 return type;
951 new_type = make_node (TREE_CODE (type));
953 /* Copy the name and flags from the old type to that of the new.
954 Note that we rely on the pointer equality created here for
955 TYPE_NAME to look through conversions in various places. */
956 TYPE_NAME (new_type) = TYPE_NAME (type);
957 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
958 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
959 if (TREE_CODE (type) == RECORD_TYPE)
960 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
962 /* If we are in a record and have a small size, set the alignment to
963 try for an integral mode. Otherwise set it to try for a smaller
964 type with BLKmode. */
965 if (in_record && size <= MAX_FIXED_MODE_SIZE)
967 align = ceil_pow2 (size);
968 TYPE_ALIGN (new_type) = align;
969 new_size = (size + align - 1) & -align;
971 else
973 unsigned HOST_WIDE_INT align;
975 /* Do not try to shrink the size if the RM size is not constant. */
976 if (TYPE_CONTAINS_TEMPLATE_P (type)
977 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
978 return type;
980 /* Round the RM size up to a unit boundary to get the minimal size
981 for a BLKmode record. Give up if it's already the size. */
982 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
983 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
984 if (new_size == size)
985 return type;
987 align = new_size & -new_size;
988 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
991 TYPE_USER_ALIGN (new_type) = 1;
993 /* Now copy the fields, keeping the position and size as we don't want
994 to change the layout by propagating the packedness downwards. */
995 for (old_field = TYPE_FIELDS (type); old_field;
996 old_field = DECL_CHAIN (old_field))
998 tree new_field_type = TREE_TYPE (old_field);
999 tree new_field, new_size;
1001 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1002 && !TYPE_FAT_POINTER_P (new_field_type)
1003 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1004 new_field_type = make_packable_type (new_field_type, true);
1006 /* However, for the last field in a not already packed record type
1007 that is of an aggregate type, we need to use the RM size in the
1008 packable version of the record type, see finish_record_type. */
1009 if (!DECL_CHAIN (old_field)
1010 && !TYPE_PACKED (type)
1011 && RECORD_OR_UNION_TYPE_P (new_field_type)
1012 && !TYPE_FAT_POINTER_P (new_field_type)
1013 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1014 && TYPE_ADA_SIZE (new_field_type))
1015 new_size = TYPE_ADA_SIZE (new_field_type);
1016 else
1017 new_size = DECL_SIZE (old_field);
1019 new_field
1020 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
1021 new_size, bit_position (old_field),
1022 TYPE_PACKED (type),
1023 !DECL_NONADDRESSABLE_P (old_field));
1025 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
1026 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
1027 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1028 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
1030 DECL_CHAIN (new_field) = field_list;
1031 field_list = new_field;
1034 finish_record_type (new_type, nreverse (field_list), 2, false);
1035 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1036 if (TYPE_STUB_DECL (type))
1037 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1038 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1040 /* If this is a padding record, we never want to make the size smaller
1041 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1042 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1044 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1045 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1046 new_size = size;
1048 else
1050 TYPE_SIZE (new_type) = bitsize_int (new_size);
1051 TYPE_SIZE_UNIT (new_type)
1052 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
1055 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1056 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1058 compute_record_mode (new_type);
1060 /* Try harder to get a packable type if necessary, for example
1061 in case the record itself contains a BLKmode field. */
1062 if (in_record && TYPE_MODE (new_type) == BLKmode)
1063 SET_TYPE_MODE (new_type,
1064 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1066 /* If neither the mode nor the size has shrunk, return the old type. */
1067 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
1068 return type;
1070 return new_type;
1073 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1074 If TYPE is the best type, return it. Otherwise, make a new type. We
1075 only support new integral and pointer types. FOR_BIASED is true if
1076 we are making a biased type. */
1078 tree
1079 make_type_from_size (tree type, tree size_tree, bool for_biased)
1081 unsigned HOST_WIDE_INT size;
1082 bool biased_p;
1083 tree new_type;
1085 /* If size indicates an error, just return TYPE to avoid propagating
1086 the error. Likewise if it's too large to represent. */
1087 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1088 return type;
1090 size = tree_to_uhwi (size_tree);
1092 switch (TREE_CODE (type))
1094 case INTEGER_TYPE:
1095 case ENUMERAL_TYPE:
1096 case BOOLEAN_TYPE:
1097 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1098 && TYPE_BIASED_REPRESENTATION_P (type));
1100 /* Integer types with precision 0 are forbidden. */
1101 if (size == 0)
1102 size = 1;
1104 /* Only do something if the type isn't a packed array type and doesn't
1105 already have the proper size and the size isn't too large. */
1106 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1107 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1108 || size > LONG_LONG_TYPE_SIZE)
1109 break;
1111 biased_p |= for_biased;
1112 if (TYPE_UNSIGNED (type) || biased_p)
1113 new_type = make_unsigned_type (size);
1114 else
1115 new_type = make_signed_type (size);
1116 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1117 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1118 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1119 /* Copy the name to show that it's essentially the same type and
1120 not a subrange type. */
1121 TYPE_NAME (new_type) = TYPE_NAME (type);
1122 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1123 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1124 return new_type;
1126 case RECORD_TYPE:
1127 /* Do something if this is a fat pointer, in which case we
1128 may need to return the thin pointer. */
1129 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1131 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
1132 if (!targetm.valid_pointer_mode (p_mode))
1133 p_mode = ptr_mode;
1134 return
1135 build_pointer_type_for_mode
1136 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1137 p_mode, 0);
1139 break;
1141 case POINTER_TYPE:
1142 /* Only do something if this is a thin pointer, in which case we
1143 may need to return the fat pointer. */
1144 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1145 return
1146 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1147 break;
1149 default:
1150 break;
1153 return type;
1156 /* See if the data pointed to by the hash table slot is marked. */
1158 static int
1159 pad_type_hash_marked_p (const void *p)
1161 const_tree const type = ((const struct pad_type_hash *) p)->type;
1163 return ggc_marked_p (type);
1166 /* Return the cached hash value. */
1168 static hashval_t
1169 pad_type_hash_hash (const void *p)
1171 return ((const struct pad_type_hash *) p)->hash;
1174 /* Return 1 iff the padded types are equivalent. */
1176 static int
1177 pad_type_hash_eq (const void *p1, const void *p2)
1179 const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
1180 const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
1181 tree type1, type2;
1183 if (t1->hash != t2->hash)
1184 return 0;
1186 type1 = t1->type;
1187 type2 = t2->type;
1189 /* We consider that the padded types are equivalent if they pad the same
1190 type and have the same size, alignment and RM size. Taking the mode
1191 into account is redundant since it is determined by the others. */
1192 return
1193 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1194 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1195 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1196 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1199 /* Look up the padded TYPE in the hash table and return its canonical version
1200 if it exists; otherwise, insert it into the hash table. */
1202 static tree
1203 lookup_and_insert_pad_type (tree type)
1205 hashval_t hashcode;
1206 struct pad_type_hash in, *h;
1207 void **loc;
1209 hashcode
1210 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1211 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1212 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1213 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1215 in.hash = hashcode;
1216 in.type = type;
1217 h = (struct pad_type_hash *)
1218 htab_find_with_hash (pad_type_hash_table, &in, hashcode);
1219 if (h)
1220 return h->type;
1222 h = ggc_alloc<pad_type_hash> ();
1223 h->hash = hashcode;
1224 h->type = type;
1225 loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode, INSERT);
1226 *loc = (void *)h;
1227 return NULL_TREE;
1230 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1231 if needed. We have already verified that SIZE and ALIGN are large enough.
1232 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1233 IS_COMPONENT_TYPE is true if this is being done for the component type of
1234 an array. IS_USER_TYPE is true if the original type needs to be completed.
1235 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1236 the RM size of the resulting type is to be set to SIZE too. */
1238 tree
1239 maybe_pad_type (tree type, tree size, unsigned int align,
1240 Entity_Id gnat_entity, bool is_component_type,
1241 bool is_user_type, bool definition, bool set_rm_size)
1243 tree orig_size = TYPE_SIZE (type);
1244 unsigned int orig_align = TYPE_ALIGN (type);
1245 tree record, field;
1247 /* If TYPE is a padded type, see if it agrees with any size and alignment
1248 we were given. If so, return the original type. Otherwise, strip
1249 off the padding, since we will either be returning the inner type
1250 or repadding it. If no size or alignment is specified, use that of
1251 the original padded type. */
1252 if (TYPE_IS_PADDING_P (type))
1254 if ((!size
1255 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1256 && (align == 0 || align == orig_align))
1257 return type;
1259 if (!size)
1260 size = orig_size;
1261 if (align == 0)
1262 align = orig_align;
1264 type = TREE_TYPE (TYPE_FIELDS (type));
1265 orig_size = TYPE_SIZE (type);
1266 orig_align = TYPE_ALIGN (type);
1269 /* If the size is either not being changed or is being made smaller (which
1270 is not done here and is only valid for bitfields anyway), show the size
1271 isn't changing. Likewise, clear the alignment if it isn't being
1272 changed. Then return if we aren't doing anything. */
1273 if (size
1274 && (operand_equal_p (size, orig_size, 0)
1275 || (TREE_CODE (orig_size) == INTEGER_CST
1276 && tree_int_cst_lt (size, orig_size))))
1277 size = NULL_TREE;
1279 if (align == orig_align)
1280 align = 0;
1282 if (align == 0 && !size)
1283 return type;
1285 /* If requested, complete the original type and give it a name. */
1286 if (is_user_type)
1287 create_type_decl (get_entity_name (gnat_entity), type,
1288 !Comes_From_Source (gnat_entity),
1289 !(TYPE_NAME (type)
1290 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1291 && DECL_IGNORED_P (TYPE_NAME (type))),
1292 gnat_entity);
1294 /* We used to modify the record in place in some cases, but that could
1295 generate incorrect debugging information. So make a new record
1296 type and name. */
1297 record = make_node (RECORD_TYPE);
1298 TYPE_PADDING_P (record) = 1;
1300 if (Present (gnat_entity))
1301 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1303 TYPE_ALIGN (record) = align ? align : orig_align;
1304 TYPE_SIZE (record) = size ? size : orig_size;
1305 TYPE_SIZE_UNIT (record)
1306 = convert (sizetype,
1307 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1308 bitsize_unit_node));
1310 /* If we are changing the alignment and the input type is a record with
1311 BLKmode and a small constant size, try to make a form that has an
1312 integral mode. This might allow the padding record to also have an
1313 integral mode, which will be much more efficient. There is no point
1314 in doing so if a size is specified unless it is also a small constant
1315 size and it is incorrect to do so if we cannot guarantee that the mode
1316 will be naturally aligned since the field must always be addressable.
1318 ??? This might not always be a win when done for a stand-alone object:
1319 since the nominal and the effective type of the object will now have
1320 different modes, a VIEW_CONVERT_EXPR will be required for converting
1321 between them and it might be hard to overcome afterwards, including
1322 at the RTL level when the stand-alone object is accessed as a whole. */
1323 if (align != 0
1324 && RECORD_OR_UNION_TYPE_P (type)
1325 && TYPE_MODE (type) == BLKmode
1326 && !TYPE_BY_REFERENCE_P (type)
1327 && TREE_CODE (orig_size) == INTEGER_CST
1328 && !TREE_OVERFLOW (orig_size)
1329 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1330 && (!size
1331 || (TREE_CODE (size) == INTEGER_CST
1332 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1334 tree packable_type = make_packable_type (type, true);
1335 if (TYPE_MODE (packable_type) != BLKmode
1336 && align >= TYPE_ALIGN (packable_type))
1337 type = packable_type;
1340 /* Now create the field with the original size. */
1341 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1342 bitsize_zero_node, 0, 1);
1343 DECL_INTERNAL_P (field) = 1;
1345 /* Do not emit debug info until after the auxiliary record is built. */
1346 finish_record_type (record, field, 1, false);
1348 /* Set the RM size if requested. */
1349 if (set_rm_size)
1351 tree canonical_pad_type;
1353 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1355 /* If the padded type is complete and has constant size, we canonicalize
1356 it by means of the hash table. This is consistent with the language
1357 semantics and ensures that gigi and the middle-end have a common view
1358 of these padded types. */
1359 if (TREE_CONSTANT (TYPE_SIZE (record))
1360 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1362 record = canonical_pad_type;
1363 goto built;
1367 /* Unless debugging information isn't being written for the input type,
1368 write a record that shows what we are a subtype of and also make a
1369 variable that indicates our size, if still variable. */
1370 if (TREE_CODE (orig_size) != INTEGER_CST
1371 && TYPE_NAME (record)
1372 && TYPE_NAME (type)
1373 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1374 && DECL_IGNORED_P (TYPE_NAME (type))))
1376 tree marker = make_node (RECORD_TYPE);
1377 tree name = TYPE_IDENTIFIER (record);
1378 tree orig_name = TYPE_IDENTIFIER (type);
1380 TYPE_NAME (marker) = concat_name (name, "XVS");
1381 finish_record_type (marker,
1382 create_field_decl (orig_name,
1383 build_reference_type (type),
1384 marker, NULL_TREE, NULL_TREE,
1385 0, 0),
1386 0, true);
1388 add_parallel_type (record, marker);
1390 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1391 TYPE_SIZE_UNIT (marker)
1392 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1393 TYPE_SIZE_UNIT (record), false, false, false,
1394 false, NULL, gnat_entity);
1397 rest_of_record_type_compilation (record);
1399 built:
1400 /* If the size was widened explicitly, maybe give a warning. Take the
1401 original size as the maximum size of the input if there was an
1402 unconstrained record involved and round it up to the specified alignment,
1403 if one was specified. But don't do it if we are just annotating types
1404 and the type is tagged, since tagged types aren't fully laid out in this
1405 mode. */
1406 if (!size
1407 || TREE_CODE (size) == COND_EXPR
1408 || TREE_CODE (size) == MAX_EXPR
1409 || No (gnat_entity)
1410 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1411 return record;
1413 if (CONTAINS_PLACEHOLDER_P (orig_size))
1414 orig_size = max_size (orig_size, true);
1416 if (align)
1417 orig_size = round_up (orig_size, align);
1419 if (!operand_equal_p (size, orig_size, 0)
1420 && !(TREE_CODE (size) == INTEGER_CST
1421 && TREE_CODE (orig_size) == INTEGER_CST
1422 && (TREE_OVERFLOW (size)
1423 || TREE_OVERFLOW (orig_size)
1424 || tree_int_cst_lt (size, orig_size))))
1426 Node_Id gnat_error_node = Empty;
1428 /* For a packed array, post the message on the original array type. */
1429 if (Is_Packed_Array_Impl_Type (gnat_entity))
1430 gnat_entity = Original_Array_Type (gnat_entity);
1432 if ((Ekind (gnat_entity) == E_Component
1433 || Ekind (gnat_entity) == E_Discriminant)
1434 && Present (Component_Clause (gnat_entity)))
1435 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1436 else if (Present (Size_Clause (gnat_entity)))
1437 gnat_error_node = Expression (Size_Clause (gnat_entity));
1439 /* Generate message only for entities that come from source, since
1440 if we have an entity created by expansion, the message will be
1441 generated for some other corresponding source entity. */
1442 if (Comes_From_Source (gnat_entity))
1444 if (Present (gnat_error_node))
1445 post_error_ne_tree ("{^ }bits of & unused?",
1446 gnat_error_node, gnat_entity,
1447 size_diffop (size, orig_size));
1448 else if (is_component_type)
1449 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1450 gnat_entity, gnat_entity,
1451 size_diffop (size, orig_size));
1455 return record;
1458 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1459 If this is a multi-dimensional array type, do this recursively.
1461 OP may be
1462 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1463 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1464 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1466 void
1467 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1469 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1470 of a one-dimensional array, since the padding has the same alias set
1471 as the field type, but if it's a multi-dimensional array, we need to
1472 see the inner types. */
1473 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1474 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1475 || TYPE_PADDING_P (gnu_old_type)))
1476 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1478 /* Unconstrained array types are deemed incomplete and would thus be given
1479 alias set 0. Retrieve the underlying array type. */
1480 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1481 gnu_old_type
1482 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1483 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1484 gnu_new_type
1485 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1487 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1488 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1489 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1490 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1492 switch (op)
1494 case ALIAS_SET_COPY:
1495 /* The alias set shouldn't be copied between array types with different
1496 aliasing settings because this can break the aliasing relationship
1497 between the array type and its element type. */
1498 #ifndef ENABLE_CHECKING
1499 if (flag_strict_aliasing)
1500 #endif
1501 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1502 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1503 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1504 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1506 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1507 break;
1509 case ALIAS_SET_SUBSET:
1510 case ALIAS_SET_SUPERSET:
1512 alias_set_type old_set = get_alias_set (gnu_old_type);
1513 alias_set_type new_set = get_alias_set (gnu_new_type);
1515 /* Do nothing if the alias sets conflict. This ensures that we
1516 never call record_alias_subset several times for the same pair
1517 or at all for alias set 0. */
1518 if (!alias_sets_conflict_p (old_set, new_set))
1520 if (op == ALIAS_SET_SUBSET)
1521 record_alias_subset (old_set, new_set);
1522 else
1523 record_alias_subset (new_set, old_set);
1526 break;
1528 default:
1529 gcc_unreachable ();
1532 record_component_aliases (gnu_new_type);
1535 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1536 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1538 void
1539 record_builtin_type (const char *name, tree type, bool artificial_p)
1541 tree type_decl = build_decl (input_location,
1542 TYPE_DECL, get_identifier (name), type);
1543 DECL_ARTIFICIAL (type_decl) = artificial_p;
1544 TYPE_ARTIFICIAL (type) = artificial_p;
1545 gnat_pushdecl (type_decl, Empty);
1547 if (debug_hooks->type_decl)
1548 debug_hooks->type_decl (type_decl, false);
1551 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1552 finish constructing the record type as a fat pointer type. */
1554 void
1555 finish_fat_pointer_type (tree record_type, tree field_list)
1557 /* Make sure we can put it into a register. */
1558 if (STRICT_ALIGNMENT)
1559 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
1561 /* Show what it really is. */
1562 TYPE_FAT_POINTER_P (record_type) = 1;
1564 /* Do not emit debug info for it since the types of its fields may still be
1565 incomplete at this point. */
1566 finish_record_type (record_type, field_list, 0, false);
1568 /* Force type_contains_placeholder_p to return true on it. Although the
1569 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1570 type but the representation of the unconstrained array. */
1571 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1574 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1575 finish constructing the record or union type. If REP_LEVEL is zero, this
1576 record has no representation clause and so will be entirely laid out here.
1577 If REP_LEVEL is one, this record has a representation clause and has been
1578 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1579 this record is derived from a parent record and thus inherits its layout;
1580 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1581 we need to write debug information about this type. */
1583 void
1584 finish_record_type (tree record_type, tree field_list, int rep_level,
1585 bool debug_info_p)
1587 enum tree_code code = TREE_CODE (record_type);
1588 tree name = TYPE_IDENTIFIER (record_type);
1589 tree ada_size = bitsize_zero_node;
1590 tree size = bitsize_zero_node;
1591 bool had_size = TYPE_SIZE (record_type) != 0;
1592 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1593 bool had_align = TYPE_ALIGN (record_type) != 0;
1594 tree field;
1596 TYPE_FIELDS (record_type) = field_list;
1598 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1599 generate debug info and have a parallel type. */
1600 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1602 /* Globally initialize the record first. If this is a rep'ed record,
1603 that just means some initializations; otherwise, layout the record. */
1604 if (rep_level > 0)
1606 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
1608 if (!had_size_unit)
1609 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1611 if (!had_size)
1612 TYPE_SIZE (record_type) = bitsize_zero_node;
1614 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1615 out just like a UNION_TYPE, since the size will be fixed. */
1616 else if (code == QUAL_UNION_TYPE)
1617 code = UNION_TYPE;
1619 else
1621 /* Ensure there isn't a size already set. There can be in an error
1622 case where there is a rep clause but all fields have errors and
1623 no longer have a position. */
1624 TYPE_SIZE (record_type) = 0;
1626 /* Ensure we use the traditional GCC layout for bitfields when we need
1627 to pack the record type or have a representation clause. The other
1628 possible layout (Microsoft C compiler), if available, would prevent
1629 efficient packing in almost all cases. */
1630 #ifdef TARGET_MS_BITFIELD_LAYOUT
1631 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1632 decl_attributes (&record_type,
1633 tree_cons (get_identifier ("gcc_struct"),
1634 NULL_TREE, NULL_TREE),
1635 ATTR_FLAG_TYPE_IN_PLACE);
1636 #endif
1638 layout_type (record_type);
1641 /* At this point, the position and size of each field is known. It was
1642 either set before entry by a rep clause, or by laying out the type above.
1644 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1645 to compute the Ada size; the GCC size and alignment (for rep'ed records
1646 that are not padding types); and the mode (for rep'ed records). We also
1647 clear the DECL_BIT_FIELD indication for the cases we know have not been
1648 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1650 if (code == QUAL_UNION_TYPE)
1651 field_list = nreverse (field_list);
1653 for (field = field_list; field; field = DECL_CHAIN (field))
1655 tree type = TREE_TYPE (field);
1656 tree pos = bit_position (field);
1657 tree this_size = DECL_SIZE (field);
1658 tree this_ada_size;
1660 if (RECORD_OR_UNION_TYPE_P (type)
1661 && !TYPE_FAT_POINTER_P (type)
1662 && !TYPE_CONTAINS_TEMPLATE_P (type)
1663 && TYPE_ADA_SIZE (type))
1664 this_ada_size = TYPE_ADA_SIZE (type);
1665 else
1666 this_ada_size = this_size;
1668 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1669 if (DECL_BIT_FIELD (field)
1670 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1672 unsigned int align = TYPE_ALIGN (type);
1674 /* In the general case, type alignment is required. */
1675 if (value_factor_p (pos, align))
1677 /* The enclosing record type must be sufficiently aligned.
1678 Otherwise, if no alignment was specified for it and it
1679 has been laid out already, bump its alignment to the
1680 desired one if this is compatible with its size. */
1681 if (TYPE_ALIGN (record_type) >= align)
1683 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1684 DECL_BIT_FIELD (field) = 0;
1686 else if (!had_align
1687 && rep_level == 0
1688 && value_factor_p (TYPE_SIZE (record_type), align))
1690 TYPE_ALIGN (record_type) = align;
1691 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1692 DECL_BIT_FIELD (field) = 0;
1696 /* In the non-strict alignment case, only byte alignment is. */
1697 if (!STRICT_ALIGNMENT
1698 && DECL_BIT_FIELD (field)
1699 && value_factor_p (pos, BITS_PER_UNIT))
1700 DECL_BIT_FIELD (field) = 0;
1703 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1704 field is technically not addressable. Except that it can actually
1705 be addressed if it is BLKmode and happens to be properly aligned. */
1706 if (DECL_BIT_FIELD (field)
1707 && !(DECL_MODE (field) == BLKmode
1708 && value_factor_p (pos, BITS_PER_UNIT)))
1709 DECL_NONADDRESSABLE_P (field) = 1;
1711 /* A type must be as aligned as its most aligned field that is not
1712 a bit-field. But this is already enforced by layout_type. */
1713 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1714 TYPE_ALIGN (record_type)
1715 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1717 switch (code)
1719 case UNION_TYPE:
1720 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1721 size = size_binop (MAX_EXPR, size, this_size);
1722 break;
1724 case QUAL_UNION_TYPE:
1725 ada_size
1726 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1727 this_ada_size, ada_size);
1728 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1729 this_size, size);
1730 break;
1732 case RECORD_TYPE:
1733 /* Since we know here that all fields are sorted in order of
1734 increasing bit position, the size of the record is one
1735 higher than the ending bit of the last field processed
1736 unless we have a rep clause, since in that case we might
1737 have a field outside a QUAL_UNION_TYPE that has a higher ending
1738 position. So use a MAX in that case. Also, if this field is a
1739 QUAL_UNION_TYPE, we need to take into account the previous size in
1740 the case of empty variants. */
1741 ada_size
1742 = merge_sizes (ada_size, pos, this_ada_size,
1743 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1744 size
1745 = merge_sizes (size, pos, this_size,
1746 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1747 break;
1749 default:
1750 gcc_unreachable ();
1754 if (code == QUAL_UNION_TYPE)
1755 nreverse (field_list);
1757 if (rep_level < 2)
1759 /* If this is a padding record, we never want to make the size smaller
1760 than what was specified in it, if any. */
1761 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1762 size = TYPE_SIZE (record_type);
1764 /* Now set any of the values we've just computed that apply. */
1765 if (!TYPE_FAT_POINTER_P (record_type)
1766 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1767 SET_TYPE_ADA_SIZE (record_type, ada_size);
1769 if (rep_level > 0)
1771 tree size_unit = had_size_unit
1772 ? TYPE_SIZE_UNIT (record_type)
1773 : convert (sizetype,
1774 size_binop (CEIL_DIV_EXPR, size,
1775 bitsize_unit_node));
1776 unsigned int align = TYPE_ALIGN (record_type);
1778 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1779 TYPE_SIZE_UNIT (record_type)
1780 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1782 compute_record_mode (record_type);
1786 if (debug_info_p)
1787 rest_of_record_type_compilation (record_type);
1790 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1791 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1792 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1793 moment TYPE will get a context. */
1795 void
1796 add_parallel_type (tree type, tree parallel_type)
1798 tree decl = TYPE_STUB_DECL (type);
1800 while (DECL_PARALLEL_TYPE (decl))
1801 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1803 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1805 /* If PARALLEL_TYPE already has a context, we are done. */
1806 if (TYPE_CONTEXT (parallel_type) != NULL_TREE)
1807 return;
1809 /* Otherwise, try to get one from TYPE's context. */
1810 if (TYPE_CONTEXT (type) != NULL_TREE)
1811 /* TYPE already has a context, so simply propagate it to PARALLEL_TYPE. */
1812 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1814 /* ... otherwise TYPE has not context yet. We know it will thanks to
1815 gnat_pushdecl, and then its context will be propagated to PARALLEL_TYPE.
1816 So we have nothing to do in this case. */
1819 /* Return true if TYPE has a parallel type. */
1821 static bool
1822 has_parallel_type (tree type)
1824 tree decl = TYPE_STUB_DECL (type);
1826 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1829 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1830 associated with it. It need not be invoked directly in most cases since
1831 finish_record_type takes care of doing so, but this can be necessary if
1832 a parallel type is to be attached to the record type. */
1834 void
1835 rest_of_record_type_compilation (tree record_type)
1837 bool var_size = false;
1838 tree field;
1840 /* If this is a padded type, the bulk of the debug info has already been
1841 generated for the field's type. */
1842 if (TYPE_IS_PADDING_P (record_type))
1843 return;
1845 /* If the type already has a parallel type (XVS type), then we're done. */
1846 if (has_parallel_type (record_type))
1847 return;
1849 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1851 /* We need to make an XVE/XVU record if any field has variable size,
1852 whether or not the record does. For example, if we have a union,
1853 it may be that all fields, rounded up to the alignment, have the
1854 same size, in which case we'll use that size. But the debug
1855 output routines (except Dwarf2) won't be able to output the fields,
1856 so we need to make the special record. */
1857 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1858 /* If a field has a non-constant qualifier, the record will have
1859 variable size too. */
1860 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1861 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1863 var_size = true;
1864 break;
1868 /* If this record type is of variable size, make a parallel record type that
1869 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1870 if (var_size)
1872 tree new_record_type
1873 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1874 ? UNION_TYPE : TREE_CODE (record_type));
1875 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
1876 tree last_pos = bitsize_zero_node;
1877 tree old_field, prev_old_field = NULL_TREE;
1879 new_name
1880 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1881 ? "XVU" : "XVE");
1882 TYPE_NAME (new_record_type) = new_name;
1883 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1884 TYPE_STUB_DECL (new_record_type)
1885 = create_type_stub_decl (new_name, new_record_type);
1886 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1887 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1888 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1889 TYPE_SIZE_UNIT (new_record_type)
1890 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1892 /* Now scan all the fields, replacing each field with a new field
1893 corresponding to the new encoding. */
1894 for (old_field = TYPE_FIELDS (record_type); old_field;
1895 old_field = DECL_CHAIN (old_field))
1897 tree field_type = TREE_TYPE (old_field);
1898 tree field_name = DECL_NAME (old_field);
1899 tree curpos = bit_position (old_field);
1900 tree pos, new_field;
1901 bool var = false;
1902 unsigned int align = 0;
1904 /* We're going to do some pattern matching below so remove as many
1905 conversions as possible. */
1906 curpos = remove_conversions (curpos, true);
1908 /* See how the position was modified from the last position.
1910 There are two basic cases we support: a value was added
1911 to the last position or the last position was rounded to
1912 a boundary and they something was added. Check for the
1913 first case first. If not, see if there is any evidence
1914 of rounding. If so, round the last position and retry.
1916 If this is a union, the position can be taken as zero. */
1917 if (TREE_CODE (new_record_type) == UNION_TYPE)
1918 pos = bitsize_zero_node;
1919 else
1920 pos = compute_related_constant (curpos, last_pos);
1922 if (!pos
1923 && TREE_CODE (curpos) == MULT_EXPR
1924 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
1926 tree offset = TREE_OPERAND (curpos, 0);
1927 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1928 align = scale_by_factor_of (offset, align);
1929 last_pos = round_up (last_pos, align);
1930 pos = compute_related_constant (curpos, last_pos);
1932 else if (!pos
1933 && TREE_CODE (curpos) == PLUS_EXPR
1934 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
1935 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1936 && tree_fits_uhwi_p
1937 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
1939 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1940 unsigned HOST_WIDE_INT addend
1941 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1942 align
1943 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
1944 align = scale_by_factor_of (offset, align);
1945 align = MIN (align, addend & -addend);
1946 last_pos = round_up (last_pos, align);
1947 pos = compute_related_constant (curpos, last_pos);
1949 else if (potential_alignment_gap (prev_old_field, old_field, pos))
1951 align = TYPE_ALIGN (field_type);
1952 last_pos = round_up (last_pos, align);
1953 pos = compute_related_constant (curpos, last_pos);
1956 /* If we can't compute a position, set it to zero.
1958 ??? We really should abort here, but it's too much work
1959 to get this correct for all cases. */
1960 if (!pos)
1961 pos = bitsize_zero_node;
1963 /* See if this type is variable-sized and make a pointer type
1964 and indicate the indirection if so. Beware that the debug
1965 back-end may adjust the position computed above according
1966 to the alignment of the field type, i.e. the pointer type
1967 in this case, if we don't preventively counter that. */
1968 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1970 field_type = build_pointer_type (field_type);
1971 if (align != 0 && TYPE_ALIGN (field_type) > align)
1973 field_type = copy_node (field_type);
1974 TYPE_ALIGN (field_type) = align;
1976 var = true;
1979 /* Make a new field name, if necessary. */
1980 if (var || align != 0)
1982 char suffix[16];
1984 if (align != 0)
1985 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1986 align / BITS_PER_UNIT);
1987 else
1988 strcpy (suffix, "XVL");
1990 field_name = concat_name (field_name, suffix);
1993 new_field
1994 = create_field_decl (field_name, field_type, new_record_type,
1995 DECL_SIZE (old_field), pos, 0, 0);
1996 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1997 TYPE_FIELDS (new_record_type) = new_field;
1999 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2000 zero. The only time it's not the last field of the record
2001 is when there are other components at fixed positions after
2002 it (meaning there was a rep clause for every field) and we
2003 want to be able to encode them. */
2004 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2005 (TREE_CODE (TREE_TYPE (old_field))
2006 == QUAL_UNION_TYPE)
2007 ? bitsize_zero_node
2008 : DECL_SIZE (old_field));
2009 prev_old_field = old_field;
2012 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2014 add_parallel_type (record_type, new_record_type);
2018 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2019 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2020 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2021 replace a value of zero with the old size. If HAS_REP is true, we take the
2022 MAX of the end position of this field with LAST_SIZE. In all other cases,
2023 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2025 static tree
2026 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2027 bool has_rep)
2029 tree type = TREE_TYPE (last_size);
2030 tree new_size;
2032 if (!special || TREE_CODE (size) != COND_EXPR)
2034 new_size = size_binop (PLUS_EXPR, first_bit, size);
2035 if (has_rep)
2036 new_size = size_binop (MAX_EXPR, last_size, new_size);
2039 else
2040 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2041 integer_zerop (TREE_OPERAND (size, 1))
2042 ? last_size : merge_sizes (last_size, first_bit,
2043 TREE_OPERAND (size, 1),
2044 1, has_rep),
2045 integer_zerop (TREE_OPERAND (size, 2))
2046 ? last_size : merge_sizes (last_size, first_bit,
2047 TREE_OPERAND (size, 2),
2048 1, has_rep));
2050 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2051 when fed through substitute_in_expr) into thinking that a constant
2052 size is not constant. */
2053 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2054 new_size = TREE_OPERAND (new_size, 0);
2056 return new_size;
2059 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2060 related by the addition of a constant. Return that constant if so. */
2062 static tree
2063 compute_related_constant (tree op0, tree op1)
2065 tree op0_var, op1_var;
2066 tree op0_con = split_plus (op0, &op0_var);
2067 tree op1_con = split_plus (op1, &op1_var);
2068 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2070 if (operand_equal_p (op0_var, op1_var, 0))
2071 return result;
2072 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2073 return result;
2074 else
2075 return 0;
2078 /* Utility function of above to split a tree OP which may be a sum, into a
2079 constant part, which is returned, and a variable part, which is stored
2080 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2081 bitsizetype. */
2083 static tree
2084 split_plus (tree in, tree *pvar)
2086 /* Strip conversions in order to ease the tree traversal and maximize the
2087 potential for constant or plus/minus discovery. We need to be careful
2088 to always return and set *pvar to bitsizetype trees, but it's worth
2089 the effort. */
2090 in = remove_conversions (in, false);
2092 *pvar = convert (bitsizetype, in);
2094 if (TREE_CODE (in) == INTEGER_CST)
2096 *pvar = bitsize_zero_node;
2097 return convert (bitsizetype, in);
2099 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2101 tree lhs_var, rhs_var;
2102 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2103 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2105 if (lhs_var == TREE_OPERAND (in, 0)
2106 && rhs_var == TREE_OPERAND (in, 1))
2107 return bitsize_zero_node;
2109 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2110 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2112 else
2113 return bitsize_zero_node;
2116 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
2117 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
2118 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
2119 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
2120 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
2121 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
2122 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
2123 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
2124 invisible reference. */
2126 tree
2127 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
2128 bool return_unconstrained_p, bool return_by_direct_ref_p,
2129 bool return_by_invisi_ref_p)
2131 /* A list of the data type nodes of the subprogram formal parameters.
2132 This list is generated by traversing the input list of PARM_DECL
2133 nodes. */
2134 vec<tree, va_gc> *param_type_list = NULL;
2135 tree t, type;
2137 for (t = param_decl_list; t; t = DECL_CHAIN (t))
2138 vec_safe_push (param_type_list, TREE_TYPE (t));
2140 type = build_function_type_vec (return_type, param_type_list);
2142 /* TYPE may have been shared since GCC hashes types. If it has a different
2143 CICO_LIST, make a copy. Likewise for the various flags. */
2144 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
2145 return_by_direct_ref_p, return_by_invisi_ref_p))
2147 type = copy_type (type);
2148 TYPE_CI_CO_LIST (type) = cico_list;
2149 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
2150 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
2151 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
2154 return type;
2157 /* Return a copy of TYPE but safe to modify in any way. */
2159 tree
2160 copy_type (tree type)
2162 tree new_type = copy_node (type);
2164 /* Unshare the language-specific data. */
2165 if (TYPE_LANG_SPECIFIC (type))
2167 TYPE_LANG_SPECIFIC (new_type) = NULL;
2168 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2171 /* And the contents of the language-specific slot if needed. */
2172 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2173 && TYPE_RM_VALUES (type))
2175 TYPE_RM_VALUES (new_type) = NULL_TREE;
2176 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2177 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2178 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2181 /* copy_node clears this field instead of copying it, because it is
2182 aliased with TREE_CHAIN. */
2183 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2185 TYPE_POINTER_TO (new_type) = 0;
2186 TYPE_REFERENCE_TO (new_type) = 0;
2187 TYPE_MAIN_VARIANT (new_type) = new_type;
2188 TYPE_NEXT_VARIANT (new_type) = 0;
2190 return new_type;
2193 /* Return a subtype of sizetype with range MIN to MAX and whose
2194 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2195 of the associated TYPE_DECL. */
2197 tree
2198 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2200 /* First build a type for the desired range. */
2201 tree type = build_nonshared_range_type (sizetype, min, max);
2203 /* Then set the index type. */
2204 SET_TYPE_INDEX_TYPE (type, index);
2205 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2207 return type;
2210 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2211 sizetype is used. */
2213 tree
2214 create_range_type (tree type, tree min, tree max)
2216 tree range_type;
2218 if (type == NULL_TREE)
2219 type = sizetype;
2221 /* First build a type with the base range. */
2222 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2223 TYPE_MAX_VALUE (type));
2225 /* Then set the actual range. */
2226 SET_TYPE_RM_MIN_VALUE (range_type, min);
2227 SET_TYPE_RM_MAX_VALUE (range_type, max);
2229 return range_type;
2232 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2233 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2234 its data type. */
2236 tree
2237 create_type_stub_decl (tree type_name, tree type)
2239 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2240 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2241 emitted in DWARF. */
2242 tree type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2243 DECL_ARTIFICIAL (type_decl) = 1;
2244 TYPE_ARTIFICIAL (type) = 1;
2245 return type_decl;
2248 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2249 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2250 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2251 true if we need to write debug information about this type. GNAT_NODE
2252 is used for the position of the decl. */
2254 tree
2255 create_type_decl (tree type_name, tree type, bool artificial_p,
2256 bool debug_info_p, Node_Id gnat_node)
2258 enum tree_code code = TREE_CODE (type);
2259 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2260 tree type_decl;
2262 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2263 gcc_assert (!TYPE_IS_DUMMY_P (type));
2265 /* If the type hasn't been named yet, we're naming it; preserve an existing
2266 TYPE_STUB_DECL that has been attached to it for some purpose. */
2267 if (!named && TYPE_STUB_DECL (type))
2269 type_decl = TYPE_STUB_DECL (type);
2270 DECL_NAME (type_decl) = type_name;
2272 else
2273 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2275 DECL_ARTIFICIAL (type_decl) = artificial_p;
2276 TYPE_ARTIFICIAL (type) = artificial_p;
2278 /* Add this decl to the current binding level. */
2279 gnat_pushdecl (type_decl, gnat_node);
2281 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2282 This causes the name to be also viewed as a "tag" by the debug
2283 back-end, with the advantage that no DW_TAG_typedef is emitted
2284 for artificial "tagged" types in DWARF. */
2285 if (!named)
2286 TYPE_STUB_DECL (type) = type_decl;
2288 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2289 back-end doesn't support, and for others if we don't need to. */
2290 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2291 DECL_IGNORED_P (type_decl) = 1;
2293 return type_decl;
2296 /* Return a VAR_DECL or CONST_DECL node.
2298 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2299 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2300 the GCC tree for an optional initial expression; NULL_TREE if none.
2302 CONST_FLAG is true if this variable is constant, in which case we might
2303 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2305 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2306 definition to be made visible outside of the current compilation unit, for
2307 instance variable definitions in a package specification.
2309 EXTERN_FLAG is true when processing an external variable declaration (as
2310 opposed to a definition: no storage is to be allocated for the variable).
2312 STATIC_FLAG is only relevant when not at top level. In that case
2313 it indicates whether to always allocate storage to the variable.
2315 GNAT_NODE is used for the position of the decl. */
2317 tree
2318 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2319 bool const_flag, bool public_flag, bool extern_flag,
2320 bool static_flag, bool const_decl_allowed_p,
2321 struct attrib *attr_list, Node_Id gnat_node)
2323 /* Whether the object has static storage duration, either explicitly or by
2324 virtue of being declared at the global level. */
2325 const bool static_storage = static_flag || global_bindings_p ();
2327 /* Whether the initializer is constant: for an external object or an object
2328 with static storage duration, we check that the initializer is a valid
2329 constant expression for initializing a static variable; otherwise, we
2330 only check that it is constant. */
2331 const bool init_const
2332 = (var_init
2333 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
2334 && (extern_flag || static_storage
2335 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2336 != NULL_TREE
2337 : TREE_CONSTANT (var_init)));
2339 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2340 case the initializer may be used in lieu of the DECL node (as done in
2341 Identifier_to_gnu). This is useful to prevent the need of elaboration
2342 code when an identifier for which such a DECL is made is in turn used
2343 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2344 but extra constraints apply to this choice (see below) and they are not
2345 relevant to the distinction we wish to make. */
2346 const bool constant_p = const_flag && init_const;
2348 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2349 and may be used for scalars in general but not for aggregates. */
2350 tree var_decl
2351 = build_decl (input_location,
2352 (constant_p && const_decl_allowed_p
2353 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2354 var_name, type);
2356 /* If this is external, throw away any initializations (they will be done
2357 elsewhere) unless this is a constant for which we would like to remain
2358 able to get the initializer. If we are defining a global here, leave a
2359 constant initialization and save any variable elaborations for the
2360 elaboration routine. If we are just annotating types, throw away the
2361 initialization if it isn't a constant. */
2362 if ((extern_flag && !constant_p)
2363 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2364 var_init = NULL_TREE;
2366 /* At the global level, a non-constant initializer generates elaboration
2367 statements. Check that such statements are allowed, that is to say,
2368 not violating a No_Elaboration_Code restriction. */
2369 if (var_init && !init_const && global_bindings_p ())
2370 Check_Elaboration_Code_Allowed (gnat_node);
2372 DECL_INITIAL (var_decl) = var_init;
2373 TREE_READONLY (var_decl) = const_flag;
2374 DECL_EXTERNAL (var_decl) = extern_flag;
2375 TREE_CONSTANT (var_decl) = constant_p;
2377 /* We need to allocate static storage for an object with static storage
2378 duration if it isn't external. */
2379 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2381 /* The object is public if it is external or if it is declared public
2382 and has static storage duration. */
2383 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2385 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2386 try to fiddle with DECL_COMMON. However, on platforms that don't
2387 support global BSS sections, uninitialized global variables would
2388 go in DATA instead, thus increasing the size of the executable. */
2389 if (!flag_no_common
2390 && TREE_CODE (var_decl) == VAR_DECL
2391 && TREE_PUBLIC (var_decl)
2392 && !have_global_bss_p ())
2393 DECL_COMMON (var_decl) = 1;
2395 /* For an external constant whose initializer is not absolute, do not emit
2396 debug info. In DWARF this would mean a global relocation in a read-only
2397 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2398 if (extern_flag
2399 && constant_p
2400 && var_init
2401 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2402 != null_pointer_node)
2403 DECL_IGNORED_P (var_decl) = 1;
2405 if (TYPE_VOLATILE (type))
2406 TREE_SIDE_EFFECTS (var_decl) = TREE_THIS_VOLATILE (var_decl) = 1;
2408 if (TREE_SIDE_EFFECTS (var_decl))
2409 TREE_ADDRESSABLE (var_decl) = 1;
2411 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2412 if (TREE_CODE (var_decl) == VAR_DECL)
2413 process_attributes (&var_decl, &attr_list, true, gnat_node);
2415 /* Add this decl to the current binding level. */
2416 gnat_pushdecl (var_decl, gnat_node);
2418 if (TREE_CODE (var_decl) == VAR_DECL)
2420 if (asm_name)
2421 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2423 if (global_bindings_p ())
2424 rest_of_decl_compilation (var_decl, true, 0);
2427 return var_decl;
2430 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2432 static bool
2433 aggregate_type_contains_array_p (tree type)
2435 switch (TREE_CODE (type))
2437 case RECORD_TYPE:
2438 case UNION_TYPE:
2439 case QUAL_UNION_TYPE:
2441 tree field;
2442 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2443 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2444 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2445 return true;
2446 return false;
2449 case ARRAY_TYPE:
2450 return true;
2452 default:
2453 gcc_unreachable ();
2457 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2458 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2459 nonzero, it is the specified size of the field. If POS is nonzero, it is
2460 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2461 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2462 means we are allowed to take the address of the field; if it is negative,
2463 we should not make a bitfield, which is used by make_aligning_type. */
2465 tree
2466 create_field_decl (tree field_name, tree field_type, tree record_type,
2467 tree size, tree pos, int packed, int addressable)
2469 tree field_decl = build_decl (input_location,
2470 FIELD_DECL, field_name, field_type);
2472 DECL_CONTEXT (field_decl) = record_type;
2473 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2475 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2476 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2477 Likewise for an aggregate without specified position that contains an
2478 array, because in this case slices of variable length of this array
2479 must be handled by GCC and variable-sized objects need to be aligned
2480 to at least a byte boundary. */
2481 if (packed && (TYPE_MODE (field_type) == BLKmode
2482 || (!pos
2483 && AGGREGATE_TYPE_P (field_type)
2484 && aggregate_type_contains_array_p (field_type))))
2485 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2487 /* If a size is specified, use it. Otherwise, if the record type is packed
2488 compute a size to use, which may differ from the object's natural size.
2489 We always set a size in this case to trigger the checks for bitfield
2490 creation below, which is typically required when no position has been
2491 specified. */
2492 if (size)
2493 size = convert (bitsizetype, size);
2494 else if (packed == 1)
2496 size = rm_size (field_type);
2497 if (TYPE_MODE (field_type) == BLKmode)
2498 size = round_up (size, BITS_PER_UNIT);
2501 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2502 specified for two reasons: first if the size differs from the natural
2503 size. Second, if the alignment is insufficient. There are a number of
2504 ways the latter can be true.
2506 We never make a bitfield if the type of the field has a nonconstant size,
2507 because no such entity requiring bitfield operations should reach here.
2509 We do *preventively* make a bitfield when there might be the need for it
2510 but we don't have all the necessary information to decide, as is the case
2511 of a field with no specified position in a packed record.
2513 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2514 in layout_decl or finish_record_type to clear the bit_field indication if
2515 it is in fact not needed. */
2516 if (addressable >= 0
2517 && size
2518 && TREE_CODE (size) == INTEGER_CST
2519 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2520 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2521 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2522 || packed
2523 || (TYPE_ALIGN (record_type) != 0
2524 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2526 DECL_BIT_FIELD (field_decl) = 1;
2527 DECL_SIZE (field_decl) = size;
2528 if (!packed && !pos)
2530 if (TYPE_ALIGN (record_type) != 0
2531 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2532 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2533 else
2534 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2538 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2540 /* Bump the alignment if need be, either for bitfield/packing purposes or
2541 to satisfy the type requirements if no such consideration applies. When
2542 we get the alignment from the type, indicate if this is from an explicit
2543 user request, which prevents stor-layout from lowering it later on. */
2545 unsigned int bit_align
2546 = (DECL_BIT_FIELD (field_decl) ? 1
2547 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2549 if (bit_align > DECL_ALIGN (field_decl))
2550 DECL_ALIGN (field_decl) = bit_align;
2551 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2553 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2554 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2558 if (pos)
2560 /* We need to pass in the alignment the DECL is known to have.
2561 This is the lowest-order bit set in POS, but no more than
2562 the alignment of the record, if one is specified. Note
2563 that an alignment of 0 is taken as infinite. */
2564 unsigned int known_align;
2566 if (tree_fits_uhwi_p (pos))
2567 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2568 else
2569 known_align = BITS_PER_UNIT;
2571 if (TYPE_ALIGN (record_type)
2572 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2573 known_align = TYPE_ALIGN (record_type);
2575 layout_decl (field_decl, known_align);
2576 SET_DECL_OFFSET_ALIGN (field_decl,
2577 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2578 : BITS_PER_UNIT);
2579 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2580 &DECL_FIELD_BIT_OFFSET (field_decl),
2581 DECL_OFFSET_ALIGN (field_decl), pos);
2584 /* In addition to what our caller says, claim the field is addressable if we
2585 know that its type is not suitable.
2587 The field may also be "technically" nonaddressable, meaning that even if
2588 we attempt to take the field's address we will actually get the address
2589 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2590 value we have at this point is not accurate enough, so we don't account
2591 for this here and let finish_record_type decide. */
2592 if (!addressable && !type_for_nonaliased_component_p (field_type))
2593 addressable = 1;
2595 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2597 return field_decl;
2600 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2601 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2602 (either an In parameter or an address of a pass-by-ref parameter). */
2604 tree
2605 create_param_decl (tree param_name, tree param_type, bool readonly)
2607 tree param_decl = build_decl (input_location,
2608 PARM_DECL, param_name, param_type);
2610 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2611 can lead to various ABI violations. */
2612 if (targetm.calls.promote_prototypes (NULL_TREE)
2613 && INTEGRAL_TYPE_P (param_type)
2614 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2616 /* We have to be careful about biased types here. Make a subtype
2617 of integer_type_node with the proper biasing. */
2618 if (TREE_CODE (param_type) == INTEGER_TYPE
2619 && TYPE_BIASED_REPRESENTATION_P (param_type))
2621 tree subtype
2622 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2623 TREE_TYPE (subtype) = integer_type_node;
2624 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2625 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2626 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
2627 param_type = subtype;
2629 else
2630 param_type = integer_type_node;
2633 DECL_ARG_TYPE (param_decl) = param_type;
2634 TREE_READONLY (param_decl) = readonly;
2635 return param_decl;
2638 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2639 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2640 changed. GNAT_NODE is used for the position of error messages. */
2642 void
2643 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2644 Node_Id gnat_node)
2646 struct attrib *attr;
2648 for (attr = *attr_list; attr; attr = attr->next)
2649 switch (attr->type)
2651 case ATTR_MACHINE_ATTRIBUTE:
2652 Sloc_to_locus (Sloc (gnat_node), &input_location);
2653 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2654 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2655 break;
2657 case ATTR_LINK_ALIAS:
2658 if (!DECL_EXTERNAL (*node))
2660 TREE_STATIC (*node) = 1;
2661 assemble_alias (*node, attr->name);
2663 break;
2665 case ATTR_WEAK_EXTERNAL:
2666 if (SUPPORTS_WEAK)
2667 declare_weak (*node);
2668 else
2669 post_error ("?weak declarations not supported on this target",
2670 attr->error_point);
2671 break;
2673 case ATTR_LINK_SECTION:
2674 if (targetm_common.have_named_sections)
2676 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2677 DECL_COMMON (*node) = 0;
2679 else
2680 post_error ("?section attributes are not supported for this target",
2681 attr->error_point);
2682 break;
2684 case ATTR_LINK_CONSTRUCTOR:
2685 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2686 TREE_USED (*node) = 1;
2687 break;
2689 case ATTR_LINK_DESTRUCTOR:
2690 DECL_STATIC_DESTRUCTOR (*node) = 1;
2691 TREE_USED (*node) = 1;
2692 break;
2694 case ATTR_THREAD_LOCAL_STORAGE:
2695 set_decl_tls_model (*node, decl_default_tls_model (*node));
2696 DECL_COMMON (*node) = 0;
2697 break;
2700 *attr_list = NULL;
2703 /* Record DECL as a global renaming pointer. */
2705 void
2706 record_global_renaming_pointer (tree decl)
2708 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
2709 vec_safe_push (global_renaming_pointers, decl);
2712 /* Invalidate the global renaming pointers that are not constant, lest their
2713 renamed object contains SAVE_EXPRs tied to an elaboration routine. Note
2714 that we should not blindly invalidate everything here because of the need
2715 to propagate constant values through renaming. */
2717 void
2718 invalidate_global_renaming_pointers (void)
2720 unsigned int i;
2721 tree iter;
2723 if (global_renaming_pointers == NULL)
2724 return;
2726 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
2727 if (!TREE_CONSTANT (DECL_RENAMED_OBJECT (iter)))
2728 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2730 vec_free (global_renaming_pointers);
2733 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2734 a power of 2. */
2736 bool
2737 value_factor_p (tree value, HOST_WIDE_INT factor)
2739 if (tree_fits_uhwi_p (value))
2740 return tree_to_uhwi (value) % factor == 0;
2742 if (TREE_CODE (value) == MULT_EXPR)
2743 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2744 || value_factor_p (TREE_OPERAND (value, 1), factor));
2746 return false;
2749 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2750 from the parameter association for the instantiation of a generic. We do
2751 not want to emit source location for them: the code generated for their
2752 initialization is likely to disturb debugging. */
2754 bool
2755 renaming_from_generic_instantiation_p (Node_Id gnat_node)
2757 if (Nkind (gnat_node) != N_Defining_Identifier
2758 || !IN (Ekind (gnat_node), Object_Kind)
2759 || Comes_From_Source (gnat_node)
2760 || !Present (Renamed_Object (gnat_node)))
2761 return false;
2763 /* Get the object declaration of the renamed object, if any and if the
2764 renamed object is a mere identifier. */
2765 gnat_node = Renamed_Object (gnat_node);
2766 if (Nkind (gnat_node) != N_Identifier)
2767 return false;
2769 gnat_node = Entity (gnat_node);
2770 if (!Present (Parent (gnat_node)))
2771 return false;
2773 gnat_node = Parent (gnat_node);
2774 return
2775 (Present (gnat_node)
2776 && Nkind (gnat_node) == N_Object_Declaration
2777 && Present (Corresponding_Generic_Association (gnat_node)));
2780 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2781 feed it with the elaboration of GNAT_SCOPE. */
2783 static struct deferred_decl_context_node *
2784 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2786 struct deferred_decl_context_node *new_node;
2788 new_node
2789 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2790 new_node->decl = decl;
2791 new_node->gnat_scope = gnat_scope;
2792 new_node->force_global = force_global;
2793 new_node->types.create (1);
2794 new_node->next = deferred_decl_context_queue;
2795 deferred_decl_context_queue = new_node;
2796 return new_node;
2799 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2800 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2801 computed. */
2803 static void
2804 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2806 n->types.safe_push (type);
2809 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2810 NULL_TREE if it is not available. */
2812 static tree
2813 compute_deferred_decl_context (Entity_Id gnat_scope)
2815 tree context;
2817 if (present_gnu_tree (gnat_scope))
2818 context = get_gnu_tree (gnat_scope);
2819 else
2820 return NULL_TREE;
2822 if (TREE_CODE (context) == TYPE_DECL)
2824 const tree context_type = TREE_TYPE (context);
2826 /* Skip dummy types: only the final ones can appear in the context
2827 chain. */
2828 if (TYPE_DUMMY_P (context_type))
2829 return NULL_TREE;
2831 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2832 chain. */
2833 else
2834 context = context_type;
2837 return context;
2840 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2841 that cannot be processed yet, remove the other ones. If FORCE is true,
2842 force the processing for all nodes, use the global context when nodes don't
2843 have a GNU translation. */
2845 void
2846 process_deferred_decl_context (bool force)
2848 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2849 struct deferred_decl_context_node *node;
2851 while (*it != NULL)
2853 bool processed = false;
2854 tree context = NULL_TREE;
2855 Entity_Id gnat_scope;
2857 node = *it;
2859 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2860 get the first scope. */
2861 gnat_scope = node->gnat_scope;
2862 while (Present (gnat_scope))
2864 context = compute_deferred_decl_context (gnat_scope);
2865 if (!force || context != NULL_TREE)
2866 break;
2867 gnat_scope = get_debug_scope (gnat_scope, NULL);
2870 /* Imported declarations must not be in a local context (i.e. not inside
2871 a function). */
2872 if (context != NULL_TREE && node->force_global > 0)
2874 tree ctx = context;
2876 while (ctx != NULL_TREE)
2878 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2879 ctx = (DECL_P (ctx))
2880 ? DECL_CONTEXT (ctx)
2881 : TYPE_CONTEXT (ctx);
2885 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2886 was no elaborated scope, use the global context. */
2887 if (force && context == NULL_TREE)
2888 context = get_global_context ();
2890 if (context != NULL_TREE)
2892 tree t;
2893 int i;
2895 DECL_CONTEXT (node->decl) = context;
2897 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2898 ..._TYPE nodes. */
2899 FOR_EACH_VEC_ELT (node->types, i, t)
2901 gnat_set_type_context (t, context);
2903 processed = true;
2906 /* If this node has been successfuly processed, remove it from the
2907 queue. Then move to the next node. */
2908 if (processed)
2910 *it = node->next;
2911 node->types.release ();
2912 free (node);
2914 else
2915 it = &node->next;
2920 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2922 static unsigned int
2923 scale_by_factor_of (tree expr, unsigned int value)
2925 expr = remove_conversions (expr, true);
2927 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2928 corresponding to the number of trailing zeros of the mask. */
2929 if (TREE_CODE (expr) == BIT_AND_EXPR
2930 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2932 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2933 unsigned int i = 0;
2935 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2937 mask >>= 1;
2938 value *= 2;
2939 i++;
2943 return value;
2946 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2947 unless we can prove these 2 fields are laid out in such a way that no gap
2948 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2949 is the distance in bits between the end of PREV_FIELD and the starting
2950 position of CURR_FIELD. It is ignored if null. */
2952 static bool
2953 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2955 /* If this is the first field of the record, there cannot be any gap */
2956 if (!prev_field)
2957 return false;
2959 /* If the previous field is a union type, then return false: The only
2960 time when such a field is not the last field of the record is when
2961 there are other components at fixed positions after it (meaning there
2962 was a rep clause for every field), in which case we don't want the
2963 alignment constraint to override them. */
2964 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2965 return false;
2967 /* If the distance between the end of prev_field and the beginning of
2968 curr_field is constant, then there is a gap if the value of this
2969 constant is not null. */
2970 if (offset && tree_fits_uhwi_p (offset))
2971 return !integer_zerop (offset);
2973 /* If the size and position of the previous field are constant,
2974 then check the sum of this size and position. There will be a gap
2975 iff it is not multiple of the current field alignment. */
2976 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
2977 && tree_fits_uhwi_p (bit_position (prev_field)))
2978 return ((tree_to_uhwi (bit_position (prev_field))
2979 + tree_to_uhwi (DECL_SIZE (prev_field)))
2980 % DECL_ALIGN (curr_field) != 0);
2982 /* If both the position and size of the previous field are multiples
2983 of the current field alignment, there cannot be any gap. */
2984 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2985 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2986 return false;
2988 /* Fallback, return that there may be a potential gap */
2989 return true;
2992 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2993 of the decl. */
2995 tree
2996 create_label_decl (tree label_name, Node_Id gnat_node)
2998 tree label_decl
2999 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
3001 DECL_MODE (label_decl) = VOIDmode;
3003 /* Add this decl to the current binding level. */
3004 gnat_pushdecl (label_decl, gnat_node);
3006 return label_decl;
3009 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
3010 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
3011 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
3012 PARM_DECL nodes chained through the DECL_CHAIN field).
3014 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
3015 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
3016 used for the position of the decl. */
3018 tree
3019 create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
3020 tree param_decl_list, enum inline_status_t inline_status,
3021 bool public_flag, bool extern_flag, bool artificial_flag,
3022 struct attrib *attr_list, Node_Id gnat_node)
3024 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
3025 subprog_type);
3026 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
3027 TREE_TYPE (subprog_type));
3028 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3030 /* If this is a non-inline function nested inside an inlined external
3031 function, we cannot honor both requests without cloning the nested
3032 function in the current unit since it is private to the other unit.
3033 We could inline the nested function as well but it's probably better
3034 to err on the side of too little inlining. */
3035 if ((inline_status == is_suppressed || inline_status == is_disabled)
3036 && !public_flag
3037 && current_function_decl
3038 && DECL_DECLARED_INLINE_P (current_function_decl)
3039 && DECL_EXTERNAL (current_function_decl))
3040 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
3042 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
3043 DECL_EXTERNAL (subprog_decl) = extern_flag;
3045 switch (inline_status)
3047 case is_suppressed:
3048 DECL_UNINLINABLE (subprog_decl) = 1;
3049 break;
3051 case is_disabled:
3052 break;
3054 case is_required:
3055 if (Back_End_Inlining)
3056 decl_attributes (&subprog_decl,
3057 tree_cons (get_identifier ("always_inline"),
3058 NULL_TREE, NULL_TREE),
3059 ATTR_FLAG_TYPE_IN_PLACE);
3061 /* ... fall through ... */
3063 case is_enabled:
3064 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3065 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
3066 break;
3068 default:
3069 gcc_unreachable ();
3072 TREE_PUBLIC (subprog_decl) = public_flag;
3073 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
3074 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
3075 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
3077 DECL_ARTIFICIAL (result_decl) = 1;
3078 DECL_IGNORED_P (result_decl) = 1;
3079 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
3080 DECL_RESULT (subprog_decl) = result_decl;
3082 if (asm_name)
3084 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
3086 /* The expand_main_function circuitry expects "main_identifier_node" to
3087 designate the DECL_NAME of the 'main' entry point, in turn expected
3088 to be declared as the "main" function literally by default. Ada
3089 program entry points are typically declared with a different name
3090 within the binder generated file, exported as 'main' to satisfy the
3091 system expectations. Force main_identifier_node in this case. */
3092 if (asm_name == main_identifier_node)
3093 DECL_NAME (subprog_decl) = main_identifier_node;
3096 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3098 /* Add this decl to the current binding level. */
3099 gnat_pushdecl (subprog_decl, gnat_node);
3101 /* Output the assembler code and/or RTL for the declaration. */
3102 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3104 return subprog_decl;
3107 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3108 body. This routine needs to be invoked before processing the declarations
3109 appearing in the subprogram. */
3111 void
3112 begin_subprog_body (tree subprog_decl)
3114 tree param_decl;
3116 announce_function (subprog_decl);
3118 /* This function is being defined. */
3119 TREE_STATIC (subprog_decl) = 1;
3121 current_function_decl = subprog_decl;
3123 /* Enter a new binding level and show that all the parameters belong to
3124 this function. */
3125 gnat_pushlevel ();
3127 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3128 param_decl = DECL_CHAIN (param_decl))
3129 DECL_CONTEXT (param_decl) = subprog_decl;
3131 make_decl_rtl (subprog_decl);
3134 /* Finish translating the current subprogram and set its BODY. */
3136 void
3137 end_subprog_body (tree body)
3139 tree fndecl = current_function_decl;
3141 /* Attach the BLOCK for this level to the function and pop the level. */
3142 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3143 DECL_INITIAL (fndecl) = current_binding_level->block;
3144 gnat_poplevel ();
3146 /* Mark the RESULT_DECL as being in this subprogram. */
3147 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3149 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3150 if (TREE_CODE (body) == BIND_EXPR)
3152 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3153 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3156 DECL_SAVED_TREE (fndecl) = body;
3158 current_function_decl = decl_function_context (fndecl);
3161 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3163 void
3164 rest_of_subprog_body_compilation (tree subprog_decl)
3166 /* We cannot track the location of errors past this point. */
3167 error_gnat_node = Empty;
3169 /* If we're only annotating types, don't actually compile this function. */
3170 if (type_annotate_only)
3171 return;
3173 /* Dump functions before gimplification. */
3174 dump_function (TDI_original, subprog_decl);
3176 if (!decl_function_context (subprog_decl))
3177 cgraph_node::finalize_function (subprog_decl, false);
3178 else
3179 /* Register this function with cgraph just far enough to get it
3180 added to our parent's nested function list. */
3181 (void) cgraph_node::get_create (subprog_decl);
3184 tree
3185 gnat_builtin_function (tree decl)
3187 gnat_pushdecl (decl, Empty);
3188 return decl;
3191 /* Return an integer type with the number of bits of precision given by
3192 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3193 it is a signed type. */
3195 tree
3196 gnat_type_for_size (unsigned precision, int unsignedp)
3198 tree t;
3199 char type_name[20];
3201 if (precision <= 2 * MAX_BITS_PER_WORD
3202 && signed_and_unsigned_types[precision][unsignedp])
3203 return signed_and_unsigned_types[precision][unsignedp];
3205 if (unsignedp)
3206 t = make_unsigned_type (precision);
3207 else
3208 t = make_signed_type (precision);
3210 if (precision <= 2 * MAX_BITS_PER_WORD)
3211 signed_and_unsigned_types[precision][unsignedp] = t;
3213 if (!TYPE_NAME (t))
3215 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3216 TYPE_NAME (t) = get_identifier (type_name);
3219 return t;
3222 /* Likewise for floating-point types. */
3224 static tree
3225 float_type_for_precision (int precision, machine_mode mode)
3227 tree t;
3228 char type_name[20];
3230 if (float_types[(int) mode])
3231 return float_types[(int) mode];
3233 float_types[(int) mode] = t = make_node (REAL_TYPE);
3234 TYPE_PRECISION (t) = precision;
3235 layout_type (t);
3237 gcc_assert (TYPE_MODE (t) == mode);
3238 if (!TYPE_NAME (t))
3240 sprintf (type_name, "FLOAT_%d", precision);
3241 TYPE_NAME (t) = get_identifier (type_name);
3244 return t;
3247 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3248 an unsigned type; otherwise a signed type is returned. */
3250 tree
3251 gnat_type_for_mode (machine_mode mode, int unsignedp)
3253 if (mode == BLKmode)
3254 return NULL_TREE;
3256 if (mode == VOIDmode)
3257 return void_type_node;
3259 if (COMPLEX_MODE_P (mode))
3260 return NULL_TREE;
3262 if (SCALAR_FLOAT_MODE_P (mode))
3263 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
3265 if (SCALAR_INT_MODE_P (mode))
3266 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
3268 if (VECTOR_MODE_P (mode))
3270 machine_mode inner_mode = GET_MODE_INNER (mode);
3271 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3272 if (inner_type)
3273 return build_vector_type_for_mode (inner_type, mode);
3276 return NULL_TREE;
3279 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
3281 tree
3282 gnat_unsigned_type (tree type_node)
3284 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
3286 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3288 type = copy_node (type);
3289 TREE_TYPE (type) = type_node;
3291 else if (TREE_TYPE (type_node)
3292 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3293 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3295 type = copy_node (type);
3296 TREE_TYPE (type) = TREE_TYPE (type_node);
3299 return type;
3302 /* Return the signed version of a TYPE_NODE, a scalar type. */
3304 tree
3305 gnat_signed_type (tree type_node)
3307 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
3309 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3311 type = copy_node (type);
3312 TREE_TYPE (type) = type_node;
3314 else if (TREE_TYPE (type_node)
3315 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3316 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3318 type = copy_node (type);
3319 TREE_TYPE (type) = TREE_TYPE (type_node);
3322 return type;
3325 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3326 transparently converted to each other. */
3329 gnat_types_compatible_p (tree t1, tree t2)
3331 enum tree_code code;
3333 /* This is the default criterion. */
3334 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3335 return 1;
3337 /* We only check structural equivalence here. */
3338 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3339 return 0;
3341 /* Vector types are also compatible if they have the same number of subparts
3342 and the same form of (scalar) element type. */
3343 if (code == VECTOR_TYPE
3344 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3345 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3346 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3347 return 1;
3349 /* Array types are also compatible if they are constrained and have the same
3350 domain(s) and the same component type. */
3351 if (code == ARRAY_TYPE
3352 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3353 || (TYPE_DOMAIN (t1)
3354 && TYPE_DOMAIN (t2)
3355 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3356 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3357 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3358 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3359 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3360 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3361 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
3362 return 1;
3364 return 0;
3367 /* Return true if EXPR is a useless type conversion. */
3369 bool
3370 gnat_useless_type_conversion (tree expr)
3372 if (CONVERT_EXPR_P (expr)
3373 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3374 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3375 return gnat_types_compatible_p (TREE_TYPE (expr),
3376 TREE_TYPE (TREE_OPERAND (expr, 0)));
3378 return false;
3381 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3383 bool
3384 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3385 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3387 return TYPE_CI_CO_LIST (t) == cico_list
3388 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3389 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3390 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3393 /* EXP is an expression for the size of an object. If this size contains
3394 discriminant references, replace them with the maximum (if MAX_P) or
3395 minimum (if !MAX_P) possible value of the discriminant. */
3397 tree
3398 max_size (tree exp, bool max_p)
3400 enum tree_code code = TREE_CODE (exp);
3401 tree type = TREE_TYPE (exp);
3403 switch (TREE_CODE_CLASS (code))
3405 case tcc_declaration:
3406 case tcc_constant:
3407 return exp;
3409 case tcc_vl_exp:
3410 if (code == CALL_EXPR)
3412 tree t, *argarray;
3413 int n, i;
3415 t = maybe_inline_call_in_expr (exp);
3416 if (t)
3417 return max_size (t, max_p);
3419 n = call_expr_nargs (exp);
3420 gcc_assert (n > 0);
3421 argarray = XALLOCAVEC (tree, n);
3422 for (i = 0; i < n; i++)
3423 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3424 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3426 break;
3428 case tcc_reference:
3429 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3430 modify. Otherwise, we treat it like a variable. */
3431 if (CONTAINS_PLACEHOLDER_P (exp))
3433 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3434 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3435 return max_size (convert (get_base_type (val_type), val), true);
3438 return exp;
3440 case tcc_comparison:
3441 return max_p ? size_one_node : size_zero_node;
3443 case tcc_unary:
3444 if (code == NON_LVALUE_EXPR)
3445 return max_size (TREE_OPERAND (exp, 0), max_p);
3447 return fold_build1 (code, type,
3448 max_size (TREE_OPERAND (exp, 0),
3449 code == NEGATE_EXPR ? !max_p : max_p));
3451 case tcc_binary:
3453 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3454 tree rhs = max_size (TREE_OPERAND (exp, 1),
3455 code == MINUS_EXPR ? !max_p : max_p);
3457 /* Special-case wanting the maximum value of a MIN_EXPR.
3458 In that case, if one side overflows, return the other. */
3459 if (max_p && code == MIN_EXPR)
3461 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3462 return lhs;
3464 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3465 return rhs;
3468 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3469 overflowing and the RHS a variable. */
3470 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3471 && TREE_CODE (lhs) == INTEGER_CST
3472 && TREE_OVERFLOW (lhs)
3473 && !TREE_CONSTANT (rhs))
3474 return lhs;
3476 return size_binop (code, lhs, rhs);
3479 case tcc_expression:
3480 switch (TREE_CODE_LENGTH (code))
3482 case 1:
3483 if (code == SAVE_EXPR)
3484 return exp;
3486 return fold_build1 (code, type,
3487 max_size (TREE_OPERAND (exp, 0), max_p));
3489 case 2:
3490 if (code == COMPOUND_EXPR)
3491 return max_size (TREE_OPERAND (exp, 1), max_p);
3493 return fold_build2 (code, type,
3494 max_size (TREE_OPERAND (exp, 0), max_p),
3495 max_size (TREE_OPERAND (exp, 1), max_p));
3497 case 3:
3498 if (code == COND_EXPR)
3499 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3500 max_size (TREE_OPERAND (exp, 1), max_p),
3501 max_size (TREE_OPERAND (exp, 2), max_p));
3503 default:
3504 break;
3507 /* Other tree classes cannot happen. */
3508 default:
3509 break;
3512 gcc_unreachable ();
3515 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3516 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3517 Return a constructor for the template. */
3519 tree
3520 build_template (tree template_type, tree array_type, tree expr)
3522 vec<constructor_elt, va_gc> *template_elts = NULL;
3523 tree bound_list = NULL_TREE;
3524 tree field;
3526 while (TREE_CODE (array_type) == RECORD_TYPE
3527 && (TYPE_PADDING_P (array_type)
3528 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3529 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3531 if (TREE_CODE (array_type) == ARRAY_TYPE
3532 || (TREE_CODE (array_type) == INTEGER_TYPE
3533 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3534 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3536 /* First make the list for a CONSTRUCTOR for the template. Go down the
3537 field list of the template instead of the type chain because this
3538 array might be an Ada array of arrays and we can't tell where the
3539 nested arrays stop being the underlying object. */
3541 for (field = TYPE_FIELDS (template_type); field;
3542 (bound_list
3543 ? (bound_list = TREE_CHAIN (bound_list))
3544 : (array_type = TREE_TYPE (array_type))),
3545 field = DECL_CHAIN (DECL_CHAIN (field)))
3547 tree bounds, min, max;
3549 /* If we have a bound list, get the bounds from there. Likewise
3550 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3551 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3552 This will give us a maximum range. */
3553 if (bound_list)
3554 bounds = TREE_VALUE (bound_list);
3555 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3556 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3557 else if (expr && TREE_CODE (expr) == PARM_DECL
3558 && DECL_BY_COMPONENT_PTR_P (expr))
3559 bounds = TREE_TYPE (field);
3560 else
3561 gcc_unreachable ();
3563 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3564 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3566 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3567 substitute it from OBJECT. */
3568 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3569 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3571 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3572 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3575 return gnat_build_constructor (template_type, template_elts);
3578 /* Return true if TYPE is suitable for the element type of a vector. */
3580 static bool
3581 type_for_vector_element_p (tree type)
3583 machine_mode mode;
3585 if (!INTEGRAL_TYPE_P (type)
3586 && !SCALAR_FLOAT_TYPE_P (type)
3587 && !FIXED_POINT_TYPE_P (type))
3588 return false;
3590 mode = TYPE_MODE (type);
3591 if (GET_MODE_CLASS (mode) != MODE_INT
3592 && !SCALAR_FLOAT_MODE_P (mode)
3593 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3594 return false;
3596 return true;
3599 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3600 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3601 attribute declaration and want to issue error messages on failure. */
3603 static tree
3604 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3606 unsigned HOST_WIDE_INT size_int, inner_size_int;
3607 int nunits;
3609 /* Silently punt on variable sizes. We can't make vector types for them,
3610 need to ignore them on front-end generated subtypes of unconstrained
3611 base types, and this attribute is for binding implementors, not end
3612 users, so we should never get there from legitimate explicit uses. */
3613 if (!tree_fits_uhwi_p (size))
3614 return NULL_TREE;
3615 size_int = tree_to_uhwi (size);
3617 if (!type_for_vector_element_p (inner_type))
3619 if (attribute)
3620 error ("invalid element type for attribute %qs",
3621 IDENTIFIER_POINTER (attribute));
3622 return NULL_TREE;
3624 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3626 if (size_int % inner_size_int)
3628 if (attribute)
3629 error ("vector size not an integral multiple of component size");
3630 return NULL_TREE;
3633 if (size_int == 0)
3635 if (attribute)
3636 error ("zero vector size");
3637 return NULL_TREE;
3640 nunits = size_int / inner_size_int;
3641 if (nunits & (nunits - 1))
3643 if (attribute)
3644 error ("number of components of vector not a power of two");
3645 return NULL_TREE;
3648 return build_vector_type (inner_type, nunits);
3651 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3652 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3653 processing the attribute and want to issue error messages on failure. */
3655 static tree
3656 build_vector_type_for_array (tree array_type, tree attribute)
3658 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3659 TYPE_SIZE_UNIT (array_type),
3660 attribute);
3661 if (!vector_type)
3662 return NULL_TREE;
3664 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3665 return vector_type;
3668 /* Build a type to be used to represent an aliased object whose nominal type
3669 is an unconstrained array. This consists of a RECORD_TYPE containing a
3670 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3671 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3672 an arbitrary unconstrained object. Use NAME as the name of the record.
3673 DEBUG_INFO_P is true if we need to write debug information for the type. */
3675 tree
3676 build_unc_object_type (tree template_type, tree object_type, tree name,
3677 bool debug_info_p)
3679 tree decl;
3680 tree type = make_node (RECORD_TYPE);
3681 tree template_field
3682 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3683 NULL_TREE, NULL_TREE, 0, 1);
3684 tree array_field
3685 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3686 NULL_TREE, NULL_TREE, 0, 1);
3688 TYPE_NAME (type) = name;
3689 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3690 DECL_CHAIN (template_field) = array_field;
3691 finish_record_type (type, template_field, 0, true);
3693 /* Declare it now since it will never be declared otherwise. This is
3694 necessary to ensure that its subtrees are properly marked. */
3695 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3697 /* template_type will not be used elsewhere than here, so to keep the debug
3698 info clean and in order to avoid scoping issues, make decl its
3699 context. */
3700 gnat_set_type_context (template_type, decl);
3702 return type;
3705 /* Same, taking a thin or fat pointer type instead of a template type. */
3707 tree
3708 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3709 tree name, bool debug_info_p)
3711 tree template_type;
3713 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3715 template_type
3716 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3717 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3718 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3720 return
3721 build_unc_object_type (template_type, object_type, name, debug_info_p);
3724 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3725 In the normal case this is just two adjustments, but we have more to
3726 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3728 void
3729 update_pointer_to (tree old_type, tree new_type)
3731 tree ptr = TYPE_POINTER_TO (old_type);
3732 tree ref = TYPE_REFERENCE_TO (old_type);
3733 tree t;
3735 /* If this is the main variant, process all the other variants first. */
3736 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3737 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3738 update_pointer_to (t, new_type);
3740 /* If no pointers and no references, we are done. */
3741 if (!ptr && !ref)
3742 return;
3744 /* Merge the old type qualifiers in the new type.
3746 Each old variant has qualifiers for specific reasons, and the new
3747 designated type as well. Each set of qualifiers represents useful
3748 information grabbed at some point, and merging the two simply unifies
3749 these inputs into the final type description.
3751 Consider for instance a volatile type frozen after an access to constant
3752 type designating it; after the designated type's freeze, we get here with
3753 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3754 when the access type was processed. We will make a volatile and readonly
3755 designated type, because that's what it really is.
3757 We might also get here for a non-dummy OLD_TYPE variant with different
3758 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3759 to private record type elaboration (see the comments around the call to
3760 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3761 the qualifiers in those cases too, to avoid accidentally discarding the
3762 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3763 new_type
3764 = build_qualified_type (new_type,
3765 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3767 /* If old type and new type are identical, there is nothing to do. */
3768 if (old_type == new_type)
3769 return;
3771 /* Otherwise, first handle the simple case. */
3772 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3774 tree new_ptr, new_ref;
3776 /* If pointer or reference already points to new type, nothing to do.
3777 This can happen as update_pointer_to can be invoked multiple times
3778 on the same couple of types because of the type variants. */
3779 if ((ptr && TREE_TYPE (ptr) == new_type)
3780 || (ref && TREE_TYPE (ref) == new_type))
3781 return;
3783 /* Chain PTR and its variants at the end. */
3784 new_ptr = TYPE_POINTER_TO (new_type);
3785 if (new_ptr)
3787 while (TYPE_NEXT_PTR_TO (new_ptr))
3788 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3789 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3791 else
3792 TYPE_POINTER_TO (new_type) = ptr;
3794 /* Now adjust them. */
3795 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3796 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3798 TREE_TYPE (t) = new_type;
3799 if (TYPE_NULL_BOUNDS (t))
3800 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3803 /* Chain REF and its variants at the end. */
3804 new_ref = TYPE_REFERENCE_TO (new_type);
3805 if (new_ref)
3807 while (TYPE_NEXT_REF_TO (new_ref))
3808 new_ref = TYPE_NEXT_REF_TO (new_ref);
3809 TYPE_NEXT_REF_TO (new_ref) = ref;
3811 else
3812 TYPE_REFERENCE_TO (new_type) = ref;
3814 /* Now adjust them. */
3815 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3816 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3817 TREE_TYPE (t) = new_type;
3819 TYPE_POINTER_TO (old_type) = NULL_TREE;
3820 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3823 /* Now deal with the unconstrained array case. In this case the pointer
3824 is actually a record where both fields are pointers to dummy nodes.
3825 Turn them into pointers to the correct types using update_pointer_to.
3826 Likewise for the pointer to the object record (thin pointer). */
3827 else
3829 tree new_ptr = TYPE_POINTER_TO (new_type);
3831 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3833 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3834 since update_pointer_to can be invoked multiple times on the same
3835 couple of types because of the type variants. */
3836 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3837 return;
3839 update_pointer_to
3840 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3841 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3843 update_pointer_to
3844 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3845 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3847 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3848 TYPE_OBJECT_RECORD_TYPE (new_type));
3850 TYPE_POINTER_TO (old_type) = NULL_TREE;
3854 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3855 unconstrained one. This involves making or finding a template. */
3857 static tree
3858 convert_to_fat_pointer (tree type, tree expr)
3860 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3861 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3862 tree etype = TREE_TYPE (expr);
3863 tree template_addr;
3864 vec<constructor_elt, va_gc> *v;
3865 vec_alloc (v, 2);
3867 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3868 array (compare_fat_pointers ensures that this is the full discriminant)
3869 and a valid pointer to the bounds. This latter property is necessary
3870 since the compiler can hoist the load of the bounds done through it. */
3871 if (integer_zerop (expr))
3873 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3874 tree null_bounds, t;
3876 if (TYPE_NULL_BOUNDS (ptr_template_type))
3877 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3878 else
3880 /* The template type can still be dummy at this point so we build an
3881 empty constructor. The middle-end will fill it in with zeros. */
3882 t = build_constructor (template_type,
3883 NULL);
3884 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3885 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3886 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
3889 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3890 fold_convert (p_array_type, null_pointer_node));
3891 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
3892 t = build_constructor (type, v);
3893 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3894 TREE_CONSTANT (t) = 0;
3895 TREE_STATIC (t) = 1;
3897 return t;
3900 /* If EXPR is a thin pointer, make template and data from the record. */
3901 if (TYPE_IS_THIN_POINTER_P (etype))
3903 tree field = TYPE_FIELDS (TREE_TYPE (etype));
3905 expr = gnat_protect_expr (expr);
3907 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
3908 the thin pointer value has been shifted so we shift it back to get
3909 the template address. */
3910 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
3912 template_addr
3913 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
3914 fold_build1 (NEGATE_EXPR, sizetype,
3915 byte_position
3916 (DECL_CHAIN (field))));
3917 template_addr
3918 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
3919 template_addr);
3922 /* Otherwise we explicitly take the address of the fields. */
3923 else
3925 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
3926 template_addr
3927 = build_unary_op (ADDR_EXPR, NULL_TREE,
3928 build_component_ref (expr, NULL_TREE, field,
3929 false));
3930 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3931 build_component_ref (expr, NULL_TREE,
3932 DECL_CHAIN (field),
3933 false));
3937 /* Otherwise, build the constructor for the template. */
3938 else
3939 template_addr
3940 = build_unary_op (ADDR_EXPR, NULL_TREE,
3941 build_template (template_type, TREE_TYPE (etype),
3942 expr));
3944 /* The final result is a constructor for the fat pointer.
3946 If EXPR is an argument of a foreign convention subprogram, the type it
3947 points to is directly the component type. In this case, the expression
3948 type may not match the corresponding FIELD_DECL type at this point, so we
3949 call "convert" here to fix that up if necessary. This type consistency is
3950 required, for instance because it ensures that possible later folding of
3951 COMPONENT_REFs against this constructor always yields something of the
3952 same type as the initial reference.
3954 Note that the call to "build_template" above is still fine because it
3955 will only refer to the provided TEMPLATE_TYPE in this case. */
3956 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
3957 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
3958 return gnat_build_constructor (type, v);
3961 /* Create an expression whose value is that of EXPR,
3962 converted to type TYPE. The TREE_TYPE of the value
3963 is always TYPE. This function implements all reasonable
3964 conversions; callers should filter out those that are
3965 not permitted by the language being compiled. */
3967 tree
3968 convert (tree type, tree expr)
3970 tree etype = TREE_TYPE (expr);
3971 enum tree_code ecode = TREE_CODE (etype);
3972 enum tree_code code = TREE_CODE (type);
3974 /* If the expression is already of the right type, we are done. */
3975 if (etype == type)
3976 return expr;
3978 /* If both input and output have padding and are of variable size, do this
3979 as an unchecked conversion. Likewise if one is a mere variant of the
3980 other, so we avoid a pointless unpad/repad sequence. */
3981 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3982 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3983 && (!TREE_CONSTANT (TYPE_SIZE (type))
3984 || !TREE_CONSTANT (TYPE_SIZE (etype))
3985 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3986 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3987 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3990 /* If the output type has padding, convert to the inner type and make a
3991 constructor to build the record, unless a variable size is involved. */
3992 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3994 vec<constructor_elt, va_gc> *v;
3996 /* If we previously converted from another type and our type is
3997 of variable size, remove the conversion to avoid the need for
3998 variable-sized temporaries. Likewise for a conversion between
3999 original and packable version. */
4000 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4001 && (!TREE_CONSTANT (TYPE_SIZE (type))
4002 || (ecode == RECORD_TYPE
4003 && TYPE_NAME (etype)
4004 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4005 expr = TREE_OPERAND (expr, 0);
4007 /* If we are just removing the padding from expr, convert the original
4008 object if we have variable size in order to avoid the need for some
4009 variable-sized temporaries. Likewise if the padding is a variant
4010 of the other, so we avoid a pointless unpad/repad sequence. */
4011 if (TREE_CODE (expr) == COMPONENT_REF
4012 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4013 && (!TREE_CONSTANT (TYPE_SIZE (type))
4014 || TYPE_MAIN_VARIANT (type)
4015 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4016 || (ecode == RECORD_TYPE
4017 && TYPE_NAME (etype)
4018 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4019 return convert (type, TREE_OPERAND (expr, 0));
4021 /* If the inner type is of self-referential size and the expression type
4022 is a record, do this as an unchecked conversion. But first pad the
4023 expression if possible to have the same size on both sides. */
4024 if (ecode == RECORD_TYPE
4025 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4027 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4028 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4029 false, false, false, true),
4030 expr);
4031 return unchecked_convert (type, expr, false);
4034 /* If we are converting between array types with variable size, do the
4035 final conversion as an unchecked conversion, again to avoid the need
4036 for some variable-sized temporaries. If valid, this conversion is
4037 very likely purely technical and without real effects. */
4038 if (ecode == ARRAY_TYPE
4039 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4040 && !TREE_CONSTANT (TYPE_SIZE (etype))
4041 && !TREE_CONSTANT (TYPE_SIZE (type)))
4042 return unchecked_convert (type,
4043 convert (TREE_TYPE (TYPE_FIELDS (type)),
4044 expr),
4045 false);
4047 vec_alloc (v, 1);
4048 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4049 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4050 return gnat_build_constructor (type, v);
4053 /* If the input type has padding, remove it and convert to the output type.
4054 The conditions ordering is arranged to ensure that the output type is not
4055 a padding type here, as it is not clear whether the conversion would
4056 always be correct if this was to happen. */
4057 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4059 tree unpadded;
4061 /* If we have just converted to this padded type, just get the
4062 inner expression. */
4063 if (TREE_CODE (expr) == CONSTRUCTOR
4064 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4065 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4066 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
4068 /* Otherwise, build an explicit component reference. */
4069 else
4070 unpadded
4071 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4073 return convert (type, unpadded);
4076 /* If the input is a biased type, adjust first. */
4077 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4078 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4079 fold_convert (TREE_TYPE (etype), expr),
4080 fold_convert (TREE_TYPE (etype),
4081 TYPE_MIN_VALUE (etype))));
4083 /* If the input is a justified modular type, we need to extract the actual
4084 object before converting it to any other type with the exceptions of an
4085 unconstrained array or of a mere type variant. It is useful to avoid the
4086 extraction and conversion in the type variant case because it could end
4087 up replacing a VAR_DECL expr by a constructor and we might be about the
4088 take the address of the result. */
4089 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4090 && code != UNCONSTRAINED_ARRAY_TYPE
4091 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4092 return convert (type, build_component_ref (expr, NULL_TREE,
4093 TYPE_FIELDS (etype), false));
4095 /* If converting to a type that contains a template, convert to the data
4096 type and then build the template. */
4097 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4099 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4100 vec<constructor_elt, va_gc> *v;
4101 vec_alloc (v, 2);
4103 /* If the source already has a template, get a reference to the
4104 associated array only, as we are going to rebuild a template
4105 for the target type anyway. */
4106 expr = maybe_unconstrained_array (expr);
4108 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4109 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4110 obj_type, NULL_TREE));
4111 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4112 convert (obj_type, expr));
4113 return gnat_build_constructor (type, v);
4116 /* There are some cases of expressions that we process specially. */
4117 switch (TREE_CODE (expr))
4119 case ERROR_MARK:
4120 return expr;
4122 case NULL_EXPR:
4123 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4124 conversion in gnat_expand_expr. NULL_EXPR does not represent
4125 and actual value, so no conversion is needed. */
4126 expr = copy_node (expr);
4127 TREE_TYPE (expr) = type;
4128 return expr;
4130 case STRING_CST:
4131 /* If we are converting a STRING_CST to another constrained array type,
4132 just make a new one in the proper type. */
4133 if (code == ecode && AGGREGATE_TYPE_P (etype)
4134 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4135 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4137 expr = copy_node (expr);
4138 TREE_TYPE (expr) = type;
4139 return expr;
4141 break;
4143 case VECTOR_CST:
4144 /* If we are converting a VECTOR_CST to a mere type variant, just make
4145 a new one in the proper type. */
4146 if (code == ecode && gnat_types_compatible_p (type, etype))
4148 expr = copy_node (expr);
4149 TREE_TYPE (expr) = type;
4150 return expr;
4153 case CONSTRUCTOR:
4154 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4155 another padding type around the same type, just make a new one in
4156 the proper type. */
4157 if (code == ecode
4158 && (gnat_types_compatible_p (type, etype)
4159 || (code == RECORD_TYPE
4160 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4161 && TREE_TYPE (TYPE_FIELDS (type))
4162 == TREE_TYPE (TYPE_FIELDS (etype)))))
4164 expr = copy_node (expr);
4165 TREE_TYPE (expr) = type;
4166 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4167 return expr;
4170 /* Likewise for a conversion between original and packable version, or
4171 conversion between types of the same size and with the same list of
4172 fields, but we have to work harder to preserve type consistency. */
4173 if (code == ecode
4174 && code == RECORD_TYPE
4175 && (TYPE_NAME (type) == TYPE_NAME (etype)
4176 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4179 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4180 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4181 vec<constructor_elt, va_gc> *v;
4182 vec_alloc (v, len);
4183 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4184 unsigned HOST_WIDE_INT idx;
4185 tree index, value;
4187 /* Whether we need to clear TREE_CONSTANT et al. on the output
4188 constructor when we convert in place. */
4189 bool clear_constant = false;
4191 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4193 /* Skip the missing fields in the CONSTRUCTOR. */
4194 while (efield && field && !SAME_FIELD_P (efield, index))
4196 efield = DECL_CHAIN (efield);
4197 field = DECL_CHAIN (field);
4199 /* The field must be the same. */
4200 if (!(efield && field && SAME_FIELD_P (efield, field)))
4201 break;
4202 constructor_elt elt
4203 = {field, convert (TREE_TYPE (field), value)};
4204 v->quick_push (elt);
4206 /* If packing has made this field a bitfield and the input
4207 value couldn't be emitted statically any more, we need to
4208 clear TREE_CONSTANT on our output. */
4209 if (!clear_constant
4210 && TREE_CONSTANT (expr)
4211 && !CONSTRUCTOR_BITFIELD_P (efield)
4212 && CONSTRUCTOR_BITFIELD_P (field)
4213 && !initializer_constant_valid_for_bitfield_p (value))
4214 clear_constant = true;
4216 efield = DECL_CHAIN (efield);
4217 field = DECL_CHAIN (field);
4220 /* If we have been able to match and convert all the input fields
4221 to their output type, convert in place now. We'll fallback to a
4222 view conversion downstream otherwise. */
4223 if (idx == len)
4225 expr = copy_node (expr);
4226 TREE_TYPE (expr) = type;
4227 CONSTRUCTOR_ELTS (expr) = v;
4228 if (clear_constant)
4229 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4230 return expr;
4234 /* Likewise for a conversion between array type and vector type with a
4235 compatible representative array. */
4236 else if (code == VECTOR_TYPE
4237 && ecode == ARRAY_TYPE
4238 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4239 etype))
4241 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4242 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4243 vec<constructor_elt, va_gc> *v;
4244 unsigned HOST_WIDE_INT ix;
4245 tree value;
4247 /* Build a VECTOR_CST from a *constant* array constructor. */
4248 if (TREE_CONSTANT (expr))
4250 bool constant_p = true;
4252 /* Iterate through elements and check if all constructor
4253 elements are *_CSTs. */
4254 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4255 if (!CONSTANT_CLASS_P (value))
4257 constant_p = false;
4258 break;
4261 if (constant_p)
4262 return build_vector_from_ctor (type,
4263 CONSTRUCTOR_ELTS (expr));
4266 /* Otherwise, build a regular vector constructor. */
4267 vec_alloc (v, len);
4268 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4270 constructor_elt elt = {NULL_TREE, value};
4271 v->quick_push (elt);
4273 expr = copy_node (expr);
4274 TREE_TYPE (expr) = type;
4275 CONSTRUCTOR_ELTS (expr) = v;
4276 return expr;
4278 break;
4280 case UNCONSTRAINED_ARRAY_REF:
4281 /* First retrieve the underlying array. */
4282 expr = maybe_unconstrained_array (expr);
4283 etype = TREE_TYPE (expr);
4284 ecode = TREE_CODE (etype);
4285 break;
4287 case VIEW_CONVERT_EXPR:
4289 /* GCC 4.x is very sensitive to type consistency overall, and view
4290 conversions thus are very frequent. Even though just "convert"ing
4291 the inner operand to the output type is fine in most cases, it
4292 might expose unexpected input/output type mismatches in special
4293 circumstances so we avoid such recursive calls when we can. */
4294 tree op0 = TREE_OPERAND (expr, 0);
4296 /* If we are converting back to the original type, we can just
4297 lift the input conversion. This is a common occurrence with
4298 switches back-and-forth amongst type variants. */
4299 if (type == TREE_TYPE (op0))
4300 return op0;
4302 /* Otherwise, if we're converting between two aggregate or vector
4303 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4304 target type in place or to just convert the inner expression. */
4305 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4306 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4308 /* If we are converting between mere variants, we can just
4309 substitute the VIEW_CONVERT_EXPR in place. */
4310 if (gnat_types_compatible_p (type, etype))
4311 return build1 (VIEW_CONVERT_EXPR, type, op0);
4313 /* Otherwise, we may just bypass the input view conversion unless
4314 one of the types is a fat pointer, which is handled by
4315 specialized code below which relies on exact type matching. */
4316 else if (!TYPE_IS_FAT_POINTER_P (type)
4317 && !TYPE_IS_FAT_POINTER_P (etype))
4318 return convert (type, op0);
4321 break;
4324 default:
4325 break;
4328 /* Check for converting to a pointer to an unconstrained array. */
4329 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4330 return convert_to_fat_pointer (type, expr);
4332 /* If we are converting between two aggregate or vector types that are mere
4333 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4334 to a vector type from its representative array type. */
4335 else if ((code == ecode
4336 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4337 && gnat_types_compatible_p (type, etype))
4338 || (code == VECTOR_TYPE
4339 && ecode == ARRAY_TYPE
4340 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4341 etype)))
4342 return build1 (VIEW_CONVERT_EXPR, type, expr);
4344 /* If we are converting between tagged types, try to upcast properly. */
4345 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4346 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4348 tree child_etype = etype;
4349 do {
4350 tree field = TYPE_FIELDS (child_etype);
4351 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4352 return build_component_ref (expr, NULL_TREE, field, false);
4353 child_etype = TREE_TYPE (field);
4354 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4357 /* If we are converting from a smaller form of record type back to it, just
4358 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4359 size on both sides. */
4360 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4361 && smaller_form_type_p (etype, type))
4363 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4364 false, false, false, true),
4365 expr);
4366 return build1 (VIEW_CONVERT_EXPR, type, expr);
4369 /* In all other cases of related types, make a NOP_EXPR. */
4370 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4371 return fold_convert (type, expr);
4373 switch (code)
4375 case VOID_TYPE:
4376 return fold_build1 (CONVERT_EXPR, type, expr);
4378 case INTEGER_TYPE:
4379 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4380 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4381 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4382 return unchecked_convert (type, expr, false);
4383 else if (TYPE_BIASED_REPRESENTATION_P (type))
4384 return fold_convert (type,
4385 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4386 convert (TREE_TYPE (type), expr),
4387 convert (TREE_TYPE (type),
4388 TYPE_MIN_VALUE (type))));
4390 /* ... fall through ... */
4392 case ENUMERAL_TYPE:
4393 case BOOLEAN_TYPE:
4394 /* If we are converting an additive expression to an integer type
4395 with lower precision, be wary of the optimization that can be
4396 applied by convert_to_integer. There are 2 problematic cases:
4397 - if the first operand was originally of a biased type,
4398 because we could be recursively called to convert it
4399 to an intermediate type and thus rematerialize the
4400 additive operator endlessly,
4401 - if the expression contains a placeholder, because an
4402 intermediate conversion that changes the sign could
4403 be inserted and thus introduce an artificial overflow
4404 at compile time when the placeholder is substituted. */
4405 if (code == INTEGER_TYPE
4406 && ecode == INTEGER_TYPE
4407 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4408 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4410 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4412 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4413 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4414 || CONTAINS_PLACEHOLDER_P (expr))
4415 return build1 (NOP_EXPR, type, expr);
4418 return fold (convert_to_integer (type, expr));
4420 case POINTER_TYPE:
4421 case REFERENCE_TYPE:
4422 /* If converting between two thin pointers, adjust if needed to account
4423 for differing offsets from the base pointer, depending on whether
4424 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4425 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4427 tree etype_pos
4428 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4429 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4430 : size_zero_node;
4431 tree type_pos
4432 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4433 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4434 : size_zero_node;
4435 tree byte_diff = size_diffop (type_pos, etype_pos);
4437 expr = build1 (NOP_EXPR, type, expr);
4438 if (integer_zerop (byte_diff))
4439 return expr;
4441 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4442 fold_convert (sizetype, byte_diff));
4445 /* If converting fat pointer to normal or thin pointer, get the pointer
4446 to the array and then convert it. */
4447 if (TYPE_IS_FAT_POINTER_P (etype))
4448 expr
4449 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4451 return fold (convert_to_pointer (type, expr));
4453 case REAL_TYPE:
4454 return fold (convert_to_real (type, expr));
4456 case RECORD_TYPE:
4457 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4459 vec<constructor_elt, va_gc> *v;
4460 vec_alloc (v, 1);
4462 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4463 convert (TREE_TYPE (TYPE_FIELDS (type)),
4464 expr));
4465 return gnat_build_constructor (type, v);
4468 /* ... fall through ... */
4470 case ARRAY_TYPE:
4471 /* In these cases, assume the front-end has validated the conversion.
4472 If the conversion is valid, it will be a bit-wise conversion, so
4473 it can be viewed as an unchecked conversion. */
4474 return unchecked_convert (type, expr, false);
4476 case UNION_TYPE:
4477 /* This is a either a conversion between a tagged type and some
4478 subtype, which we have to mark as a UNION_TYPE because of
4479 overlapping fields or a conversion of an Unchecked_Union. */
4480 return unchecked_convert (type, expr, false);
4482 case UNCONSTRAINED_ARRAY_TYPE:
4483 /* If the input is a VECTOR_TYPE, convert to the representative
4484 array type first. */
4485 if (ecode == VECTOR_TYPE)
4487 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4488 etype = TREE_TYPE (expr);
4489 ecode = TREE_CODE (etype);
4492 /* If EXPR is a constrained array, take its address, convert it to a
4493 fat pointer, and then dereference it. Likewise if EXPR is a
4494 record containing both a template and a constrained array.
4495 Note that a record representing a justified modular type
4496 always represents a packed constrained array. */
4497 if (ecode == ARRAY_TYPE
4498 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4499 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4500 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4501 return
4502 build_unary_op
4503 (INDIRECT_REF, NULL_TREE,
4504 convert_to_fat_pointer (TREE_TYPE (type),
4505 build_unary_op (ADDR_EXPR,
4506 NULL_TREE, expr)));
4508 /* Do something very similar for converting one unconstrained
4509 array to another. */
4510 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4511 return
4512 build_unary_op (INDIRECT_REF, NULL_TREE,
4513 convert (TREE_TYPE (type),
4514 build_unary_op (ADDR_EXPR,
4515 NULL_TREE, expr)));
4516 else
4517 gcc_unreachable ();
4519 case COMPLEX_TYPE:
4520 return fold (convert_to_complex (type, expr));
4522 default:
4523 gcc_unreachable ();
4527 /* Create an expression whose value is that of EXPR converted to the common
4528 index type, which is sizetype. EXPR is supposed to be in the base type
4529 of the GNAT index type. Calling it is equivalent to doing
4531 convert (sizetype, expr)
4533 but we try to distribute the type conversion with the knowledge that EXPR
4534 cannot overflow in its type. This is a best-effort approach and we fall
4535 back to the above expression as soon as difficulties are encountered.
4537 This is necessary to overcome issues that arise when the GNAT base index
4538 type and the GCC common index type (sizetype) don't have the same size,
4539 which is quite frequent on 64-bit architectures. In this case, and if
4540 the GNAT base index type is signed but the iteration type of the loop has
4541 been forced to unsigned, the loop scalar evolution engine cannot compute
4542 a simple evolution for the general induction variables associated with the
4543 array indices, because it will preserve the wrap-around semantics in the
4544 unsigned type of their "inner" part. As a result, many loop optimizations
4545 are blocked.
4547 The solution is to use a special (basic) induction variable that is at
4548 least as large as sizetype, and to express the aforementioned general
4549 induction variables in terms of this induction variable, eliminating
4550 the problematic intermediate truncation to the GNAT base index type.
4551 This is possible as long as the original expression doesn't overflow
4552 and if the middle-end hasn't introduced artificial overflows in the
4553 course of the various simplification it can make to the expression. */
4555 tree
4556 convert_to_index_type (tree expr)
4558 enum tree_code code = TREE_CODE (expr);
4559 tree type = TREE_TYPE (expr);
4561 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4562 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4563 if (TYPE_UNSIGNED (type) || !optimize)
4564 return convert (sizetype, expr);
4566 switch (code)
4568 case VAR_DECL:
4569 /* The main effect of the function: replace a loop parameter with its
4570 associated special induction variable. */
4571 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4572 expr = DECL_INDUCTION_VAR (expr);
4573 break;
4575 CASE_CONVERT:
4577 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4578 /* Bail out as soon as we suspect some sort of type frobbing. */
4579 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4580 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4581 break;
4584 /* ... fall through ... */
4586 case NON_LVALUE_EXPR:
4587 return fold_build1 (code, sizetype,
4588 convert_to_index_type (TREE_OPERAND (expr, 0)));
4590 case PLUS_EXPR:
4591 case MINUS_EXPR:
4592 case MULT_EXPR:
4593 return fold_build2 (code, sizetype,
4594 convert_to_index_type (TREE_OPERAND (expr, 0)),
4595 convert_to_index_type (TREE_OPERAND (expr, 1)));
4597 case COMPOUND_EXPR:
4598 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4599 convert_to_index_type (TREE_OPERAND (expr, 1)));
4601 case COND_EXPR:
4602 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4603 convert_to_index_type (TREE_OPERAND (expr, 1)),
4604 convert_to_index_type (TREE_OPERAND (expr, 2)));
4606 default:
4607 break;
4610 return convert (sizetype, expr);
4613 /* Remove all conversions that are done in EXP. This includes converting
4614 from a padded type or to a justified modular type. If TRUE_ADDRESS
4615 is true, always return the address of the containing object even if
4616 the address is not bit-aligned. */
4618 tree
4619 remove_conversions (tree exp, bool true_address)
4621 switch (TREE_CODE (exp))
4623 case CONSTRUCTOR:
4624 if (true_address
4625 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4626 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4627 return
4628 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
4629 break;
4631 case COMPONENT_REF:
4632 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4633 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4634 break;
4636 CASE_CONVERT:
4637 case VIEW_CONVERT_EXPR:
4638 case NON_LVALUE_EXPR:
4639 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4641 default:
4642 break;
4645 return exp;
4648 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4649 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4650 likewise return an expression pointing to the underlying array. */
4652 tree
4653 maybe_unconstrained_array (tree exp)
4655 enum tree_code code = TREE_CODE (exp);
4656 tree type = TREE_TYPE (exp);
4658 switch (TREE_CODE (type))
4660 case UNCONSTRAINED_ARRAY_TYPE:
4661 if (code == UNCONSTRAINED_ARRAY_REF)
4663 const bool read_only = TREE_READONLY (exp);
4664 const bool no_trap = TREE_THIS_NOTRAP (exp);
4666 exp = TREE_OPERAND (exp, 0);
4667 type = TREE_TYPE (exp);
4669 if (TREE_CODE (exp) == COND_EXPR)
4671 tree op1
4672 = build_unary_op (INDIRECT_REF, NULL_TREE,
4673 build_component_ref (TREE_OPERAND (exp, 1),
4674 NULL_TREE,
4675 TYPE_FIELDS (type),
4676 false));
4677 tree op2
4678 = build_unary_op (INDIRECT_REF, NULL_TREE,
4679 build_component_ref (TREE_OPERAND (exp, 2),
4680 NULL_TREE,
4681 TYPE_FIELDS (type),
4682 false));
4684 exp = build3 (COND_EXPR,
4685 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4686 TREE_OPERAND (exp, 0), op1, op2);
4688 else
4690 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4691 build_component_ref (exp, NULL_TREE,
4692 TYPE_FIELDS (type),
4693 false));
4694 TREE_READONLY (exp) = read_only;
4695 TREE_THIS_NOTRAP (exp) = no_trap;
4699 else if (code == NULL_EXPR)
4700 exp = build1 (NULL_EXPR,
4701 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4702 TREE_OPERAND (exp, 0));
4703 break;
4705 case RECORD_TYPE:
4706 /* If this is a padded type and it contains a template, convert to the
4707 unpadded type first. */
4708 if (TYPE_PADDING_P (type)
4709 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4710 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4712 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4713 type = TREE_TYPE (exp);
4716 if (TYPE_CONTAINS_TEMPLATE_P (type))
4718 exp = build_component_ref (exp, NULL_TREE,
4719 DECL_CHAIN (TYPE_FIELDS (type)),
4720 false);
4721 type = TREE_TYPE (exp);
4723 /* If the array type is padded, convert to the unpadded type. */
4724 if (TYPE_IS_PADDING_P (type))
4725 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4727 break;
4729 default:
4730 break;
4733 return exp;
4736 /* Return true if EXPR is an expression that can be folded as an operand
4737 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4739 static bool
4740 can_fold_for_view_convert_p (tree expr)
4742 tree t1, t2;
4744 /* The folder will fold NOP_EXPRs between integral types with the same
4745 precision (in the middle-end's sense). We cannot allow it if the
4746 types don't have the same precision in the Ada sense as well. */
4747 if (TREE_CODE (expr) != NOP_EXPR)
4748 return true;
4750 t1 = TREE_TYPE (expr);
4751 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4753 /* Defer to the folder for non-integral conversions. */
4754 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4755 return true;
4757 /* Only fold conversions that preserve both precisions. */
4758 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4759 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4760 return true;
4762 return false;
4765 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4766 If NOTRUNC_P is true, truncation operations should be suppressed.
4768 Special care is required with (source or target) integral types whose
4769 precision is not equal to their size, to make sure we fetch or assign
4770 the value bits whose location might depend on the endianness, e.g.
4772 Rmsize : constant := 8;
4773 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4775 type Bit_Array is array (1 .. Rmsize) of Boolean;
4776 pragma Pack (Bit_Array);
4778 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4780 Value : Int := 2#1000_0001#;
4781 Vbits : Bit_Array := To_Bit_Array (Value);
4783 we expect the 8 bits at Vbits'Address to always contain Value, while
4784 their original location depends on the endianness, at Value'Address
4785 on a little-endian architecture but not on a big-endian one. */
4787 tree
4788 unchecked_convert (tree type, tree expr, bool notrunc_p)
4790 tree etype = TREE_TYPE (expr);
4791 enum tree_code ecode = TREE_CODE (etype);
4792 enum tree_code code = TREE_CODE (type);
4793 tree tem;
4794 int c;
4796 /* If the expression is already of the right type, we are done. */
4797 if (etype == type)
4798 return expr;
4800 /* If both types types are integral just do a normal conversion.
4801 Likewise for a conversion to an unconstrained array. */
4802 if (((INTEGRAL_TYPE_P (type)
4803 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
4804 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4805 && (INTEGRAL_TYPE_P (etype)
4806 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4807 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4808 || code == UNCONSTRAINED_ARRAY_TYPE)
4810 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4812 tree ntype = copy_type (etype);
4813 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4814 TYPE_MAIN_VARIANT (ntype) = ntype;
4815 expr = build1 (NOP_EXPR, ntype, expr);
4818 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4820 tree rtype = copy_type (type);
4821 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4822 TYPE_MAIN_VARIANT (rtype) = rtype;
4823 expr = convert (rtype, expr);
4824 expr = build1 (NOP_EXPR, type, expr);
4826 else
4827 expr = convert (type, expr);
4830 /* If we are converting to an integral type whose precision is not equal
4831 to its size, first unchecked convert to a record type that contains an
4832 field of the given precision. Then extract the field. */
4833 else if (INTEGRAL_TYPE_P (type)
4834 && TYPE_RM_SIZE (type)
4835 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4836 GET_MODE_BITSIZE (TYPE_MODE (type))))
4838 tree rec_type = make_node (RECORD_TYPE);
4839 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4840 tree field_type, field;
4842 if (TYPE_UNSIGNED (type))
4843 field_type = make_unsigned_type (prec);
4844 else
4845 field_type = make_signed_type (prec);
4846 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4848 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4849 NULL_TREE, bitsize_zero_node, 1, 0);
4851 finish_record_type (rec_type, field, 1, false);
4853 expr = unchecked_convert (rec_type, expr, notrunc_p);
4854 expr = build_component_ref (expr, NULL_TREE, field, false);
4855 expr = fold_build1 (NOP_EXPR, type, expr);
4858 /* Similarly if we are converting from an integral type whose precision is
4859 not equal to its size, first copy into a field of the given precision
4860 and unchecked convert the record type. */
4861 else if (INTEGRAL_TYPE_P (etype)
4862 && TYPE_RM_SIZE (etype)
4863 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4864 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4866 tree rec_type = make_node (RECORD_TYPE);
4867 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
4868 vec<constructor_elt, va_gc> *v;
4869 vec_alloc (v, 1);
4870 tree field_type, field;
4872 if (TYPE_UNSIGNED (etype))
4873 field_type = make_unsigned_type (prec);
4874 else
4875 field_type = make_signed_type (prec);
4876 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
4878 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4879 NULL_TREE, bitsize_zero_node, 1, 0);
4881 finish_record_type (rec_type, field, 1, false);
4883 expr = fold_build1 (NOP_EXPR, field_type, expr);
4884 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4885 expr = gnat_build_constructor (rec_type, v);
4886 expr = unchecked_convert (type, expr, notrunc_p);
4889 /* If we are converting from a scalar type to a type with a different size,
4890 we need to pad to have the same size on both sides.
4892 ??? We cannot do it unconditionally because unchecked conversions are
4893 used liberally by the front-end to implement polymorphism, e.g. in:
4895 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4896 return p___size__4 (p__object!(S191s.all));
4898 so we skip all expressions that are references. */
4899 else if (!REFERENCE_CLASS_P (expr)
4900 && !AGGREGATE_TYPE_P (etype)
4901 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4902 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4904 if (c < 0)
4906 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4907 false, false, false, true),
4908 expr);
4909 expr = unchecked_convert (type, expr, notrunc_p);
4911 else
4913 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4914 false, false, false, true);
4915 expr = unchecked_convert (rec_type, expr, notrunc_p);
4916 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4917 false);
4921 /* We have a special case when we are converting between two unconstrained
4922 array types. In that case, take the address, convert the fat pointer
4923 types, and dereference. */
4924 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4925 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4926 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4927 build_unary_op (ADDR_EXPR, NULL_TREE,
4928 expr)));
4930 /* Another special case is when we are converting to a vector type from its
4931 representative array type; this a regular conversion. */
4932 else if (code == VECTOR_TYPE
4933 && ecode == ARRAY_TYPE
4934 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4935 etype))
4936 expr = convert (type, expr);
4938 /* And, if the array type is not the representative, we try to build an
4939 intermediate vector type of which the array type is the representative
4940 and to do the unchecked conversion between the vector types, in order
4941 to enable further simplifications in the middle-end. */
4942 else if (code == VECTOR_TYPE
4943 && ecode == ARRAY_TYPE
4944 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
4946 expr = convert (tem, expr);
4947 return unchecked_convert (type, expr, notrunc_p);
4950 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
4951 the alignment of the CONSTRUCTOR to speed up the copy operation. */
4952 else if (TREE_CODE (expr) == CONSTRUCTOR
4953 && code == RECORD_TYPE
4954 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
4956 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
4957 Empty, false, false, false, true),
4958 expr);
4959 return unchecked_convert (type, expr, notrunc_p);
4962 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
4963 else
4965 expr = maybe_unconstrained_array (expr);
4966 etype = TREE_TYPE (expr);
4967 ecode = TREE_CODE (etype);
4968 if (can_fold_for_view_convert_p (expr))
4969 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4970 else
4971 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4974 /* If the result is an integral type whose precision is not equal to its
4975 size, sign- or zero-extend the result. We need not do this if the input
4976 is an integral type of the same precision and signedness or if the output
4977 is a biased type or if both the input and output are unsigned. */
4978 if (!notrunc_p
4979 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4980 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4981 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4982 GET_MODE_BITSIZE (TYPE_MODE (type)))
4983 && !(INTEGRAL_TYPE_P (etype)
4984 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4985 && operand_equal_p (TYPE_RM_SIZE (type),
4986 (TYPE_RM_SIZE (etype) != 0
4987 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4989 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4991 tree base_type
4992 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4993 tree shift_expr
4994 = convert (base_type,
4995 size_binop (MINUS_EXPR,
4996 bitsize_int
4997 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4998 TYPE_RM_SIZE (type)));
4999 expr
5000 = convert (type,
5001 build_binary_op (RSHIFT_EXPR, base_type,
5002 build_binary_op (LSHIFT_EXPR, base_type,
5003 convert (base_type, expr),
5004 shift_expr),
5005 shift_expr));
5008 /* An unchecked conversion should never raise Constraint_Error. The code
5009 below assumes that GCC's conversion routines overflow the same way that
5010 the underlying hardware does. This is probably true. In the rare case
5011 when it is false, we can rely on the fact that such conversions are
5012 erroneous anyway. */
5013 if (TREE_CODE (expr) == INTEGER_CST)
5014 TREE_OVERFLOW (expr) = 0;
5016 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5017 show no longer constant. */
5018 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5019 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5020 OEP_ONLY_CONST))
5021 TREE_CONSTANT (expr) = 0;
5023 return expr;
5026 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5027 the latter being a record type as predicated by Is_Record_Type. */
5029 enum tree_code
5030 tree_code_for_record_type (Entity_Id gnat_type)
5032 Node_Id component_list, component;
5034 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5035 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5036 if (!Is_Unchecked_Union (gnat_type))
5037 return RECORD_TYPE;
5039 gnat_type = Implementation_Base_Type (gnat_type);
5040 component_list
5041 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5043 for (component = First_Non_Pragma (Component_Items (component_list));
5044 Present (component);
5045 component = Next_Non_Pragma (component))
5046 if (Ekind (Defining_Entity (component)) == E_Component)
5047 return RECORD_TYPE;
5049 return UNION_TYPE;
5052 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5053 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5054 according to the presence of an alignment clause on the type or, if it
5055 is an array, on the component type. */
5057 bool
5058 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5060 gnat_type = Underlying_Type (gnat_type);
5062 *align_clause = Present (Alignment_Clause (gnat_type));
5064 if (Is_Array_Type (gnat_type))
5066 gnat_type = Underlying_Type (Component_Type (gnat_type));
5067 if (Present (Alignment_Clause (gnat_type)))
5068 *align_clause = true;
5071 if (!Is_Floating_Point_Type (gnat_type))
5072 return false;
5074 if (UI_To_Int (Esize (gnat_type)) != 64)
5075 return false;
5077 return true;
5080 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5081 size is greater or equal to 64 bits, or an array of such a type. Set
5082 ALIGN_CLAUSE according to the presence of an alignment clause on the
5083 type or, if it is an array, on the component type. */
5085 bool
5086 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5088 gnat_type = Underlying_Type (gnat_type);
5090 *align_clause = Present (Alignment_Clause (gnat_type));
5092 if (Is_Array_Type (gnat_type))
5094 gnat_type = Underlying_Type (Component_Type (gnat_type));
5095 if (Present (Alignment_Clause (gnat_type)))
5096 *align_clause = true;
5099 if (!Is_Scalar_Type (gnat_type))
5100 return false;
5102 if (UI_To_Int (Esize (gnat_type)) < 64)
5103 return false;
5105 return true;
5108 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5109 component of an aggregate type. */
5111 bool
5112 type_for_nonaliased_component_p (tree gnu_type)
5114 /* If the type is passed by reference, we may have pointers to the
5115 component so it cannot be made non-aliased. */
5116 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5117 return false;
5119 /* We used to say that any component of aggregate type is aliased
5120 because the front-end may take 'Reference of it. The front-end
5121 has been enhanced in the meantime so as to use a renaming instead
5122 in most cases, but the back-end can probably take the address of
5123 such a component too so we go for the conservative stance.
5125 For instance, we might need the address of any array type, even
5126 if normally passed by copy, to construct a fat pointer if the
5127 component is used as an actual for an unconstrained formal.
5129 Likewise for record types: even if a specific record subtype is
5130 passed by copy, the parent type might be passed by ref (e.g. if
5131 it's of variable size) and we might take the address of a child
5132 component to pass to a parent formal. We have no way to check
5133 for such conditions here. */
5134 if (AGGREGATE_TYPE_P (gnu_type))
5135 return false;
5137 return true;
5140 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5142 bool
5143 smaller_form_type_p (tree type, tree orig_type)
5145 tree size, osize;
5147 /* We're not interested in variants here. */
5148 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5149 return false;
5151 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5152 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5153 return false;
5155 size = TYPE_SIZE (type);
5156 osize = TYPE_SIZE (orig_type);
5158 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5159 return false;
5161 return tree_int_cst_lt (size, osize) != 0;
5164 /* Perform final processing on global variables. */
5166 static GTY (()) tree dummy_global;
5168 void
5169 gnat_write_global_declarations (void)
5171 unsigned int i;
5172 tree iter;
5174 /* If we have declared types as used at the global level, insert them in
5175 the global hash table. We use a dummy variable for this purpose. */
5176 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
5178 struct varpool_node *node;
5179 char *label;
5181 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5182 dummy_global
5183 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5184 void_type_node);
5185 DECL_HARD_REGISTER (dummy_global) = 1;
5186 TREE_STATIC (dummy_global) = 1;
5187 node = varpool_node::get_create (dummy_global);
5188 node->definition = 1;
5189 node->force_output = 1;
5191 while (!types_used_by_cur_var_decl->is_empty ())
5193 tree t = types_used_by_cur_var_decl->pop ();
5194 types_used_by_var_decl_insert (t, dummy_global);
5198 /* Output debug information for all global type declarations first. This
5199 ensures that global types whose compilation hasn't been finalized yet,
5200 for example pointers to Taft amendment types, have their compilation
5201 finalized in the right context. */
5202 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5203 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5204 debug_hooks->global_decl (iter);
5206 /* Proceed to optimize and emit assembly. */
5207 symtab->finalize_compilation_unit ();
5209 /* After cgraph has had a chance to emit everything that's going to
5210 be emitted, output debug information for the rest of globals. */
5211 if (!seen_error ())
5213 timevar_push (TV_SYMOUT);
5214 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5215 if (TREE_CODE (iter) != TYPE_DECL && !DECL_IGNORED_P (iter))
5216 debug_hooks->global_decl (iter);
5217 timevar_pop (TV_SYMOUT);
5221 /* ************************************************************************
5222 * * GCC builtins support *
5223 * ************************************************************************ */
5225 /* The general scheme is fairly simple:
5227 For each builtin function/type to be declared, gnat_install_builtins calls
5228 internal facilities which eventually get to gnat_push_decl, which in turn
5229 tracks the so declared builtin function decls in the 'builtin_decls' global
5230 datastructure. When an Intrinsic subprogram declaration is processed, we
5231 search this global datastructure to retrieve the associated BUILT_IN DECL
5232 node. */
5234 /* Search the chain of currently available builtin declarations for a node
5235 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5236 found, if any, or NULL_TREE otherwise. */
5237 tree
5238 builtin_decl_for (tree name)
5240 unsigned i;
5241 tree decl;
5243 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5244 if (DECL_NAME (decl) == name)
5245 return decl;
5247 return NULL_TREE;
5250 /* The code below eventually exposes gnat_install_builtins, which declares
5251 the builtin types and functions we might need, either internally or as
5252 user accessible facilities.
5254 ??? This is a first implementation shot, still in rough shape. It is
5255 heavily inspired from the "C" family implementation, with chunks copied
5256 verbatim from there.
5258 Two obvious TODO candidates are
5259 o Use a more efficient name/decl mapping scheme
5260 o Devise a middle-end infrastructure to avoid having to copy
5261 pieces between front-ends. */
5263 /* ----------------------------------------------------------------------- *
5264 * BUILTIN ELEMENTARY TYPES *
5265 * ----------------------------------------------------------------------- */
5267 /* Standard data types to be used in builtin argument declarations. */
5269 enum c_tree_index
5271 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5272 CTI_STRING_TYPE,
5273 CTI_CONST_STRING_TYPE,
5275 CTI_MAX
5278 static tree c_global_trees[CTI_MAX];
5280 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5281 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5282 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5284 /* ??? In addition some attribute handlers, we currently don't support a
5285 (small) number of builtin-types, which in turns inhibits support for a
5286 number of builtin functions. */
5287 #define wint_type_node void_type_node
5288 #define intmax_type_node void_type_node
5289 #define uintmax_type_node void_type_node
5291 /* Build the void_list_node (void_type_node having been created). */
5293 static tree
5294 build_void_list_node (void)
5296 tree t = build_tree_list (NULL_TREE, void_type_node);
5297 return t;
5300 /* Used to help initialize the builtin-types.def table. When a type of
5301 the correct size doesn't exist, use error_mark_node instead of NULL.
5302 The later results in segfaults even when a decl using the type doesn't
5303 get invoked. */
5305 static tree
5306 builtin_type_for_size (int size, bool unsignedp)
5308 tree type = gnat_type_for_size (size, unsignedp);
5309 return type ? type : error_mark_node;
5312 /* Build/push the elementary type decls that builtin functions/types
5313 will need. */
5315 static void
5316 install_builtin_elementary_types (void)
5318 signed_size_type_node = gnat_signed_type (size_type_node);
5319 pid_type_node = integer_type_node;
5320 void_list_node = build_void_list_node ();
5322 string_type_node = build_pointer_type (char_type_node);
5323 const_string_type_node
5324 = build_pointer_type (build_qualified_type
5325 (char_type_node, TYPE_QUAL_CONST));
5328 /* ----------------------------------------------------------------------- *
5329 * BUILTIN FUNCTION TYPES *
5330 * ----------------------------------------------------------------------- */
5332 /* Now, builtin function types per se. */
5334 enum c_builtin_type
5336 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5337 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5338 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5339 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5340 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5341 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5342 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5343 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5344 ARG6) NAME,
5345 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5346 ARG6, ARG7) NAME,
5347 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5348 ARG6, ARG7, ARG8) NAME,
5349 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5350 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5351 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5352 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5353 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5354 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5355 NAME,
5356 #define DEF_FUNCTION_TYPE_VAR_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5357 ARG6, ARG7, ARG8) \
5358 NAME,
5359 #define DEF_FUNCTION_TYPE_VAR_12(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5360 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11, ARG12) \
5361 NAME,
5362 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5363 #include "builtin-types.def"
5364 #undef DEF_PRIMITIVE_TYPE
5365 #undef DEF_FUNCTION_TYPE_0
5366 #undef DEF_FUNCTION_TYPE_1
5367 #undef DEF_FUNCTION_TYPE_2
5368 #undef DEF_FUNCTION_TYPE_3
5369 #undef DEF_FUNCTION_TYPE_4
5370 #undef DEF_FUNCTION_TYPE_5
5371 #undef DEF_FUNCTION_TYPE_6
5372 #undef DEF_FUNCTION_TYPE_7
5373 #undef DEF_FUNCTION_TYPE_8
5374 #undef DEF_FUNCTION_TYPE_VAR_0
5375 #undef DEF_FUNCTION_TYPE_VAR_1
5376 #undef DEF_FUNCTION_TYPE_VAR_2
5377 #undef DEF_FUNCTION_TYPE_VAR_3
5378 #undef DEF_FUNCTION_TYPE_VAR_4
5379 #undef DEF_FUNCTION_TYPE_VAR_5
5380 #undef DEF_FUNCTION_TYPE_VAR_8
5381 #undef DEF_FUNCTION_TYPE_VAR_12
5382 #undef DEF_POINTER_TYPE
5383 BT_LAST
5386 typedef enum c_builtin_type builtin_type;
5388 /* A temporary array used in communication with def_fn_type. */
5389 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5391 /* A helper function for install_builtin_types. Build function type
5392 for DEF with return type RET and N arguments. If VAR is true, then the
5393 function should be variadic after those N arguments.
5395 Takes special care not to ICE if any of the types involved are
5396 error_mark_node, which indicates that said type is not in fact available
5397 (see builtin_type_for_size). In which case the function type as a whole
5398 should be error_mark_node. */
5400 static void
5401 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5403 tree t;
5404 tree *args = XALLOCAVEC (tree, n);
5405 va_list list;
5406 int i;
5408 va_start (list, n);
5409 for (i = 0; i < n; ++i)
5411 builtin_type a = (builtin_type) va_arg (list, int);
5412 t = builtin_types[a];
5413 if (t == error_mark_node)
5414 goto egress;
5415 args[i] = t;
5418 t = builtin_types[ret];
5419 if (t == error_mark_node)
5420 goto egress;
5421 if (var)
5422 t = build_varargs_function_type_array (t, n, args);
5423 else
5424 t = build_function_type_array (t, n, args);
5426 egress:
5427 builtin_types[def] = t;
5428 va_end (list);
5431 /* Build the builtin function types and install them in the builtin_types
5432 array for later use in builtin function decls. */
5434 static void
5435 install_builtin_function_types (void)
5437 tree va_list_ref_type_node;
5438 tree va_list_arg_type_node;
5440 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5442 va_list_arg_type_node = va_list_ref_type_node =
5443 build_pointer_type (TREE_TYPE (va_list_type_node));
5445 else
5447 va_list_arg_type_node = va_list_type_node;
5448 va_list_ref_type_node = build_reference_type (va_list_type_node);
5451 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5452 builtin_types[ENUM] = VALUE;
5453 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5454 def_fn_type (ENUM, RETURN, 0, 0);
5455 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5456 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5457 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5458 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5459 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5460 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5461 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5462 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5463 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5464 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5465 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5466 ARG6) \
5467 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5468 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5469 ARG6, ARG7) \
5470 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5471 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5472 ARG6, ARG7, ARG8) \
5473 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5474 ARG7, ARG8);
5475 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5476 def_fn_type (ENUM, RETURN, 1, 0);
5477 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5478 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5479 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5480 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5481 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5482 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5483 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5484 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5485 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5486 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5487 #define DEF_FUNCTION_TYPE_VAR_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5488 ARG6, ARG7, ARG8) \
5489 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5490 ARG7, ARG8);
5491 #define DEF_FUNCTION_TYPE_VAR_12(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5492 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11, ARG12) \
5493 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5494 ARG7, ARG8, ARG9, ARG10, ARG11, ARG12);
5495 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5496 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5498 #include "builtin-types.def"
5500 #undef DEF_PRIMITIVE_TYPE
5501 #undef DEF_FUNCTION_TYPE_0
5502 #undef DEF_FUNCTION_TYPE_1
5503 #undef DEF_FUNCTION_TYPE_2
5504 #undef DEF_FUNCTION_TYPE_3
5505 #undef DEF_FUNCTION_TYPE_4
5506 #undef DEF_FUNCTION_TYPE_5
5507 #undef DEF_FUNCTION_TYPE_6
5508 #undef DEF_FUNCTION_TYPE_7
5509 #undef DEF_FUNCTION_TYPE_8
5510 #undef DEF_FUNCTION_TYPE_VAR_0
5511 #undef DEF_FUNCTION_TYPE_VAR_1
5512 #undef DEF_FUNCTION_TYPE_VAR_2
5513 #undef DEF_FUNCTION_TYPE_VAR_3
5514 #undef DEF_FUNCTION_TYPE_VAR_4
5515 #undef DEF_FUNCTION_TYPE_VAR_5
5516 #undef DEF_FUNCTION_TYPE_VAR_8
5517 #undef DEF_FUNCTION_TYPE_VAR_12
5518 #undef DEF_POINTER_TYPE
5519 builtin_types[(int) BT_LAST] = NULL_TREE;
5522 /* ----------------------------------------------------------------------- *
5523 * BUILTIN ATTRIBUTES *
5524 * ----------------------------------------------------------------------- */
5526 enum built_in_attribute
5528 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5529 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5530 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5531 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5532 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5533 #include "builtin-attrs.def"
5534 #undef DEF_ATTR_NULL_TREE
5535 #undef DEF_ATTR_INT
5536 #undef DEF_ATTR_STRING
5537 #undef DEF_ATTR_IDENT
5538 #undef DEF_ATTR_TREE_LIST
5539 ATTR_LAST
5542 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5544 static void
5545 install_builtin_attributes (void)
5547 /* Fill in the built_in_attributes array. */
5548 #define DEF_ATTR_NULL_TREE(ENUM) \
5549 built_in_attributes[(int) ENUM] = NULL_TREE;
5550 #define DEF_ATTR_INT(ENUM, VALUE) \
5551 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5552 #define DEF_ATTR_STRING(ENUM, VALUE) \
5553 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5554 #define DEF_ATTR_IDENT(ENUM, STRING) \
5555 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5556 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5557 built_in_attributes[(int) ENUM] \
5558 = tree_cons (built_in_attributes[(int) PURPOSE], \
5559 built_in_attributes[(int) VALUE], \
5560 built_in_attributes[(int) CHAIN]);
5561 #include "builtin-attrs.def"
5562 #undef DEF_ATTR_NULL_TREE
5563 #undef DEF_ATTR_INT
5564 #undef DEF_ATTR_STRING
5565 #undef DEF_ATTR_IDENT
5566 #undef DEF_ATTR_TREE_LIST
5569 /* Handle a "const" attribute; arguments as in
5570 struct attribute_spec.handler. */
5572 static tree
5573 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5574 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5575 bool *no_add_attrs)
5577 if (TREE_CODE (*node) == FUNCTION_DECL)
5578 TREE_READONLY (*node) = 1;
5579 else
5580 *no_add_attrs = true;
5582 return NULL_TREE;
5585 /* Handle a "nothrow" attribute; arguments as in
5586 struct attribute_spec.handler. */
5588 static tree
5589 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5590 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5591 bool *no_add_attrs)
5593 if (TREE_CODE (*node) == FUNCTION_DECL)
5594 TREE_NOTHROW (*node) = 1;
5595 else
5596 *no_add_attrs = true;
5598 return NULL_TREE;
5601 /* Handle a "pure" attribute; arguments as in
5602 struct attribute_spec.handler. */
5604 static tree
5605 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5606 int ARG_UNUSED (flags), bool *no_add_attrs)
5608 if (TREE_CODE (*node) == FUNCTION_DECL)
5609 DECL_PURE_P (*node) = 1;
5610 /* ??? TODO: Support types. */
5611 else
5613 warning (OPT_Wattributes, "%qs attribute ignored",
5614 IDENTIFIER_POINTER (name));
5615 *no_add_attrs = true;
5618 return NULL_TREE;
5621 /* Handle a "no vops" attribute; arguments as in
5622 struct attribute_spec.handler. */
5624 static tree
5625 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5626 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5627 bool *ARG_UNUSED (no_add_attrs))
5629 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5630 DECL_IS_NOVOPS (*node) = 1;
5631 return NULL_TREE;
5634 /* Helper for nonnull attribute handling; fetch the operand number
5635 from the attribute argument list. */
5637 static bool
5638 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5640 /* Verify the arg number is a constant. */
5641 if (!tree_fits_uhwi_p (arg_num_expr))
5642 return false;
5644 *valp = TREE_INT_CST_LOW (arg_num_expr);
5645 return true;
5648 /* Handle the "nonnull" attribute. */
5649 static tree
5650 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5651 tree args, int ARG_UNUSED (flags),
5652 bool *no_add_attrs)
5654 tree type = *node;
5655 unsigned HOST_WIDE_INT attr_arg_num;
5657 /* If no arguments are specified, all pointer arguments should be
5658 non-null. Verify a full prototype is given so that the arguments
5659 will have the correct types when we actually check them later. */
5660 if (!args)
5662 if (!prototype_p (type))
5664 error ("nonnull attribute without arguments on a non-prototype");
5665 *no_add_attrs = true;
5667 return NULL_TREE;
5670 /* Argument list specified. Verify that each argument number references
5671 a pointer argument. */
5672 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5674 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5676 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5678 error ("nonnull argument has invalid operand number (argument %lu)",
5679 (unsigned long) attr_arg_num);
5680 *no_add_attrs = true;
5681 return NULL_TREE;
5684 if (prototype_p (type))
5686 function_args_iterator iter;
5687 tree argument;
5689 function_args_iter_init (&iter, type);
5690 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5692 argument = function_args_iter_cond (&iter);
5693 if (!argument || ck_num == arg_num)
5694 break;
5697 if (!argument
5698 || TREE_CODE (argument) == VOID_TYPE)
5700 error ("nonnull argument with out-of-range operand number "
5701 "(argument %lu, operand %lu)",
5702 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5703 *no_add_attrs = true;
5704 return NULL_TREE;
5707 if (TREE_CODE (argument) != POINTER_TYPE)
5709 error ("nonnull argument references non-pointer operand "
5710 "(argument %lu, operand %lu)",
5711 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5712 *no_add_attrs = true;
5713 return NULL_TREE;
5718 return NULL_TREE;
5721 /* Handle a "sentinel" attribute. */
5723 static tree
5724 handle_sentinel_attribute (tree *node, tree name, tree args,
5725 int ARG_UNUSED (flags), bool *no_add_attrs)
5727 if (!prototype_p (*node))
5729 warning (OPT_Wattributes,
5730 "%qs attribute requires prototypes with named arguments",
5731 IDENTIFIER_POINTER (name));
5732 *no_add_attrs = true;
5734 else
5736 if (!stdarg_p (*node))
5738 warning (OPT_Wattributes,
5739 "%qs attribute only applies to variadic functions",
5740 IDENTIFIER_POINTER (name));
5741 *no_add_attrs = true;
5745 if (args)
5747 tree position = TREE_VALUE (args);
5749 if (TREE_CODE (position) != INTEGER_CST)
5751 warning (0, "requested position is not an integer constant");
5752 *no_add_attrs = true;
5754 else
5756 if (tree_int_cst_lt (position, integer_zero_node))
5758 warning (0, "requested position is less than zero");
5759 *no_add_attrs = true;
5764 return NULL_TREE;
5767 /* Handle a "noreturn" attribute; arguments as in
5768 struct attribute_spec.handler. */
5770 static tree
5771 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5772 int ARG_UNUSED (flags), bool *no_add_attrs)
5774 tree type = TREE_TYPE (*node);
5776 /* See FIXME comment in c_common_attribute_table. */
5777 if (TREE_CODE (*node) == FUNCTION_DECL)
5778 TREE_THIS_VOLATILE (*node) = 1;
5779 else if (TREE_CODE (type) == POINTER_TYPE
5780 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5781 TREE_TYPE (*node)
5782 = build_pointer_type
5783 (build_type_variant (TREE_TYPE (type),
5784 TYPE_READONLY (TREE_TYPE (type)), 1));
5785 else
5787 warning (OPT_Wattributes, "%qs attribute ignored",
5788 IDENTIFIER_POINTER (name));
5789 *no_add_attrs = true;
5792 return NULL_TREE;
5795 /* Handle a "leaf" attribute; arguments as in
5796 struct attribute_spec.handler. */
5798 static tree
5799 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5800 int ARG_UNUSED (flags), bool *no_add_attrs)
5802 if (TREE_CODE (*node) != FUNCTION_DECL)
5804 warning (OPT_Wattributes, "%qE attribute ignored", name);
5805 *no_add_attrs = true;
5807 if (!TREE_PUBLIC (*node))
5809 warning (OPT_Wattributes, "%qE attribute has no effect", name);
5810 *no_add_attrs = true;
5813 return NULL_TREE;
5816 /* Handle a "always_inline" attribute; arguments as in
5817 struct attribute_spec.handler. */
5819 static tree
5820 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5821 int ARG_UNUSED (flags), bool *no_add_attrs)
5823 if (TREE_CODE (*node) == FUNCTION_DECL)
5825 /* Set the attribute and mark it for disregarding inline limits. */
5826 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
5828 else
5830 warning (OPT_Wattributes, "%qE attribute ignored", name);
5831 *no_add_attrs = true;
5834 return NULL_TREE;
5837 /* Handle a "malloc" attribute; arguments as in
5838 struct attribute_spec.handler. */
5840 static tree
5841 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5842 int ARG_UNUSED (flags), bool *no_add_attrs)
5844 if (TREE_CODE (*node) == FUNCTION_DECL
5845 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5846 DECL_IS_MALLOC (*node) = 1;
5847 else
5849 warning (OPT_Wattributes, "%qs attribute ignored",
5850 IDENTIFIER_POINTER (name));
5851 *no_add_attrs = true;
5854 return NULL_TREE;
5857 /* Fake handler for attributes we don't properly support. */
5859 tree
5860 fake_attribute_handler (tree * ARG_UNUSED (node),
5861 tree ARG_UNUSED (name),
5862 tree ARG_UNUSED (args),
5863 int ARG_UNUSED (flags),
5864 bool * ARG_UNUSED (no_add_attrs))
5866 return NULL_TREE;
5869 /* Handle a "type_generic" attribute. */
5871 static tree
5872 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5873 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5874 bool * ARG_UNUSED (no_add_attrs))
5876 /* Ensure we have a function type. */
5877 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5879 /* Ensure we have a variadic function. */
5880 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
5882 return NULL_TREE;
5885 /* Handle a "vector_size" attribute; arguments as in
5886 struct attribute_spec.handler. */
5888 static tree
5889 handle_vector_size_attribute (tree *node, tree name, tree args,
5890 int ARG_UNUSED (flags), bool *no_add_attrs)
5892 tree type = *node;
5893 tree vector_type;
5895 *no_add_attrs = true;
5897 /* We need to provide for vector pointers, vector arrays, and
5898 functions returning vectors. For example:
5900 __attribute__((vector_size(16))) short *foo;
5902 In this case, the mode is SI, but the type being modified is
5903 HI, so we need to look further. */
5904 while (POINTER_TYPE_P (type)
5905 || TREE_CODE (type) == FUNCTION_TYPE
5906 || TREE_CODE (type) == ARRAY_TYPE)
5907 type = TREE_TYPE (type);
5909 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
5910 if (!vector_type)
5911 return NULL_TREE;
5913 /* Build back pointers if needed. */
5914 *node = reconstruct_complex_type (*node, vector_type);
5916 return NULL_TREE;
5919 /* Handle a "vector_type" attribute; arguments as in
5920 struct attribute_spec.handler. */
5922 static tree
5923 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5924 int ARG_UNUSED (flags), bool *no_add_attrs)
5926 tree type = *node;
5927 tree vector_type;
5929 *no_add_attrs = true;
5931 if (TREE_CODE (type) != ARRAY_TYPE)
5933 error ("attribute %qs applies to array types only",
5934 IDENTIFIER_POINTER (name));
5935 return NULL_TREE;
5938 vector_type = build_vector_type_for_array (type, name);
5939 if (!vector_type)
5940 return NULL_TREE;
5942 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
5943 *node = vector_type;
5945 return NULL_TREE;
5948 /* ----------------------------------------------------------------------- *
5949 * BUILTIN FUNCTIONS *
5950 * ----------------------------------------------------------------------- */
5952 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5953 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5954 if nonansi_p and flag_no_nonansi_builtin. */
5956 static void
5957 def_builtin_1 (enum built_in_function fncode,
5958 const char *name,
5959 enum built_in_class fnclass,
5960 tree fntype, tree libtype,
5961 bool both_p, bool fallback_p,
5962 bool nonansi_p ATTRIBUTE_UNUSED,
5963 tree fnattrs, bool implicit_p)
5965 tree decl;
5966 const char *libname;
5968 /* Preserve an already installed decl. It most likely was setup in advance
5969 (e.g. as part of the internal builtins) for specific reasons. */
5970 if (builtin_decl_explicit (fncode) != NULL_TREE)
5971 return;
5973 gcc_assert ((!both_p && !fallback_p)
5974 || !strncmp (name, "__builtin_",
5975 strlen ("__builtin_")));
5977 libname = name + strlen ("__builtin_");
5978 decl = add_builtin_function (name, fntype, fncode, fnclass,
5979 (fallback_p ? libname : NULL),
5980 fnattrs);
5981 if (both_p)
5982 /* ??? This is normally further controlled by command-line options
5983 like -fno-builtin, but we don't have them for Ada. */
5984 add_builtin_function (libname, libtype, fncode, fnclass,
5985 NULL, fnattrs);
5987 set_builtin_decl (fncode, decl, implicit_p);
5990 static int flag_isoc94 = 0;
5991 static int flag_isoc99 = 0;
5992 static int flag_isoc11 = 0;
5994 /* Install what the common builtins.def offers. */
5996 static void
5997 install_builtin_functions (void)
5999 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6000 NONANSI_P, ATTRS, IMPLICIT, COND) \
6001 if (NAME && COND) \
6002 def_builtin_1 (ENUM, NAME, CLASS, \
6003 builtin_types[(int) TYPE], \
6004 builtin_types[(int) LIBTYPE], \
6005 BOTH_P, FALLBACK_P, NONANSI_P, \
6006 built_in_attributes[(int) ATTRS], IMPLICIT);
6007 #include "builtins.def"
6008 #undef DEF_BUILTIN
6011 /* ----------------------------------------------------------------------- *
6012 * BUILTIN FUNCTIONS *
6013 * ----------------------------------------------------------------------- */
6015 /* Install the builtin functions we might need. */
6017 void
6018 gnat_install_builtins (void)
6020 install_builtin_elementary_types ();
6021 install_builtin_function_types ();
6022 install_builtin_attributes ();
6024 /* Install builtins used by generic middle-end pieces first. Some of these
6025 know about internal specificities and control attributes accordingly, for
6026 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6027 the generic definition from builtins.def. */
6028 build_common_builtin_nodes ();
6030 /* Now, install the target specific builtins, such as the AltiVec family on
6031 ppc, and the common set as exposed by builtins.def. */
6032 targetm.init_builtins ();
6033 install_builtin_functions ();
6036 #include "gt-ada-utils.h"
6037 #include "gtype-ada.h"