* varasm.c (compare_constant) <CONSTRUCTOR>: Compare
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob063d34c722402c32bbd12ac359b6c2ed8cd587b8
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2015, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "hash-set.h"
31 #include "machmode.h"
32 #include "vec.h"
33 #include "double-int.h"
34 #include "input.h"
35 #include "alias.h"
36 #include "symtab.h"
37 #include "wide-int.h"
38 #include "inchash.h"
39 #include "tree.h"
40 #include "fold-const.h"
41 #include "stringpool.h"
42 #include "stor-layout.h"
43 #include "attribs.h"
44 #include "varasm.h"
45 #include "flags.h"
46 #include "toplev.h"
47 #include "diagnostic-core.h"
48 #include "output.h"
49 #include "ggc.h"
50 #include "debug.h"
51 #include "convert.h"
52 #include "target.h"
53 #include "common/common-target.h"
54 #include "langhooks.h"
55 #include "hash-map.h"
56 #include "is-a.h"
57 #include "plugin-api.h"
58 #include "hard-reg-set.h"
59 #include "input.h"
60 #include "function.h"
61 #include "ipa-ref.h"
62 #include "cgraph.h"
63 #include "diagnostic.h"
64 #include "timevar.h"
65 #include "tree-dump.h"
66 #include "tree-inline.h"
67 #include "tree-iterator.h"
69 #include "ada.h"
70 #include "types.h"
71 #include "atree.h"
72 #include "elists.h"
73 #include "namet.h"
74 #include "nlists.h"
75 #include "stringt.h"
76 #include "uintp.h"
77 #include "fe.h"
78 #include "sinfo.h"
79 #include "einfo.h"
80 #include "ada-tree.h"
81 #include "gigi.h"
83 /* If nonzero, pretend we are allocating at global level. */
84 int force_global;
86 /* The default alignment of "double" floating-point types, i.e. floating
87 point types whose size is equal to 64 bits, or 0 if this alignment is
88 not specifically capped. */
89 int double_float_alignment;
91 /* The default alignment of "double" or larger scalar types, i.e. scalar
92 types whose size is greater or equal to 64 bits, or 0 if this alignment
93 is not specifically capped. */
94 int double_scalar_alignment;
96 /* True if floating-point arithmetics may use wider intermediate results. */
97 bool fp_arith_may_widen = true;
99 /* Tree nodes for the various types and decls we create. */
100 tree gnat_std_decls[(int) ADT_LAST];
102 /* Functions to call for each of the possible raise reasons. */
103 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
105 /* Likewise, but with extra info for each of the possible raise reasons. */
106 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
108 /* Forward declarations for handlers of attributes. */
109 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
110 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
111 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
112 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
113 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
114 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
115 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
116 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
117 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
118 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
119 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
120 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
121 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
123 /* Fake handler for attributes we don't properly support, typically because
124 they'd require dragging a lot of the common-c front-end circuitry. */
125 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
127 /* Table of machine-independent internal attributes for Ada. We support
128 this minimal set of attributes to accommodate the needs of builtins. */
129 const struct attribute_spec gnat_internal_attribute_table[] =
131 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
132 affects_type_identity } */
133 { "const", 0, 0, true, false, false, handle_const_attribute,
134 false },
135 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
136 false },
137 { "pure", 0, 0, true, false, false, handle_pure_attribute,
138 false },
139 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
140 false },
141 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
142 false },
143 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
144 false },
145 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
146 false },
147 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
148 false },
149 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
150 false },
151 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
152 false },
153 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
154 false },
156 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
157 false },
158 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
159 false },
160 { "may_alias", 0, 0, false, true, false, NULL, false },
162 /* ??? format and format_arg are heavy and not supported, which actually
163 prevents support for stdio builtins, which we however declare as part
164 of the common builtins.def contents. */
165 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
166 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
168 { NULL, 0, 0, false, false, false, NULL, false }
171 /* Associates a GNAT tree node to a GCC tree node. It is used in
172 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
173 of `save_gnu_tree' for more info. */
174 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
176 #define GET_GNU_TREE(GNAT_ENTITY) \
177 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
179 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
180 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
182 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
183 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
185 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
186 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
188 #define GET_DUMMY_NODE(GNAT_ENTITY) \
189 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
191 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
192 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
194 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
195 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
197 /* This variable keeps a table for types for each precision so that we only
198 allocate each of them once. Signed and unsigned types are kept separate.
200 Note that these types are only used when fold-const requests something
201 special. Perhaps we should NOT share these types; we'll see how it
202 goes later. */
203 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
205 /* Likewise for float types, but record these by mode. */
206 static GTY(()) tree float_types[NUM_MACHINE_MODES];
208 /* For each binding contour we allocate a binding_level structure to indicate
209 the binding depth. */
211 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
212 /* The binding level containing this one (the enclosing binding level). */
213 struct gnat_binding_level *chain;
214 /* The BLOCK node for this level. */
215 tree block;
216 /* If nonzero, the setjmp buffer that needs to be updated for any
217 variable-sized definition within this context. */
218 tree jmpbuf_decl;
221 /* The binding level currently in effect. */
222 static GTY(()) struct gnat_binding_level *current_binding_level;
224 /* A chain of gnat_binding_level structures awaiting reuse. */
225 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
227 /* The context to be used for global declarations. */
228 static GTY(()) tree global_context;
230 /* An array of global declarations. */
231 static GTY(()) vec<tree, va_gc> *global_decls;
233 /* An array of builtin function declarations. */
234 static GTY(()) vec<tree, va_gc> *builtin_decls;
236 /* An array of global renaming pointers. */
237 static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
239 /* A chain of unused BLOCK nodes. */
240 static GTY((deletable)) tree free_block_chain;
242 /* A hash table of padded types. It is modelled on the generic type
243 hash table in tree.c, which must thus be used as a reference. */
245 struct GTY((for_user)) pad_type_hash {
246 unsigned long hash;
247 tree type;
250 struct pad_type_hasher : ggc_cache_hasher<pad_type_hash *>
252 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
253 static bool equal (pad_type_hash *a, pad_type_hash *b);
254 static void handle_cache_entry (pad_type_hash *&);
257 static GTY ((cache))
258 hash_table<pad_type_hasher> *pad_type_hash_table;
260 static tree merge_sizes (tree, tree, tree, bool, bool);
261 static tree compute_related_constant (tree, tree);
262 static tree split_plus (tree, tree *);
263 static tree float_type_for_precision (int, machine_mode);
264 static tree convert_to_fat_pointer (tree, tree);
265 static unsigned int scale_by_factor_of (tree, unsigned int);
266 static bool potential_alignment_gap (tree, tree, tree);
268 /* A linked list used as a queue to defer the initialization of the
269 DECL_CONTEXT attribute of ..._DECL nodes and of the TYPE_CONTEXT attribute
270 of ..._TYPE nodes. */
271 struct deferred_decl_context_node
273 tree decl; /* The ..._DECL node to work on. */
274 Entity_Id gnat_scope; /* The corresponding entity's Scope attribute. */
275 int force_global; /* force_global value when pushing DECL. */
276 vec<tree, va_heap, vl_ptr> types; /* A list of ..._TYPE nodes to propagate the
277 context to. */
278 struct deferred_decl_context_node *next; /* The next queue item. */
281 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
283 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
284 feed it with the elaboration of GNAT_SCOPE. */
285 static struct deferred_decl_context_node *
286 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
288 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
289 feed it with the DECL_CONTEXT computed as part of N as soon as it is
290 computed. */
291 static void add_deferred_type_context (struct deferred_decl_context_node *n,
292 tree type);
294 /* Initialize data structures of the utils.c module. */
296 void
297 init_gnat_utils (void)
299 /* Initialize the association of GNAT nodes to GCC trees. */
300 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
302 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
303 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
305 /* Initialize the hash table of padded types. */
306 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
309 /* Destroy data structures of the utils.c module. */
311 void
312 destroy_gnat_utils (void)
314 /* Destroy the association of GNAT nodes to GCC trees. */
315 ggc_free (associate_gnat_to_gnu);
316 associate_gnat_to_gnu = NULL;
318 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
319 ggc_free (dummy_node_table);
320 dummy_node_table = NULL;
322 /* Destroy the hash table of padded types. */
323 pad_type_hash_table->empty ();
324 pad_type_hash_table = NULL;
326 /* Invalidate the global renaming pointers. */
327 invalidate_global_renaming_pointers ();
330 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
331 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
332 If NO_CHECK is true, the latter check is suppressed.
334 If GNU_DECL is zero, reset a previous association. */
336 void
337 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
339 /* Check that GNAT_ENTITY is not already defined and that it is being set
340 to something which is a decl. If that is not the case, this usually
341 means GNAT_ENTITY is defined twice, but occasionally is due to some
342 Gigi problem. */
343 gcc_assert (!(gnu_decl
344 && (PRESENT_GNU_TREE (gnat_entity)
345 || (!no_check && !DECL_P (gnu_decl)))));
347 SET_GNU_TREE (gnat_entity, gnu_decl);
350 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
351 that was associated with it. If there is no such tree node, abort.
353 In some cases, such as delayed elaboration or expressions that need to
354 be elaborated only once, GNAT_ENTITY is really not an entity. */
356 tree
357 get_gnu_tree (Entity_Id gnat_entity)
359 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
360 return GET_GNU_TREE (gnat_entity);
363 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
365 bool
366 present_gnu_tree (Entity_Id gnat_entity)
368 return PRESENT_GNU_TREE (gnat_entity);
371 /* Make a dummy type corresponding to GNAT_TYPE. */
373 tree
374 make_dummy_type (Entity_Id gnat_type)
376 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
377 tree gnu_type;
379 /* If there was no equivalent type (can only happen when just annotating
380 types) or underlying type, go back to the original type. */
381 if (No (gnat_equiv))
382 gnat_equiv = gnat_type;
384 /* If it there already a dummy type, use that one. Else make one. */
385 if (PRESENT_DUMMY_NODE (gnat_equiv))
386 return GET_DUMMY_NODE (gnat_equiv);
388 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
389 an ENUMERAL_TYPE. */
390 gnu_type = make_node (Is_Record_Type (gnat_equiv)
391 ? tree_code_for_record_type (gnat_equiv)
392 : ENUMERAL_TYPE);
393 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
394 TYPE_DUMMY_P (gnu_type) = 1;
395 TYPE_STUB_DECL (gnu_type)
396 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
397 if (Is_By_Reference_Type (gnat_equiv))
398 TYPE_BY_REFERENCE_P (gnu_type) = 1;
400 SET_DUMMY_NODE (gnat_equiv, gnu_type);
402 return gnu_type;
405 /* Return the dummy type that was made for GNAT_TYPE, if any. */
407 tree
408 get_dummy_type (Entity_Id gnat_type)
410 return GET_DUMMY_NODE (gnat_type);
413 /* Build dummy fat and thin pointer types whose designated type is specified
414 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
416 void
417 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
419 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
420 tree gnu_fat_type, fields, gnu_object_type;
422 gnu_template_type = make_node (RECORD_TYPE);
423 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
424 TYPE_DUMMY_P (gnu_template_type) = 1;
425 gnu_ptr_template = build_pointer_type (gnu_template_type);
427 gnu_array_type = make_node (ENUMERAL_TYPE);
428 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
429 TYPE_DUMMY_P (gnu_array_type) = 1;
430 gnu_ptr_array = build_pointer_type (gnu_array_type);
432 gnu_fat_type = make_node (RECORD_TYPE);
433 /* Build a stub DECL to trigger the special processing for fat pointer types
434 in gnat_pushdecl. */
435 TYPE_NAME (gnu_fat_type)
436 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
437 gnu_fat_type);
438 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
439 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
440 DECL_CHAIN (fields)
441 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
442 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
443 finish_fat_pointer_type (gnu_fat_type, fields);
444 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
445 /* Suppress debug info until after the type is completed. */
446 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
448 gnu_object_type = make_node (RECORD_TYPE);
449 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
450 TYPE_DUMMY_P (gnu_object_type) = 1;
452 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
453 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
456 /* Return true if we are in the global binding level. */
458 bool
459 global_bindings_p (void)
461 return force_global || current_function_decl == NULL_TREE;
464 /* Enter a new binding level. */
466 void
467 gnat_pushlevel (void)
469 struct gnat_binding_level *newlevel = NULL;
471 /* Reuse a struct for this binding level, if there is one. */
472 if (free_binding_level)
474 newlevel = free_binding_level;
475 free_binding_level = free_binding_level->chain;
477 else
478 newlevel = ggc_alloc<gnat_binding_level> ();
480 /* Use a free BLOCK, if any; otherwise, allocate one. */
481 if (free_block_chain)
483 newlevel->block = free_block_chain;
484 free_block_chain = BLOCK_CHAIN (free_block_chain);
485 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
487 else
488 newlevel->block = make_node (BLOCK);
490 /* Point the BLOCK we just made to its parent. */
491 if (current_binding_level)
492 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
494 BLOCK_VARS (newlevel->block) = NULL_TREE;
495 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
496 TREE_USED (newlevel->block) = 1;
498 /* Add this level to the front of the chain (stack) of active levels. */
499 newlevel->chain = current_binding_level;
500 newlevel->jmpbuf_decl = NULL_TREE;
501 current_binding_level = newlevel;
504 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
505 and point FNDECL to this BLOCK. */
507 void
508 set_current_block_context (tree fndecl)
510 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
511 DECL_INITIAL (fndecl) = current_binding_level->block;
512 set_block_for_group (current_binding_level->block);
515 /* Set the jmpbuf_decl for the current binding level to DECL. */
517 void
518 set_block_jmpbuf_decl (tree decl)
520 current_binding_level->jmpbuf_decl = decl;
523 /* Get the jmpbuf_decl, if any, for the current binding level. */
525 tree
526 get_block_jmpbuf_decl (void)
528 return current_binding_level->jmpbuf_decl;
531 /* Exit a binding level. Set any BLOCK into the current code group. */
533 void
534 gnat_poplevel (void)
536 struct gnat_binding_level *level = current_binding_level;
537 tree block = level->block;
539 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
540 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
542 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
543 are no variables free the block and merge its subblocks into those of its
544 parent block. Otherwise, add it to the list of its parent. */
545 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
547 else if (BLOCK_VARS (block) == NULL_TREE)
549 BLOCK_SUBBLOCKS (level->chain->block)
550 = block_chainon (BLOCK_SUBBLOCKS (block),
551 BLOCK_SUBBLOCKS (level->chain->block));
552 BLOCK_CHAIN (block) = free_block_chain;
553 free_block_chain = block;
555 else
557 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
558 BLOCK_SUBBLOCKS (level->chain->block) = block;
559 TREE_USED (block) = 1;
560 set_block_for_group (block);
563 /* Free this binding structure. */
564 current_binding_level = level->chain;
565 level->chain = free_binding_level;
566 free_binding_level = level;
569 /* Exit a binding level and discard the associated BLOCK. */
571 void
572 gnat_zaplevel (void)
574 struct gnat_binding_level *level = current_binding_level;
575 tree block = level->block;
577 BLOCK_CHAIN (block) = free_block_chain;
578 free_block_chain = block;
580 /* Free this binding structure. */
581 current_binding_level = level->chain;
582 level->chain = free_binding_level;
583 free_binding_level = level;
586 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
588 static void
589 gnat_set_type_context (tree type, tree context)
591 tree decl = TYPE_STUB_DECL (type);
593 TYPE_CONTEXT (type) = context;
595 while (decl && DECL_PARALLEL_TYPE (decl))
597 tree parallel_type = DECL_PARALLEL_TYPE (decl);
599 /* Give a context to the parallel types and their stub decl, if any.
600 Some parallel types seems to be present in multiple parallel type
601 chains, so don't mess with their context if they already have one. */
602 if (TYPE_CONTEXT (parallel_type) == NULL_TREE)
604 if (TYPE_STUB_DECL (parallel_type) != NULL_TREE)
605 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
606 TYPE_CONTEXT (parallel_type) = context;
609 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
613 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
614 the debug info, or Empty if there is no such scope. If not NULL, set
615 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
617 static Entity_Id
618 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
620 Entity_Id gnat_entity;
622 if (is_subprogram)
623 *is_subprogram = false;
625 if (Nkind (gnat_node) == N_Defining_Identifier)
626 gnat_entity = Scope (gnat_node);
627 else
628 return Empty;
630 while (Present (gnat_entity))
632 switch (Ekind (gnat_entity))
634 case E_Function:
635 case E_Procedure:
636 if (Present (Protected_Body_Subprogram (gnat_entity)))
637 gnat_entity = Protected_Body_Subprogram (gnat_entity);
639 /* If the scope is a subprogram, then just rely on
640 current_function_decl, so that we don't have to defer
641 anything. This is needed because other places rely on the
642 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
643 if (is_subprogram)
644 *is_subprogram = true;
645 return gnat_entity;
647 case E_Record_Type:
648 case E_Record_Subtype:
649 return gnat_entity;
651 default:
652 /* By default, we are not interested in this particular scope: go to
653 the outer one. */
654 break;
656 gnat_entity = Scope (gnat_entity);
658 return Empty;
661 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing of
662 N otherwise. */
664 static void
665 defer_or_set_type_context (tree type,
666 tree context,
667 struct deferred_decl_context_node *n)
669 if (n)
670 add_deferred_type_context (n, type);
671 else
672 gnat_set_type_context (type, context);
675 /* Return global_context. Create it if needed, first. */
677 static tree
678 get_global_context (void)
680 if (!global_context)
681 global_context = build_translation_unit_decl (NULL_TREE);
682 return global_context;
685 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
686 for location information and flag propagation. */
688 void
689 gnat_pushdecl (tree decl, Node_Id gnat_node)
691 tree context = NULL_TREE;
692 struct deferred_decl_context_node *deferred_decl_context = NULL;
694 /* If explicitely asked to make DECL global or if it's an imported nested
695 object, short-circuit the regular Scope-based context computation. */
696 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
698 /* Rely on the GNAT scope, or fallback to the current_function_decl if
699 the GNAT scope reached the global scope, if it reached a subprogram
700 or the declaration is a subprogram or a variable (for them we skip
701 intermediate context types because the subprogram body elaboration
702 machinery and the inliner both expect a subprogram context).
704 Falling back to current_function_decl is necessary for implicit
705 subprograms created by gigi, such as the elaboration subprograms. */
706 bool context_is_subprogram = false;
707 const Entity_Id gnat_scope
708 = get_debug_scope (gnat_node, &context_is_subprogram);
710 if (Present (gnat_scope)
711 && !context_is_subprogram
712 && TREE_CODE (decl) != FUNCTION_DECL
713 && TREE_CODE (decl) != VAR_DECL)
714 /* Always assume the scope has not been elaborated, thus defer the
715 context propagation to the time its elaboration will be
716 available. */
717 deferred_decl_context
718 = add_deferred_decl_context (decl, gnat_scope, force_global);
720 /* External declarations (when force_global > 0) may not be in a
721 local context. */
722 else if (current_function_decl != NULL_TREE && force_global == 0)
723 context = current_function_decl;
726 /* If either we are forced to be in global mode or if both the GNAT scope and
727 the current_function_decl did not help determining the context, use the
728 global scope. */
729 if (!deferred_decl_context && context == NULL_TREE)
730 context = get_global_context ();
732 /* Functions imported in another function are not really nested.
733 For really nested functions mark them initially as needing
734 a static chain for uses of that flag before unnesting;
735 lower_nested_functions will then recompute it. */
736 if (TREE_CODE (decl) == FUNCTION_DECL
737 && !TREE_PUBLIC (decl)
738 && context != NULL_TREE
739 && (TREE_CODE (context) == FUNCTION_DECL
740 || decl_function_context (context) != NULL_TREE))
741 DECL_STATIC_CHAIN (decl) = 1;
743 if (!deferred_decl_context)
744 DECL_CONTEXT (decl) = context;
746 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
748 /* Set the location of DECL and emit a declaration for it. */
749 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
750 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
752 add_decl_expr (decl, gnat_node);
754 /* Put the declaration on the list. The list of declarations is in reverse
755 order. The list will be reversed later. Put global declarations in the
756 globals list and local ones in the current block. But skip TYPE_DECLs
757 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
758 with the debugger and aren't needed anyway. */
759 if (!(TREE_CODE (decl) == TYPE_DECL
760 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
762 if (DECL_EXTERNAL (decl))
764 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
765 vec_safe_push (builtin_decls, decl);
767 else if (global_bindings_p ())
768 vec_safe_push (global_decls, decl);
769 else
771 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
772 BLOCK_VARS (current_binding_level->block) = decl;
776 /* For the declaration of a type, set its name either if it isn't already
777 set or if the previous type name was not derived from a source name.
778 We'd rather have the type named with a real name and all the pointer
779 types to the same object have the same node, except when the names are
780 both derived from source names. */
781 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
783 tree t = TREE_TYPE (decl);
785 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
786 && (TREE_CODE (t) != POINTER_TYPE || DECL_ARTIFICIAL (decl)))
788 /* Array types aren't "tagged" types so we force the type to be
789 associated with its typedef in the DWARF back-end, in order to
790 make sure that the latter is always preserved, by creating an
791 on-side copy for DECL_ORIGINAL_TYPE. We used to do the same
792 for pointer types, but to have consistent DWARF output we now
793 create a copy for the type itself and use the original type
794 for DECL_ORIGINAL_TYPE like the C front-end. */
795 if (!DECL_ARTIFICIAL (decl) && TREE_CODE (t) == ARRAY_TYPE)
797 tree tt = build_distinct_type_copy (t);
798 /* Array types need to have a name so that they can be related
799 to their GNAT encodings. */
800 TYPE_NAME (tt) = DECL_NAME (decl);
801 defer_or_set_type_context (tt,
802 DECL_CONTEXT (decl),
803 deferred_decl_context);
804 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
805 DECL_ORIGINAL_TYPE (decl) = tt;
808 else if (!DECL_ARTIFICIAL (decl)
809 && (TREE_CODE (t) == POINTER_TYPE || TYPE_IS_FAT_POINTER_P (t)))
811 tree tt;
812 /* ??? Copy and original type are not supposed to be variant but we
813 really need a variant for the placeholder machinery to work. */
814 if (TYPE_IS_FAT_POINTER_P (t))
815 tt = build_variant_type_copy (t);
816 else
818 /* TYPE_NEXT_PTR_TO is a chain of main variants. */
819 tt = build_distinct_type_copy (TYPE_MAIN_VARIANT (t));
820 TYPE_NEXT_PTR_TO (TYPE_MAIN_VARIANT (t)) = tt;
821 tt = build_qualified_type (tt, TYPE_QUALS (t));
823 TYPE_NAME (tt) = decl;
824 defer_or_set_type_context (tt,
825 DECL_CONTEXT (decl),
826 deferred_decl_context);
827 TREE_USED (tt) = TREE_USED (t);
828 TREE_TYPE (decl) = tt;
829 if (TYPE_NAME (t) != NULL_TREE
830 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
831 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
832 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
833 else
834 DECL_ORIGINAL_TYPE (decl) = t;
835 t = NULL_TREE;
837 else if (TYPE_NAME (t) != NULL_TREE
838 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
839 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
841 else
842 t = NULL_TREE;
844 /* Propagate the name to all the anonymous variants. This is needed
845 for the type qualifiers machinery to work properly (see
846 check_qualified_type). Also propagate the context to them. Note that
847 the context will be propagated to all parallel types too thanks to
848 gnat_set_type_context. */
849 if (t)
850 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
851 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
853 TYPE_NAME (t) = decl;
854 defer_or_set_type_context (t,
855 DECL_CONTEXT (decl),
856 deferred_decl_context);
861 /* Create a record type that contains a SIZE bytes long field of TYPE with a
862 starting bit position so that it is aligned to ALIGN bits, and leaving at
863 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
864 record is guaranteed to get. GNAT_NODE is used for the position of the
865 associated TYPE_DECL. */
867 tree
868 make_aligning_type (tree type, unsigned int align, tree size,
869 unsigned int base_align, int room, Node_Id gnat_node)
871 /* We will be crafting a record type with one field at a position set to be
872 the next multiple of ALIGN past record'address + room bytes. We use a
873 record placeholder to express record'address. */
874 tree record_type = make_node (RECORD_TYPE);
875 tree record = build0 (PLACEHOLDER_EXPR, record_type);
877 tree record_addr_st
878 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
880 /* The diagram below summarizes the shape of what we manipulate:
882 <--------- pos ---------->
883 { +------------+-------------+-----------------+
884 record =>{ |############| ... | field (type) |
885 { +------------+-------------+-----------------+
886 |<-- room -->|<- voffset ->|<---- size ----->|
889 record_addr vblock_addr
891 Every length is in sizetype bytes there, except "pos" which has to be
892 set as a bit position in the GCC tree for the record. */
893 tree room_st = size_int (room);
894 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
895 tree voffset_st, pos, field;
897 tree name = TYPE_IDENTIFIER (type);
899 name = concat_name (name, "ALIGN");
900 TYPE_NAME (record_type) = name;
902 /* Compute VOFFSET and then POS. The next byte position multiple of some
903 alignment after some address is obtained by "and"ing the alignment minus
904 1 with the two's complement of the address. */
905 voffset_st = size_binop (BIT_AND_EXPR,
906 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
907 size_int ((align / BITS_PER_UNIT) - 1));
909 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
910 pos = size_binop (MULT_EXPR,
911 convert (bitsizetype,
912 size_binop (PLUS_EXPR, room_st, voffset_st)),
913 bitsize_unit_node);
915 /* Craft the GCC record representation. We exceptionally do everything
916 manually here because 1) our generic circuitry is not quite ready to
917 handle the complex position/size expressions we are setting up, 2) we
918 have a strong simplifying factor at hand: we know the maximum possible
919 value of voffset, and 3) we have to set/reset at least the sizes in
920 accordance with this maximum value anyway, as we need them to convey
921 what should be "alloc"ated for this type.
923 Use -1 as the 'addressable' indication for the field to prevent the
924 creation of a bitfield. We don't need one, it would have damaging
925 consequences on the alignment computation, and create_field_decl would
926 make one without this special argument, for instance because of the
927 complex position expression. */
928 field = create_field_decl (get_identifier ("F"), type, record_type, size,
929 pos, 1, -1);
930 TYPE_FIELDS (record_type) = field;
932 TYPE_ALIGN (record_type) = base_align;
933 TYPE_USER_ALIGN (record_type) = 1;
935 TYPE_SIZE (record_type)
936 = size_binop (PLUS_EXPR,
937 size_binop (MULT_EXPR, convert (bitsizetype, size),
938 bitsize_unit_node),
939 bitsize_int (align + room * BITS_PER_UNIT));
940 TYPE_SIZE_UNIT (record_type)
941 = size_binop (PLUS_EXPR, size,
942 size_int (room + align / BITS_PER_UNIT));
944 SET_TYPE_MODE (record_type, BLKmode);
945 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
947 /* Declare it now since it will never be declared otherwise. This is
948 necessary to ensure that its subtrees are properly marked. */
949 create_type_decl (name, record_type, true, false, gnat_node);
951 return record_type;
954 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
955 as the field type of a packed record if IN_RECORD is true, or as the
956 component type of a packed array if IN_RECORD is false. See if we can
957 rewrite it either as a type that has a non-BLKmode, which we can pack
958 tighter in the packed record case, or as a smaller type. If so, return
959 the new type. If not, return the original type. */
961 tree
962 make_packable_type (tree type, bool in_record)
964 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
965 unsigned HOST_WIDE_INT new_size;
966 tree new_type, old_field, field_list = NULL_TREE;
967 unsigned int align;
969 /* No point in doing anything if the size is zero. */
970 if (size == 0)
971 return type;
973 new_type = make_node (TREE_CODE (type));
975 /* Copy the name and flags from the old type to that of the new.
976 Note that we rely on the pointer equality created here for
977 TYPE_NAME to look through conversions in various places. */
978 TYPE_NAME (new_type) = TYPE_NAME (type);
979 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
980 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
981 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
982 if (TREE_CODE (type) == RECORD_TYPE)
983 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
985 /* If we are in a record and have a small size, set the alignment to
986 try for an integral mode. Otherwise set it to try for a smaller
987 type with BLKmode. */
988 if (in_record && size <= MAX_FIXED_MODE_SIZE)
990 align = ceil_pow2 (size);
991 TYPE_ALIGN (new_type) = align;
992 new_size = (size + align - 1) & -align;
994 else
996 unsigned HOST_WIDE_INT align;
998 /* Do not try to shrink the size if the RM size is not constant. */
999 if (TYPE_CONTAINS_TEMPLATE_P (type)
1000 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
1001 return type;
1003 /* Round the RM size up to a unit boundary to get the minimal size
1004 for a BLKmode record. Give up if it's already the size. */
1005 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
1006 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1007 if (new_size == size)
1008 return type;
1010 align = new_size & -new_size;
1011 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
1014 TYPE_USER_ALIGN (new_type) = 1;
1016 /* Now copy the fields, keeping the position and size as we don't want
1017 to change the layout by propagating the packedness downwards. */
1018 for (old_field = TYPE_FIELDS (type); old_field;
1019 old_field = DECL_CHAIN (old_field))
1021 tree new_field_type = TREE_TYPE (old_field);
1022 tree new_field, new_size;
1024 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1025 && !TYPE_FAT_POINTER_P (new_field_type)
1026 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1027 new_field_type = make_packable_type (new_field_type, true);
1029 /* However, for the last field in a not already packed record type
1030 that is of an aggregate type, we need to use the RM size in the
1031 packable version of the record type, see finish_record_type. */
1032 if (!DECL_CHAIN (old_field)
1033 && !TYPE_PACKED (type)
1034 && RECORD_OR_UNION_TYPE_P (new_field_type)
1035 && !TYPE_FAT_POINTER_P (new_field_type)
1036 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1037 && TYPE_ADA_SIZE (new_field_type))
1038 new_size = TYPE_ADA_SIZE (new_field_type);
1039 else
1040 new_size = DECL_SIZE (old_field);
1042 new_field
1043 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
1044 new_size, bit_position (old_field),
1045 TYPE_PACKED (type),
1046 !DECL_NONADDRESSABLE_P (old_field));
1048 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
1049 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
1050 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1051 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
1053 DECL_CHAIN (new_field) = field_list;
1054 field_list = new_field;
1057 finish_record_type (new_type, nreverse (field_list), 2, false);
1058 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1059 if (TYPE_STUB_DECL (type))
1060 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1061 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1063 /* If this is a padding record, we never want to make the size smaller
1064 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1065 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1067 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1068 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1069 new_size = size;
1071 else
1073 TYPE_SIZE (new_type) = bitsize_int (new_size);
1074 TYPE_SIZE_UNIT (new_type)
1075 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
1078 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1079 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1081 compute_record_mode (new_type);
1083 /* Try harder to get a packable type if necessary, for example
1084 in case the record itself contains a BLKmode field. */
1085 if (in_record && TYPE_MODE (new_type) == BLKmode)
1086 SET_TYPE_MODE (new_type,
1087 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1089 /* If neither the mode nor the size has shrunk, return the old type. */
1090 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
1091 return type;
1093 return new_type;
1096 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1097 If TYPE is the best type, return it. Otherwise, make a new type. We
1098 only support new integral and pointer types. FOR_BIASED is true if
1099 we are making a biased type. */
1101 tree
1102 make_type_from_size (tree type, tree size_tree, bool for_biased)
1104 unsigned HOST_WIDE_INT size;
1105 bool biased_p;
1106 tree new_type;
1108 /* If size indicates an error, just return TYPE to avoid propagating
1109 the error. Likewise if it's too large to represent. */
1110 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1111 return type;
1113 size = tree_to_uhwi (size_tree);
1115 switch (TREE_CODE (type))
1117 case INTEGER_TYPE:
1118 case ENUMERAL_TYPE:
1119 case BOOLEAN_TYPE:
1120 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1121 && TYPE_BIASED_REPRESENTATION_P (type));
1123 /* Integer types with precision 0 are forbidden. */
1124 if (size == 0)
1125 size = 1;
1127 /* Only do something if the type isn't a packed array type and doesn't
1128 already have the proper size and the size isn't too large. */
1129 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1130 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1131 || size > LONG_LONG_TYPE_SIZE)
1132 break;
1134 biased_p |= for_biased;
1135 if (TYPE_UNSIGNED (type) || biased_p)
1136 new_type = make_unsigned_type (size);
1137 else
1138 new_type = make_signed_type (size);
1139 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1140 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1141 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1142 /* Copy the name to show that it's essentially the same type and
1143 not a subrange type. */
1144 TYPE_NAME (new_type) = TYPE_NAME (type);
1145 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1146 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1147 return new_type;
1149 case RECORD_TYPE:
1150 /* Do something if this is a fat pointer, in which case we
1151 may need to return the thin pointer. */
1152 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1154 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
1155 if (!targetm.valid_pointer_mode (p_mode))
1156 p_mode = ptr_mode;
1157 return
1158 build_pointer_type_for_mode
1159 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1160 p_mode, 0);
1162 break;
1164 case POINTER_TYPE:
1165 /* Only do something if this is a thin pointer, in which case we
1166 may need to return the fat pointer. */
1167 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1168 return
1169 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1170 break;
1172 default:
1173 break;
1176 return type;
1179 /* See if the data pointed to by the hash table slot is marked. */
1181 void
1182 pad_type_hasher::handle_cache_entry (pad_type_hash *&t)
1184 extern void gt_ggc_mx (pad_type_hash *&);
1185 if (t == HTAB_EMPTY_ENTRY || t == HTAB_DELETED_ENTRY)
1186 return;
1187 else if (ggc_marked_p (t->type))
1188 gt_ggc_mx (t);
1189 else
1190 t = static_cast<pad_type_hash *> (HTAB_DELETED_ENTRY);
1193 /* Return true iff the padded types are equivalent. */
1195 bool
1196 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1198 tree type1, type2;
1200 if (t1->hash != t2->hash)
1201 return 0;
1203 type1 = t1->type;
1204 type2 = t2->type;
1206 /* We consider that the padded types are equivalent if they pad the same type
1207 and have the same size, alignment, RM size and storage order. Taking the
1208 mode into account is redundant since it is determined by the others. */
1209 return
1210 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1211 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1212 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1213 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1214 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1217 /* Look up the padded TYPE in the hash table and return its canonical version
1218 if it exists; otherwise, insert it into the hash table. */
1220 static tree
1221 lookup_and_insert_pad_type (tree type)
1223 hashval_t hashcode;
1224 struct pad_type_hash in, *h;
1226 hashcode
1227 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1228 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1229 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1230 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1232 in.hash = hashcode;
1233 in.type = type;
1234 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1235 if (h)
1236 return h->type;
1238 h = ggc_alloc<pad_type_hash> ();
1239 h->hash = hashcode;
1240 h->type = type;
1241 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
1242 return NULL_TREE;
1245 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1246 if needed. We have already verified that SIZE and ALIGN are large enough.
1247 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1248 IS_COMPONENT_TYPE is true if this is being done for the component type of
1249 an array. IS_USER_TYPE is true if the original type needs to be completed.
1250 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1251 the RM size of the resulting type is to be set to SIZE too. */
1253 tree
1254 maybe_pad_type (tree type, tree size, unsigned int align,
1255 Entity_Id gnat_entity, bool is_component_type,
1256 bool is_user_type, bool definition, bool set_rm_size)
1258 tree orig_size = TYPE_SIZE (type);
1259 unsigned int orig_align = TYPE_ALIGN (type);
1260 tree record, field;
1262 /* If TYPE is a padded type, see if it agrees with any size and alignment
1263 we were given. If so, return the original type. Otherwise, strip
1264 off the padding, since we will either be returning the inner type
1265 or repadding it. If no size or alignment is specified, use that of
1266 the original padded type. */
1267 if (TYPE_IS_PADDING_P (type))
1269 if ((!size
1270 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1271 && (align == 0 || align == orig_align))
1272 return type;
1274 if (!size)
1275 size = orig_size;
1276 if (align == 0)
1277 align = orig_align;
1279 type = TREE_TYPE (TYPE_FIELDS (type));
1280 orig_size = TYPE_SIZE (type);
1281 orig_align = TYPE_ALIGN (type);
1284 /* If the size is either not being changed or is being made smaller (which
1285 is not done here and is only valid for bitfields anyway), show the size
1286 isn't changing. Likewise, clear the alignment if it isn't being
1287 changed. Then return if we aren't doing anything. */
1288 if (size
1289 && (operand_equal_p (size, orig_size, 0)
1290 || (TREE_CODE (orig_size) == INTEGER_CST
1291 && tree_int_cst_lt (size, orig_size))))
1292 size = NULL_TREE;
1294 if (align == orig_align)
1295 align = 0;
1297 if (align == 0 && !size)
1298 return type;
1300 /* If requested, complete the original type and give it a name. */
1301 if (is_user_type)
1302 create_type_decl (get_entity_name (gnat_entity), type,
1303 !Comes_From_Source (gnat_entity),
1304 !(TYPE_NAME (type)
1305 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1306 && DECL_IGNORED_P (TYPE_NAME (type))),
1307 gnat_entity);
1309 /* We used to modify the record in place in some cases, but that could
1310 generate incorrect debugging information. So make a new record
1311 type and name. */
1312 record = make_node (RECORD_TYPE);
1313 TYPE_PADDING_P (record) = 1;
1315 if (Present (gnat_entity))
1316 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1318 TYPE_ALIGN (record) = align ? align : orig_align;
1319 TYPE_SIZE (record) = size ? size : orig_size;
1320 TYPE_SIZE_UNIT (record)
1321 = convert (sizetype,
1322 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1323 bitsize_unit_node));
1325 /* If we are changing the alignment and the input type is a record with
1326 BLKmode and a small constant size, try to make a form that has an
1327 integral mode. This might allow the padding record to also have an
1328 integral mode, which will be much more efficient. There is no point
1329 in doing so if a size is specified unless it is also a small constant
1330 size and it is incorrect to do so if we cannot guarantee that the mode
1331 will be naturally aligned since the field must always be addressable.
1333 ??? This might not always be a win when done for a stand-alone object:
1334 since the nominal and the effective type of the object will now have
1335 different modes, a VIEW_CONVERT_EXPR will be required for converting
1336 between them and it might be hard to overcome afterwards, including
1337 at the RTL level when the stand-alone object is accessed as a whole. */
1338 if (align != 0
1339 && RECORD_OR_UNION_TYPE_P (type)
1340 && TYPE_MODE (type) == BLKmode
1341 && !TYPE_BY_REFERENCE_P (type)
1342 && TREE_CODE (orig_size) == INTEGER_CST
1343 && !TREE_OVERFLOW (orig_size)
1344 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1345 && (!size
1346 || (TREE_CODE (size) == INTEGER_CST
1347 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1349 tree packable_type = make_packable_type (type, true);
1350 if (TYPE_MODE (packable_type) != BLKmode
1351 && align >= TYPE_ALIGN (packable_type))
1352 type = packable_type;
1355 /* Now create the field with the original size. */
1356 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1357 bitsize_zero_node, 0, 1);
1358 DECL_INTERNAL_P (field) = 1;
1360 /* Do not emit debug info until after the auxiliary record is built. */
1361 finish_record_type (record, field, 1, false);
1363 /* Set the RM size if requested. */
1364 if (set_rm_size)
1366 tree canonical_pad_type;
1368 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1370 /* If the padded type is complete and has constant size, we canonicalize
1371 it by means of the hash table. This is consistent with the language
1372 semantics and ensures that gigi and the middle-end have a common view
1373 of these padded types. */
1374 if (TREE_CONSTANT (TYPE_SIZE (record))
1375 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1377 record = canonical_pad_type;
1378 goto built;
1382 /* Unless debugging information isn't being written for the input type,
1383 write a record that shows what we are a subtype of and also make a
1384 variable that indicates our size, if still variable. */
1385 if (TREE_CODE (orig_size) != INTEGER_CST
1386 && TYPE_NAME (record)
1387 && TYPE_NAME (type)
1388 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1389 && DECL_IGNORED_P (TYPE_NAME (type))))
1391 tree marker = make_node (RECORD_TYPE);
1392 tree name = TYPE_IDENTIFIER (record);
1393 tree orig_name = TYPE_IDENTIFIER (type);
1395 TYPE_NAME (marker) = concat_name (name, "XVS");
1396 finish_record_type (marker,
1397 create_field_decl (orig_name,
1398 build_reference_type (type),
1399 marker, NULL_TREE, NULL_TREE,
1400 0, 0),
1401 0, true);
1403 add_parallel_type (record, marker);
1405 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1406 TYPE_SIZE_UNIT (marker)
1407 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1408 TYPE_SIZE_UNIT (record), false, false, false,
1409 false, NULL, gnat_entity);
1412 rest_of_record_type_compilation (record);
1414 built:
1415 /* If the size was widened explicitly, maybe give a warning. Take the
1416 original size as the maximum size of the input if there was an
1417 unconstrained record involved and round it up to the specified alignment,
1418 if one was specified. But don't do it if we are just annotating types
1419 and the type is tagged, since tagged types aren't fully laid out in this
1420 mode. */
1421 if (!size
1422 || TREE_CODE (size) == COND_EXPR
1423 || TREE_CODE (size) == MAX_EXPR
1424 || No (gnat_entity)
1425 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1426 return record;
1428 if (CONTAINS_PLACEHOLDER_P (orig_size))
1429 orig_size = max_size (orig_size, true);
1431 if (align)
1432 orig_size = round_up (orig_size, align);
1434 if (!operand_equal_p (size, orig_size, 0)
1435 && !(TREE_CODE (size) == INTEGER_CST
1436 && TREE_CODE (orig_size) == INTEGER_CST
1437 && (TREE_OVERFLOW (size)
1438 || TREE_OVERFLOW (orig_size)
1439 || tree_int_cst_lt (size, orig_size))))
1441 Node_Id gnat_error_node = Empty;
1443 /* For a packed array, post the message on the original array type. */
1444 if (Is_Packed_Array_Impl_Type (gnat_entity))
1445 gnat_entity = Original_Array_Type (gnat_entity);
1447 if ((Ekind (gnat_entity) == E_Component
1448 || Ekind (gnat_entity) == E_Discriminant)
1449 && Present (Component_Clause (gnat_entity)))
1450 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1451 else if (Present (Size_Clause (gnat_entity)))
1452 gnat_error_node = Expression (Size_Clause (gnat_entity));
1454 /* Generate message only for entities that come from source, since
1455 if we have an entity created by expansion, the message will be
1456 generated for some other corresponding source entity. */
1457 if (Comes_From_Source (gnat_entity))
1459 if (Present (gnat_error_node))
1460 post_error_ne_tree ("{^ }bits of & unused?",
1461 gnat_error_node, gnat_entity,
1462 size_diffop (size, orig_size));
1463 else if (is_component_type)
1464 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1465 gnat_entity, gnat_entity,
1466 size_diffop (size, orig_size));
1470 return record;
1473 /* Return a copy of the padded TYPE but with reverse storage order. */
1475 tree
1476 set_reverse_storage_order_on_pad_type (tree type)
1478 tree field, canonical_pad_type;
1480 #ifdef ENABLE_CHECKING
1481 /* If the inner type is not scalar then the function does nothing. */
1482 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1483 gcc_assert (!AGGREGATE_TYPE_P (inner_type));
1484 #endif
1486 /* This is required for the canonicalization. */
1487 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1489 field = copy_node (TYPE_FIELDS (type));
1490 type = copy_type (type);
1491 DECL_CONTEXT (field) = type;
1492 TYPE_FIELDS (type) = field;
1493 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1494 canonical_pad_type = lookup_and_insert_pad_type (type);
1495 return canonical_pad_type ? canonical_pad_type : type;
1498 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1499 If this is a multi-dimensional array type, do this recursively.
1501 OP may be
1502 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1503 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1504 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1506 void
1507 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1509 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1510 of a one-dimensional array, since the padding has the same alias set
1511 as the field type, but if it's a multi-dimensional array, we need to
1512 see the inner types. */
1513 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1514 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1515 || TYPE_PADDING_P (gnu_old_type)))
1516 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1518 /* Unconstrained array types are deemed incomplete and would thus be given
1519 alias set 0. Retrieve the underlying array type. */
1520 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1521 gnu_old_type
1522 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1523 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1524 gnu_new_type
1525 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1527 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1528 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1529 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1530 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1532 switch (op)
1534 case ALIAS_SET_COPY:
1535 /* The alias set shouldn't be copied between array types with different
1536 aliasing settings because this can break the aliasing relationship
1537 between the array type and its element type. */
1538 #ifndef ENABLE_CHECKING
1539 if (flag_strict_aliasing)
1540 #endif
1541 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1542 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1543 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1544 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1546 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1547 break;
1549 case ALIAS_SET_SUBSET:
1550 case ALIAS_SET_SUPERSET:
1552 alias_set_type old_set = get_alias_set (gnu_old_type);
1553 alias_set_type new_set = get_alias_set (gnu_new_type);
1555 /* Do nothing if the alias sets conflict. This ensures that we
1556 never call record_alias_subset several times for the same pair
1557 or at all for alias set 0. */
1558 if (!alias_sets_conflict_p (old_set, new_set))
1560 if (op == ALIAS_SET_SUBSET)
1561 record_alias_subset (old_set, new_set);
1562 else
1563 record_alias_subset (new_set, old_set);
1566 break;
1568 default:
1569 gcc_unreachable ();
1572 record_component_aliases (gnu_new_type);
1575 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1576 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1578 void
1579 record_builtin_type (const char *name, tree type, bool artificial_p)
1581 tree type_decl = build_decl (input_location,
1582 TYPE_DECL, get_identifier (name), type);
1583 DECL_ARTIFICIAL (type_decl) = artificial_p;
1584 TYPE_ARTIFICIAL (type) = artificial_p;
1585 gnat_pushdecl (type_decl, Empty);
1587 if (debug_hooks->type_decl)
1588 debug_hooks->type_decl (type_decl, false);
1591 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1592 finish constructing the record type as a fat pointer type. */
1594 void
1595 finish_fat_pointer_type (tree record_type, tree field_list)
1597 /* Make sure we can put it into a register. */
1598 if (STRICT_ALIGNMENT)
1599 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
1601 /* Show what it really is. */
1602 TYPE_FAT_POINTER_P (record_type) = 1;
1604 /* Do not emit debug info for it since the types of its fields may still be
1605 incomplete at this point. */
1606 finish_record_type (record_type, field_list, 0, false);
1608 /* Force type_contains_placeholder_p to return true on it. Although the
1609 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1610 type but the representation of the unconstrained array. */
1611 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1614 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1615 finish constructing the record or union type. If REP_LEVEL is zero, this
1616 record has no representation clause and so will be entirely laid out here.
1617 If REP_LEVEL is one, this record has a representation clause and has been
1618 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1619 this record is derived from a parent record and thus inherits its layout;
1620 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1621 we need to write debug information about this type. */
1623 void
1624 finish_record_type (tree record_type, tree field_list, int rep_level,
1625 bool debug_info_p)
1627 enum tree_code code = TREE_CODE (record_type);
1628 tree name = TYPE_IDENTIFIER (record_type);
1629 tree ada_size = bitsize_zero_node;
1630 tree size = bitsize_zero_node;
1631 bool had_size = TYPE_SIZE (record_type) != 0;
1632 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1633 bool had_align = TYPE_ALIGN (record_type) != 0;
1634 tree field;
1636 TYPE_FIELDS (record_type) = field_list;
1638 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1639 generate debug info and have a parallel type. */
1640 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1642 /* Globally initialize the record first. If this is a rep'ed record,
1643 that just means some initializations; otherwise, layout the record. */
1644 if (rep_level > 0)
1646 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
1648 if (!had_size_unit)
1649 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1651 if (!had_size)
1652 TYPE_SIZE (record_type) = bitsize_zero_node;
1654 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1655 out just like a UNION_TYPE, since the size will be fixed. */
1656 else if (code == QUAL_UNION_TYPE)
1657 code = UNION_TYPE;
1659 else
1661 /* Ensure there isn't a size already set. There can be in an error
1662 case where there is a rep clause but all fields have errors and
1663 no longer have a position. */
1664 TYPE_SIZE (record_type) = 0;
1666 /* Ensure we use the traditional GCC layout for bitfields when we need
1667 to pack the record type or have a representation clause. The other
1668 possible layout (Microsoft C compiler), if available, would prevent
1669 efficient packing in almost all cases. */
1670 #ifdef TARGET_MS_BITFIELD_LAYOUT
1671 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1672 decl_attributes (&record_type,
1673 tree_cons (get_identifier ("gcc_struct"),
1674 NULL_TREE, NULL_TREE),
1675 ATTR_FLAG_TYPE_IN_PLACE);
1676 #endif
1678 layout_type (record_type);
1681 /* At this point, the position and size of each field is known. It was
1682 either set before entry by a rep clause, or by laying out the type above.
1684 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1685 to compute the Ada size; the GCC size and alignment (for rep'ed records
1686 that are not padding types); and the mode (for rep'ed records). We also
1687 clear the DECL_BIT_FIELD indication for the cases we know have not been
1688 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1690 if (code == QUAL_UNION_TYPE)
1691 field_list = nreverse (field_list);
1693 for (field = field_list; field; field = DECL_CHAIN (field))
1695 tree type = TREE_TYPE (field);
1696 tree pos = bit_position (field);
1697 tree this_size = DECL_SIZE (field);
1698 tree this_ada_size;
1700 if (RECORD_OR_UNION_TYPE_P (type)
1701 && !TYPE_FAT_POINTER_P (type)
1702 && !TYPE_CONTAINS_TEMPLATE_P (type)
1703 && TYPE_ADA_SIZE (type))
1704 this_ada_size = TYPE_ADA_SIZE (type);
1705 else
1706 this_ada_size = this_size;
1708 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1709 if (DECL_BIT_FIELD (field)
1710 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1712 unsigned int align = TYPE_ALIGN (type);
1714 /* In the general case, type alignment is required. */
1715 if (value_factor_p (pos, align))
1717 /* The enclosing record type must be sufficiently aligned.
1718 Otherwise, if no alignment was specified for it and it
1719 has been laid out already, bump its alignment to the
1720 desired one if this is compatible with its size. */
1721 if (TYPE_ALIGN (record_type) >= align)
1723 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1724 DECL_BIT_FIELD (field) = 0;
1726 else if (!had_align
1727 && rep_level == 0
1728 && value_factor_p (TYPE_SIZE (record_type), align))
1730 TYPE_ALIGN (record_type) = align;
1731 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1732 DECL_BIT_FIELD (field) = 0;
1736 /* In the non-strict alignment case, only byte alignment is. */
1737 if (!STRICT_ALIGNMENT
1738 && DECL_BIT_FIELD (field)
1739 && value_factor_p (pos, BITS_PER_UNIT))
1740 DECL_BIT_FIELD (field) = 0;
1743 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1744 field is technically not addressable. Except that it can actually
1745 be addressed if it is BLKmode and happens to be properly aligned. */
1746 if (DECL_BIT_FIELD (field)
1747 && !(DECL_MODE (field) == BLKmode
1748 && value_factor_p (pos, BITS_PER_UNIT)))
1749 DECL_NONADDRESSABLE_P (field) = 1;
1751 /* A type must be as aligned as its most aligned field that is not
1752 a bit-field. But this is already enforced by layout_type. */
1753 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1754 TYPE_ALIGN (record_type)
1755 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1757 switch (code)
1759 case UNION_TYPE:
1760 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1761 size = size_binop (MAX_EXPR, size, this_size);
1762 break;
1764 case QUAL_UNION_TYPE:
1765 ada_size
1766 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1767 this_ada_size, ada_size);
1768 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1769 this_size, size);
1770 break;
1772 case RECORD_TYPE:
1773 /* Since we know here that all fields are sorted in order of
1774 increasing bit position, the size of the record is one
1775 higher than the ending bit of the last field processed
1776 unless we have a rep clause, since in that case we might
1777 have a field outside a QUAL_UNION_TYPE that has a higher ending
1778 position. So use a MAX in that case. Also, if this field is a
1779 QUAL_UNION_TYPE, we need to take into account the previous size in
1780 the case of empty variants. */
1781 ada_size
1782 = merge_sizes (ada_size, pos, this_ada_size,
1783 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1784 size
1785 = merge_sizes (size, pos, this_size,
1786 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1787 break;
1789 default:
1790 gcc_unreachable ();
1794 if (code == QUAL_UNION_TYPE)
1795 nreverse (field_list);
1797 if (rep_level < 2)
1799 /* If this is a padding record, we never want to make the size smaller
1800 than what was specified in it, if any. */
1801 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1802 size = TYPE_SIZE (record_type);
1804 /* Now set any of the values we've just computed that apply. */
1805 if (!TYPE_FAT_POINTER_P (record_type)
1806 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1807 SET_TYPE_ADA_SIZE (record_type, ada_size);
1809 if (rep_level > 0)
1811 tree size_unit = had_size_unit
1812 ? TYPE_SIZE_UNIT (record_type)
1813 : convert (sizetype,
1814 size_binop (CEIL_DIV_EXPR, size,
1815 bitsize_unit_node));
1816 unsigned int align = TYPE_ALIGN (record_type);
1818 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1819 TYPE_SIZE_UNIT (record_type)
1820 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1822 compute_record_mode (record_type);
1826 if (debug_info_p)
1827 rest_of_record_type_compilation (record_type);
1830 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1831 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1832 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1833 moment TYPE will get a context. */
1835 void
1836 add_parallel_type (tree type, tree parallel_type)
1838 tree decl = TYPE_STUB_DECL (type);
1840 while (DECL_PARALLEL_TYPE (decl))
1841 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1843 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1845 /* If PARALLEL_TYPE already has a context, we are done. */
1846 if (TYPE_CONTEXT (parallel_type) != NULL_TREE)
1847 return;
1849 /* Otherwise, try to get one from TYPE's context. */
1850 if (TYPE_CONTEXT (type) != NULL_TREE)
1851 /* TYPE already has a context, so simply propagate it to PARALLEL_TYPE. */
1852 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1854 /* ... otherwise TYPE has not context yet. We know it will thanks to
1855 gnat_pushdecl, and then its context will be propagated to PARALLEL_TYPE.
1856 So we have nothing to do in this case. */
1859 /* Return true if TYPE has a parallel type. */
1861 static bool
1862 has_parallel_type (tree type)
1864 tree decl = TYPE_STUB_DECL (type);
1866 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1869 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1870 associated with it. It need not be invoked directly in most cases since
1871 finish_record_type takes care of doing so, but this can be necessary if
1872 a parallel type is to be attached to the record type. */
1874 void
1875 rest_of_record_type_compilation (tree record_type)
1877 bool var_size = false;
1878 tree field;
1880 /* If this is a padded type, the bulk of the debug info has already been
1881 generated for the field's type. */
1882 if (TYPE_IS_PADDING_P (record_type))
1883 return;
1885 /* If the type already has a parallel type (XVS type), then we're done. */
1886 if (has_parallel_type (record_type))
1887 return;
1889 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1891 /* We need to make an XVE/XVU record if any field has variable size,
1892 whether or not the record does. For example, if we have a union,
1893 it may be that all fields, rounded up to the alignment, have the
1894 same size, in which case we'll use that size. But the debug
1895 output routines (except Dwarf2) won't be able to output the fields,
1896 so we need to make the special record. */
1897 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1898 /* If a field has a non-constant qualifier, the record will have
1899 variable size too. */
1900 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1901 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1903 var_size = true;
1904 break;
1908 /* If this record type is of variable size, make a parallel record type that
1909 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1910 if (var_size)
1912 tree new_record_type
1913 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1914 ? UNION_TYPE : TREE_CODE (record_type));
1915 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
1916 tree last_pos = bitsize_zero_node;
1917 tree old_field, prev_old_field = NULL_TREE;
1919 new_name
1920 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1921 ? "XVU" : "XVE");
1922 TYPE_NAME (new_record_type) = new_name;
1923 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1924 TYPE_STUB_DECL (new_record_type)
1925 = create_type_stub_decl (new_name, new_record_type);
1926 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1927 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1928 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1929 TYPE_SIZE_UNIT (new_record_type)
1930 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1932 /* Now scan all the fields, replacing each field with a new field
1933 corresponding to the new encoding. */
1934 for (old_field = TYPE_FIELDS (record_type); old_field;
1935 old_field = DECL_CHAIN (old_field))
1937 tree field_type = TREE_TYPE (old_field);
1938 tree field_name = DECL_NAME (old_field);
1939 tree curpos = bit_position (old_field);
1940 tree pos, new_field;
1941 bool var = false;
1942 unsigned int align = 0;
1944 /* We're going to do some pattern matching below so remove as many
1945 conversions as possible. */
1946 curpos = remove_conversions (curpos, true);
1948 /* See how the position was modified from the last position.
1950 There are two basic cases we support: a value was added
1951 to the last position or the last position was rounded to
1952 a boundary and they something was added. Check for the
1953 first case first. If not, see if there is any evidence
1954 of rounding. If so, round the last position and retry.
1956 If this is a union, the position can be taken as zero. */
1957 if (TREE_CODE (new_record_type) == UNION_TYPE)
1958 pos = bitsize_zero_node;
1959 else
1960 pos = compute_related_constant (curpos, last_pos);
1962 if (!pos
1963 && TREE_CODE (curpos) == MULT_EXPR
1964 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
1966 tree offset = TREE_OPERAND (curpos, 0);
1967 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1968 align = scale_by_factor_of (offset, align);
1969 last_pos = round_up (last_pos, align);
1970 pos = compute_related_constant (curpos, last_pos);
1972 else if (!pos
1973 && TREE_CODE (curpos) == PLUS_EXPR
1974 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
1975 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1976 && tree_fits_uhwi_p
1977 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
1979 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1980 unsigned HOST_WIDE_INT addend
1981 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1982 align
1983 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
1984 align = scale_by_factor_of (offset, align);
1985 align = MIN (align, addend & -addend);
1986 last_pos = round_up (last_pos, align);
1987 pos = compute_related_constant (curpos, last_pos);
1989 else if (potential_alignment_gap (prev_old_field, old_field, pos))
1991 align = TYPE_ALIGN (field_type);
1992 last_pos = round_up (last_pos, align);
1993 pos = compute_related_constant (curpos, last_pos);
1996 /* If we can't compute a position, set it to zero.
1998 ??? We really should abort here, but it's too much work
1999 to get this correct for all cases. */
2000 if (!pos)
2001 pos = bitsize_zero_node;
2003 /* See if this type is variable-sized and make a pointer type
2004 and indicate the indirection if so. Beware that the debug
2005 back-end may adjust the position computed above according
2006 to the alignment of the field type, i.e. the pointer type
2007 in this case, if we don't preventively counter that. */
2008 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2010 field_type = build_pointer_type (field_type);
2011 if (align != 0 && TYPE_ALIGN (field_type) > align)
2013 field_type = copy_node (field_type);
2014 TYPE_ALIGN (field_type) = align;
2016 var = true;
2019 /* Make a new field name, if necessary. */
2020 if (var || align != 0)
2022 char suffix[16];
2024 if (align != 0)
2025 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2026 align / BITS_PER_UNIT);
2027 else
2028 strcpy (suffix, "XVL");
2030 field_name = concat_name (field_name, suffix);
2033 new_field
2034 = create_field_decl (field_name, field_type, new_record_type,
2035 DECL_SIZE (old_field), pos, 0, 0);
2036 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2037 TYPE_FIELDS (new_record_type) = new_field;
2039 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2040 zero. The only time it's not the last field of the record
2041 is when there are other components at fixed positions after
2042 it (meaning there was a rep clause for every field) and we
2043 want to be able to encode them. */
2044 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2045 (TREE_CODE (TREE_TYPE (old_field))
2046 == QUAL_UNION_TYPE)
2047 ? bitsize_zero_node
2048 : DECL_SIZE (old_field));
2049 prev_old_field = old_field;
2052 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2054 add_parallel_type (record_type, new_record_type);
2058 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2059 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2060 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2061 replace a value of zero with the old size. If HAS_REP is true, we take the
2062 MAX of the end position of this field with LAST_SIZE. In all other cases,
2063 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2065 static tree
2066 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2067 bool has_rep)
2069 tree type = TREE_TYPE (last_size);
2070 tree new_size;
2072 if (!special || TREE_CODE (size) != COND_EXPR)
2074 new_size = size_binop (PLUS_EXPR, first_bit, size);
2075 if (has_rep)
2076 new_size = size_binop (MAX_EXPR, last_size, new_size);
2079 else
2080 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2081 integer_zerop (TREE_OPERAND (size, 1))
2082 ? last_size : merge_sizes (last_size, first_bit,
2083 TREE_OPERAND (size, 1),
2084 1, has_rep),
2085 integer_zerop (TREE_OPERAND (size, 2))
2086 ? last_size : merge_sizes (last_size, first_bit,
2087 TREE_OPERAND (size, 2),
2088 1, has_rep));
2090 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2091 when fed through substitute_in_expr) into thinking that a constant
2092 size is not constant. */
2093 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2094 new_size = TREE_OPERAND (new_size, 0);
2096 return new_size;
2099 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2100 related by the addition of a constant. Return that constant if so. */
2102 static tree
2103 compute_related_constant (tree op0, tree op1)
2105 tree op0_var, op1_var;
2106 tree op0_con = split_plus (op0, &op0_var);
2107 tree op1_con = split_plus (op1, &op1_var);
2108 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2110 if (operand_equal_p (op0_var, op1_var, 0))
2111 return result;
2112 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2113 return result;
2114 else
2115 return 0;
2118 /* Utility function of above to split a tree OP which may be a sum, into a
2119 constant part, which is returned, and a variable part, which is stored
2120 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2121 bitsizetype. */
2123 static tree
2124 split_plus (tree in, tree *pvar)
2126 /* Strip conversions in order to ease the tree traversal and maximize the
2127 potential for constant or plus/minus discovery. We need to be careful
2128 to always return and set *pvar to bitsizetype trees, but it's worth
2129 the effort. */
2130 in = remove_conversions (in, false);
2132 *pvar = convert (bitsizetype, in);
2134 if (TREE_CODE (in) == INTEGER_CST)
2136 *pvar = bitsize_zero_node;
2137 return convert (bitsizetype, in);
2139 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2141 tree lhs_var, rhs_var;
2142 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2143 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2145 if (lhs_var == TREE_OPERAND (in, 0)
2146 && rhs_var == TREE_OPERAND (in, 1))
2147 return bitsize_zero_node;
2149 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2150 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2152 else
2153 return bitsize_zero_node;
2156 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
2157 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
2158 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
2159 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
2160 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
2161 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
2162 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
2163 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
2164 invisible reference. */
2166 tree
2167 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
2168 bool return_unconstrained_p, bool return_by_direct_ref_p,
2169 bool return_by_invisi_ref_p)
2171 /* A list of the data type nodes of the subprogram formal parameters.
2172 This list is generated by traversing the input list of PARM_DECL
2173 nodes. */
2174 vec<tree, va_gc> *param_type_list = NULL;
2175 tree t, type;
2177 for (t = param_decl_list; t; t = DECL_CHAIN (t))
2178 vec_safe_push (param_type_list, TREE_TYPE (t));
2180 type = build_function_type_vec (return_type, param_type_list);
2182 /* TYPE may have been shared since GCC hashes types. If it has a different
2183 CICO_LIST, make a copy. Likewise for the various flags. */
2184 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
2185 return_by_direct_ref_p, return_by_invisi_ref_p))
2187 type = copy_type (type);
2188 TYPE_CI_CO_LIST (type) = cico_list;
2189 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
2190 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
2191 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
2194 return type;
2197 /* Return a copy of TYPE but safe to modify in any way. */
2199 tree
2200 copy_type (tree type)
2202 tree new_type = copy_node (type);
2204 /* Unshare the language-specific data. */
2205 if (TYPE_LANG_SPECIFIC (type))
2207 TYPE_LANG_SPECIFIC (new_type) = NULL;
2208 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2211 /* And the contents of the language-specific slot if needed. */
2212 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2213 && TYPE_RM_VALUES (type))
2215 TYPE_RM_VALUES (new_type) = NULL_TREE;
2216 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2217 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2218 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2221 /* copy_node clears this field instead of copying it, because it is
2222 aliased with TREE_CHAIN. */
2223 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2225 TYPE_POINTER_TO (new_type) = 0;
2226 TYPE_REFERENCE_TO (new_type) = 0;
2227 TYPE_MAIN_VARIANT (new_type) = new_type;
2228 TYPE_NEXT_VARIANT (new_type) = 0;
2230 return new_type;
2233 /* Return a subtype of sizetype with range MIN to MAX and whose
2234 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2235 of the associated TYPE_DECL. */
2237 tree
2238 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2240 /* First build a type for the desired range. */
2241 tree type = build_nonshared_range_type (sizetype, min, max);
2243 /* Then set the index type. */
2244 SET_TYPE_INDEX_TYPE (type, index);
2245 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2247 return type;
2250 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2251 sizetype is used. */
2253 tree
2254 create_range_type (tree type, tree min, tree max)
2256 tree range_type;
2258 if (type == NULL_TREE)
2259 type = sizetype;
2261 /* First build a type with the base range. */
2262 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2263 TYPE_MAX_VALUE (type));
2265 /* Then set the actual range. */
2266 SET_TYPE_RM_MIN_VALUE (range_type, min);
2267 SET_TYPE_RM_MAX_VALUE (range_type, max);
2269 return range_type;
2272 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2273 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2274 its data type. */
2276 tree
2277 create_type_stub_decl (tree type_name, tree type)
2279 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2280 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2281 emitted in DWARF. */
2282 tree type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2283 DECL_ARTIFICIAL (type_decl) = 1;
2284 TYPE_ARTIFICIAL (type) = 1;
2285 return type_decl;
2288 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2289 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2290 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2291 true if we need to write debug information about this type. GNAT_NODE
2292 is used for the position of the decl. */
2294 tree
2295 create_type_decl (tree type_name, tree type, bool artificial_p,
2296 bool debug_info_p, Node_Id gnat_node)
2298 enum tree_code code = TREE_CODE (type);
2299 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2300 tree type_decl;
2302 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2303 gcc_assert (!TYPE_IS_DUMMY_P (type));
2305 /* If the type hasn't been named yet, we're naming it; preserve an existing
2306 TYPE_STUB_DECL that has been attached to it for some purpose. */
2307 if (!named && TYPE_STUB_DECL (type))
2309 type_decl = TYPE_STUB_DECL (type);
2310 DECL_NAME (type_decl) = type_name;
2312 else
2313 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2315 DECL_ARTIFICIAL (type_decl) = artificial_p;
2316 TYPE_ARTIFICIAL (type) = artificial_p;
2318 /* Add this decl to the current binding level. */
2319 gnat_pushdecl (type_decl, gnat_node);
2321 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2322 causes the name to be also viewed as a "tag" by the debug back-end, with
2323 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2324 types in DWARF.
2326 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2327 from multiple contexts, and "type_decl" references a copy of it: in such a
2328 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2329 with the mechanism above. */
2330 if (!named && type != DECL_ORIGINAL_TYPE (type_decl))
2331 TYPE_STUB_DECL (type) = type_decl;
2333 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2334 back-end doesn't support, and for others if we don't need to. */
2335 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2336 DECL_IGNORED_P (type_decl) = 1;
2338 return type_decl;
2341 /* Return a VAR_DECL or CONST_DECL node.
2343 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2344 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2345 the GCC tree for an optional initial expression; NULL_TREE if none.
2347 CONST_FLAG is true if this variable is constant, in which case we might
2348 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2350 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2351 definition to be made visible outside of the current compilation unit, for
2352 instance variable definitions in a package specification.
2354 EXTERN_FLAG is true when processing an external variable declaration (as
2355 opposed to a definition: no storage is to be allocated for the variable).
2357 STATIC_FLAG is only relevant when not at top level. In that case
2358 it indicates whether to always allocate storage to the variable.
2360 GNAT_NODE is used for the position of the decl. */
2362 tree
2363 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2364 bool const_flag, bool public_flag, bool extern_flag,
2365 bool static_flag, bool const_decl_allowed_p,
2366 struct attrib *attr_list, Node_Id gnat_node)
2368 /* Whether the object has static storage duration, either explicitly or by
2369 virtue of being declared at the global level. */
2370 const bool static_storage = static_flag || global_bindings_p ();
2372 /* Whether the initializer is constant: for an external object or an object
2373 with static storage duration, we check that the initializer is a valid
2374 constant expression for initializing a static variable; otherwise, we
2375 only check that it is constant. */
2376 const bool init_const
2377 = (var_init
2378 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
2379 && (extern_flag || static_storage
2380 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2381 != NULL_TREE
2382 : TREE_CONSTANT (var_init)));
2384 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2385 case the initializer may be used in lieu of the DECL node (as done in
2386 Identifier_to_gnu). This is useful to prevent the need of elaboration
2387 code when an identifier for which such a DECL is made is in turn used
2388 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2389 but extra constraints apply to this choice (see below) and they are not
2390 relevant to the distinction we wish to make. */
2391 const bool constant_p = const_flag && init_const;
2393 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2394 and may be used for scalars in general but not for aggregates. */
2395 tree var_decl
2396 = build_decl (input_location,
2397 (constant_p && const_decl_allowed_p
2398 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2399 var_name, type);
2401 /* If this is external, throw away any initializations (they will be done
2402 elsewhere) unless this is a constant for which we would like to remain
2403 able to get the initializer. If we are defining a global here, leave a
2404 constant initialization and save any variable elaborations for the
2405 elaboration routine. If we are just annotating types, throw away the
2406 initialization if it isn't a constant. */
2407 if ((extern_flag && !constant_p)
2408 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2409 var_init = NULL_TREE;
2411 /* At the global level, a non-constant initializer generates elaboration
2412 statements. Check that such statements are allowed, that is to say,
2413 not violating a No_Elaboration_Code restriction. */
2414 if (var_init && !init_const && global_bindings_p ())
2415 Check_Elaboration_Code_Allowed (gnat_node);
2417 DECL_INITIAL (var_decl) = var_init;
2418 TREE_READONLY (var_decl) = const_flag;
2419 DECL_EXTERNAL (var_decl) = extern_flag;
2420 TREE_CONSTANT (var_decl) = constant_p;
2422 /* We need to allocate static storage for an object with static storage
2423 duration if it isn't external. */
2424 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2426 /* The object is public if it is external or if it is declared public
2427 and has static storage duration. */
2428 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2430 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2431 try to fiddle with DECL_COMMON. However, on platforms that don't
2432 support global BSS sections, uninitialized global variables would
2433 go in DATA instead, thus increasing the size of the executable. */
2434 if (!flag_no_common
2435 && TREE_CODE (var_decl) == VAR_DECL
2436 && TREE_PUBLIC (var_decl)
2437 && !have_global_bss_p ())
2438 DECL_COMMON (var_decl) = 1;
2440 /* For an external constant whose initializer is not absolute, do not emit
2441 debug info. In DWARF this would mean a global relocation in a read-only
2442 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2443 if (extern_flag
2444 && constant_p
2445 && var_init
2446 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2447 != null_pointer_node)
2448 DECL_IGNORED_P (var_decl) = 1;
2450 if (TYPE_VOLATILE (type))
2451 TREE_SIDE_EFFECTS (var_decl) = TREE_THIS_VOLATILE (var_decl) = 1;
2453 if (TREE_SIDE_EFFECTS (var_decl))
2454 TREE_ADDRESSABLE (var_decl) = 1;
2456 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2457 if (TREE_CODE (var_decl) == VAR_DECL)
2458 process_attributes (&var_decl, &attr_list, true, gnat_node);
2460 /* Add this decl to the current binding level. */
2461 gnat_pushdecl (var_decl, gnat_node);
2463 if (TREE_CODE (var_decl) == VAR_DECL)
2465 if (asm_name)
2466 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2468 if (global_bindings_p ())
2469 rest_of_decl_compilation (var_decl, true, 0);
2472 return var_decl;
2475 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2477 static bool
2478 aggregate_type_contains_array_p (tree type)
2480 switch (TREE_CODE (type))
2482 case RECORD_TYPE:
2483 case UNION_TYPE:
2484 case QUAL_UNION_TYPE:
2486 tree field;
2487 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2488 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2489 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2490 return true;
2491 return false;
2494 case ARRAY_TYPE:
2495 return true;
2497 default:
2498 gcc_unreachable ();
2502 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2503 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2504 nonzero, it is the specified size of the field. If POS is nonzero, it is
2505 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2506 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2507 means we are allowed to take the address of the field; if it is negative,
2508 we should not make a bitfield, which is used by make_aligning_type. */
2510 tree
2511 create_field_decl (tree field_name, tree field_type, tree record_type,
2512 tree size, tree pos, int packed, int addressable)
2514 tree field_decl = build_decl (input_location,
2515 FIELD_DECL, field_name, field_type);
2517 DECL_CONTEXT (field_decl) = record_type;
2518 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2520 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2521 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2522 Likewise for an aggregate without specified position that contains an
2523 array, because in this case slices of variable length of this array
2524 must be handled by GCC and variable-sized objects need to be aligned
2525 to at least a byte boundary. */
2526 if (packed && (TYPE_MODE (field_type) == BLKmode
2527 || (!pos
2528 && AGGREGATE_TYPE_P (field_type)
2529 && aggregate_type_contains_array_p (field_type))))
2530 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2532 /* If a size is specified, use it. Otherwise, if the record type is packed
2533 compute a size to use, which may differ from the object's natural size.
2534 We always set a size in this case to trigger the checks for bitfield
2535 creation below, which is typically required when no position has been
2536 specified. */
2537 if (size)
2538 size = convert (bitsizetype, size);
2539 else if (packed == 1)
2541 size = rm_size (field_type);
2542 if (TYPE_MODE (field_type) == BLKmode)
2543 size = round_up (size, BITS_PER_UNIT);
2546 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2547 specified for two reasons: first if the size differs from the natural
2548 size. Second, if the alignment is insufficient. There are a number of
2549 ways the latter can be true.
2551 We never make a bitfield if the type of the field has a nonconstant size,
2552 because no such entity requiring bitfield operations should reach here.
2554 We do *preventively* make a bitfield when there might be the need for it
2555 but we don't have all the necessary information to decide, as is the case
2556 of a field with no specified position in a packed record.
2558 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2559 in layout_decl or finish_record_type to clear the bit_field indication if
2560 it is in fact not needed. */
2561 if (addressable >= 0
2562 && size
2563 && TREE_CODE (size) == INTEGER_CST
2564 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2565 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2566 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2567 || packed
2568 || (TYPE_ALIGN (record_type) != 0
2569 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2571 DECL_BIT_FIELD (field_decl) = 1;
2572 DECL_SIZE (field_decl) = size;
2573 if (!packed && !pos)
2575 if (TYPE_ALIGN (record_type) != 0
2576 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2577 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2578 else
2579 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2583 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2585 /* Bump the alignment if need be, either for bitfield/packing purposes or
2586 to satisfy the type requirements if no such consideration applies. When
2587 we get the alignment from the type, indicate if this is from an explicit
2588 user request, which prevents stor-layout from lowering it later on. */
2590 unsigned int bit_align
2591 = (DECL_BIT_FIELD (field_decl) ? 1
2592 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2594 if (bit_align > DECL_ALIGN (field_decl))
2595 DECL_ALIGN (field_decl) = bit_align;
2596 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2598 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2599 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2603 if (pos)
2605 /* We need to pass in the alignment the DECL is known to have.
2606 This is the lowest-order bit set in POS, but no more than
2607 the alignment of the record, if one is specified. Note
2608 that an alignment of 0 is taken as infinite. */
2609 unsigned int known_align;
2611 if (tree_fits_uhwi_p (pos))
2612 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2613 else
2614 known_align = BITS_PER_UNIT;
2616 if (TYPE_ALIGN (record_type)
2617 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2618 known_align = TYPE_ALIGN (record_type);
2620 layout_decl (field_decl, known_align);
2621 SET_DECL_OFFSET_ALIGN (field_decl,
2622 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2623 : BITS_PER_UNIT);
2624 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2625 &DECL_FIELD_BIT_OFFSET (field_decl),
2626 DECL_OFFSET_ALIGN (field_decl), pos);
2629 /* In addition to what our caller says, claim the field is addressable if we
2630 know that its type is not suitable.
2632 The field may also be "technically" nonaddressable, meaning that even if
2633 we attempt to take the field's address we will actually get the address
2634 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2635 value we have at this point is not accurate enough, so we don't account
2636 for this here and let finish_record_type decide. */
2637 if (!addressable && !type_for_nonaliased_component_p (field_type))
2638 addressable = 1;
2640 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2642 return field_decl;
2645 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2646 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2647 (either an In parameter or an address of a pass-by-ref parameter). */
2649 tree
2650 create_param_decl (tree param_name, tree param_type, bool readonly)
2652 tree param_decl = build_decl (input_location,
2653 PARM_DECL, param_name, param_type);
2655 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2656 can lead to various ABI violations. */
2657 if (targetm.calls.promote_prototypes (NULL_TREE)
2658 && INTEGRAL_TYPE_P (param_type)
2659 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2661 /* We have to be careful about biased types here. Make a subtype
2662 of integer_type_node with the proper biasing. */
2663 if (TREE_CODE (param_type) == INTEGER_TYPE
2664 && TYPE_BIASED_REPRESENTATION_P (param_type))
2666 tree subtype
2667 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2668 TREE_TYPE (subtype) = integer_type_node;
2669 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2670 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2671 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
2672 param_type = subtype;
2674 else
2675 param_type = integer_type_node;
2678 DECL_ARG_TYPE (param_decl) = param_type;
2679 TREE_READONLY (param_decl) = readonly;
2680 return param_decl;
2683 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2684 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2685 changed. GNAT_NODE is used for the position of error messages. */
2687 void
2688 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2689 Node_Id gnat_node)
2691 struct attrib *attr;
2693 for (attr = *attr_list; attr; attr = attr->next)
2694 switch (attr->type)
2696 case ATTR_MACHINE_ATTRIBUTE:
2697 Sloc_to_locus (Sloc (gnat_node), &input_location);
2698 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2699 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2700 break;
2702 case ATTR_LINK_ALIAS:
2703 if (!DECL_EXTERNAL (*node))
2705 TREE_STATIC (*node) = 1;
2706 assemble_alias (*node, attr->name);
2708 break;
2710 case ATTR_WEAK_EXTERNAL:
2711 if (SUPPORTS_WEAK)
2712 declare_weak (*node);
2713 else
2714 post_error ("?weak declarations not supported on this target",
2715 attr->error_point);
2716 break;
2718 case ATTR_LINK_SECTION:
2719 if (targetm_common.have_named_sections)
2721 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2722 DECL_COMMON (*node) = 0;
2724 else
2725 post_error ("?section attributes are not supported for this target",
2726 attr->error_point);
2727 break;
2729 case ATTR_LINK_CONSTRUCTOR:
2730 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2731 TREE_USED (*node) = 1;
2732 break;
2734 case ATTR_LINK_DESTRUCTOR:
2735 DECL_STATIC_DESTRUCTOR (*node) = 1;
2736 TREE_USED (*node) = 1;
2737 break;
2739 case ATTR_THREAD_LOCAL_STORAGE:
2740 set_decl_tls_model (*node, decl_default_tls_model (*node));
2741 DECL_COMMON (*node) = 0;
2742 break;
2745 *attr_list = NULL;
2748 /* Record DECL as a global renaming pointer. */
2750 void
2751 record_global_renaming_pointer (tree decl)
2753 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
2754 vec_safe_push (global_renaming_pointers, decl);
2757 /* Invalidate the global renaming pointers that are not constant, lest their
2758 renamed object contains SAVE_EXPRs tied to an elaboration routine. Note
2759 that we should not blindly invalidate everything here because of the need
2760 to propagate constant values through renaming. */
2762 void
2763 invalidate_global_renaming_pointers (void)
2765 unsigned int i;
2766 tree iter;
2768 if (global_renaming_pointers == NULL)
2769 return;
2771 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
2772 if (!TREE_CONSTANT (DECL_RENAMED_OBJECT (iter)))
2773 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2775 vec_free (global_renaming_pointers);
2778 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2779 a power of 2. */
2781 bool
2782 value_factor_p (tree value, HOST_WIDE_INT factor)
2784 if (tree_fits_uhwi_p (value))
2785 return tree_to_uhwi (value) % factor == 0;
2787 if (TREE_CODE (value) == MULT_EXPR)
2788 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2789 || value_factor_p (TREE_OPERAND (value, 1), factor));
2791 return false;
2794 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2795 from the parameter association for the instantiation of a generic. We do
2796 not want to emit source location for them: the code generated for their
2797 initialization is likely to disturb debugging. */
2799 bool
2800 renaming_from_generic_instantiation_p (Node_Id gnat_node)
2802 if (Nkind (gnat_node) != N_Defining_Identifier
2803 || !IN (Ekind (gnat_node), Object_Kind)
2804 || Comes_From_Source (gnat_node)
2805 || !Present (Renamed_Object (gnat_node)))
2806 return false;
2808 /* Get the object declaration of the renamed object, if any and if the
2809 renamed object is a mere identifier. */
2810 gnat_node = Renamed_Object (gnat_node);
2811 if (Nkind (gnat_node) != N_Identifier)
2812 return false;
2814 gnat_node = Entity (gnat_node);
2815 if (!Present (Parent (gnat_node)))
2816 return false;
2818 gnat_node = Parent (gnat_node);
2819 return
2820 (Present (gnat_node)
2821 && Nkind (gnat_node) == N_Object_Declaration
2822 && Present (Corresponding_Generic_Association (gnat_node)));
2825 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2826 feed it with the elaboration of GNAT_SCOPE. */
2828 static struct deferred_decl_context_node *
2829 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2831 struct deferred_decl_context_node *new_node;
2833 new_node
2834 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2835 new_node->decl = decl;
2836 new_node->gnat_scope = gnat_scope;
2837 new_node->force_global = force_global;
2838 new_node->types.create (1);
2839 new_node->next = deferred_decl_context_queue;
2840 deferred_decl_context_queue = new_node;
2841 return new_node;
2844 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2845 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2846 computed. */
2848 static void
2849 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2851 n->types.safe_push (type);
2854 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2855 NULL_TREE if it is not available. */
2857 static tree
2858 compute_deferred_decl_context (Entity_Id gnat_scope)
2860 tree context;
2862 if (present_gnu_tree (gnat_scope))
2863 context = get_gnu_tree (gnat_scope);
2864 else
2865 return NULL_TREE;
2867 if (TREE_CODE (context) == TYPE_DECL)
2869 const tree context_type = TREE_TYPE (context);
2871 /* Skip dummy types: only the final ones can appear in the context
2872 chain. */
2873 if (TYPE_DUMMY_P (context_type))
2874 return NULL_TREE;
2876 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2877 chain. */
2878 else
2879 context = context_type;
2882 return context;
2885 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2886 that cannot be processed yet, remove the other ones. If FORCE is true,
2887 force the processing for all nodes, use the global context when nodes don't
2888 have a GNU translation. */
2890 void
2891 process_deferred_decl_context (bool force)
2893 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2894 struct deferred_decl_context_node *node;
2896 while (*it != NULL)
2898 bool processed = false;
2899 tree context = NULL_TREE;
2900 Entity_Id gnat_scope;
2902 node = *it;
2904 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2905 get the first scope. */
2906 gnat_scope = node->gnat_scope;
2907 while (Present (gnat_scope))
2909 context = compute_deferred_decl_context (gnat_scope);
2910 if (!force || context != NULL_TREE)
2911 break;
2912 gnat_scope = get_debug_scope (gnat_scope, NULL);
2915 /* Imported declarations must not be in a local context (i.e. not inside
2916 a function). */
2917 if (context != NULL_TREE && node->force_global > 0)
2919 tree ctx = context;
2921 while (ctx != NULL_TREE)
2923 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2924 ctx = (DECL_P (ctx))
2925 ? DECL_CONTEXT (ctx)
2926 : TYPE_CONTEXT (ctx);
2930 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2931 was no elaborated scope, use the global context. */
2932 if (force && context == NULL_TREE)
2933 context = get_global_context ();
2935 if (context != NULL_TREE)
2937 tree t;
2938 int i;
2940 DECL_CONTEXT (node->decl) = context;
2942 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2943 ..._TYPE nodes. */
2944 FOR_EACH_VEC_ELT (node->types, i, t)
2946 gnat_set_type_context (t, context);
2948 processed = true;
2951 /* If this node has been successfuly processed, remove it from the
2952 queue. Then move to the next node. */
2953 if (processed)
2955 *it = node->next;
2956 node->types.release ();
2957 free (node);
2959 else
2960 it = &node->next;
2965 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2967 static unsigned int
2968 scale_by_factor_of (tree expr, unsigned int value)
2970 expr = remove_conversions (expr, true);
2972 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2973 corresponding to the number of trailing zeros of the mask. */
2974 if (TREE_CODE (expr) == BIT_AND_EXPR
2975 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2977 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2978 unsigned int i = 0;
2980 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2982 mask >>= 1;
2983 value *= 2;
2984 i++;
2988 return value;
2991 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2992 unless we can prove these 2 fields are laid out in such a way that no gap
2993 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2994 is the distance in bits between the end of PREV_FIELD and the starting
2995 position of CURR_FIELD. It is ignored if null. */
2997 static bool
2998 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3000 /* If this is the first field of the record, there cannot be any gap */
3001 if (!prev_field)
3002 return false;
3004 /* If the previous field is a union type, then return false: The only
3005 time when such a field is not the last field of the record is when
3006 there are other components at fixed positions after it (meaning there
3007 was a rep clause for every field), in which case we don't want the
3008 alignment constraint to override them. */
3009 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3010 return false;
3012 /* If the distance between the end of prev_field and the beginning of
3013 curr_field is constant, then there is a gap if the value of this
3014 constant is not null. */
3015 if (offset && tree_fits_uhwi_p (offset))
3016 return !integer_zerop (offset);
3018 /* If the size and position of the previous field are constant,
3019 then check the sum of this size and position. There will be a gap
3020 iff it is not multiple of the current field alignment. */
3021 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3022 && tree_fits_uhwi_p (bit_position (prev_field)))
3023 return ((tree_to_uhwi (bit_position (prev_field))
3024 + tree_to_uhwi (DECL_SIZE (prev_field)))
3025 % DECL_ALIGN (curr_field) != 0);
3027 /* If both the position and size of the previous field are multiples
3028 of the current field alignment, there cannot be any gap. */
3029 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3030 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3031 return false;
3033 /* Fallback, return that there may be a potential gap */
3034 return true;
3037 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
3038 of the decl. */
3040 tree
3041 create_label_decl (tree label_name, Node_Id gnat_node)
3043 tree label_decl
3044 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
3046 DECL_MODE (label_decl) = VOIDmode;
3048 /* Add this decl to the current binding level. */
3049 gnat_pushdecl (label_decl, gnat_node);
3051 return label_decl;
3054 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
3055 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
3056 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
3057 PARM_DECL nodes chained through the DECL_CHAIN field).
3059 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
3060 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
3061 used for the position of the decl. */
3063 tree
3064 create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
3065 tree param_decl_list, enum inline_status_t inline_status,
3066 bool public_flag, bool extern_flag, bool artificial_flag,
3067 struct attrib *attr_list, Node_Id gnat_node)
3069 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
3070 subprog_type);
3071 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
3072 TREE_TYPE (subprog_type));
3073 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3075 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
3076 DECL_EXTERNAL (subprog_decl) = extern_flag;
3078 switch (inline_status)
3080 case is_suppressed:
3081 DECL_UNINLINABLE (subprog_decl) = 1;
3082 break;
3084 case is_disabled:
3085 break;
3087 case is_required:
3088 if (Back_End_Inlining)
3089 decl_attributes (&subprog_decl,
3090 tree_cons (get_identifier ("always_inline"),
3091 NULL_TREE, NULL_TREE),
3092 ATTR_FLAG_TYPE_IN_PLACE);
3094 /* ... fall through ... */
3096 case is_enabled:
3097 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3098 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
3099 break;
3101 default:
3102 gcc_unreachable ();
3105 TREE_PUBLIC (subprog_decl) = public_flag;
3106 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
3107 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
3108 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
3110 DECL_ARTIFICIAL (result_decl) = 1;
3111 DECL_IGNORED_P (result_decl) = 1;
3112 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
3113 DECL_RESULT (subprog_decl) = result_decl;
3115 if (asm_name)
3117 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
3119 /* The expand_main_function circuitry expects "main_identifier_node" to
3120 designate the DECL_NAME of the 'main' entry point, in turn expected
3121 to be declared as the "main" function literally by default. Ada
3122 program entry points are typically declared with a different name
3123 within the binder generated file, exported as 'main' to satisfy the
3124 system expectations. Force main_identifier_node in this case. */
3125 if (asm_name == main_identifier_node)
3126 DECL_NAME (subprog_decl) = main_identifier_node;
3129 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3131 /* Add this decl to the current binding level. */
3132 gnat_pushdecl (subprog_decl, gnat_node);
3134 /* Output the assembler code and/or RTL for the declaration. */
3135 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3137 return subprog_decl;
3140 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3141 body. This routine needs to be invoked before processing the declarations
3142 appearing in the subprogram. */
3144 void
3145 begin_subprog_body (tree subprog_decl)
3147 tree param_decl;
3149 announce_function (subprog_decl);
3151 /* This function is being defined. */
3152 TREE_STATIC (subprog_decl) = 1;
3154 /* The failure of this assertion will likely come from a wrong context for
3155 the subprogram body, e.g. another procedure for a procedure declared at
3156 library level. */
3157 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3159 current_function_decl = subprog_decl;
3161 /* Enter a new binding level and show that all the parameters belong to
3162 this function. */
3163 gnat_pushlevel ();
3165 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3166 param_decl = DECL_CHAIN (param_decl))
3167 DECL_CONTEXT (param_decl) = subprog_decl;
3169 make_decl_rtl (subprog_decl);
3172 /* Finish translating the current subprogram and set its BODY. */
3174 void
3175 end_subprog_body (tree body)
3177 tree fndecl = current_function_decl;
3179 /* Attach the BLOCK for this level to the function and pop the level. */
3180 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3181 DECL_INITIAL (fndecl) = current_binding_level->block;
3182 gnat_poplevel ();
3184 /* Mark the RESULT_DECL as being in this subprogram. */
3185 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3187 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3188 if (TREE_CODE (body) == BIND_EXPR)
3190 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3191 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3194 DECL_SAVED_TREE (fndecl) = body;
3196 current_function_decl = decl_function_context (fndecl);
3199 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3201 void
3202 rest_of_subprog_body_compilation (tree subprog_decl)
3204 /* We cannot track the location of errors past this point. */
3205 error_gnat_node = Empty;
3207 /* If we're only annotating types, don't actually compile this function. */
3208 if (type_annotate_only)
3209 return;
3211 /* Dump functions before gimplification. */
3212 dump_function (TDI_original, subprog_decl);
3214 if (!decl_function_context (subprog_decl))
3215 cgraph_node::finalize_function (subprog_decl, false);
3216 else
3217 /* Register this function with cgraph just far enough to get it
3218 added to our parent's nested function list. */
3219 (void) cgraph_node::get_create (subprog_decl);
3222 tree
3223 gnat_builtin_function (tree decl)
3225 gnat_pushdecl (decl, Empty);
3226 return decl;
3229 /* Return an integer type with the number of bits of precision given by
3230 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3231 it is a signed type. */
3233 tree
3234 gnat_type_for_size (unsigned precision, int unsignedp)
3236 tree t;
3237 char type_name[20];
3239 if (precision <= 2 * MAX_BITS_PER_WORD
3240 && signed_and_unsigned_types[precision][unsignedp])
3241 return signed_and_unsigned_types[precision][unsignedp];
3243 if (unsignedp)
3244 t = make_unsigned_type (precision);
3245 else
3246 t = make_signed_type (precision);
3248 if (precision <= 2 * MAX_BITS_PER_WORD)
3249 signed_and_unsigned_types[precision][unsignedp] = t;
3251 if (!TYPE_NAME (t))
3253 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3254 TYPE_NAME (t) = get_identifier (type_name);
3257 return t;
3260 /* Likewise for floating-point types. */
3262 static tree
3263 float_type_for_precision (int precision, machine_mode mode)
3265 tree t;
3266 char type_name[20];
3268 if (float_types[(int) mode])
3269 return float_types[(int) mode];
3271 float_types[(int) mode] = t = make_node (REAL_TYPE);
3272 TYPE_PRECISION (t) = precision;
3273 layout_type (t);
3275 gcc_assert (TYPE_MODE (t) == mode);
3276 if (!TYPE_NAME (t))
3278 sprintf (type_name, "FLOAT_%d", precision);
3279 TYPE_NAME (t) = get_identifier (type_name);
3282 return t;
3285 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3286 an unsigned type; otherwise a signed type is returned. */
3288 tree
3289 gnat_type_for_mode (machine_mode mode, int unsignedp)
3291 if (mode == BLKmode)
3292 return NULL_TREE;
3294 if (mode == VOIDmode)
3295 return void_type_node;
3297 if (COMPLEX_MODE_P (mode))
3298 return NULL_TREE;
3300 if (SCALAR_FLOAT_MODE_P (mode))
3301 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
3303 if (SCALAR_INT_MODE_P (mode))
3304 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
3306 if (VECTOR_MODE_P (mode))
3308 machine_mode inner_mode = GET_MODE_INNER (mode);
3309 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3310 if (inner_type)
3311 return build_vector_type_for_mode (inner_type, mode);
3314 return NULL_TREE;
3317 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
3319 tree
3320 gnat_unsigned_type (tree type_node)
3322 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
3324 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3326 type = copy_node (type);
3327 TREE_TYPE (type) = type_node;
3329 else if (TREE_TYPE (type_node)
3330 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3331 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3333 type = copy_node (type);
3334 TREE_TYPE (type) = TREE_TYPE (type_node);
3337 return type;
3340 /* Return the signed version of a TYPE_NODE, a scalar type. */
3342 tree
3343 gnat_signed_type (tree type_node)
3345 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
3347 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3349 type = copy_node (type);
3350 TREE_TYPE (type) = type_node;
3352 else if (TREE_TYPE (type_node)
3353 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3354 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3356 type = copy_node (type);
3357 TREE_TYPE (type) = TREE_TYPE (type_node);
3360 return type;
3363 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3364 transparently converted to each other. */
3367 gnat_types_compatible_p (tree t1, tree t2)
3369 enum tree_code code;
3371 /* This is the default criterion. */
3372 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3373 return 1;
3375 /* We only check structural equivalence here. */
3376 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3377 return 0;
3379 /* Vector types are also compatible if they have the same number of subparts
3380 and the same form of (scalar) element type. */
3381 if (code == VECTOR_TYPE
3382 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3383 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3384 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3385 return 1;
3387 /* Array types are also compatible if they are constrained and have the same
3388 domain(s), the same component type and the same scalar storage order. */
3389 if (code == ARRAY_TYPE
3390 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3391 || (TYPE_DOMAIN (t1)
3392 && TYPE_DOMAIN (t2)
3393 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3394 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3395 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3396 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3397 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3398 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3399 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3400 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3401 return 1;
3403 return 0;
3406 /* Return true if EXPR is a useless type conversion. */
3408 bool
3409 gnat_useless_type_conversion (tree expr)
3411 if (CONVERT_EXPR_P (expr)
3412 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3413 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3414 return gnat_types_compatible_p (TREE_TYPE (expr),
3415 TREE_TYPE (TREE_OPERAND (expr, 0)));
3417 return false;
3420 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3422 bool
3423 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3424 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3426 return TYPE_CI_CO_LIST (t) == cico_list
3427 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3428 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3429 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3432 /* EXP is an expression for the size of an object. If this size contains
3433 discriminant references, replace them with the maximum (if MAX_P) or
3434 minimum (if !MAX_P) possible value of the discriminant. */
3436 tree
3437 max_size (tree exp, bool max_p)
3439 enum tree_code code = TREE_CODE (exp);
3440 tree type = TREE_TYPE (exp);
3442 switch (TREE_CODE_CLASS (code))
3444 case tcc_declaration:
3445 case tcc_constant:
3446 return exp;
3448 case tcc_vl_exp:
3449 if (code == CALL_EXPR)
3451 tree t, *argarray;
3452 int n, i;
3454 t = maybe_inline_call_in_expr (exp);
3455 if (t)
3456 return max_size (t, max_p);
3458 n = call_expr_nargs (exp);
3459 gcc_assert (n > 0);
3460 argarray = XALLOCAVEC (tree, n);
3461 for (i = 0; i < n; i++)
3462 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3463 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3465 break;
3467 case tcc_reference:
3468 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3469 modify. Otherwise, we treat it like a variable. */
3470 if (CONTAINS_PLACEHOLDER_P (exp))
3472 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3473 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3474 return max_size (convert (get_base_type (val_type), val), true);
3477 return exp;
3479 case tcc_comparison:
3480 return max_p ? size_one_node : size_zero_node;
3482 case tcc_unary:
3483 if (code == NON_LVALUE_EXPR)
3484 return max_size (TREE_OPERAND (exp, 0), max_p);
3486 return fold_build1 (code, type,
3487 max_size (TREE_OPERAND (exp, 0),
3488 code == NEGATE_EXPR ? !max_p : max_p));
3490 case tcc_binary:
3492 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3493 tree rhs = max_size (TREE_OPERAND (exp, 1),
3494 code == MINUS_EXPR ? !max_p : max_p);
3496 /* Special-case wanting the maximum value of a MIN_EXPR.
3497 In that case, if one side overflows, return the other. */
3498 if (max_p && code == MIN_EXPR)
3500 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3501 return lhs;
3503 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3504 return rhs;
3507 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3508 overflowing and the RHS a variable. */
3509 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3510 && TREE_CODE (lhs) == INTEGER_CST
3511 && TREE_OVERFLOW (lhs)
3512 && !TREE_CONSTANT (rhs))
3513 return lhs;
3515 return size_binop (code, lhs, rhs);
3518 case tcc_expression:
3519 switch (TREE_CODE_LENGTH (code))
3521 case 1:
3522 if (code == SAVE_EXPR)
3523 return exp;
3525 return fold_build1 (code, type,
3526 max_size (TREE_OPERAND (exp, 0), max_p));
3528 case 2:
3529 if (code == COMPOUND_EXPR)
3530 return max_size (TREE_OPERAND (exp, 1), max_p);
3532 return fold_build2 (code, type,
3533 max_size (TREE_OPERAND (exp, 0), max_p),
3534 max_size (TREE_OPERAND (exp, 1), max_p));
3536 case 3:
3537 if (code == COND_EXPR)
3538 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3539 max_size (TREE_OPERAND (exp, 1), max_p),
3540 max_size (TREE_OPERAND (exp, 2), max_p));
3542 default:
3543 break;
3546 /* Other tree classes cannot happen. */
3547 default:
3548 break;
3551 gcc_unreachable ();
3554 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3555 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3556 Return a constructor for the template. */
3558 tree
3559 build_template (tree template_type, tree array_type, tree expr)
3561 vec<constructor_elt, va_gc> *template_elts = NULL;
3562 tree bound_list = NULL_TREE;
3563 tree field;
3565 while (TREE_CODE (array_type) == RECORD_TYPE
3566 && (TYPE_PADDING_P (array_type)
3567 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3568 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3570 if (TREE_CODE (array_type) == ARRAY_TYPE
3571 || (TREE_CODE (array_type) == INTEGER_TYPE
3572 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3573 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3575 /* First make the list for a CONSTRUCTOR for the template. Go down the
3576 field list of the template instead of the type chain because this
3577 array might be an Ada array of arrays and we can't tell where the
3578 nested arrays stop being the underlying object. */
3580 for (field = TYPE_FIELDS (template_type); field;
3581 (bound_list
3582 ? (bound_list = TREE_CHAIN (bound_list))
3583 : (array_type = TREE_TYPE (array_type))),
3584 field = DECL_CHAIN (DECL_CHAIN (field)))
3586 tree bounds, min, max;
3588 /* If we have a bound list, get the bounds from there. Likewise
3589 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3590 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3591 This will give us a maximum range. */
3592 if (bound_list)
3593 bounds = TREE_VALUE (bound_list);
3594 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3595 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3596 else if (expr && TREE_CODE (expr) == PARM_DECL
3597 && DECL_BY_COMPONENT_PTR_P (expr))
3598 bounds = TREE_TYPE (field);
3599 else
3600 gcc_unreachable ();
3602 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3603 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3605 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3606 substitute it from OBJECT. */
3607 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3608 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3610 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3611 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3614 return gnat_build_constructor (template_type, template_elts);
3617 /* Return true if TYPE is suitable for the element type of a vector. */
3619 static bool
3620 type_for_vector_element_p (tree type)
3622 machine_mode mode;
3624 if (!INTEGRAL_TYPE_P (type)
3625 && !SCALAR_FLOAT_TYPE_P (type)
3626 && !FIXED_POINT_TYPE_P (type))
3627 return false;
3629 mode = TYPE_MODE (type);
3630 if (GET_MODE_CLASS (mode) != MODE_INT
3631 && !SCALAR_FLOAT_MODE_P (mode)
3632 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3633 return false;
3635 return true;
3638 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3639 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3640 attribute declaration and want to issue error messages on failure. */
3642 static tree
3643 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3645 unsigned HOST_WIDE_INT size_int, inner_size_int;
3646 int nunits;
3648 /* Silently punt on variable sizes. We can't make vector types for them,
3649 need to ignore them on front-end generated subtypes of unconstrained
3650 base types, and this attribute is for binding implementors, not end
3651 users, so we should never get there from legitimate explicit uses. */
3652 if (!tree_fits_uhwi_p (size))
3653 return NULL_TREE;
3654 size_int = tree_to_uhwi (size);
3656 if (!type_for_vector_element_p (inner_type))
3658 if (attribute)
3659 error ("invalid element type for attribute %qs",
3660 IDENTIFIER_POINTER (attribute));
3661 return NULL_TREE;
3663 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3665 if (size_int % inner_size_int)
3667 if (attribute)
3668 error ("vector size not an integral multiple of component size");
3669 return NULL_TREE;
3672 if (size_int == 0)
3674 if (attribute)
3675 error ("zero vector size");
3676 return NULL_TREE;
3679 nunits = size_int / inner_size_int;
3680 if (nunits & (nunits - 1))
3682 if (attribute)
3683 error ("number of components of vector not a power of two");
3684 return NULL_TREE;
3687 return build_vector_type (inner_type, nunits);
3690 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3691 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3692 processing the attribute and want to issue error messages on failure. */
3694 static tree
3695 build_vector_type_for_array (tree array_type, tree attribute)
3697 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3698 TYPE_SIZE_UNIT (array_type),
3699 attribute);
3700 if (!vector_type)
3701 return NULL_TREE;
3703 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3704 return vector_type;
3707 /* Build a type to be used to represent an aliased object whose nominal type
3708 is an unconstrained array. This consists of a RECORD_TYPE containing a
3709 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3710 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3711 an arbitrary unconstrained object. Use NAME as the name of the record.
3712 DEBUG_INFO_P is true if we need to write debug information for the type. */
3714 tree
3715 build_unc_object_type (tree template_type, tree object_type, tree name,
3716 bool debug_info_p)
3718 tree decl;
3719 tree type = make_node (RECORD_TYPE);
3720 tree template_field
3721 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3722 NULL_TREE, NULL_TREE, 0, 1);
3723 tree array_field
3724 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3725 NULL_TREE, NULL_TREE, 0, 1);
3727 TYPE_NAME (type) = name;
3728 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3729 DECL_CHAIN (template_field) = array_field;
3730 finish_record_type (type, template_field, 0, true);
3732 /* Declare it now since it will never be declared otherwise. This is
3733 necessary to ensure that its subtrees are properly marked. */
3734 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3736 /* template_type will not be used elsewhere than here, so to keep the debug
3737 info clean and in order to avoid scoping issues, make decl its
3738 context. */
3739 gnat_set_type_context (template_type, decl);
3741 return type;
3744 /* Same, taking a thin or fat pointer type instead of a template type. */
3746 tree
3747 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3748 tree name, bool debug_info_p)
3750 tree template_type;
3752 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3754 template_type
3755 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3756 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3757 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3759 return
3760 build_unc_object_type (template_type, object_type, name, debug_info_p);
3763 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3764 In the normal case this is just two adjustments, but we have more to
3765 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3767 void
3768 update_pointer_to (tree old_type, tree new_type)
3770 tree ptr = TYPE_POINTER_TO (old_type);
3771 tree ref = TYPE_REFERENCE_TO (old_type);
3772 tree t;
3774 /* If this is the main variant, process all the other variants first. */
3775 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3776 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3777 update_pointer_to (t, new_type);
3779 /* If no pointers and no references, we are done. */
3780 if (!ptr && !ref)
3781 return;
3783 /* Merge the old type qualifiers in the new type.
3785 Each old variant has qualifiers for specific reasons, and the new
3786 designated type as well. Each set of qualifiers represents useful
3787 information grabbed at some point, and merging the two simply unifies
3788 these inputs into the final type description.
3790 Consider for instance a volatile type frozen after an access to constant
3791 type designating it; after the designated type's freeze, we get here with
3792 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3793 when the access type was processed. We will make a volatile and readonly
3794 designated type, because that's what it really is.
3796 We might also get here for a non-dummy OLD_TYPE variant with different
3797 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3798 to private record type elaboration (see the comments around the call to
3799 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3800 the qualifiers in those cases too, to avoid accidentally discarding the
3801 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3802 new_type
3803 = build_qualified_type (new_type,
3804 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3806 /* If old type and new type are identical, there is nothing to do. */
3807 if (old_type == new_type)
3808 return;
3810 /* Otherwise, first handle the simple case. */
3811 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3813 tree new_ptr, new_ref;
3815 /* If pointer or reference already points to new type, nothing to do.
3816 This can happen as update_pointer_to can be invoked multiple times
3817 on the same couple of types because of the type variants. */
3818 if ((ptr && TREE_TYPE (ptr) == new_type)
3819 || (ref && TREE_TYPE (ref) == new_type))
3820 return;
3822 /* Chain PTR and its variants at the end. */
3823 new_ptr = TYPE_POINTER_TO (new_type);
3824 if (new_ptr)
3826 while (TYPE_NEXT_PTR_TO (new_ptr))
3827 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3828 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3830 else
3831 TYPE_POINTER_TO (new_type) = ptr;
3833 /* Now adjust them. */
3834 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3835 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3837 TREE_TYPE (t) = new_type;
3838 if (TYPE_NULL_BOUNDS (t))
3839 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3842 /* Chain REF and its variants at the end. */
3843 new_ref = TYPE_REFERENCE_TO (new_type);
3844 if (new_ref)
3846 while (TYPE_NEXT_REF_TO (new_ref))
3847 new_ref = TYPE_NEXT_REF_TO (new_ref);
3848 TYPE_NEXT_REF_TO (new_ref) = ref;
3850 else
3851 TYPE_REFERENCE_TO (new_type) = ref;
3853 /* Now adjust them. */
3854 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3855 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3856 TREE_TYPE (t) = new_type;
3858 TYPE_POINTER_TO (old_type) = NULL_TREE;
3859 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3862 /* Now deal with the unconstrained array case. In this case the pointer
3863 is actually a record where both fields are pointers to dummy nodes.
3864 Turn them into pointers to the correct types using update_pointer_to.
3865 Likewise for the pointer to the object record (thin pointer). */
3866 else
3868 tree new_ptr = TYPE_POINTER_TO (new_type);
3870 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3872 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3873 since update_pointer_to can be invoked multiple times on the same
3874 couple of types because of the type variants. */
3875 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3876 return;
3878 update_pointer_to
3879 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3880 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3882 update_pointer_to
3883 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3884 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3886 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3887 TYPE_OBJECT_RECORD_TYPE (new_type));
3889 TYPE_POINTER_TO (old_type) = NULL_TREE;
3893 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3894 unconstrained one. This involves making or finding a template. */
3896 static tree
3897 convert_to_fat_pointer (tree type, tree expr)
3899 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3900 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3901 tree etype = TREE_TYPE (expr);
3902 tree template_addr;
3903 vec<constructor_elt, va_gc> *v;
3904 vec_alloc (v, 2);
3906 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3907 array (compare_fat_pointers ensures that this is the full discriminant)
3908 and a valid pointer to the bounds. This latter property is necessary
3909 since the compiler can hoist the load of the bounds done through it. */
3910 if (integer_zerop (expr))
3912 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3913 tree null_bounds, t;
3915 if (TYPE_NULL_BOUNDS (ptr_template_type))
3916 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3917 else
3919 /* The template type can still be dummy at this point so we build an
3920 empty constructor. The middle-end will fill it in with zeros. */
3921 t = build_constructor (template_type, NULL);
3922 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3923 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3924 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
3927 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3928 fold_convert (p_array_type, null_pointer_node));
3929 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
3930 t = build_constructor (type, v);
3931 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3932 TREE_CONSTANT (t) = 0;
3933 TREE_STATIC (t) = 1;
3935 return t;
3938 /* If EXPR is a thin pointer, make template and data from the record. */
3939 if (TYPE_IS_THIN_POINTER_P (etype))
3941 tree field = TYPE_FIELDS (TREE_TYPE (etype));
3943 expr = gnat_protect_expr (expr);
3945 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
3946 the thin pointer value has been shifted so we shift it back to get
3947 the template address. */
3948 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
3950 template_addr
3951 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
3952 fold_build1 (NEGATE_EXPR, sizetype,
3953 byte_position
3954 (DECL_CHAIN (field))));
3955 template_addr
3956 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
3957 template_addr);
3960 /* Otherwise we explicitly take the address of the fields. */
3961 else
3963 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
3964 template_addr
3965 = build_unary_op (ADDR_EXPR, NULL_TREE,
3966 build_component_ref (expr, NULL_TREE, field,
3967 false));
3968 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3969 build_component_ref (expr, NULL_TREE,
3970 DECL_CHAIN (field),
3971 false));
3975 /* Otherwise, build the constructor for the template. */
3976 else
3977 template_addr
3978 = build_unary_op (ADDR_EXPR, NULL_TREE,
3979 build_template (template_type, TREE_TYPE (etype),
3980 expr));
3982 /* The final result is a constructor for the fat pointer.
3984 If EXPR is an argument of a foreign convention subprogram, the type it
3985 points to is directly the component type. In this case, the expression
3986 type may not match the corresponding FIELD_DECL type at this point, so we
3987 call "convert" here to fix that up if necessary. This type consistency is
3988 required, for instance because it ensures that possible later folding of
3989 COMPONENT_REFs against this constructor always yields something of the
3990 same type as the initial reference.
3992 Note that the call to "build_template" above is still fine because it
3993 will only refer to the provided TEMPLATE_TYPE in this case. */
3994 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
3995 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
3996 return gnat_build_constructor (type, v);
3999 /* Create an expression whose value is that of EXPR,
4000 converted to type TYPE. The TREE_TYPE of the value
4001 is always TYPE. This function implements all reasonable
4002 conversions; callers should filter out those that are
4003 not permitted by the language being compiled. */
4005 tree
4006 convert (tree type, tree expr)
4008 tree etype = TREE_TYPE (expr);
4009 enum tree_code ecode = TREE_CODE (etype);
4010 enum tree_code code = TREE_CODE (type);
4012 /* If the expression is already of the right type, we are done. */
4013 if (etype == type)
4014 return expr;
4016 /* If both input and output have padding and are of variable size, do this
4017 as an unchecked conversion. Likewise if one is a mere variant of the
4018 other, so we avoid a pointless unpad/repad sequence. */
4019 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4020 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4021 && (!TREE_CONSTANT (TYPE_SIZE (type))
4022 || !TREE_CONSTANT (TYPE_SIZE (etype))
4023 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4024 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4025 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4028 /* If the output type has padding, convert to the inner type and make a
4029 constructor to build the record, unless a variable size is involved. */
4030 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4032 vec<constructor_elt, va_gc> *v;
4034 /* If we previously converted from another type and our type is
4035 of variable size, remove the conversion to avoid the need for
4036 variable-sized temporaries. Likewise for a conversion between
4037 original and packable version. */
4038 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4039 && (!TREE_CONSTANT (TYPE_SIZE (type))
4040 || (ecode == RECORD_TYPE
4041 && TYPE_NAME (etype)
4042 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4043 expr = TREE_OPERAND (expr, 0);
4045 /* If we are just removing the padding from expr, convert the original
4046 object if we have variable size in order to avoid the need for some
4047 variable-sized temporaries. Likewise if the padding is a variant
4048 of the other, so we avoid a pointless unpad/repad sequence. */
4049 if (TREE_CODE (expr) == COMPONENT_REF
4050 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4051 && (!TREE_CONSTANT (TYPE_SIZE (type))
4052 || TYPE_MAIN_VARIANT (type)
4053 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4054 || (ecode == RECORD_TYPE
4055 && TYPE_NAME (etype)
4056 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4057 return convert (type, TREE_OPERAND (expr, 0));
4059 /* If the inner type is of self-referential size and the expression type
4060 is a record, do this as an unchecked conversion. But first pad the
4061 expression if possible to have the same size on both sides. */
4062 if (ecode == RECORD_TYPE
4063 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4065 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4066 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4067 false, false, false, true),
4068 expr);
4069 return unchecked_convert (type, expr, false);
4072 /* If we are converting between array types with variable size, do the
4073 final conversion as an unchecked conversion, again to avoid the need
4074 for some variable-sized temporaries. If valid, this conversion is
4075 very likely purely technical and without real effects. */
4076 if (ecode == ARRAY_TYPE
4077 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4078 && !TREE_CONSTANT (TYPE_SIZE (etype))
4079 && !TREE_CONSTANT (TYPE_SIZE (type)))
4080 return unchecked_convert (type,
4081 convert (TREE_TYPE (TYPE_FIELDS (type)),
4082 expr),
4083 false);
4085 vec_alloc (v, 1);
4086 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4087 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4088 return gnat_build_constructor (type, v);
4091 /* If the input type has padding, remove it and convert to the output type.
4092 The conditions ordering is arranged to ensure that the output type is not
4093 a padding type here, as it is not clear whether the conversion would
4094 always be correct if this was to happen. */
4095 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4097 tree unpadded;
4099 /* If we have just converted to this padded type, just get the
4100 inner expression. */
4101 if (TREE_CODE (expr) == CONSTRUCTOR
4102 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4103 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4104 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
4106 /* Otherwise, build an explicit component reference. */
4107 else
4108 unpadded
4109 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4111 return convert (type, unpadded);
4114 /* If the input is a biased type, adjust first. */
4115 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4116 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4117 fold_convert (TREE_TYPE (etype), expr),
4118 fold_convert (TREE_TYPE (etype),
4119 TYPE_MIN_VALUE (etype))));
4121 /* If the input is a justified modular type, we need to extract the actual
4122 object before converting it to any other type with the exceptions of an
4123 unconstrained array or of a mere type variant. It is useful to avoid the
4124 extraction and conversion in the type variant case because it could end
4125 up replacing a VAR_DECL expr by a constructor and we might be about the
4126 take the address of the result. */
4127 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4128 && code != UNCONSTRAINED_ARRAY_TYPE
4129 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4130 return convert (type, build_component_ref (expr, NULL_TREE,
4131 TYPE_FIELDS (etype), false));
4133 /* If converting to a type that contains a template, convert to the data
4134 type and then build the template. */
4135 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4137 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4138 vec<constructor_elt, va_gc> *v;
4139 vec_alloc (v, 2);
4141 /* If the source already has a template, get a reference to the
4142 associated array only, as we are going to rebuild a template
4143 for the target type anyway. */
4144 expr = maybe_unconstrained_array (expr);
4146 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4147 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4148 obj_type, NULL_TREE));
4149 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4150 convert (obj_type, expr));
4151 return gnat_build_constructor (type, v);
4154 /* There are some cases of expressions that we process specially. */
4155 switch (TREE_CODE (expr))
4157 case ERROR_MARK:
4158 return expr;
4160 case NULL_EXPR:
4161 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4162 conversion in gnat_expand_expr. NULL_EXPR does not represent
4163 and actual value, so no conversion is needed. */
4164 expr = copy_node (expr);
4165 TREE_TYPE (expr) = type;
4166 return expr;
4168 case STRING_CST:
4169 /* If we are converting a STRING_CST to another constrained array type,
4170 just make a new one in the proper type. */
4171 if (code == ecode && AGGREGATE_TYPE_P (etype)
4172 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4173 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4175 expr = copy_node (expr);
4176 TREE_TYPE (expr) = type;
4177 return expr;
4179 break;
4181 case VECTOR_CST:
4182 /* If we are converting a VECTOR_CST to a mere type variant, just make
4183 a new one in the proper type. */
4184 if (code == ecode && gnat_types_compatible_p (type, etype))
4186 expr = copy_node (expr);
4187 TREE_TYPE (expr) = type;
4188 return expr;
4191 case CONSTRUCTOR:
4192 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4193 another padding type around the same type, just make a new one in
4194 the proper type. */
4195 if (code == ecode
4196 && (gnat_types_compatible_p (type, etype)
4197 || (code == RECORD_TYPE
4198 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4199 && TREE_TYPE (TYPE_FIELDS (type))
4200 == TREE_TYPE (TYPE_FIELDS (etype)))))
4202 expr = copy_node (expr);
4203 TREE_TYPE (expr) = type;
4204 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4205 return expr;
4208 /* Likewise for a conversion between original and packable version, or
4209 conversion between types of the same size and with the same list of
4210 fields, but we have to work harder to preserve type consistency. */
4211 if (code == ecode
4212 && code == RECORD_TYPE
4213 && (TYPE_NAME (type) == TYPE_NAME (etype)
4214 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4217 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4218 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4219 vec<constructor_elt, va_gc> *v;
4220 vec_alloc (v, len);
4221 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4222 unsigned HOST_WIDE_INT idx;
4223 tree index, value;
4225 /* Whether we need to clear TREE_CONSTANT et al. on the output
4226 constructor when we convert in place. */
4227 bool clear_constant = false;
4229 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4231 /* Skip the missing fields in the CONSTRUCTOR. */
4232 while (efield && field && !SAME_FIELD_P (efield, index))
4234 efield = DECL_CHAIN (efield);
4235 field = DECL_CHAIN (field);
4237 /* The field must be the same. */
4238 if (!(efield && field && SAME_FIELD_P (efield, field)))
4239 break;
4240 constructor_elt elt
4241 = {field, convert (TREE_TYPE (field), value)};
4242 v->quick_push (elt);
4244 /* If packing has made this field a bitfield and the input
4245 value couldn't be emitted statically any more, we need to
4246 clear TREE_CONSTANT on our output. */
4247 if (!clear_constant
4248 && TREE_CONSTANT (expr)
4249 && !CONSTRUCTOR_BITFIELD_P (efield)
4250 && CONSTRUCTOR_BITFIELD_P (field)
4251 && !initializer_constant_valid_for_bitfield_p (value))
4252 clear_constant = true;
4254 efield = DECL_CHAIN (efield);
4255 field = DECL_CHAIN (field);
4258 /* If we have been able to match and convert all the input fields
4259 to their output type, convert in place now. We'll fallback to a
4260 view conversion downstream otherwise. */
4261 if (idx == len)
4263 expr = copy_node (expr);
4264 TREE_TYPE (expr) = type;
4265 CONSTRUCTOR_ELTS (expr) = v;
4266 if (clear_constant)
4267 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4268 return expr;
4272 /* Likewise for a conversion between array type and vector type with a
4273 compatible representative array. */
4274 else if (code == VECTOR_TYPE
4275 && ecode == ARRAY_TYPE
4276 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4277 etype))
4279 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4280 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4281 vec<constructor_elt, va_gc> *v;
4282 unsigned HOST_WIDE_INT ix;
4283 tree value;
4285 /* Build a VECTOR_CST from a *constant* array constructor. */
4286 if (TREE_CONSTANT (expr))
4288 bool constant_p = true;
4290 /* Iterate through elements and check if all constructor
4291 elements are *_CSTs. */
4292 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4293 if (!CONSTANT_CLASS_P (value))
4295 constant_p = false;
4296 break;
4299 if (constant_p)
4300 return build_vector_from_ctor (type,
4301 CONSTRUCTOR_ELTS (expr));
4304 /* Otherwise, build a regular vector constructor. */
4305 vec_alloc (v, len);
4306 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4308 constructor_elt elt = {NULL_TREE, value};
4309 v->quick_push (elt);
4311 expr = copy_node (expr);
4312 TREE_TYPE (expr) = type;
4313 CONSTRUCTOR_ELTS (expr) = v;
4314 return expr;
4316 break;
4318 case UNCONSTRAINED_ARRAY_REF:
4319 /* First retrieve the underlying array. */
4320 expr = maybe_unconstrained_array (expr);
4321 etype = TREE_TYPE (expr);
4322 ecode = TREE_CODE (etype);
4323 break;
4325 case VIEW_CONVERT_EXPR:
4327 /* GCC 4.x is very sensitive to type consistency overall, and view
4328 conversions thus are very frequent. Even though just "convert"ing
4329 the inner operand to the output type is fine in most cases, it
4330 might expose unexpected input/output type mismatches in special
4331 circumstances so we avoid such recursive calls when we can. */
4332 tree op0 = TREE_OPERAND (expr, 0);
4334 /* If we are converting back to the original type, we can just
4335 lift the input conversion. This is a common occurrence with
4336 switches back-and-forth amongst type variants. */
4337 if (type == TREE_TYPE (op0))
4338 return op0;
4340 /* Otherwise, if we're converting between two aggregate or vector
4341 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4342 target type in place or to just convert the inner expression. */
4343 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4344 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4346 /* If we are converting between mere variants, we can just
4347 substitute the VIEW_CONVERT_EXPR in place. */
4348 if (gnat_types_compatible_p (type, etype))
4349 return build1 (VIEW_CONVERT_EXPR, type, op0);
4351 /* Otherwise, we may just bypass the input view conversion unless
4352 one of the types is a fat pointer, which is handled by
4353 specialized code below which relies on exact type matching. */
4354 else if (!TYPE_IS_FAT_POINTER_P (type)
4355 && !TYPE_IS_FAT_POINTER_P (etype))
4356 return convert (type, op0);
4359 break;
4362 default:
4363 break;
4366 /* Check for converting to a pointer to an unconstrained array. */
4367 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4368 return convert_to_fat_pointer (type, expr);
4370 /* If we are converting between two aggregate or vector types that are mere
4371 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4372 to a vector type from its representative array type. */
4373 else if ((code == ecode
4374 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4375 && gnat_types_compatible_p (type, etype))
4376 || (code == VECTOR_TYPE
4377 && ecode == ARRAY_TYPE
4378 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4379 etype)))
4380 return build1 (VIEW_CONVERT_EXPR, type, expr);
4382 /* If we are converting between tagged types, try to upcast properly. */
4383 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4384 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4386 tree child_etype = etype;
4387 do {
4388 tree field = TYPE_FIELDS (child_etype);
4389 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4390 return build_component_ref (expr, NULL_TREE, field, false);
4391 child_etype = TREE_TYPE (field);
4392 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4395 /* If we are converting from a smaller form of record type back to it, just
4396 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4397 size on both sides. */
4398 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4399 && smaller_form_type_p (etype, type))
4401 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4402 false, false, false, true),
4403 expr);
4404 return build1 (VIEW_CONVERT_EXPR, type, expr);
4407 /* In all other cases of related types, make a NOP_EXPR. */
4408 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4409 return fold_convert (type, expr);
4411 switch (code)
4413 case VOID_TYPE:
4414 return fold_build1 (CONVERT_EXPR, type, expr);
4416 case INTEGER_TYPE:
4417 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4418 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4419 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4420 return unchecked_convert (type, expr, false);
4421 else if (TYPE_BIASED_REPRESENTATION_P (type))
4422 return fold_convert (type,
4423 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4424 convert (TREE_TYPE (type), expr),
4425 convert (TREE_TYPE (type),
4426 TYPE_MIN_VALUE (type))));
4428 /* ... fall through ... */
4430 case ENUMERAL_TYPE:
4431 case BOOLEAN_TYPE:
4432 /* If we are converting an additive expression to an integer type
4433 with lower precision, be wary of the optimization that can be
4434 applied by convert_to_integer. There are 2 problematic cases:
4435 - if the first operand was originally of a biased type,
4436 because we could be recursively called to convert it
4437 to an intermediate type and thus rematerialize the
4438 additive operator endlessly,
4439 - if the expression contains a placeholder, because an
4440 intermediate conversion that changes the sign could
4441 be inserted and thus introduce an artificial overflow
4442 at compile time when the placeholder is substituted. */
4443 if (code == INTEGER_TYPE
4444 && ecode == INTEGER_TYPE
4445 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4446 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4448 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4450 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4451 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4452 || CONTAINS_PLACEHOLDER_P (expr))
4453 return build1 (NOP_EXPR, type, expr);
4456 return fold (convert_to_integer (type, expr));
4458 case POINTER_TYPE:
4459 case REFERENCE_TYPE:
4460 /* If converting between two thin pointers, adjust if needed to account
4461 for differing offsets from the base pointer, depending on whether
4462 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4463 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4465 tree etype_pos
4466 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4467 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4468 : size_zero_node;
4469 tree type_pos
4470 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4471 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4472 : size_zero_node;
4473 tree byte_diff = size_diffop (type_pos, etype_pos);
4475 expr = build1 (NOP_EXPR, type, expr);
4476 if (integer_zerop (byte_diff))
4477 return expr;
4479 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4480 fold_convert (sizetype, byte_diff));
4483 /* If converting fat pointer to normal or thin pointer, get the pointer
4484 to the array and then convert it. */
4485 if (TYPE_IS_FAT_POINTER_P (etype))
4486 expr
4487 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4489 return fold (convert_to_pointer (type, expr));
4491 case REAL_TYPE:
4492 return fold (convert_to_real (type, expr));
4494 case RECORD_TYPE:
4495 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4497 vec<constructor_elt, va_gc> *v;
4498 vec_alloc (v, 1);
4500 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4501 convert (TREE_TYPE (TYPE_FIELDS (type)),
4502 expr));
4503 return gnat_build_constructor (type, v);
4506 /* ... fall through ... */
4508 case ARRAY_TYPE:
4509 /* In these cases, assume the front-end has validated the conversion.
4510 If the conversion is valid, it will be a bit-wise conversion, so
4511 it can be viewed as an unchecked conversion. */
4512 return unchecked_convert (type, expr, false);
4514 case UNION_TYPE:
4515 /* This is a either a conversion between a tagged type and some
4516 subtype, which we have to mark as a UNION_TYPE because of
4517 overlapping fields or a conversion of an Unchecked_Union. */
4518 return unchecked_convert (type, expr, false);
4520 case UNCONSTRAINED_ARRAY_TYPE:
4521 /* If the input is a VECTOR_TYPE, convert to the representative
4522 array type first. */
4523 if (ecode == VECTOR_TYPE)
4525 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4526 etype = TREE_TYPE (expr);
4527 ecode = TREE_CODE (etype);
4530 /* If EXPR is a constrained array, take its address, convert it to a
4531 fat pointer, and then dereference it. Likewise if EXPR is a
4532 record containing both a template and a constrained array.
4533 Note that a record representing a justified modular type
4534 always represents a packed constrained array. */
4535 if (ecode == ARRAY_TYPE
4536 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4537 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4538 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4539 return
4540 build_unary_op
4541 (INDIRECT_REF, NULL_TREE,
4542 convert_to_fat_pointer (TREE_TYPE (type),
4543 build_unary_op (ADDR_EXPR,
4544 NULL_TREE, expr)));
4546 /* Do something very similar for converting one unconstrained
4547 array to another. */
4548 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4549 return
4550 build_unary_op (INDIRECT_REF, NULL_TREE,
4551 convert (TREE_TYPE (type),
4552 build_unary_op (ADDR_EXPR,
4553 NULL_TREE, expr)));
4554 else
4555 gcc_unreachable ();
4557 case COMPLEX_TYPE:
4558 return fold (convert_to_complex (type, expr));
4560 default:
4561 gcc_unreachable ();
4565 /* Create an expression whose value is that of EXPR converted to the common
4566 index type, which is sizetype. EXPR is supposed to be in the base type
4567 of the GNAT index type. Calling it is equivalent to doing
4569 convert (sizetype, expr)
4571 but we try to distribute the type conversion with the knowledge that EXPR
4572 cannot overflow in its type. This is a best-effort approach and we fall
4573 back to the above expression as soon as difficulties are encountered.
4575 This is necessary to overcome issues that arise when the GNAT base index
4576 type and the GCC common index type (sizetype) don't have the same size,
4577 which is quite frequent on 64-bit architectures. In this case, and if
4578 the GNAT base index type is signed but the iteration type of the loop has
4579 been forced to unsigned, the loop scalar evolution engine cannot compute
4580 a simple evolution for the general induction variables associated with the
4581 array indices, because it will preserve the wrap-around semantics in the
4582 unsigned type of their "inner" part. As a result, many loop optimizations
4583 are blocked.
4585 The solution is to use a special (basic) induction variable that is at
4586 least as large as sizetype, and to express the aforementioned general
4587 induction variables in terms of this induction variable, eliminating
4588 the problematic intermediate truncation to the GNAT base index type.
4589 This is possible as long as the original expression doesn't overflow
4590 and if the middle-end hasn't introduced artificial overflows in the
4591 course of the various simplification it can make to the expression. */
4593 tree
4594 convert_to_index_type (tree expr)
4596 enum tree_code code = TREE_CODE (expr);
4597 tree type = TREE_TYPE (expr);
4599 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4600 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4601 if (TYPE_UNSIGNED (type) || !optimize)
4602 return convert (sizetype, expr);
4604 switch (code)
4606 case VAR_DECL:
4607 /* The main effect of the function: replace a loop parameter with its
4608 associated special induction variable. */
4609 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4610 expr = DECL_INDUCTION_VAR (expr);
4611 break;
4613 CASE_CONVERT:
4615 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4616 /* Bail out as soon as we suspect some sort of type frobbing. */
4617 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4618 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4619 break;
4622 /* ... fall through ... */
4624 case NON_LVALUE_EXPR:
4625 return fold_build1 (code, sizetype,
4626 convert_to_index_type (TREE_OPERAND (expr, 0)));
4628 case PLUS_EXPR:
4629 case MINUS_EXPR:
4630 case MULT_EXPR:
4631 return fold_build2 (code, sizetype,
4632 convert_to_index_type (TREE_OPERAND (expr, 0)),
4633 convert_to_index_type (TREE_OPERAND (expr, 1)));
4635 case COMPOUND_EXPR:
4636 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4637 convert_to_index_type (TREE_OPERAND (expr, 1)));
4639 case COND_EXPR:
4640 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4641 convert_to_index_type (TREE_OPERAND (expr, 1)),
4642 convert_to_index_type (TREE_OPERAND (expr, 2)));
4644 default:
4645 break;
4648 return convert (sizetype, expr);
4651 /* Remove all conversions that are done in EXP. This includes converting
4652 from a padded type or to a justified modular type. If TRUE_ADDRESS
4653 is true, always return the address of the containing object even if
4654 the address is not bit-aligned. */
4656 tree
4657 remove_conversions (tree exp, bool true_address)
4659 switch (TREE_CODE (exp))
4661 case CONSTRUCTOR:
4662 if (true_address
4663 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4664 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4665 return
4666 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
4667 break;
4669 case COMPONENT_REF:
4670 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4671 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4672 break;
4674 CASE_CONVERT:
4675 case VIEW_CONVERT_EXPR:
4676 case NON_LVALUE_EXPR:
4677 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4679 default:
4680 break;
4683 return exp;
4686 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4687 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4688 likewise return an expression pointing to the underlying array. */
4690 tree
4691 maybe_unconstrained_array (tree exp)
4693 enum tree_code code = TREE_CODE (exp);
4694 tree type = TREE_TYPE (exp);
4696 switch (TREE_CODE (type))
4698 case UNCONSTRAINED_ARRAY_TYPE:
4699 if (code == UNCONSTRAINED_ARRAY_REF)
4701 const bool read_only = TREE_READONLY (exp);
4702 const bool no_trap = TREE_THIS_NOTRAP (exp);
4704 exp = TREE_OPERAND (exp, 0);
4705 type = TREE_TYPE (exp);
4707 if (TREE_CODE (exp) == COND_EXPR)
4709 tree op1
4710 = build_unary_op (INDIRECT_REF, NULL_TREE,
4711 build_component_ref (TREE_OPERAND (exp, 1),
4712 NULL_TREE,
4713 TYPE_FIELDS (type),
4714 false));
4715 tree op2
4716 = build_unary_op (INDIRECT_REF, NULL_TREE,
4717 build_component_ref (TREE_OPERAND (exp, 2),
4718 NULL_TREE,
4719 TYPE_FIELDS (type),
4720 false));
4722 exp = build3 (COND_EXPR,
4723 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4724 TREE_OPERAND (exp, 0), op1, op2);
4726 else
4728 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4729 build_component_ref (exp, NULL_TREE,
4730 TYPE_FIELDS (type),
4731 false));
4732 TREE_READONLY (exp) = read_only;
4733 TREE_THIS_NOTRAP (exp) = no_trap;
4737 else if (code == NULL_EXPR)
4738 exp = build1 (NULL_EXPR,
4739 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4740 TREE_OPERAND (exp, 0));
4741 break;
4743 case RECORD_TYPE:
4744 /* If this is a padded type and it contains a template, convert to the
4745 unpadded type first. */
4746 if (TYPE_PADDING_P (type)
4747 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4748 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4750 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4751 type = TREE_TYPE (exp);
4754 if (TYPE_CONTAINS_TEMPLATE_P (type))
4756 exp = build_component_ref (exp, NULL_TREE,
4757 DECL_CHAIN (TYPE_FIELDS (type)),
4758 false);
4759 type = TREE_TYPE (exp);
4761 /* If the array type is padded, convert to the unpadded type. */
4762 if (TYPE_IS_PADDING_P (type))
4763 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4765 break;
4767 default:
4768 break;
4771 return exp;
4774 /* Return true if EXPR is an expression that can be folded as an operand
4775 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4777 static bool
4778 can_fold_for_view_convert_p (tree expr)
4780 tree t1, t2;
4782 /* The folder will fold NOP_EXPRs between integral types with the same
4783 precision (in the middle-end's sense). We cannot allow it if the
4784 types don't have the same precision in the Ada sense as well. */
4785 if (TREE_CODE (expr) != NOP_EXPR)
4786 return true;
4788 t1 = TREE_TYPE (expr);
4789 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4791 /* Defer to the folder for non-integral conversions. */
4792 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4793 return true;
4795 /* Only fold conversions that preserve both precisions. */
4796 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4797 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4798 return true;
4800 return false;
4803 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4804 If NOTRUNC_P is true, truncation operations should be suppressed.
4806 Special care is required with (source or target) integral types whose
4807 precision is not equal to their size, to make sure we fetch or assign
4808 the value bits whose location might depend on the endianness, e.g.
4810 Rmsize : constant := 8;
4811 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4813 type Bit_Array is array (1 .. Rmsize) of Boolean;
4814 pragma Pack (Bit_Array);
4816 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4818 Value : Int := 2#1000_0001#;
4819 Vbits : Bit_Array := To_Bit_Array (Value);
4821 we expect the 8 bits at Vbits'Address to always contain Value, while
4822 their original location depends on the endianness, at Value'Address
4823 on a little-endian architecture but not on a big-endian one. */
4825 tree
4826 unchecked_convert (tree type, tree expr, bool notrunc_p)
4828 tree etype = TREE_TYPE (expr);
4829 enum tree_code ecode = TREE_CODE (etype);
4830 enum tree_code code = TREE_CODE (type);
4831 tree tem;
4832 int c;
4834 /* If the expression is already of the right type, we are done. */
4835 if (etype == type)
4836 return expr;
4838 /* If both types types are integral just do a normal conversion.
4839 Likewise for a conversion to an unconstrained array. */
4840 if (((INTEGRAL_TYPE_P (type)
4841 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
4842 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4843 && (INTEGRAL_TYPE_P (etype)
4844 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4845 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4846 || code == UNCONSTRAINED_ARRAY_TYPE)
4848 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4850 tree ntype = copy_type (etype);
4851 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4852 TYPE_MAIN_VARIANT (ntype) = ntype;
4853 expr = build1 (NOP_EXPR, ntype, expr);
4856 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4858 tree rtype = copy_type (type);
4859 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4860 TYPE_MAIN_VARIANT (rtype) = rtype;
4861 expr = convert (rtype, expr);
4862 expr = build1 (NOP_EXPR, type, expr);
4864 else
4865 expr = convert (type, expr);
4868 /* If we are converting to an integral type whose precision is not equal
4869 to its size, first unchecked convert to a record type that contains a
4870 field of the given precision. Then extract the result from the field.
4872 There is a subtlety if the source type is an aggregate type with reverse
4873 storage order because its representation is not contiguous in the native
4874 storage order, i.e. a direct unchecked conversion to an integral type
4875 with N bits of precision cannot read the first N bits of the aggregate
4876 type. To overcome it, we do an unchecked conversion to an integral type
4877 with reverse storage order and return the resulting value. This also
4878 ensures that the result of the unchecked conversion doesn't depend on
4879 the endianness of the target machine, but only on the storage order of
4880 the aggregate type.
4882 Finally, for the sake of consistency, we do the unchecked conversion
4883 to an integral type with reverse storage order as soon as the source
4884 type is an aggregate type with reverse storage order, even if there
4885 are no considerations of precision or size involved. */
4886 else if (INTEGRAL_TYPE_P (type)
4887 && TYPE_RM_SIZE (type)
4888 && (0 != compare_tree_int (TYPE_RM_SIZE (type),
4889 GET_MODE_BITSIZE (TYPE_MODE (type)))
4890 || (AGGREGATE_TYPE_P (etype)
4891 && TYPE_REVERSE_STORAGE_ORDER (etype))))
4893 tree rec_type = make_node (RECORD_TYPE);
4894 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4895 tree field_type, field;
4897 if (AGGREGATE_TYPE_P (etype))
4898 TYPE_REVERSE_STORAGE_ORDER (rec_type)
4899 = TYPE_REVERSE_STORAGE_ORDER (etype);
4901 if (TYPE_UNSIGNED (type))
4902 field_type = make_unsigned_type (prec);
4903 else
4904 field_type = make_signed_type (prec);
4905 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4907 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4908 NULL_TREE, bitsize_zero_node, 1, 0);
4910 finish_record_type (rec_type, field, 1, false);
4912 expr = unchecked_convert (rec_type, expr, notrunc_p);
4913 expr = build_component_ref (expr, NULL_TREE, field, false);
4914 expr = fold_build1 (NOP_EXPR, type, expr);
4917 /* Similarly if we are converting from an integral type whose precision is
4918 not equal to its size, first copy into a field of the given precision
4919 and unchecked convert the record type.
4921 The same considerations as above apply if the target type is an aggregate
4922 type with reverse storage order and we also proceed similarly. */
4923 else if (INTEGRAL_TYPE_P (etype)
4924 && TYPE_RM_SIZE (etype)
4925 && (0 != compare_tree_int (TYPE_RM_SIZE (etype),
4926 GET_MODE_BITSIZE (TYPE_MODE (etype)))
4927 || (AGGREGATE_TYPE_P (type)
4928 && TYPE_REVERSE_STORAGE_ORDER (type))))
4930 tree rec_type = make_node (RECORD_TYPE);
4931 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
4932 vec<constructor_elt, va_gc> *v;
4933 vec_alloc (v, 1);
4934 tree field_type, field;
4936 if (AGGREGATE_TYPE_P (type))
4937 TYPE_REVERSE_STORAGE_ORDER (rec_type)
4938 = TYPE_REVERSE_STORAGE_ORDER (type);
4940 if (TYPE_UNSIGNED (etype))
4941 field_type = make_unsigned_type (prec);
4942 else
4943 field_type = make_signed_type (prec);
4944 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
4946 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4947 NULL_TREE, bitsize_zero_node, 1, 0);
4949 finish_record_type (rec_type, field, 1, false);
4951 expr = fold_build1 (NOP_EXPR, field_type, expr);
4952 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4953 expr = gnat_build_constructor (rec_type, v);
4954 expr = unchecked_convert (type, expr, notrunc_p);
4957 /* If we are converting from a scalar type to a type with a different size,
4958 we need to pad to have the same size on both sides.
4960 ??? We cannot do it unconditionally because unchecked conversions are
4961 used liberally by the front-end to implement polymorphism, e.g. in:
4963 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4964 return p___size__4 (p__object!(S191s.all));
4966 so we skip all expressions that are references. */
4967 else if (!REFERENCE_CLASS_P (expr)
4968 && !AGGREGATE_TYPE_P (etype)
4969 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4970 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4972 if (c < 0)
4974 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4975 false, false, false, true),
4976 expr);
4977 expr = unchecked_convert (type, expr, notrunc_p);
4979 else
4981 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4982 false, false, false, true);
4983 expr = unchecked_convert (rec_type, expr, notrunc_p);
4984 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4985 false);
4989 /* We have a special case when we are converting between two unconstrained
4990 array types. In that case, take the address, convert the fat pointer
4991 types, and dereference. */
4992 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4993 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4994 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4995 build_unary_op (ADDR_EXPR, NULL_TREE,
4996 expr)));
4998 /* Another special case is when we are converting to a vector type from its
4999 representative array type; this a regular conversion. */
5000 else if (code == VECTOR_TYPE
5001 && ecode == ARRAY_TYPE
5002 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5003 etype))
5004 expr = convert (type, expr);
5006 /* And, if the array type is not the representative, we try to build an
5007 intermediate vector type of which the array type is the representative
5008 and to do the unchecked conversion between the vector types, in order
5009 to enable further simplifications in the middle-end. */
5010 else if (code == VECTOR_TYPE
5011 && ecode == ARRAY_TYPE
5012 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5014 expr = convert (tem, expr);
5015 return unchecked_convert (type, expr, notrunc_p);
5018 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5019 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5020 else if (TREE_CODE (expr) == CONSTRUCTOR
5021 && code == RECORD_TYPE
5022 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5024 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5025 Empty, false, false, false, true),
5026 expr);
5027 return unchecked_convert (type, expr, notrunc_p);
5030 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5031 else
5033 expr = maybe_unconstrained_array (expr);
5034 etype = TREE_TYPE (expr);
5035 ecode = TREE_CODE (etype);
5036 if (can_fold_for_view_convert_p (expr))
5037 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5038 else
5039 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5042 /* If the result is an integral type whose precision is not equal to its
5043 size, sign- or zero-extend the result. We need not do this if the input
5044 is an integral type of the same precision and signedness or if the output
5045 is a biased type or if both the input and output are unsigned. */
5046 if (!notrunc_p
5047 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
5048 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5049 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5050 GET_MODE_BITSIZE (TYPE_MODE (type)))
5051 && !(INTEGRAL_TYPE_P (etype)
5052 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5053 && operand_equal_p (TYPE_RM_SIZE (type),
5054 (TYPE_RM_SIZE (etype) != 0
5055 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5057 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5059 tree base_type
5060 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
5061 tree shift_expr
5062 = convert (base_type,
5063 size_binop (MINUS_EXPR,
5064 bitsize_int
5065 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5066 TYPE_RM_SIZE (type)));
5067 expr
5068 = convert (type,
5069 build_binary_op (RSHIFT_EXPR, base_type,
5070 build_binary_op (LSHIFT_EXPR, base_type,
5071 convert (base_type, expr),
5072 shift_expr),
5073 shift_expr));
5076 /* An unchecked conversion should never raise Constraint_Error. The code
5077 below assumes that GCC's conversion routines overflow the same way that
5078 the underlying hardware does. This is probably true. In the rare case
5079 when it is false, we can rely on the fact that such conversions are
5080 erroneous anyway. */
5081 if (TREE_CODE (expr) == INTEGER_CST)
5082 TREE_OVERFLOW (expr) = 0;
5084 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5085 show no longer constant. */
5086 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5087 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5088 OEP_ONLY_CONST))
5089 TREE_CONSTANT (expr) = 0;
5091 return expr;
5094 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5095 the latter being a record type as predicated by Is_Record_Type. */
5097 enum tree_code
5098 tree_code_for_record_type (Entity_Id gnat_type)
5100 Node_Id component_list, component;
5102 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5103 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5104 if (!Is_Unchecked_Union (gnat_type))
5105 return RECORD_TYPE;
5107 gnat_type = Implementation_Base_Type (gnat_type);
5108 component_list
5109 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5111 for (component = First_Non_Pragma (Component_Items (component_list));
5112 Present (component);
5113 component = Next_Non_Pragma (component))
5114 if (Ekind (Defining_Entity (component)) == E_Component)
5115 return RECORD_TYPE;
5117 return UNION_TYPE;
5120 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5121 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5122 according to the presence of an alignment clause on the type or, if it
5123 is an array, on the component type. */
5125 bool
5126 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5128 gnat_type = Underlying_Type (gnat_type);
5130 *align_clause = Present (Alignment_Clause (gnat_type));
5132 if (Is_Array_Type (gnat_type))
5134 gnat_type = Underlying_Type (Component_Type (gnat_type));
5135 if (Present (Alignment_Clause (gnat_type)))
5136 *align_clause = true;
5139 if (!Is_Floating_Point_Type (gnat_type))
5140 return false;
5142 if (UI_To_Int (Esize (gnat_type)) != 64)
5143 return false;
5145 return true;
5148 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5149 size is greater or equal to 64 bits, or an array of such a type. Set
5150 ALIGN_CLAUSE according to the presence of an alignment clause on the
5151 type or, if it is an array, on the component type. */
5153 bool
5154 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5156 gnat_type = Underlying_Type (gnat_type);
5158 *align_clause = Present (Alignment_Clause (gnat_type));
5160 if (Is_Array_Type (gnat_type))
5162 gnat_type = Underlying_Type (Component_Type (gnat_type));
5163 if (Present (Alignment_Clause (gnat_type)))
5164 *align_clause = true;
5167 if (!Is_Scalar_Type (gnat_type))
5168 return false;
5170 if (UI_To_Int (Esize (gnat_type)) < 64)
5171 return false;
5173 return true;
5176 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5177 component of an aggregate type. */
5179 bool
5180 type_for_nonaliased_component_p (tree gnu_type)
5182 /* If the type is passed by reference, we may have pointers to the
5183 component so it cannot be made non-aliased. */
5184 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5185 return false;
5187 /* We used to say that any component of aggregate type is aliased
5188 because the front-end may take 'Reference of it. The front-end
5189 has been enhanced in the meantime so as to use a renaming instead
5190 in most cases, but the back-end can probably take the address of
5191 such a component too so we go for the conservative stance.
5193 For instance, we might need the address of any array type, even
5194 if normally passed by copy, to construct a fat pointer if the
5195 component is used as an actual for an unconstrained formal.
5197 Likewise for record types: even if a specific record subtype is
5198 passed by copy, the parent type might be passed by ref (e.g. if
5199 it's of variable size) and we might take the address of a child
5200 component to pass to a parent formal. We have no way to check
5201 for such conditions here. */
5202 if (AGGREGATE_TYPE_P (gnu_type))
5203 return false;
5205 return true;
5208 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5210 bool
5211 smaller_form_type_p (tree type, tree orig_type)
5213 tree size, osize;
5215 /* We're not interested in variants here. */
5216 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5217 return false;
5219 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5220 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5221 return false;
5223 size = TYPE_SIZE (type);
5224 osize = TYPE_SIZE (orig_type);
5226 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5227 return false;
5229 return tree_int_cst_lt (size, osize) != 0;
5232 /* Perform final processing on global variables. */
5234 static GTY (()) tree dummy_global;
5236 void
5237 gnat_write_global_declarations (void)
5239 unsigned int i;
5240 tree iter;
5242 /* If we have declared types as used at the global level, insert them in
5243 the global hash table. We use a dummy variable for this purpose. */
5244 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
5246 struct varpool_node *node;
5247 char *label;
5249 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5250 dummy_global
5251 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5252 void_type_node);
5253 DECL_HARD_REGISTER (dummy_global) = 1;
5254 TREE_STATIC (dummy_global) = 1;
5255 node = varpool_node::get_create (dummy_global);
5256 node->definition = 1;
5257 node->force_output = 1;
5259 while (!types_used_by_cur_var_decl->is_empty ())
5261 tree t = types_used_by_cur_var_decl->pop ();
5262 types_used_by_var_decl_insert (t, dummy_global);
5266 /* Output debug information for all global type declarations first. This
5267 ensures that global types whose compilation hasn't been finalized yet,
5268 for example pointers to Taft amendment types, have their compilation
5269 finalized in the right context. */
5270 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5271 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5272 debug_hooks->global_decl (iter);
5274 /* Proceed to optimize and emit assembly. */
5275 symtab->finalize_compilation_unit ();
5277 /* After cgraph has had a chance to emit everything that's going to
5278 be emitted, output debug information for the rest of globals. */
5279 if (!seen_error ())
5281 timevar_push (TV_SYMOUT);
5282 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5283 if (TREE_CODE (iter) != TYPE_DECL && !DECL_IGNORED_P (iter))
5284 debug_hooks->global_decl (iter);
5285 timevar_pop (TV_SYMOUT);
5289 /* ************************************************************************
5290 * * GCC builtins support *
5291 * ************************************************************************ */
5293 /* The general scheme is fairly simple:
5295 For each builtin function/type to be declared, gnat_install_builtins calls
5296 internal facilities which eventually get to gnat_pushdecl, which in turn
5297 tracks the so declared builtin function decls in the 'builtin_decls' global
5298 datastructure. When an Intrinsic subprogram declaration is processed, we
5299 search this global datastructure to retrieve the associated BUILT_IN DECL
5300 node. */
5302 /* Search the chain of currently available builtin declarations for a node
5303 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5304 found, if any, or NULL_TREE otherwise. */
5305 tree
5306 builtin_decl_for (tree name)
5308 unsigned i;
5309 tree decl;
5311 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5312 if (DECL_NAME (decl) == name)
5313 return decl;
5315 return NULL_TREE;
5318 /* The code below eventually exposes gnat_install_builtins, which declares
5319 the builtin types and functions we might need, either internally or as
5320 user accessible facilities.
5322 ??? This is a first implementation shot, still in rough shape. It is
5323 heavily inspired from the "C" family implementation, with chunks copied
5324 verbatim from there.
5326 Two obvious TODO candidates are
5327 o Use a more efficient name/decl mapping scheme
5328 o Devise a middle-end infrastructure to avoid having to copy
5329 pieces between front-ends. */
5331 /* ----------------------------------------------------------------------- *
5332 * BUILTIN ELEMENTARY TYPES *
5333 * ----------------------------------------------------------------------- */
5335 /* Standard data types to be used in builtin argument declarations. */
5337 enum c_tree_index
5339 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5340 CTI_STRING_TYPE,
5341 CTI_CONST_STRING_TYPE,
5343 CTI_MAX
5346 static tree c_global_trees[CTI_MAX];
5348 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5349 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5350 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5352 /* ??? In addition some attribute handlers, we currently don't support a
5353 (small) number of builtin-types, which in turns inhibits support for a
5354 number of builtin functions. */
5355 #define wint_type_node void_type_node
5356 #define intmax_type_node void_type_node
5357 #define uintmax_type_node void_type_node
5359 /* Build the void_list_node (void_type_node having been created). */
5361 static tree
5362 build_void_list_node (void)
5364 tree t = build_tree_list (NULL_TREE, void_type_node);
5365 return t;
5368 /* Used to help initialize the builtin-types.def table. When a type of
5369 the correct size doesn't exist, use error_mark_node instead of NULL.
5370 The later results in segfaults even when a decl using the type doesn't
5371 get invoked. */
5373 static tree
5374 builtin_type_for_size (int size, bool unsignedp)
5376 tree type = gnat_type_for_size (size, unsignedp);
5377 return type ? type : error_mark_node;
5380 /* Build/push the elementary type decls that builtin functions/types
5381 will need. */
5383 static void
5384 install_builtin_elementary_types (void)
5386 signed_size_type_node = gnat_signed_type (size_type_node);
5387 pid_type_node = integer_type_node;
5388 void_list_node = build_void_list_node ();
5390 string_type_node = build_pointer_type (char_type_node);
5391 const_string_type_node
5392 = build_pointer_type (build_qualified_type
5393 (char_type_node, TYPE_QUAL_CONST));
5396 /* ----------------------------------------------------------------------- *
5397 * BUILTIN FUNCTION TYPES *
5398 * ----------------------------------------------------------------------- */
5400 /* Now, builtin function types per se. */
5402 enum c_builtin_type
5404 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5405 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5406 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5407 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5408 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5409 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5410 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5411 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5412 ARG6) NAME,
5413 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5414 ARG6, ARG7) NAME,
5415 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5416 ARG6, ARG7, ARG8) NAME,
5417 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5418 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5419 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5420 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5421 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5422 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5423 NAME,
5424 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5425 ARG6, ARG7) NAME,
5426 #define DEF_FUNCTION_TYPE_VAR_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5427 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5428 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5429 #include "builtin-types.def"
5430 #undef DEF_PRIMITIVE_TYPE
5431 #undef DEF_FUNCTION_TYPE_0
5432 #undef DEF_FUNCTION_TYPE_1
5433 #undef DEF_FUNCTION_TYPE_2
5434 #undef DEF_FUNCTION_TYPE_3
5435 #undef DEF_FUNCTION_TYPE_4
5436 #undef DEF_FUNCTION_TYPE_5
5437 #undef DEF_FUNCTION_TYPE_6
5438 #undef DEF_FUNCTION_TYPE_7
5439 #undef DEF_FUNCTION_TYPE_8
5440 #undef DEF_FUNCTION_TYPE_VAR_0
5441 #undef DEF_FUNCTION_TYPE_VAR_1
5442 #undef DEF_FUNCTION_TYPE_VAR_2
5443 #undef DEF_FUNCTION_TYPE_VAR_3
5444 #undef DEF_FUNCTION_TYPE_VAR_4
5445 #undef DEF_FUNCTION_TYPE_VAR_5
5446 #undef DEF_FUNCTION_TYPE_VAR_7
5447 #undef DEF_FUNCTION_TYPE_VAR_11
5448 #undef DEF_POINTER_TYPE
5449 BT_LAST
5452 typedef enum c_builtin_type builtin_type;
5454 /* A temporary array used in communication with def_fn_type. */
5455 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5457 /* A helper function for install_builtin_types. Build function type
5458 for DEF with return type RET and N arguments. If VAR is true, then the
5459 function should be variadic after those N arguments.
5461 Takes special care not to ICE if any of the types involved are
5462 error_mark_node, which indicates that said type is not in fact available
5463 (see builtin_type_for_size). In which case the function type as a whole
5464 should be error_mark_node. */
5466 static void
5467 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5469 tree t;
5470 tree *args = XALLOCAVEC (tree, n);
5471 va_list list;
5472 int i;
5474 va_start (list, n);
5475 for (i = 0; i < n; ++i)
5477 builtin_type a = (builtin_type) va_arg (list, int);
5478 t = builtin_types[a];
5479 if (t == error_mark_node)
5480 goto egress;
5481 args[i] = t;
5484 t = builtin_types[ret];
5485 if (t == error_mark_node)
5486 goto egress;
5487 if (var)
5488 t = build_varargs_function_type_array (t, n, args);
5489 else
5490 t = build_function_type_array (t, n, args);
5492 egress:
5493 builtin_types[def] = t;
5494 va_end (list);
5497 /* Build the builtin function types and install them in the builtin_types
5498 array for later use in builtin function decls. */
5500 static void
5501 install_builtin_function_types (void)
5503 tree va_list_ref_type_node;
5504 tree va_list_arg_type_node;
5506 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5508 va_list_arg_type_node = va_list_ref_type_node =
5509 build_pointer_type (TREE_TYPE (va_list_type_node));
5511 else
5513 va_list_arg_type_node = va_list_type_node;
5514 va_list_ref_type_node = build_reference_type (va_list_type_node);
5517 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5518 builtin_types[ENUM] = VALUE;
5519 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5520 def_fn_type (ENUM, RETURN, 0, 0);
5521 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5522 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5523 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5524 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5525 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5526 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5527 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5528 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5529 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5530 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5531 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5532 ARG6) \
5533 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5534 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5535 ARG6, ARG7) \
5536 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5537 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5538 ARG6, ARG7, ARG8) \
5539 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5540 ARG7, ARG8);
5541 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5542 def_fn_type (ENUM, RETURN, 1, 0);
5543 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5544 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5545 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5546 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5547 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5548 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5549 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5550 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5551 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5552 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5553 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5554 ARG6, ARG7) \
5555 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5556 #define DEF_FUNCTION_TYPE_VAR_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5557 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5558 def_fn_type (ENUM, RETURN, 1, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5559 ARG7, ARG8, ARG9, ARG10, ARG11);
5560 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5561 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5563 #include "builtin-types.def"
5565 #undef DEF_PRIMITIVE_TYPE
5566 #undef DEF_FUNCTION_TYPE_0
5567 #undef DEF_FUNCTION_TYPE_1
5568 #undef DEF_FUNCTION_TYPE_2
5569 #undef DEF_FUNCTION_TYPE_3
5570 #undef DEF_FUNCTION_TYPE_4
5571 #undef DEF_FUNCTION_TYPE_5
5572 #undef DEF_FUNCTION_TYPE_6
5573 #undef DEF_FUNCTION_TYPE_7
5574 #undef DEF_FUNCTION_TYPE_8
5575 #undef DEF_FUNCTION_TYPE_VAR_0
5576 #undef DEF_FUNCTION_TYPE_VAR_1
5577 #undef DEF_FUNCTION_TYPE_VAR_2
5578 #undef DEF_FUNCTION_TYPE_VAR_3
5579 #undef DEF_FUNCTION_TYPE_VAR_4
5580 #undef DEF_FUNCTION_TYPE_VAR_5
5581 #undef DEF_FUNCTION_TYPE_VAR_7
5582 #undef DEF_FUNCTION_TYPE_VAR_11
5583 #undef DEF_POINTER_TYPE
5584 builtin_types[(int) BT_LAST] = NULL_TREE;
5587 /* ----------------------------------------------------------------------- *
5588 * BUILTIN ATTRIBUTES *
5589 * ----------------------------------------------------------------------- */
5591 enum built_in_attribute
5593 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5594 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5595 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5596 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5597 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5598 #include "builtin-attrs.def"
5599 #undef DEF_ATTR_NULL_TREE
5600 #undef DEF_ATTR_INT
5601 #undef DEF_ATTR_STRING
5602 #undef DEF_ATTR_IDENT
5603 #undef DEF_ATTR_TREE_LIST
5604 ATTR_LAST
5607 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5609 static void
5610 install_builtin_attributes (void)
5612 /* Fill in the built_in_attributes array. */
5613 #define DEF_ATTR_NULL_TREE(ENUM) \
5614 built_in_attributes[(int) ENUM] = NULL_TREE;
5615 #define DEF_ATTR_INT(ENUM, VALUE) \
5616 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5617 #define DEF_ATTR_STRING(ENUM, VALUE) \
5618 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5619 #define DEF_ATTR_IDENT(ENUM, STRING) \
5620 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5621 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5622 built_in_attributes[(int) ENUM] \
5623 = tree_cons (built_in_attributes[(int) PURPOSE], \
5624 built_in_attributes[(int) VALUE], \
5625 built_in_attributes[(int) CHAIN]);
5626 #include "builtin-attrs.def"
5627 #undef DEF_ATTR_NULL_TREE
5628 #undef DEF_ATTR_INT
5629 #undef DEF_ATTR_STRING
5630 #undef DEF_ATTR_IDENT
5631 #undef DEF_ATTR_TREE_LIST
5634 /* Handle a "const" attribute; arguments as in
5635 struct attribute_spec.handler. */
5637 static tree
5638 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5639 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5640 bool *no_add_attrs)
5642 if (TREE_CODE (*node) == FUNCTION_DECL)
5643 TREE_READONLY (*node) = 1;
5644 else
5645 *no_add_attrs = true;
5647 return NULL_TREE;
5650 /* Handle a "nothrow" attribute; arguments as in
5651 struct attribute_spec.handler. */
5653 static tree
5654 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5655 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5656 bool *no_add_attrs)
5658 if (TREE_CODE (*node) == FUNCTION_DECL)
5659 TREE_NOTHROW (*node) = 1;
5660 else
5661 *no_add_attrs = true;
5663 return NULL_TREE;
5666 /* Handle a "pure" attribute; arguments as in
5667 struct attribute_spec.handler. */
5669 static tree
5670 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5671 int ARG_UNUSED (flags), bool *no_add_attrs)
5673 if (TREE_CODE (*node) == FUNCTION_DECL)
5674 DECL_PURE_P (*node) = 1;
5675 /* ??? TODO: Support types. */
5676 else
5678 warning (OPT_Wattributes, "%qs attribute ignored",
5679 IDENTIFIER_POINTER (name));
5680 *no_add_attrs = true;
5683 return NULL_TREE;
5686 /* Handle a "no vops" attribute; arguments as in
5687 struct attribute_spec.handler. */
5689 static tree
5690 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5691 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5692 bool *ARG_UNUSED (no_add_attrs))
5694 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5695 DECL_IS_NOVOPS (*node) = 1;
5696 return NULL_TREE;
5699 /* Helper for nonnull attribute handling; fetch the operand number
5700 from the attribute argument list. */
5702 static bool
5703 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5705 /* Verify the arg number is a constant. */
5706 if (!tree_fits_uhwi_p (arg_num_expr))
5707 return false;
5709 *valp = TREE_INT_CST_LOW (arg_num_expr);
5710 return true;
5713 /* Handle the "nonnull" attribute. */
5714 static tree
5715 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5716 tree args, int ARG_UNUSED (flags),
5717 bool *no_add_attrs)
5719 tree type = *node;
5720 unsigned HOST_WIDE_INT attr_arg_num;
5722 /* If no arguments are specified, all pointer arguments should be
5723 non-null. Verify a full prototype is given so that the arguments
5724 will have the correct types when we actually check them later. */
5725 if (!args)
5727 if (!prototype_p (type))
5729 error ("nonnull attribute without arguments on a non-prototype");
5730 *no_add_attrs = true;
5732 return NULL_TREE;
5735 /* Argument list specified. Verify that each argument number references
5736 a pointer argument. */
5737 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5739 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5741 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5743 error ("nonnull argument has invalid operand number (argument %lu)",
5744 (unsigned long) attr_arg_num);
5745 *no_add_attrs = true;
5746 return NULL_TREE;
5749 if (prototype_p (type))
5751 function_args_iterator iter;
5752 tree argument;
5754 function_args_iter_init (&iter, type);
5755 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5757 argument = function_args_iter_cond (&iter);
5758 if (!argument || ck_num == arg_num)
5759 break;
5762 if (!argument
5763 || TREE_CODE (argument) == VOID_TYPE)
5765 error ("nonnull argument with out-of-range operand number "
5766 "(argument %lu, operand %lu)",
5767 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5768 *no_add_attrs = true;
5769 return NULL_TREE;
5772 if (TREE_CODE (argument) != POINTER_TYPE)
5774 error ("nonnull argument references non-pointer operand "
5775 "(argument %lu, operand %lu)",
5776 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5777 *no_add_attrs = true;
5778 return NULL_TREE;
5783 return NULL_TREE;
5786 /* Handle a "sentinel" attribute. */
5788 static tree
5789 handle_sentinel_attribute (tree *node, tree name, tree args,
5790 int ARG_UNUSED (flags), bool *no_add_attrs)
5792 if (!prototype_p (*node))
5794 warning (OPT_Wattributes,
5795 "%qs attribute requires prototypes with named arguments",
5796 IDENTIFIER_POINTER (name));
5797 *no_add_attrs = true;
5799 else
5801 if (!stdarg_p (*node))
5803 warning (OPT_Wattributes,
5804 "%qs attribute only applies to variadic functions",
5805 IDENTIFIER_POINTER (name));
5806 *no_add_attrs = true;
5810 if (args)
5812 tree position = TREE_VALUE (args);
5814 if (TREE_CODE (position) != INTEGER_CST)
5816 warning (0, "requested position is not an integer constant");
5817 *no_add_attrs = true;
5819 else
5821 if (tree_int_cst_lt (position, integer_zero_node))
5823 warning (0, "requested position is less than zero");
5824 *no_add_attrs = true;
5829 return NULL_TREE;
5832 /* Handle a "noreturn" attribute; arguments as in
5833 struct attribute_spec.handler. */
5835 static tree
5836 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5837 int ARG_UNUSED (flags), bool *no_add_attrs)
5839 tree type = TREE_TYPE (*node);
5841 /* See FIXME comment in c_common_attribute_table. */
5842 if (TREE_CODE (*node) == FUNCTION_DECL)
5843 TREE_THIS_VOLATILE (*node) = 1;
5844 else if (TREE_CODE (type) == POINTER_TYPE
5845 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5846 TREE_TYPE (*node)
5847 = build_pointer_type
5848 (build_type_variant (TREE_TYPE (type),
5849 TYPE_READONLY (TREE_TYPE (type)), 1));
5850 else
5852 warning (OPT_Wattributes, "%qs attribute ignored",
5853 IDENTIFIER_POINTER (name));
5854 *no_add_attrs = true;
5857 return NULL_TREE;
5860 /* Handle a "leaf" attribute; arguments as in
5861 struct attribute_spec.handler. */
5863 static tree
5864 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5865 int ARG_UNUSED (flags), bool *no_add_attrs)
5867 if (TREE_CODE (*node) != FUNCTION_DECL)
5869 warning (OPT_Wattributes, "%qE attribute ignored", name);
5870 *no_add_attrs = true;
5872 if (!TREE_PUBLIC (*node))
5874 warning (OPT_Wattributes, "%qE attribute has no effect", name);
5875 *no_add_attrs = true;
5878 return NULL_TREE;
5881 /* Handle a "always_inline" attribute; arguments as in
5882 struct attribute_spec.handler. */
5884 static tree
5885 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5886 int ARG_UNUSED (flags), bool *no_add_attrs)
5888 if (TREE_CODE (*node) == FUNCTION_DECL)
5890 /* Set the attribute and mark it for disregarding inline limits. */
5891 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
5893 else
5895 warning (OPT_Wattributes, "%qE attribute ignored", name);
5896 *no_add_attrs = true;
5899 return NULL_TREE;
5902 /* Handle a "malloc" attribute; arguments as in
5903 struct attribute_spec.handler. */
5905 static tree
5906 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5907 int ARG_UNUSED (flags), bool *no_add_attrs)
5909 if (TREE_CODE (*node) == FUNCTION_DECL
5910 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5911 DECL_IS_MALLOC (*node) = 1;
5912 else
5914 warning (OPT_Wattributes, "%qs attribute ignored",
5915 IDENTIFIER_POINTER (name));
5916 *no_add_attrs = true;
5919 return NULL_TREE;
5922 /* Fake handler for attributes we don't properly support. */
5924 tree
5925 fake_attribute_handler (tree * ARG_UNUSED (node),
5926 tree ARG_UNUSED (name),
5927 tree ARG_UNUSED (args),
5928 int ARG_UNUSED (flags),
5929 bool * ARG_UNUSED (no_add_attrs))
5931 return NULL_TREE;
5934 /* Handle a "type_generic" attribute. */
5936 static tree
5937 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5938 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5939 bool * ARG_UNUSED (no_add_attrs))
5941 /* Ensure we have a function type. */
5942 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5944 /* Ensure we have a variadic function. */
5945 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
5947 return NULL_TREE;
5950 /* Handle a "vector_size" attribute; arguments as in
5951 struct attribute_spec.handler. */
5953 static tree
5954 handle_vector_size_attribute (tree *node, tree name, tree args,
5955 int ARG_UNUSED (flags), bool *no_add_attrs)
5957 tree type = *node;
5958 tree vector_type;
5960 *no_add_attrs = true;
5962 /* We need to provide for vector pointers, vector arrays, and
5963 functions returning vectors. For example:
5965 __attribute__((vector_size(16))) short *foo;
5967 In this case, the mode is SI, but the type being modified is
5968 HI, so we need to look further. */
5969 while (POINTER_TYPE_P (type)
5970 || TREE_CODE (type) == FUNCTION_TYPE
5971 || TREE_CODE (type) == ARRAY_TYPE)
5972 type = TREE_TYPE (type);
5974 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
5975 if (!vector_type)
5976 return NULL_TREE;
5978 /* Build back pointers if needed. */
5979 *node = reconstruct_complex_type (*node, vector_type);
5981 return NULL_TREE;
5984 /* Handle a "vector_type" attribute; arguments as in
5985 struct attribute_spec.handler. */
5987 static tree
5988 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5989 int ARG_UNUSED (flags), bool *no_add_attrs)
5991 tree type = *node;
5992 tree vector_type;
5994 *no_add_attrs = true;
5996 if (TREE_CODE (type) != ARRAY_TYPE)
5998 error ("attribute %qs applies to array types only",
5999 IDENTIFIER_POINTER (name));
6000 return NULL_TREE;
6003 vector_type = build_vector_type_for_array (type, name);
6004 if (!vector_type)
6005 return NULL_TREE;
6007 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6008 *node = vector_type;
6010 return NULL_TREE;
6013 /* ----------------------------------------------------------------------- *
6014 * BUILTIN FUNCTIONS *
6015 * ----------------------------------------------------------------------- */
6017 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6018 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6019 if nonansi_p and flag_no_nonansi_builtin. */
6021 static void
6022 def_builtin_1 (enum built_in_function fncode,
6023 const char *name,
6024 enum built_in_class fnclass,
6025 tree fntype, tree libtype,
6026 bool both_p, bool fallback_p,
6027 bool nonansi_p ATTRIBUTE_UNUSED,
6028 tree fnattrs, bool implicit_p)
6030 tree decl;
6031 const char *libname;
6033 /* Preserve an already installed decl. It most likely was setup in advance
6034 (e.g. as part of the internal builtins) for specific reasons. */
6035 if (builtin_decl_explicit (fncode) != NULL_TREE)
6036 return;
6038 gcc_assert ((!both_p && !fallback_p)
6039 || !strncmp (name, "__builtin_",
6040 strlen ("__builtin_")));
6042 libname = name + strlen ("__builtin_");
6043 decl = add_builtin_function (name, fntype, fncode, fnclass,
6044 (fallback_p ? libname : NULL),
6045 fnattrs);
6046 if (both_p)
6047 /* ??? This is normally further controlled by command-line options
6048 like -fno-builtin, but we don't have them for Ada. */
6049 add_builtin_function (libname, libtype, fncode, fnclass,
6050 NULL, fnattrs);
6052 set_builtin_decl (fncode, decl, implicit_p);
6055 static int flag_isoc94 = 0;
6056 static int flag_isoc99 = 0;
6057 static int flag_isoc11 = 0;
6059 /* Install what the common builtins.def offers. */
6061 static void
6062 install_builtin_functions (void)
6064 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6065 NONANSI_P, ATTRS, IMPLICIT, COND) \
6066 if (NAME && COND) \
6067 def_builtin_1 (ENUM, NAME, CLASS, \
6068 builtin_types[(int) TYPE], \
6069 builtin_types[(int) LIBTYPE], \
6070 BOTH_P, FALLBACK_P, NONANSI_P, \
6071 built_in_attributes[(int) ATTRS], IMPLICIT);
6072 #include "builtins.def"
6073 #undef DEF_BUILTIN
6076 /* ----------------------------------------------------------------------- *
6077 * BUILTIN FUNCTIONS *
6078 * ----------------------------------------------------------------------- */
6080 /* Install the builtin functions we might need. */
6082 void
6083 gnat_install_builtins (void)
6085 install_builtin_elementary_types ();
6086 install_builtin_function_types ();
6087 install_builtin_attributes ();
6089 /* Install builtins used by generic middle-end pieces first. Some of these
6090 know about internal specificities and control attributes accordingly, for
6091 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6092 the generic definition from builtins.def. */
6093 build_common_builtin_nodes ();
6095 /* Now, install the target specific builtins, such as the AltiVec family on
6096 ppc, and the common set as exposed by builtins.def. */
6097 targetm.init_builtins ();
6098 install_builtin_functions ();
6101 #include "gt-ada-utils.h"
6102 #include "gtype-ada.h"