* exp_dbug.adb (Debug_Renaming_Declaration): Process underlying types.
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob21e12658380a5a36862f63e038986777944e2c5c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2016, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute,
113 false },
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
115 false },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute,
117 false },
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
119 false },
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
121 false },
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
123 false },
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
125 false },
126 { "noinline", 0, 0, true, false, false, handle_noinline_attribute,
127 false },
128 { "noclone", 0, 0, true, false, false, handle_noclone_attribute,
129 false },
130 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
131 false },
132 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
133 false },
134 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
135 false },
136 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
137 false },
139 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
140 false },
141 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
142 false },
143 { "may_alias", 0, 0, false, true, false, NULL, false },
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
148 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
149 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
151 { NULL, 0, 0, false, false, false, NULL, false }
154 /* Associates a GNAT tree node to a GCC tree node. It is used in
155 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
156 of `save_gnu_tree' for more info. */
157 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
159 #define GET_GNU_TREE(GNAT_ENTITY) \
160 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
162 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
163 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
165 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
166 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
168 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
169 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
171 #define GET_DUMMY_NODE(GNAT_ENTITY) \
172 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
174 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
175 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
177 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
178 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
180 /* This variable keeps a table for types for each precision so that we only
181 allocate each of them once. Signed and unsigned types are kept separate.
183 Note that these types are only used when fold-const requests something
184 special. Perhaps we should NOT share these types; we'll see how it
185 goes later. */
186 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
188 /* Likewise for float types, but record these by mode. */
189 static GTY(()) tree float_types[NUM_MACHINE_MODES];
191 /* For each binding contour we allocate a binding_level structure to indicate
192 the binding depth. */
194 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
195 /* The binding level containing this one (the enclosing binding level). */
196 struct gnat_binding_level *chain;
197 /* The BLOCK node for this level. */
198 tree block;
199 /* If nonzero, the setjmp buffer that needs to be updated for any
200 variable-sized definition within this context. */
201 tree jmpbuf_decl;
204 /* The binding level currently in effect. */
205 static GTY(()) struct gnat_binding_level *current_binding_level;
207 /* A chain of gnat_binding_level structures awaiting reuse. */
208 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
210 /* The context to be used for global declarations. */
211 static GTY(()) tree global_context;
213 /* An array of global declarations. */
214 static GTY(()) vec<tree, va_gc> *global_decls;
216 /* An array of builtin function declarations. */
217 static GTY(()) vec<tree, va_gc> *builtin_decls;
219 /* A chain of unused BLOCK nodes. */
220 static GTY((deletable)) tree free_block_chain;
222 /* A hash table of padded types. It is modelled on the generic type
223 hash table in tree.c, which must thus be used as a reference. */
225 struct GTY((for_user)) pad_type_hash {
226 unsigned long hash;
227 tree type;
230 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
232 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
233 static bool equal (pad_type_hash *a, pad_type_hash *b);
234 static int keep_cache_entry (pad_type_hash *&);
237 static GTY ((cache))
238 hash_table<pad_type_hasher> *pad_type_hash_table;
240 static tree merge_sizes (tree, tree, tree, bool, bool);
241 static tree compute_related_constant (tree, tree);
242 static tree split_plus (tree, tree *);
243 static tree float_type_for_precision (int, machine_mode);
244 static tree convert_to_fat_pointer (tree, tree);
245 static unsigned int scale_by_factor_of (tree, unsigned int);
246 static bool potential_alignment_gap (tree, tree, tree);
248 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
249 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
250 struct deferred_decl_context_node
252 /* The ..._DECL node to work on. */
253 tree decl;
255 /* The corresponding entity's Scope. */
256 Entity_Id gnat_scope;
258 /* The value of force_global when DECL was pushed. */
259 int force_global;
261 /* The list of ..._TYPE nodes to propagate the context to. */
262 vec<tree> types;
264 /* The next queue item. */
265 struct deferred_decl_context_node *next;
268 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
270 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
271 feed it with the elaboration of GNAT_SCOPE. */
272 static struct deferred_decl_context_node *
273 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
275 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
276 feed it with the DECL_CONTEXT computed as part of N as soon as it is
277 computed. */
278 static void add_deferred_type_context (struct deferred_decl_context_node *n,
279 tree type);
281 /* Initialize data structures of the utils.c module. */
283 void
284 init_gnat_utils (void)
286 /* Initialize the association of GNAT nodes to GCC trees. */
287 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
289 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
290 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
292 /* Initialize the hash table of padded types. */
293 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
296 /* Destroy data structures of the utils.c module. */
298 void
299 destroy_gnat_utils (void)
301 /* Destroy the association of GNAT nodes to GCC trees. */
302 ggc_free (associate_gnat_to_gnu);
303 associate_gnat_to_gnu = NULL;
305 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
306 ggc_free (dummy_node_table);
307 dummy_node_table = NULL;
309 /* Destroy the hash table of padded types. */
310 pad_type_hash_table->empty ();
311 pad_type_hash_table = NULL;
314 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
315 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
316 If NO_CHECK is true, the latter check is suppressed.
318 If GNU_DECL is zero, reset a previous association. */
320 void
321 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
323 /* Check that GNAT_ENTITY is not already defined and that it is being set
324 to something which is a decl. If that is not the case, this usually
325 means GNAT_ENTITY is defined twice, but occasionally is due to some
326 Gigi problem. */
327 gcc_assert (!(gnu_decl
328 && (PRESENT_GNU_TREE (gnat_entity)
329 || (!no_check && !DECL_P (gnu_decl)))));
331 SET_GNU_TREE (gnat_entity, gnu_decl);
334 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
335 that was associated with it. If there is no such tree node, abort.
337 In some cases, such as delayed elaboration or expressions that need to
338 be elaborated only once, GNAT_ENTITY is really not an entity. */
340 tree
341 get_gnu_tree (Entity_Id gnat_entity)
343 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
344 return GET_GNU_TREE (gnat_entity);
347 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
349 bool
350 present_gnu_tree (Entity_Id gnat_entity)
352 return PRESENT_GNU_TREE (gnat_entity);
355 /* Make a dummy type corresponding to GNAT_TYPE. */
357 tree
358 make_dummy_type (Entity_Id gnat_type)
360 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
361 tree gnu_type;
363 /* If there was no equivalent type (can only happen when just annotating
364 types) or underlying type, go back to the original type. */
365 if (No (gnat_equiv))
366 gnat_equiv = gnat_type;
368 /* If it there already a dummy type, use that one. Else make one. */
369 if (PRESENT_DUMMY_NODE (gnat_equiv))
370 return GET_DUMMY_NODE (gnat_equiv);
372 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
373 an ENUMERAL_TYPE. */
374 gnu_type = make_node (Is_Record_Type (gnat_equiv)
375 ? tree_code_for_record_type (gnat_equiv)
376 : ENUMERAL_TYPE);
377 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
378 TYPE_DUMMY_P (gnu_type) = 1;
379 TYPE_STUB_DECL (gnu_type)
380 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
381 if (Is_By_Reference_Type (gnat_equiv))
382 TYPE_BY_REFERENCE_P (gnu_type) = 1;
384 SET_DUMMY_NODE (gnat_equiv, gnu_type);
386 return gnu_type;
389 /* Return the dummy type that was made for GNAT_TYPE, if any. */
391 tree
392 get_dummy_type (Entity_Id gnat_type)
394 return GET_DUMMY_NODE (gnat_type);
397 /* Build dummy fat and thin pointer types whose designated type is specified
398 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
400 void
401 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
403 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
404 tree gnu_fat_type, fields, gnu_object_type;
406 gnu_template_type = make_node (RECORD_TYPE);
407 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
408 TYPE_DUMMY_P (gnu_template_type) = 1;
409 gnu_ptr_template = build_pointer_type (gnu_template_type);
411 gnu_array_type = make_node (ENUMERAL_TYPE);
412 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
413 TYPE_DUMMY_P (gnu_array_type) = 1;
414 gnu_ptr_array = build_pointer_type (gnu_array_type);
416 gnu_fat_type = make_node (RECORD_TYPE);
417 /* Build a stub DECL to trigger the special processing for fat pointer types
418 in gnat_pushdecl. */
419 TYPE_NAME (gnu_fat_type)
420 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
421 gnu_fat_type);
422 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
423 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
424 DECL_CHAIN (fields)
425 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
426 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
427 finish_fat_pointer_type (gnu_fat_type, fields);
428 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
429 /* Suppress debug info until after the type is completed. */
430 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
432 gnu_object_type = make_node (RECORD_TYPE);
433 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
434 TYPE_DUMMY_P (gnu_object_type) = 1;
436 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
437 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
438 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
441 /* Return true if we are in the global binding level. */
443 bool
444 global_bindings_p (void)
446 return force_global || !current_function_decl;
449 /* Enter a new binding level. */
451 void
452 gnat_pushlevel (void)
454 struct gnat_binding_level *newlevel = NULL;
456 /* Reuse a struct for this binding level, if there is one. */
457 if (free_binding_level)
459 newlevel = free_binding_level;
460 free_binding_level = free_binding_level->chain;
462 else
463 newlevel = ggc_alloc<gnat_binding_level> ();
465 /* Use a free BLOCK, if any; otherwise, allocate one. */
466 if (free_block_chain)
468 newlevel->block = free_block_chain;
469 free_block_chain = BLOCK_CHAIN (free_block_chain);
470 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
472 else
473 newlevel->block = make_node (BLOCK);
475 /* Point the BLOCK we just made to its parent. */
476 if (current_binding_level)
477 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
479 BLOCK_VARS (newlevel->block) = NULL_TREE;
480 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
481 TREE_USED (newlevel->block) = 1;
483 /* Add this level to the front of the chain (stack) of active levels. */
484 newlevel->chain = current_binding_level;
485 newlevel->jmpbuf_decl = NULL_TREE;
486 current_binding_level = newlevel;
489 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
490 and point FNDECL to this BLOCK. */
492 void
493 set_current_block_context (tree fndecl)
495 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
496 DECL_INITIAL (fndecl) = current_binding_level->block;
497 set_block_for_group (current_binding_level->block);
500 /* Set the jmpbuf_decl for the current binding level to DECL. */
502 void
503 set_block_jmpbuf_decl (tree decl)
505 current_binding_level->jmpbuf_decl = decl;
508 /* Get the jmpbuf_decl, if any, for the current binding level. */
510 tree
511 get_block_jmpbuf_decl (void)
513 return current_binding_level->jmpbuf_decl;
516 /* Exit a binding level. Set any BLOCK into the current code group. */
518 void
519 gnat_poplevel (void)
521 struct gnat_binding_level *level = current_binding_level;
522 tree block = level->block;
524 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
525 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
527 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
528 are no variables free the block and merge its subblocks into those of its
529 parent block. Otherwise, add it to the list of its parent. */
530 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
532 else if (!BLOCK_VARS (block))
534 BLOCK_SUBBLOCKS (level->chain->block)
535 = block_chainon (BLOCK_SUBBLOCKS (block),
536 BLOCK_SUBBLOCKS (level->chain->block));
537 BLOCK_CHAIN (block) = free_block_chain;
538 free_block_chain = block;
540 else
542 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
543 BLOCK_SUBBLOCKS (level->chain->block) = block;
544 TREE_USED (block) = 1;
545 set_block_for_group (block);
548 /* Free this binding structure. */
549 current_binding_level = level->chain;
550 level->chain = free_binding_level;
551 free_binding_level = level;
554 /* Exit a binding level and discard the associated BLOCK. */
556 void
557 gnat_zaplevel (void)
559 struct gnat_binding_level *level = current_binding_level;
560 tree block = level->block;
562 BLOCK_CHAIN (block) = free_block_chain;
563 free_block_chain = block;
565 /* Free this binding structure. */
566 current_binding_level = level->chain;
567 level->chain = free_binding_level;
568 free_binding_level = level;
571 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
573 static void
574 gnat_set_type_context (tree type, tree context)
576 tree decl = TYPE_STUB_DECL (type);
578 TYPE_CONTEXT (type) = context;
580 while (decl && DECL_PARALLEL_TYPE (decl))
582 tree parallel_type = DECL_PARALLEL_TYPE (decl);
584 /* Give a context to the parallel types and their stub decl, if any.
585 Some parallel types seems to be present in multiple parallel type
586 chains, so don't mess with their context if they already have one. */
587 if (!TYPE_CONTEXT (parallel_type))
589 if (TYPE_STUB_DECL (parallel_type))
590 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
591 TYPE_CONTEXT (parallel_type) = context;
594 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
598 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
599 the debug info, or Empty if there is no such scope. If not NULL, set
600 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
602 Entity_Id
603 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
605 Entity_Id gnat_entity;
607 if (is_subprogram)
608 *is_subprogram = false;
610 if (Nkind (gnat_node) == N_Defining_Identifier
611 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
612 gnat_entity = Scope (gnat_node);
613 else
614 return Empty;
616 while (Present (gnat_entity))
618 switch (Ekind (gnat_entity))
620 case E_Function:
621 case E_Procedure:
622 if (Present (Protected_Body_Subprogram (gnat_entity)))
623 gnat_entity = Protected_Body_Subprogram (gnat_entity);
625 /* If the scope is a subprogram, then just rely on
626 current_function_decl, so that we don't have to defer
627 anything. This is needed because other places rely on the
628 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
629 if (is_subprogram)
630 *is_subprogram = true;
631 return gnat_entity;
633 case E_Record_Type:
634 case E_Record_Subtype:
635 return gnat_entity;
637 default:
638 /* By default, we are not interested in this particular scope: go to
639 the outer one. */
640 break;
643 gnat_entity = Scope (gnat_entity);
646 return Empty;
649 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
650 of N otherwise. */
652 static void
653 defer_or_set_type_context (tree type, tree context,
654 struct deferred_decl_context_node *n)
656 if (n)
657 add_deferred_type_context (n, type);
658 else
659 gnat_set_type_context (type, context);
662 /* Return global_context, but create it first if need be. */
664 static tree
665 get_global_context (void)
667 if (!global_context)
669 global_context = build_translation_unit_decl (NULL_TREE);
670 debug_hooks->register_main_translation_unit (global_context);
673 return global_context;
676 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
677 for location information and flag propagation. */
679 void
680 gnat_pushdecl (tree decl, Node_Id gnat_node)
682 tree context = NULL_TREE;
683 struct deferred_decl_context_node *deferred_decl_context = NULL;
685 /* If explicitely asked to make DECL global or if it's an imported nested
686 object, short-circuit the regular Scope-based context computation. */
687 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
689 /* Rely on the GNAT scope, or fallback to the current_function_decl if
690 the GNAT scope reached the global scope, if it reached a subprogram
691 or the declaration is a subprogram or a variable (for them we skip
692 intermediate context types because the subprogram body elaboration
693 machinery and the inliner both expect a subprogram context).
695 Falling back to current_function_decl is necessary for implicit
696 subprograms created by gigi, such as the elaboration subprograms. */
697 bool context_is_subprogram = false;
698 const Entity_Id gnat_scope
699 = get_debug_scope (gnat_node, &context_is_subprogram);
701 if (Present (gnat_scope)
702 && !context_is_subprogram
703 && TREE_CODE (decl) != FUNCTION_DECL
704 && TREE_CODE (decl) != VAR_DECL)
705 /* Always assume the scope has not been elaborated, thus defer the
706 context propagation to the time its elaboration will be
707 available. */
708 deferred_decl_context
709 = add_deferred_decl_context (decl, gnat_scope, force_global);
711 /* External declarations (when force_global > 0) may not be in a
712 local context. */
713 else if (current_function_decl && force_global == 0)
714 context = current_function_decl;
717 /* If either we are forced to be in global mode or if both the GNAT scope and
718 the current_function_decl did not help in determining the context, use the
719 global scope. */
720 if (!deferred_decl_context && !context)
721 context = get_global_context ();
723 /* Functions imported in another function are not really nested.
724 For really nested functions mark them initially as needing
725 a static chain for uses of that flag before unnesting;
726 lower_nested_functions will then recompute it. */
727 if (TREE_CODE (decl) == FUNCTION_DECL
728 && !TREE_PUBLIC (decl)
729 && context
730 && (TREE_CODE (context) == FUNCTION_DECL
731 || decl_function_context (context)))
732 DECL_STATIC_CHAIN (decl) = 1;
734 if (!deferred_decl_context)
735 DECL_CONTEXT (decl) = context;
737 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
739 /* Set the location of DECL and emit a declaration for it. */
740 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
741 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
743 add_decl_expr (decl, gnat_node);
745 /* Put the declaration on the list. The list of declarations is in reverse
746 order. The list will be reversed later. Put global declarations in the
747 globals list and local ones in the current block. But skip TYPE_DECLs
748 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
749 with the debugger and aren't needed anyway. */
750 if (!(TREE_CODE (decl) == TYPE_DECL
751 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
753 if (DECL_EXTERNAL (decl))
755 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
756 vec_safe_push (builtin_decls, decl);
758 else if (global_bindings_p ())
759 vec_safe_push (global_decls, decl);
760 else
762 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
763 BLOCK_VARS (current_binding_level->block) = decl;
767 /* For the declaration of a type, set its name either if it isn't already
768 set or if the previous type name was not derived from a source name.
769 We'd rather have the type named with a real name and all the pointer
770 types to the same object have the same node, except when the names are
771 both derived from source names. */
772 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
774 tree t = TREE_TYPE (decl);
776 /* Array and pointer types aren't tagged types in the C sense so we need
777 to generate a typedef in DWARF for them and make sure it is preserved,
778 unless the type is artificial. */
779 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
780 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
781 || DECL_ARTIFICIAL (decl)))
783 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
784 generate the typedef in DWARF. Also do that for fat pointer types
785 because, even though they are tagged types in the C sense, they are
786 still XUP types attached to the base array type at this point. */
787 else if (!DECL_ARTIFICIAL (decl)
788 && (TREE_CODE (t) == ARRAY_TYPE
789 || TREE_CODE (t) == POINTER_TYPE
790 || TYPE_IS_FAT_POINTER_P (t)))
792 tree tt = build_variant_type_copy (t);
793 TYPE_NAME (tt) = decl;
794 defer_or_set_type_context (tt,
795 DECL_CONTEXT (decl),
796 deferred_decl_context);
797 TREE_TYPE (decl) = tt;
798 if (TYPE_NAME (t)
799 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
800 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
801 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
802 else
803 DECL_ORIGINAL_TYPE (decl) = t;
804 /* Array types need to have a name so that they can be related to
805 their GNAT encodings. */
806 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
807 TYPE_NAME (t) = DECL_NAME (decl);
808 t = NULL_TREE;
810 else if (TYPE_NAME (t)
811 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
812 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
814 else
815 t = NULL_TREE;
817 /* Propagate the name to all the variants, this is needed for the type
818 qualifiers machinery to work properly (see check_qualified_type).
819 Also propagate the context to them. Note that it will be propagated
820 to all parallel types too thanks to gnat_set_type_context. */
821 if (t)
822 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
823 /* ??? Because of the previous kludge, we can have variants of fat
824 pointer types with different names. */
825 if (!(TYPE_IS_FAT_POINTER_P (t)
826 && TYPE_NAME (t)
827 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
829 TYPE_NAME (t) = decl;
830 defer_or_set_type_context (t,
831 DECL_CONTEXT (decl),
832 deferred_decl_context);
837 /* Create a record type that contains a SIZE bytes long field of TYPE with a
838 starting bit position so that it is aligned to ALIGN bits, and leaving at
839 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
840 record is guaranteed to get. GNAT_NODE is used for the position of the
841 associated TYPE_DECL. */
843 tree
844 make_aligning_type (tree type, unsigned int align, tree size,
845 unsigned int base_align, int room, Node_Id gnat_node)
847 /* We will be crafting a record type with one field at a position set to be
848 the next multiple of ALIGN past record'address + room bytes. We use a
849 record placeholder to express record'address. */
850 tree record_type = make_node (RECORD_TYPE);
851 tree record = build0 (PLACEHOLDER_EXPR, record_type);
853 tree record_addr_st
854 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
856 /* The diagram below summarizes the shape of what we manipulate:
858 <--------- pos ---------->
859 { +------------+-------------+-----------------+
860 record =>{ |############| ... | field (type) |
861 { +------------+-------------+-----------------+
862 |<-- room -->|<- voffset ->|<---- size ----->|
865 record_addr vblock_addr
867 Every length is in sizetype bytes there, except "pos" which has to be
868 set as a bit position in the GCC tree for the record. */
869 tree room_st = size_int (room);
870 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
871 tree voffset_st, pos, field;
873 tree name = TYPE_IDENTIFIER (type);
875 name = concat_name (name, "ALIGN");
876 TYPE_NAME (record_type) = name;
878 /* Compute VOFFSET and then POS. The next byte position multiple of some
879 alignment after some address is obtained by "and"ing the alignment minus
880 1 with the two's complement of the address. */
881 voffset_st = size_binop (BIT_AND_EXPR,
882 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
883 size_int ((align / BITS_PER_UNIT) - 1));
885 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
886 pos = size_binop (MULT_EXPR,
887 convert (bitsizetype,
888 size_binop (PLUS_EXPR, room_st, voffset_st)),
889 bitsize_unit_node);
891 /* Craft the GCC record representation. We exceptionally do everything
892 manually here because 1) our generic circuitry is not quite ready to
893 handle the complex position/size expressions we are setting up, 2) we
894 have a strong simplifying factor at hand: we know the maximum possible
895 value of voffset, and 3) we have to set/reset at least the sizes in
896 accordance with this maximum value anyway, as we need them to convey
897 what should be "alloc"ated for this type.
899 Use -1 as the 'addressable' indication for the field to prevent the
900 creation of a bitfield. We don't need one, it would have damaging
901 consequences on the alignment computation, and create_field_decl would
902 make one without this special argument, for instance because of the
903 complex position expression. */
904 field = create_field_decl (get_identifier ("F"), type, record_type, size,
905 pos, 1, -1);
906 TYPE_FIELDS (record_type) = field;
908 SET_TYPE_ALIGN (record_type, base_align);
909 TYPE_USER_ALIGN (record_type) = 1;
911 TYPE_SIZE (record_type)
912 = size_binop (PLUS_EXPR,
913 size_binop (MULT_EXPR, convert (bitsizetype, size),
914 bitsize_unit_node),
915 bitsize_int (align + room * BITS_PER_UNIT));
916 TYPE_SIZE_UNIT (record_type)
917 = size_binop (PLUS_EXPR, size,
918 size_int (room + align / BITS_PER_UNIT));
920 SET_TYPE_MODE (record_type, BLKmode);
921 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
923 /* Declare it now since it will never be declared otherwise. This is
924 necessary to ensure that its subtrees are properly marked. */
925 create_type_decl (name, record_type, true, false, gnat_node);
927 return record_type;
930 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
931 as the field type of a packed record if IN_RECORD is true, or as the
932 component type of a packed array if IN_RECORD is false. See if we can
933 rewrite it either as a type that has non-BLKmode, which we can pack
934 tighter in the packed record case, or as a smaller type with at most
935 MAX_ALIGN alignment if the value is non-zero. If so, return the new
936 type; if not, return the original type. */
938 tree
939 make_packable_type (tree type, bool in_record, unsigned int max_align)
941 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
942 unsigned HOST_WIDE_INT new_size;
943 unsigned int align = TYPE_ALIGN (type);
944 unsigned int new_align;
946 /* No point in doing anything if the size is zero. */
947 if (size == 0)
948 return type;
950 tree new_type = make_node (TREE_CODE (type));
952 /* Copy the name and flags from the old type to that of the new.
953 Note that we rely on the pointer equality created here for
954 TYPE_NAME to look through conversions in various places. */
955 TYPE_NAME (new_type) = TYPE_NAME (type);
956 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
957 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
958 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
959 if (TREE_CODE (type) == RECORD_TYPE)
960 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
962 /* If we are in a record and have a small size, set the alignment to
963 try for an integral mode. Otherwise set it to try for a smaller
964 type with BLKmode. */
965 if (in_record && size <= MAX_FIXED_MODE_SIZE)
967 new_size = ceil_pow2 (size);
968 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
969 SET_TYPE_ALIGN (new_type, new_align);
971 else
973 /* Do not try to shrink the size if the RM size is not constant. */
974 if (TYPE_CONTAINS_TEMPLATE_P (type)
975 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
976 return type;
978 /* Round the RM size up to a unit boundary to get the minimal size
979 for a BLKmode record. Give up if it's already the size and we
980 don't need to lower the alignment. */
981 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
982 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
983 if (new_size == size && (max_align == 0 || align <= max_align))
984 return type;
986 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
987 if (max_align > 0 && new_align > max_align)
988 new_align = max_align;
989 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
992 TYPE_USER_ALIGN (new_type) = 1;
994 /* Now copy the fields, keeping the position and size as we don't want
995 to change the layout by propagating the packedness downwards. */
996 tree new_field_list = NULL_TREE;
997 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
999 tree new_field_type = TREE_TYPE (field);
1000 tree new_field, new_size;
1002 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1003 && !TYPE_FAT_POINTER_P (new_field_type)
1004 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1005 new_field_type = make_packable_type (new_field_type, true, max_align);
1007 /* However, for the last field in a not already packed record type
1008 that is of an aggregate type, we need to use the RM size in the
1009 packable version of the record type, see finish_record_type. */
1010 if (!DECL_CHAIN (field)
1011 && !TYPE_PACKED (type)
1012 && RECORD_OR_UNION_TYPE_P (new_field_type)
1013 && !TYPE_FAT_POINTER_P (new_field_type)
1014 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1015 && TYPE_ADA_SIZE (new_field_type))
1016 new_size = TYPE_ADA_SIZE (new_field_type);
1017 else
1018 new_size = DECL_SIZE (field);
1020 new_field
1021 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1022 new_size, bit_position (field),
1023 TYPE_PACKED (type),
1024 !DECL_NONADDRESSABLE_P (field));
1026 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1027 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1028 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1029 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1031 DECL_CHAIN (new_field) = new_field_list;
1032 new_field_list = new_field;
1035 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1036 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1037 if (TYPE_STUB_DECL (type))
1038 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1039 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1041 /* If this is a padding record, we never want to make the size smaller
1042 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1043 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1045 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1046 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1047 new_size = size;
1049 else
1051 TYPE_SIZE (new_type) = bitsize_int (new_size);
1052 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1055 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1056 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1058 compute_record_mode (new_type);
1060 /* Try harder to get a packable type if necessary, for example
1061 in case the record itself contains a BLKmode field. */
1062 if (in_record && TYPE_MODE (new_type) == BLKmode)
1063 SET_TYPE_MODE (new_type,
1064 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1066 /* If neither mode nor size nor alignment shrunk, return the old type. */
1067 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1068 return type;
1070 return new_type;
1073 /* Return true if TYPE has an unsigned representation. This needs to be used
1074 when the representation of types whose precision is not equal to their size
1075 is manipulated based on the RM size. */
1077 static inline bool
1078 type_unsigned_for_rm (tree type)
1080 /* This is the common case. */
1081 if (TYPE_UNSIGNED (type))
1082 return true;
1084 /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1085 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1086 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1087 return true;
1089 return false;
1092 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1093 If TYPE is the best type, return it. Otherwise, make a new type. We
1094 only support new integral and pointer types. FOR_BIASED is true if
1095 we are making a biased type. */
1097 tree
1098 make_type_from_size (tree type, tree size_tree, bool for_biased)
1100 unsigned HOST_WIDE_INT size;
1101 bool biased_p;
1102 tree new_type;
1104 /* If size indicates an error, just return TYPE to avoid propagating
1105 the error. Likewise if it's too large to represent. */
1106 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1107 return type;
1109 size = tree_to_uhwi (size_tree);
1111 switch (TREE_CODE (type))
1113 case INTEGER_TYPE:
1114 case ENUMERAL_TYPE:
1115 case BOOLEAN_TYPE:
1116 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1117 && TYPE_BIASED_REPRESENTATION_P (type));
1119 /* Integer types with precision 0 are forbidden. */
1120 if (size == 0)
1121 size = 1;
1123 /* Only do something if the type isn't a packed array type and doesn't
1124 already have the proper size and the size isn't too large. */
1125 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1126 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1127 || size > LONG_LONG_TYPE_SIZE)
1128 break;
1130 biased_p |= for_biased;
1132 /* The type should be an unsigned type if the original type is unsigned
1133 or if the lower bound is constant and non-negative or if the type is
1134 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1135 if (type_unsigned_for_rm (type) || biased_p)
1136 new_type = make_unsigned_type (size);
1137 else
1138 new_type = make_signed_type (size);
1139 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1140 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1141 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1142 /* Copy the name to show that it's essentially the same type and
1143 not a subrange type. */
1144 TYPE_NAME (new_type) = TYPE_NAME (type);
1145 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1146 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1147 return new_type;
1149 case RECORD_TYPE:
1150 /* Do something if this is a fat pointer, in which case we
1151 may need to return the thin pointer. */
1152 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1154 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
1155 if (!targetm.valid_pointer_mode (p_mode))
1156 p_mode = ptr_mode;
1157 return
1158 build_pointer_type_for_mode
1159 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1160 p_mode, 0);
1162 break;
1164 case POINTER_TYPE:
1165 /* Only do something if this is a thin pointer, in which case we
1166 may need to return the fat pointer. */
1167 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1168 return
1169 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1170 break;
1172 default:
1173 break;
1176 return type;
1179 /* See if the data pointed to by the hash table slot is marked. */
1182 pad_type_hasher::keep_cache_entry (pad_type_hash *&t)
1184 return ggc_marked_p (t->type);
1187 /* Return true iff the padded types are equivalent. */
1189 bool
1190 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1192 tree type1, type2;
1194 if (t1->hash != t2->hash)
1195 return 0;
1197 type1 = t1->type;
1198 type2 = t2->type;
1200 /* We consider that the padded types are equivalent if they pad the same type
1201 and have the same size, alignment, RM size and storage order. Taking the
1202 mode into account is redundant since it is determined by the others. */
1203 return
1204 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1205 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1206 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1207 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1208 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1211 /* Look up the padded TYPE in the hash table and return its canonical version
1212 if it exists; otherwise, insert it into the hash table. */
1214 static tree
1215 lookup_and_insert_pad_type (tree type)
1217 hashval_t hashcode;
1218 struct pad_type_hash in, *h;
1220 hashcode
1221 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1222 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1223 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1224 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1226 in.hash = hashcode;
1227 in.type = type;
1228 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1229 if (h)
1230 return h->type;
1232 h = ggc_alloc<pad_type_hash> ();
1233 h->hash = hashcode;
1234 h->type = type;
1235 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
1236 return NULL_TREE;
1239 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1240 if needed. We have already verified that SIZE and ALIGN are large enough.
1241 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1242 IS_COMPONENT_TYPE is true if this is being done for the component type of
1243 an array. IS_USER_TYPE is true if the original type needs to be completed.
1244 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1245 the RM size of the resulting type is to be set to SIZE too; in this case,
1246 the padded type is canonicalized before being returned. */
1248 tree
1249 maybe_pad_type (tree type, tree size, unsigned int align,
1250 Entity_Id gnat_entity, bool is_component_type,
1251 bool is_user_type, bool definition, bool set_rm_size)
1253 tree orig_size = TYPE_SIZE (type);
1254 unsigned int orig_align = TYPE_ALIGN (type);
1255 tree record, field;
1257 /* If TYPE is a padded type, see if it agrees with any size and alignment
1258 we were given. If so, return the original type. Otherwise, strip
1259 off the padding, since we will either be returning the inner type
1260 or repadding it. If no size or alignment is specified, use that of
1261 the original padded type. */
1262 if (TYPE_IS_PADDING_P (type))
1264 if ((!size
1265 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1266 && (align == 0 || align == orig_align))
1267 return type;
1269 if (!size)
1270 size = orig_size;
1271 if (align == 0)
1272 align = orig_align;
1274 type = TREE_TYPE (TYPE_FIELDS (type));
1275 orig_size = TYPE_SIZE (type);
1276 orig_align = TYPE_ALIGN (type);
1279 /* If the size is either not being changed or is being made smaller (which
1280 is not done here and is only valid for bitfields anyway), show the size
1281 isn't changing. Likewise, clear the alignment if it isn't being
1282 changed. Then return if we aren't doing anything. */
1283 if (size
1284 && (operand_equal_p (size, orig_size, 0)
1285 || (TREE_CODE (orig_size) == INTEGER_CST
1286 && tree_int_cst_lt (size, orig_size))))
1287 size = NULL_TREE;
1289 if (align == orig_align)
1290 align = 0;
1292 if (align == 0 && !size)
1293 return type;
1295 /* If requested, complete the original type and give it a name. */
1296 if (is_user_type)
1297 create_type_decl (get_entity_name (gnat_entity), type,
1298 !Comes_From_Source (gnat_entity),
1299 !(TYPE_NAME (type)
1300 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1301 && DECL_IGNORED_P (TYPE_NAME (type))),
1302 gnat_entity);
1304 /* We used to modify the record in place in some cases, but that could
1305 generate incorrect debugging information. So make a new record
1306 type and name. */
1307 record = make_node (RECORD_TYPE);
1308 TYPE_PADDING_P (record) = 1;
1310 /* ??? Padding types around packed array implementation types will be
1311 considered as root types in the array descriptor language hook (see
1312 gnat_get_array_descr_info). Give them the original packed array type
1313 name so that the one coming from sources appears in the debugging
1314 information. */
1315 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1316 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1317 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1318 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1319 else if (Present (gnat_entity))
1320 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1322 SET_TYPE_ALIGN (record, align ? align : orig_align);
1323 TYPE_SIZE (record) = size ? size : orig_size;
1324 TYPE_SIZE_UNIT (record)
1325 = convert (sizetype,
1326 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1327 bitsize_unit_node));
1329 /* If we are changing the alignment and the input type is a record with
1330 BLKmode and a small constant size, try to make a form that has an
1331 integral mode. This might allow the padding record to also have an
1332 integral mode, which will be much more efficient. There is no point
1333 in doing so if a size is specified unless it is also a small constant
1334 size and it is incorrect to do so if we cannot guarantee that the mode
1335 will be naturally aligned since the field must always be addressable.
1337 ??? This might not always be a win when done for a stand-alone object:
1338 since the nominal and the effective type of the object will now have
1339 different modes, a VIEW_CONVERT_EXPR will be required for converting
1340 between them and it might be hard to overcome afterwards, including
1341 at the RTL level when the stand-alone object is accessed as a whole. */
1342 if (align != 0
1343 && RECORD_OR_UNION_TYPE_P (type)
1344 && TYPE_MODE (type) == BLKmode
1345 && !TYPE_BY_REFERENCE_P (type)
1346 && TREE_CODE (orig_size) == INTEGER_CST
1347 && !TREE_OVERFLOW (orig_size)
1348 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1349 && (!size
1350 || (TREE_CODE (size) == INTEGER_CST
1351 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1353 tree packable_type = make_packable_type (type, true);
1354 if (TYPE_MODE (packable_type) != BLKmode
1355 && align >= TYPE_ALIGN (packable_type))
1356 type = packable_type;
1359 /* Now create the field with the original size. */
1360 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1361 bitsize_zero_node, 0, 1);
1362 DECL_INTERNAL_P (field) = 1;
1364 /* We will output additional debug info manually below. */
1365 finish_record_type (record, field, 1, false);
1367 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1368 SET_TYPE_DEBUG_TYPE (record, type);
1370 /* Set the RM size if requested. */
1371 if (set_rm_size)
1373 tree canonical_pad_type;
1375 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1377 /* If the padded type is complete and has constant size, we canonicalize
1378 it by means of the hash table. This is consistent with the language
1379 semantics and ensures that gigi and the middle-end have a common view
1380 of these padded types. */
1381 if (TREE_CONSTANT (TYPE_SIZE (record))
1382 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1384 record = canonical_pad_type;
1385 goto built;
1389 /* Unless debugging information isn't being written for the input type,
1390 write a record that shows what we are a subtype of and also make a
1391 variable that indicates our size, if still variable. */
1392 if (TREE_CODE (orig_size) != INTEGER_CST
1393 && TYPE_NAME (record)
1394 && TYPE_NAME (type)
1395 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1396 && DECL_IGNORED_P (TYPE_NAME (type))))
1398 tree name = TYPE_IDENTIFIER (record);
1399 tree size_unit = TYPE_SIZE_UNIT (record);
1401 /* A variable that holds the size is required even with no encoding since
1402 it will be referenced by debugging information attributes. At global
1403 level, we need a single variable across all translation units. */
1404 if (size
1405 && TREE_CODE (size) != INTEGER_CST
1406 && (definition || global_bindings_p ()))
1408 /* Whether or not gnat_entity comes from source, this XVZ variable is
1409 is a compilation artifact. */
1410 size_unit
1411 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1412 size_unit, true, global_bindings_p (),
1413 !definition && global_bindings_p (), false,
1414 false, true, true, NULL, gnat_entity);
1415 TYPE_SIZE_UNIT (record) = size_unit;
1418 /* There is no need to show what we are a subtype of when outputting as
1419 few encodings as possible: regular debugging infomation makes this
1420 redundant. */
1421 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1423 tree marker = make_node (RECORD_TYPE);
1424 tree orig_name = TYPE_IDENTIFIER (type);
1426 TYPE_NAME (marker) = concat_name (name, "XVS");
1427 finish_record_type (marker,
1428 create_field_decl (orig_name,
1429 build_reference_type (type),
1430 marker, NULL_TREE, NULL_TREE,
1431 0, 0),
1432 0, true);
1433 TYPE_SIZE_UNIT (marker) = size_unit;
1435 add_parallel_type (record, marker);
1439 built:
1440 /* If a simple size was explicitly given, maybe issue a warning. */
1441 if (!size
1442 || TREE_CODE (size) == COND_EXPR
1443 || TREE_CODE (size) == MAX_EXPR
1444 || No (gnat_entity))
1445 return record;
1447 /* But don't do it if we are just annotating types and the type is tagged or
1448 concurrent, since these types aren't fully laid out in this mode. */
1449 if (type_annotate_only)
1451 Entity_Id gnat_type
1452 = is_component_type
1453 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1455 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1456 return record;
1459 /* Take the original size as the maximum size of the input if there was an
1460 unconstrained record involved and round it up to the specified alignment,
1461 if one was specified, but only for aggregate types. */
1462 if (CONTAINS_PLACEHOLDER_P (orig_size))
1463 orig_size = max_size (orig_size, true);
1465 if (align && AGGREGATE_TYPE_P (type))
1466 orig_size = round_up (orig_size, align);
1468 if (!operand_equal_p (size, orig_size, 0)
1469 && !(TREE_CODE (size) == INTEGER_CST
1470 && TREE_CODE (orig_size) == INTEGER_CST
1471 && (TREE_OVERFLOW (size)
1472 || TREE_OVERFLOW (orig_size)
1473 || tree_int_cst_lt (size, orig_size))))
1475 Node_Id gnat_error_node = Empty;
1477 /* For a packed array, post the message on the original array type. */
1478 if (Is_Packed_Array_Impl_Type (gnat_entity))
1479 gnat_entity = Original_Array_Type (gnat_entity);
1481 if ((Ekind (gnat_entity) == E_Component
1482 || Ekind (gnat_entity) == E_Discriminant)
1483 && Present (Component_Clause (gnat_entity)))
1484 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1485 else if (Present (Size_Clause (gnat_entity)))
1486 gnat_error_node = Expression (Size_Clause (gnat_entity));
1488 /* Generate message only for entities that come from source, since
1489 if we have an entity created by expansion, the message will be
1490 generated for some other corresponding source entity. */
1491 if (Comes_From_Source (gnat_entity))
1493 if (Present (gnat_error_node))
1494 post_error_ne_tree ("{^ }bits of & unused?",
1495 gnat_error_node, gnat_entity,
1496 size_diffop (size, orig_size));
1497 else if (is_component_type)
1498 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1499 gnat_entity, gnat_entity,
1500 size_diffop (size, orig_size));
1504 return record;
1507 /* Return a copy of the padded TYPE but with reverse storage order. */
1509 tree
1510 set_reverse_storage_order_on_pad_type (tree type)
1512 tree field, canonical_pad_type;
1514 if (flag_checking)
1516 /* If the inner type is not scalar then the function does nothing. */
1517 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1518 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1519 && !VECTOR_TYPE_P (inner_type));
1522 /* This is required for the canonicalization. */
1523 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1525 field = copy_node (TYPE_FIELDS (type));
1526 type = copy_type (type);
1527 DECL_CONTEXT (field) = type;
1528 TYPE_FIELDS (type) = field;
1529 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1530 canonical_pad_type = lookup_and_insert_pad_type (type);
1531 return canonical_pad_type ? canonical_pad_type : type;
1534 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1535 If this is a multi-dimensional array type, do this recursively.
1537 OP may be
1538 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1539 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1540 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1542 void
1543 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1545 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1546 of a one-dimensional array, since the padding has the same alias set
1547 as the field type, but if it's a multi-dimensional array, we need to
1548 see the inner types. */
1549 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1550 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1551 || TYPE_PADDING_P (gnu_old_type)))
1552 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1554 /* Unconstrained array types are deemed incomplete and would thus be given
1555 alias set 0. Retrieve the underlying array type. */
1556 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1557 gnu_old_type
1558 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1559 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1560 gnu_new_type
1561 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1563 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1564 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1565 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1566 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1568 switch (op)
1570 case ALIAS_SET_COPY:
1571 /* The alias set shouldn't be copied between array types with different
1572 aliasing settings because this can break the aliasing relationship
1573 between the array type and its element type. */
1574 if (flag_checking || flag_strict_aliasing)
1575 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1576 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1577 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1578 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1580 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1581 break;
1583 case ALIAS_SET_SUBSET:
1584 case ALIAS_SET_SUPERSET:
1586 alias_set_type old_set = get_alias_set (gnu_old_type);
1587 alias_set_type new_set = get_alias_set (gnu_new_type);
1589 /* Do nothing if the alias sets conflict. This ensures that we
1590 never call record_alias_subset several times for the same pair
1591 or at all for alias set 0. */
1592 if (!alias_sets_conflict_p (old_set, new_set))
1594 if (op == ALIAS_SET_SUBSET)
1595 record_alias_subset (old_set, new_set);
1596 else
1597 record_alias_subset (new_set, old_set);
1600 break;
1602 default:
1603 gcc_unreachable ();
1606 record_component_aliases (gnu_new_type);
1609 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1610 ARTIFICIAL_P is true if the type was generated by the compiler. */
1612 void
1613 record_builtin_type (const char *name, tree type, bool artificial_p)
1615 tree type_decl = build_decl (input_location,
1616 TYPE_DECL, get_identifier (name), type);
1617 DECL_ARTIFICIAL (type_decl) = artificial_p;
1618 TYPE_ARTIFICIAL (type) = artificial_p;
1619 gnat_pushdecl (type_decl, Empty);
1621 if (debug_hooks->type_decl)
1622 debug_hooks->type_decl (type_decl, false);
1625 /* Finish constructing the character type CHAR_TYPE.
1627 In Ada character types are enumeration types and, as a consequence, are
1628 represented in the front-end by integral types holding the positions of
1629 the enumeration values as defined by the language, which means that the
1630 integral types are unsigned.
1632 Unfortunately the signedness of 'char' in C is implementation-defined
1633 and GCC even has the option -fsigned-char to toggle it at run time.
1634 Since GNAT's philosophy is to be compatible with C by default, to wit
1635 Interfaces.C.char is defined as a mere copy of Character, we may need
1636 to declare character types as signed types in GENERIC and generate the
1637 necessary adjustments to make them behave as unsigned types.
1639 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1640 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1641 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1642 types. The idea is to ensure that the bit pattern contained in the
1643 Esize'd objects is not changed, even though the numerical value will
1644 be interpreted differently depending on the signedness.
1646 For character types, the bounds are implicit and, therefore, need to
1647 be adjusted. Morever, the debug info needs the unsigned version. */
1649 void
1650 finish_character_type (tree char_type)
1652 if (TYPE_UNSIGNED (char_type))
1653 return;
1655 /* Make a copy of a generic unsigned version since we'll modify it. */
1656 tree unsigned_char_type
1657 = (char_type == char_type_node
1658 ? unsigned_char_type_node
1659 : copy_type (gnat_unsigned_type_for (char_type)));
1661 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1662 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1663 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1665 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1666 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1667 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1670 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1671 finish constructing the record type as a fat pointer type. */
1673 void
1674 finish_fat_pointer_type (tree record_type, tree field_list)
1676 /* Make sure we can put it into a register. */
1677 if (STRICT_ALIGNMENT)
1678 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1680 /* Show what it really is. */
1681 TYPE_FAT_POINTER_P (record_type) = 1;
1683 /* Do not emit debug info for it since the types of its fields may still be
1684 incomplete at this point. */
1685 finish_record_type (record_type, field_list, 0, false);
1687 /* Force type_contains_placeholder_p to return true on it. Although the
1688 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1689 type but the representation of the unconstrained array. */
1690 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1693 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1694 finish constructing the record or union type. If REP_LEVEL is zero, this
1695 record has no representation clause and so will be entirely laid out here.
1696 If REP_LEVEL is one, this record has a representation clause and has been
1697 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1698 this record is derived from a parent record and thus inherits its layout;
1699 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1700 additional debug info needs to be output for this type. */
1702 void
1703 finish_record_type (tree record_type, tree field_list, int rep_level,
1704 bool debug_info_p)
1706 enum tree_code code = TREE_CODE (record_type);
1707 tree name = TYPE_IDENTIFIER (record_type);
1708 tree ada_size = bitsize_zero_node;
1709 tree size = bitsize_zero_node;
1710 bool had_size = TYPE_SIZE (record_type) != 0;
1711 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1712 bool had_align = TYPE_ALIGN (record_type) != 0;
1713 tree field;
1715 TYPE_FIELDS (record_type) = field_list;
1717 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1718 generate debug info and have a parallel type. */
1719 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1721 /* Globally initialize the record first. If this is a rep'ed record,
1722 that just means some initializations; otherwise, layout the record. */
1723 if (rep_level > 0)
1725 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1726 TYPE_ALIGN (record_type)));
1728 if (!had_size_unit)
1729 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1731 if (!had_size)
1732 TYPE_SIZE (record_type) = bitsize_zero_node;
1734 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1735 out just like a UNION_TYPE, since the size will be fixed. */
1736 else if (code == QUAL_UNION_TYPE)
1737 code = UNION_TYPE;
1739 else
1741 /* Ensure there isn't a size already set. There can be in an error
1742 case where there is a rep clause but all fields have errors and
1743 no longer have a position. */
1744 TYPE_SIZE (record_type) = 0;
1746 /* Ensure we use the traditional GCC layout for bitfields when we need
1747 to pack the record type or have a representation clause. The other
1748 possible layout (Microsoft C compiler), if available, would prevent
1749 efficient packing in almost all cases. */
1750 #ifdef TARGET_MS_BITFIELD_LAYOUT
1751 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1752 decl_attributes (&record_type,
1753 tree_cons (get_identifier ("gcc_struct"),
1754 NULL_TREE, NULL_TREE),
1755 ATTR_FLAG_TYPE_IN_PLACE);
1756 #endif
1758 layout_type (record_type);
1761 /* At this point, the position and size of each field is known. It was
1762 either set before entry by a rep clause, or by laying out the type above.
1764 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1765 to compute the Ada size; the GCC size and alignment (for rep'ed records
1766 that are not padding types); and the mode (for rep'ed records). We also
1767 clear the DECL_BIT_FIELD indication for the cases we know have not been
1768 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1770 if (code == QUAL_UNION_TYPE)
1771 field_list = nreverse (field_list);
1773 for (field = field_list; field; field = DECL_CHAIN (field))
1775 tree type = TREE_TYPE (field);
1776 tree pos = bit_position (field);
1777 tree this_size = DECL_SIZE (field);
1778 tree this_ada_size;
1780 if (RECORD_OR_UNION_TYPE_P (type)
1781 && !TYPE_FAT_POINTER_P (type)
1782 && !TYPE_CONTAINS_TEMPLATE_P (type)
1783 && TYPE_ADA_SIZE (type))
1784 this_ada_size = TYPE_ADA_SIZE (type);
1785 else
1786 this_ada_size = this_size;
1788 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1789 if (DECL_BIT_FIELD (field)
1790 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1792 unsigned int align = TYPE_ALIGN (type);
1794 /* In the general case, type alignment is required. */
1795 if (value_factor_p (pos, align))
1797 /* The enclosing record type must be sufficiently aligned.
1798 Otherwise, if no alignment was specified for it and it
1799 has been laid out already, bump its alignment to the
1800 desired one if this is compatible with its size and
1801 maximum alignment, if any. */
1802 if (TYPE_ALIGN (record_type) >= align)
1804 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1805 DECL_BIT_FIELD (field) = 0;
1807 else if (!had_align
1808 && rep_level == 0
1809 && value_factor_p (TYPE_SIZE (record_type), align)
1810 && (!TYPE_MAX_ALIGN (record_type)
1811 || TYPE_MAX_ALIGN (record_type) >= align))
1813 SET_TYPE_ALIGN (record_type, align);
1814 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1815 DECL_BIT_FIELD (field) = 0;
1819 /* In the non-strict alignment case, only byte alignment is. */
1820 if (!STRICT_ALIGNMENT
1821 && DECL_BIT_FIELD (field)
1822 && value_factor_p (pos, BITS_PER_UNIT))
1823 DECL_BIT_FIELD (field) = 0;
1826 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1827 field is technically not addressable. Except that it can actually
1828 be addressed if it is BLKmode and happens to be properly aligned. */
1829 if (DECL_BIT_FIELD (field)
1830 && !(DECL_MODE (field) == BLKmode
1831 && value_factor_p (pos, BITS_PER_UNIT)))
1832 DECL_NONADDRESSABLE_P (field) = 1;
1834 /* A type must be as aligned as its most aligned field that is not
1835 a bit-field. But this is already enforced by layout_type. */
1836 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1837 SET_TYPE_ALIGN (record_type,
1838 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
1840 switch (code)
1842 case UNION_TYPE:
1843 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1844 size = size_binop (MAX_EXPR, size, this_size);
1845 break;
1847 case QUAL_UNION_TYPE:
1848 ada_size
1849 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1850 this_ada_size, ada_size);
1851 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1852 this_size, size);
1853 break;
1855 case RECORD_TYPE:
1856 /* Since we know here that all fields are sorted in order of
1857 increasing bit position, the size of the record is one
1858 higher than the ending bit of the last field processed
1859 unless we have a rep clause, since in that case we might
1860 have a field outside a QUAL_UNION_TYPE that has a higher ending
1861 position. So use a MAX in that case. Also, if this field is a
1862 QUAL_UNION_TYPE, we need to take into account the previous size in
1863 the case of empty variants. */
1864 ada_size
1865 = merge_sizes (ada_size, pos, this_ada_size,
1866 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1867 size
1868 = merge_sizes (size, pos, this_size,
1869 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1870 break;
1872 default:
1873 gcc_unreachable ();
1877 if (code == QUAL_UNION_TYPE)
1878 nreverse (field_list);
1880 if (rep_level < 2)
1882 /* If this is a padding record, we never want to make the size smaller
1883 than what was specified in it, if any. */
1884 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1885 size = TYPE_SIZE (record_type);
1887 /* Now set any of the values we've just computed that apply. */
1888 if (!TYPE_FAT_POINTER_P (record_type)
1889 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1890 SET_TYPE_ADA_SIZE (record_type, ada_size);
1892 if (rep_level > 0)
1894 tree size_unit = had_size_unit
1895 ? TYPE_SIZE_UNIT (record_type)
1896 : convert (sizetype,
1897 size_binop (CEIL_DIV_EXPR, size,
1898 bitsize_unit_node));
1899 unsigned int align = TYPE_ALIGN (record_type);
1901 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1902 TYPE_SIZE_UNIT (record_type)
1903 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1905 compute_record_mode (record_type);
1909 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1910 TYPE_MAX_ALIGN (record_type) = 0;
1912 if (debug_info_p)
1913 rest_of_record_type_compilation (record_type);
1916 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1917 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1918 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1919 moment TYPE will get a context. */
1921 void
1922 add_parallel_type (tree type, tree parallel_type)
1924 tree decl = TYPE_STUB_DECL (type);
1926 while (DECL_PARALLEL_TYPE (decl))
1927 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1929 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1931 /* If PARALLEL_TYPE already has a context, we are done. */
1932 if (TYPE_CONTEXT (parallel_type))
1933 return;
1935 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
1936 it to PARALLEL_TYPE. */
1937 if (TYPE_CONTEXT (type))
1938 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1940 /* Otherwise TYPE has not context yet. We know it will have one thanks to
1941 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
1942 so we have nothing to do in this case. */
1945 /* Return true if TYPE has a parallel type. */
1947 static bool
1948 has_parallel_type (tree type)
1950 tree decl = TYPE_STUB_DECL (type);
1952 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1955 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
1956 associated with it. It need not be invoked directly in most cases as
1957 finish_record_type takes care of doing so. */
1959 void
1960 rest_of_record_type_compilation (tree record_type)
1962 bool var_size = false;
1963 tree field;
1965 /* If this is a padded type, the bulk of the debug info has already been
1966 generated for the field's type. */
1967 if (TYPE_IS_PADDING_P (record_type))
1968 return;
1970 /* If the type already has a parallel type (XVS type), then we're done. */
1971 if (has_parallel_type (record_type))
1972 return;
1974 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1976 /* We need to make an XVE/XVU record if any field has variable size,
1977 whether or not the record does. For example, if we have a union,
1978 it may be that all fields, rounded up to the alignment, have the
1979 same size, in which case we'll use that size. But the debug
1980 output routines (except Dwarf2) won't be able to output the fields,
1981 so we need to make the special record. */
1982 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1983 /* If a field has a non-constant qualifier, the record will have
1984 variable size too. */
1985 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1986 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1988 var_size = true;
1989 break;
1993 /* If this record type is of variable size, make a parallel record type that
1994 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1995 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1997 tree new_record_type
1998 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1999 ? UNION_TYPE : TREE_CODE (record_type));
2000 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2001 tree last_pos = bitsize_zero_node;
2002 tree old_field, prev_old_field = NULL_TREE;
2004 new_name
2005 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2006 ? "XVU" : "XVE");
2007 TYPE_NAME (new_record_type) = new_name;
2008 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2009 TYPE_STUB_DECL (new_record_type)
2010 = create_type_stub_decl (new_name, new_record_type);
2011 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2012 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2013 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2014 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2015 TYPE_SIZE_UNIT (new_record_type)
2016 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2018 /* Now scan all the fields, replacing each field with a new field
2019 corresponding to the new encoding. */
2020 for (old_field = TYPE_FIELDS (record_type); old_field;
2021 old_field = DECL_CHAIN (old_field))
2023 tree field_type = TREE_TYPE (old_field);
2024 tree field_name = DECL_NAME (old_field);
2025 tree curpos = bit_position (old_field);
2026 tree pos, new_field;
2027 bool var = false;
2028 unsigned int align = 0;
2030 /* We're going to do some pattern matching below so remove as many
2031 conversions as possible. */
2032 curpos = remove_conversions (curpos, true);
2034 /* See how the position was modified from the last position.
2036 There are two basic cases we support: a value was added
2037 to the last position or the last position was rounded to
2038 a boundary and they something was added. Check for the
2039 first case first. If not, see if there is any evidence
2040 of rounding. If so, round the last position and retry.
2042 If this is a union, the position can be taken as zero. */
2043 if (TREE_CODE (new_record_type) == UNION_TYPE)
2044 pos = bitsize_zero_node;
2045 else
2046 pos = compute_related_constant (curpos, last_pos);
2048 if (!pos
2049 && TREE_CODE (curpos) == MULT_EXPR
2050 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2052 tree offset = TREE_OPERAND (curpos, 0);
2053 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2054 align = scale_by_factor_of (offset, align);
2055 last_pos = round_up (last_pos, align);
2056 pos = compute_related_constant (curpos, last_pos);
2058 else if (!pos
2059 && TREE_CODE (curpos) == PLUS_EXPR
2060 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2061 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2062 && tree_fits_uhwi_p
2063 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2065 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2066 unsigned HOST_WIDE_INT addend
2067 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2068 align
2069 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2070 align = scale_by_factor_of (offset, align);
2071 align = MIN (align, addend & -addend);
2072 last_pos = round_up (last_pos, align);
2073 pos = compute_related_constant (curpos, last_pos);
2075 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2077 align = TYPE_ALIGN (field_type);
2078 last_pos = round_up (last_pos, align);
2079 pos = compute_related_constant (curpos, last_pos);
2082 /* If we can't compute a position, set it to zero.
2084 ??? We really should abort here, but it's too much work
2085 to get this correct for all cases. */
2086 if (!pos)
2087 pos = bitsize_zero_node;
2089 /* See if this type is variable-sized and make a pointer type
2090 and indicate the indirection if so. Beware that the debug
2091 back-end may adjust the position computed above according
2092 to the alignment of the field type, i.e. the pointer type
2093 in this case, if we don't preventively counter that. */
2094 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2096 field_type = build_pointer_type (field_type);
2097 if (align != 0 && TYPE_ALIGN (field_type) > align)
2099 field_type = copy_type (field_type);
2100 SET_TYPE_ALIGN (field_type, align);
2102 var = true;
2105 /* Make a new field name, if necessary. */
2106 if (var || align != 0)
2108 char suffix[16];
2110 if (align != 0)
2111 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2112 align / BITS_PER_UNIT);
2113 else
2114 strcpy (suffix, "XVL");
2116 field_name = concat_name (field_name, suffix);
2119 new_field
2120 = create_field_decl (field_name, field_type, new_record_type,
2121 DECL_SIZE (old_field), pos, 0, 0);
2122 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2123 TYPE_FIELDS (new_record_type) = new_field;
2125 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2126 zero. The only time it's not the last field of the record
2127 is when there are other components at fixed positions after
2128 it (meaning there was a rep clause for every field) and we
2129 want to be able to encode them. */
2130 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2131 (TREE_CODE (TREE_TYPE (old_field))
2132 == QUAL_UNION_TYPE)
2133 ? bitsize_zero_node
2134 : DECL_SIZE (old_field));
2135 prev_old_field = old_field;
2138 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2140 add_parallel_type (record_type, new_record_type);
2144 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2145 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2146 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2147 replace a value of zero with the old size. If HAS_REP is true, we take the
2148 MAX of the end position of this field with LAST_SIZE. In all other cases,
2149 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2151 static tree
2152 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2153 bool has_rep)
2155 tree type = TREE_TYPE (last_size);
2156 tree new_size;
2158 if (!special || TREE_CODE (size) != COND_EXPR)
2160 new_size = size_binop (PLUS_EXPR, first_bit, size);
2161 if (has_rep)
2162 new_size = size_binop (MAX_EXPR, last_size, new_size);
2165 else
2166 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2167 integer_zerop (TREE_OPERAND (size, 1))
2168 ? last_size : merge_sizes (last_size, first_bit,
2169 TREE_OPERAND (size, 1),
2170 1, has_rep),
2171 integer_zerop (TREE_OPERAND (size, 2))
2172 ? last_size : merge_sizes (last_size, first_bit,
2173 TREE_OPERAND (size, 2),
2174 1, has_rep));
2176 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2177 when fed through substitute_in_expr) into thinking that a constant
2178 size is not constant. */
2179 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2180 new_size = TREE_OPERAND (new_size, 0);
2182 return new_size;
2185 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2186 related by the addition of a constant. Return that constant if so. */
2188 static tree
2189 compute_related_constant (tree op0, tree op1)
2191 tree op0_var, op1_var;
2192 tree op0_con = split_plus (op0, &op0_var);
2193 tree op1_con = split_plus (op1, &op1_var);
2194 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2196 if (operand_equal_p (op0_var, op1_var, 0))
2197 return result;
2198 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2199 return result;
2200 else
2201 return 0;
2204 /* Utility function of above to split a tree OP which may be a sum, into a
2205 constant part, which is returned, and a variable part, which is stored
2206 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2207 bitsizetype. */
2209 static tree
2210 split_plus (tree in, tree *pvar)
2212 /* Strip conversions in order to ease the tree traversal and maximize the
2213 potential for constant or plus/minus discovery. We need to be careful
2214 to always return and set *pvar to bitsizetype trees, but it's worth
2215 the effort. */
2216 in = remove_conversions (in, false);
2218 *pvar = convert (bitsizetype, in);
2220 if (TREE_CODE (in) == INTEGER_CST)
2222 *pvar = bitsize_zero_node;
2223 return convert (bitsizetype, in);
2225 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2227 tree lhs_var, rhs_var;
2228 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2229 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2231 if (lhs_var == TREE_OPERAND (in, 0)
2232 && rhs_var == TREE_OPERAND (in, 1))
2233 return bitsize_zero_node;
2235 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2236 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2238 else
2239 return bitsize_zero_node;
2242 /* Return a copy of TYPE but safe to modify in any way. */
2244 tree
2245 copy_type (tree type)
2247 tree new_type = copy_node (type);
2249 /* Unshare the language-specific data. */
2250 if (TYPE_LANG_SPECIFIC (type))
2252 TYPE_LANG_SPECIFIC (new_type) = NULL;
2253 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2256 /* And the contents of the language-specific slot if needed. */
2257 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2258 && TYPE_RM_VALUES (type))
2260 TYPE_RM_VALUES (new_type) = NULL_TREE;
2261 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2262 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2263 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2266 /* copy_node clears this field instead of copying it, because it is
2267 aliased with TREE_CHAIN. */
2268 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2270 TYPE_POINTER_TO (new_type) = NULL_TREE;
2271 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2272 TYPE_MAIN_VARIANT (new_type) = new_type;
2273 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2274 TYPE_CANONICAL (new_type) = new_type;
2276 return new_type;
2279 /* Return a subtype of sizetype with range MIN to MAX and whose
2280 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2281 of the associated TYPE_DECL. */
2283 tree
2284 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2286 /* First build a type for the desired range. */
2287 tree type = build_nonshared_range_type (sizetype, min, max);
2289 /* Then set the index type. */
2290 SET_TYPE_INDEX_TYPE (type, index);
2291 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2293 return type;
2296 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2297 sizetype is used. */
2299 tree
2300 create_range_type (tree type, tree min, tree max)
2302 tree range_type;
2304 if (!type)
2305 type = sizetype;
2307 /* First build a type with the base range. */
2308 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2309 TYPE_MAX_VALUE (type));
2311 /* Then set the actual range. */
2312 SET_TYPE_RM_MIN_VALUE (range_type, min);
2313 SET_TYPE_RM_MAX_VALUE (range_type, max);
2315 return range_type;
2318 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2319 NAME gives the name of the type to be used in the declaration. */
2321 tree
2322 create_type_stub_decl (tree name, tree type)
2324 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2325 DECL_ARTIFICIAL (type_decl) = 1;
2326 TYPE_ARTIFICIAL (type) = 1;
2327 return type_decl;
2330 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2331 used in the declaration. ARTIFICIAL_P is true if the declaration was
2332 generated by the compiler. DEBUG_INFO_P is true if we need to write
2333 debug information about this type. GNAT_NODE is used for the position
2334 of the decl. */
2336 tree
2337 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2338 Node_Id gnat_node)
2340 enum tree_code code = TREE_CODE (type);
2341 bool is_named
2342 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2343 tree type_decl;
2345 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2346 gcc_assert (!TYPE_IS_DUMMY_P (type));
2348 /* If the type hasn't been named yet, we're naming it; preserve an existing
2349 TYPE_STUB_DECL that has been attached to it for some purpose. */
2350 if (!is_named && TYPE_STUB_DECL (type))
2352 type_decl = TYPE_STUB_DECL (type);
2353 DECL_NAME (type_decl) = name;
2355 else
2356 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2358 DECL_ARTIFICIAL (type_decl) = artificial_p;
2359 TYPE_ARTIFICIAL (type) = artificial_p;
2361 /* Add this decl to the current binding level. */
2362 gnat_pushdecl (type_decl, gnat_node);
2364 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2365 causes the name to be also viewed as a "tag" by the debug back-end, with
2366 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2367 types in DWARF.
2369 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2370 from multiple contexts, and "type_decl" references a copy of it: in such a
2371 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2372 with the mechanism above. */
2373 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2374 TYPE_STUB_DECL (type) = type_decl;
2376 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2377 back-end doesn't support, and for others if we don't need to. */
2378 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2379 DECL_IGNORED_P (type_decl) = 1;
2381 return type_decl;
2384 /* Return a VAR_DECL or CONST_DECL node.
2386 NAME gives the name of the variable. ASM_NAME is its assembler name
2387 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2388 the GCC tree for an optional initial expression; NULL_TREE if none.
2390 CONST_FLAG is true if this variable is constant, in which case we might
2391 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2393 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2394 definition to be made visible outside of the current compilation unit, for
2395 instance variable definitions in a package specification.
2397 EXTERN_FLAG is true when processing an external variable declaration (as
2398 opposed to a definition: no storage is to be allocated for the variable).
2400 STATIC_FLAG is only relevant when not at top level and indicates whether
2401 to always allocate storage to the variable.
2403 VOLATILE_FLAG is true if this variable is declared as volatile.
2405 ARTIFICIAL_P is true if the variable was generated by the compiler.
2407 DEBUG_INFO_P is true if we need to write debug information for it.
2409 ATTR_LIST is the list of attributes to be attached to the variable.
2411 GNAT_NODE is used for the position of the decl. */
2413 tree
2414 create_var_decl (tree name, tree asm_name, tree type, tree init,
2415 bool const_flag, bool public_flag, bool extern_flag,
2416 bool static_flag, bool volatile_flag, bool artificial_p,
2417 bool debug_info_p, struct attrib *attr_list,
2418 Node_Id gnat_node, bool const_decl_allowed_p)
2420 /* Whether the object has static storage duration, either explicitly or by
2421 virtue of being declared at the global level. */
2422 const bool static_storage = static_flag || global_bindings_p ();
2424 /* Whether the initializer is constant: for an external object or an object
2425 with static storage duration, we check that the initializer is a valid
2426 constant expression for initializing a static variable; otherwise, we
2427 only check that it is constant. */
2428 const bool init_const
2429 = (init
2430 && gnat_types_compatible_p (type, TREE_TYPE (init))
2431 && (extern_flag || static_storage
2432 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2433 != NULL_TREE
2434 : TREE_CONSTANT (init)));
2436 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2437 case the initializer may be used in lieu of the DECL node (as done in
2438 Identifier_to_gnu). This is useful to prevent the need of elaboration
2439 code when an identifier for which such a DECL is made is in turn used
2440 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2441 but extra constraints apply to this choice (see below) and they are not
2442 relevant to the distinction we wish to make. */
2443 const bool constant_p = const_flag && init_const;
2445 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2446 and may be used for scalars in general but not for aggregates. */
2447 tree var_decl
2448 = build_decl (input_location,
2449 (constant_p
2450 && const_decl_allowed_p
2451 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2452 name, type);
2454 /* Detect constants created by the front-end to hold 'reference to function
2455 calls for stabilization purposes. This is needed for renaming. */
2456 if (const_flag && init && POINTER_TYPE_P (type))
2458 tree inner = init;
2459 if (TREE_CODE (inner) == COMPOUND_EXPR)
2460 inner = TREE_OPERAND (inner, 1);
2461 inner = remove_conversions (inner, true);
2462 if (TREE_CODE (inner) == ADDR_EXPR
2463 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2464 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2465 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2466 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2467 DECL_RETURN_VALUE_P (var_decl) = 1;
2470 /* If this is external, throw away any initializations (they will be done
2471 elsewhere) unless this is a constant for which we would like to remain
2472 able to get the initializer. If we are defining a global here, leave a
2473 constant initialization and save any variable elaborations for the
2474 elaboration routine. If we are just annotating types, throw away the
2475 initialization if it isn't a constant. */
2476 if ((extern_flag && init && !constant_p)
2477 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2479 init = NULL_TREE;
2481 /* In LTO mode, also clear TREE_READONLY the same way add_decl_expr
2482 would do it if the initializer was not thrown away here, as the
2483 WPA phase requires a consistent view across compilation units. */
2484 if (const_flag && flag_generate_lto)
2486 const_flag = false;
2487 DECL_READONLY_ONCE_ELAB (var_decl) = 1;
2491 /* At the global level, a non-constant initializer generates elaboration
2492 statements. Check that such statements are allowed, that is to say,
2493 not violating a No_Elaboration_Code restriction. */
2494 if (init && !init_const && global_bindings_p ())
2495 Check_Elaboration_Code_Allowed (gnat_node);
2497 /* Attach the initializer, if any. */
2498 DECL_INITIAL (var_decl) = init;
2500 /* Directly set some flags. */
2501 DECL_ARTIFICIAL (var_decl) = artificial_p;
2502 DECL_EXTERNAL (var_decl) = extern_flag;
2504 TREE_CONSTANT (var_decl) = constant_p;
2505 TREE_READONLY (var_decl) = const_flag;
2507 /* The object is public if it is external or if it is declared public
2508 and has static storage duration. */
2509 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2511 /* We need to allocate static storage for an object with static storage
2512 duration if it isn't external. */
2513 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2515 TREE_SIDE_EFFECTS (var_decl)
2516 = TREE_THIS_VOLATILE (var_decl)
2517 = TYPE_VOLATILE (type) | volatile_flag;
2519 if (TREE_SIDE_EFFECTS (var_decl))
2520 TREE_ADDRESSABLE (var_decl) = 1;
2522 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2523 try to fiddle with DECL_COMMON. However, on platforms that don't
2524 support global BSS sections, uninitialized global variables would
2525 go in DATA instead, thus increasing the size of the executable. */
2526 if (!flag_no_common
2527 && TREE_CODE (var_decl) == VAR_DECL
2528 && TREE_PUBLIC (var_decl)
2529 && !have_global_bss_p ())
2530 DECL_COMMON (var_decl) = 1;
2532 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2533 since we will create an associated variable. Likewise for an external
2534 constant whose initializer is not absolute, because this would mean a
2535 global relocation in a read-only section which runs afoul of the PE-COFF
2536 run-time relocation mechanism. */
2537 if (!debug_info_p
2538 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2539 || (extern_flag
2540 && constant_p
2541 && init
2542 && initializer_constant_valid_p (init, TREE_TYPE (init))
2543 != null_pointer_node))
2544 DECL_IGNORED_P (var_decl) = 1;
2546 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2547 if (TREE_CODE (var_decl) == VAR_DECL)
2548 process_attributes (&var_decl, &attr_list, true, gnat_node);
2550 /* Add this decl to the current binding level. */
2551 gnat_pushdecl (var_decl, gnat_node);
2553 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2555 /* Let the target mangle the name if this isn't a verbatim asm. */
2556 if (*IDENTIFIER_POINTER (asm_name) != '*')
2557 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2559 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2562 return var_decl;
2565 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2567 static bool
2568 aggregate_type_contains_array_p (tree type)
2570 switch (TREE_CODE (type))
2572 case RECORD_TYPE:
2573 case UNION_TYPE:
2574 case QUAL_UNION_TYPE:
2576 tree field;
2577 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2578 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2579 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2580 return true;
2581 return false;
2584 case ARRAY_TYPE:
2585 return true;
2587 default:
2588 gcc_unreachable ();
2592 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2593 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2594 is the specified size of the field. If POS is nonzero, it is the bit
2595 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2596 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2597 means we are allowed to take the address of the field; if it is negative,
2598 we should not make a bitfield, which is used by make_aligning_type. */
2600 tree
2601 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2602 int packed, int addressable)
2604 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2606 DECL_CONTEXT (field_decl) = record_type;
2607 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2609 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2610 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2611 Likewise for an aggregate without specified position that contains an
2612 array, because in this case slices of variable length of this array
2613 must be handled by GCC and variable-sized objects need to be aligned
2614 to at least a byte boundary. */
2615 if (packed && (TYPE_MODE (type) == BLKmode
2616 || (!pos
2617 && AGGREGATE_TYPE_P (type)
2618 && aggregate_type_contains_array_p (type))))
2619 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2621 /* If a size is specified, use it. Otherwise, if the record type is packed
2622 compute a size to use, which may differ from the object's natural size.
2623 We always set a size in this case to trigger the checks for bitfield
2624 creation below, which is typically required when no position has been
2625 specified. */
2626 if (size)
2627 size = convert (bitsizetype, size);
2628 else if (packed == 1)
2630 size = rm_size (type);
2631 if (TYPE_MODE (type) == BLKmode)
2632 size = round_up (size, BITS_PER_UNIT);
2635 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2636 specified for two reasons: first if the size differs from the natural
2637 size. Second, if the alignment is insufficient. There are a number of
2638 ways the latter can be true.
2640 We never make a bitfield if the type of the field has a nonconstant size,
2641 because no such entity requiring bitfield operations should reach here.
2643 We do *preventively* make a bitfield when there might be the need for it
2644 but we don't have all the necessary information to decide, as is the case
2645 of a field with no specified position in a packed record.
2647 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2648 in layout_decl or finish_record_type to clear the bit_field indication if
2649 it is in fact not needed. */
2650 if (addressable >= 0
2651 && size
2652 && TREE_CODE (size) == INTEGER_CST
2653 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2654 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2655 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2656 || packed
2657 || (TYPE_ALIGN (record_type) != 0
2658 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2660 DECL_BIT_FIELD (field_decl) = 1;
2661 DECL_SIZE (field_decl) = size;
2662 if (!packed && !pos)
2664 if (TYPE_ALIGN (record_type) != 0
2665 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2666 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2667 else
2668 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2672 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2674 /* Bump the alignment if need be, either for bitfield/packing purposes or
2675 to satisfy the type requirements if no such consideration applies. When
2676 we get the alignment from the type, indicate if this is from an explicit
2677 user request, which prevents stor-layout from lowering it later on. */
2679 unsigned int bit_align
2680 = (DECL_BIT_FIELD (field_decl) ? 1
2681 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2683 if (bit_align > DECL_ALIGN (field_decl))
2684 SET_DECL_ALIGN (field_decl, bit_align);
2685 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2687 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2688 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2692 if (pos)
2694 /* We need to pass in the alignment the DECL is known to have.
2695 This is the lowest-order bit set in POS, but no more than
2696 the alignment of the record, if one is specified. Note
2697 that an alignment of 0 is taken as infinite. */
2698 unsigned int known_align;
2700 if (tree_fits_uhwi_p (pos))
2701 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2702 else
2703 known_align = BITS_PER_UNIT;
2705 if (TYPE_ALIGN (record_type)
2706 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2707 known_align = TYPE_ALIGN (record_type);
2709 layout_decl (field_decl, known_align);
2710 SET_DECL_OFFSET_ALIGN (field_decl,
2711 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2712 : BITS_PER_UNIT);
2713 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2714 &DECL_FIELD_BIT_OFFSET (field_decl),
2715 DECL_OFFSET_ALIGN (field_decl), pos);
2718 /* In addition to what our caller says, claim the field is addressable if we
2719 know that its type is not suitable.
2721 The field may also be "technically" nonaddressable, meaning that even if
2722 we attempt to take the field's address we will actually get the address
2723 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2724 value we have at this point is not accurate enough, so we don't account
2725 for this here and let finish_record_type decide. */
2726 if (!addressable && !type_for_nonaliased_component_p (type))
2727 addressable = 1;
2729 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2731 return field_decl;
2734 /* Return a PARM_DECL node with NAME and TYPE. */
2736 tree
2737 create_param_decl (tree name, tree type)
2739 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2741 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2742 can lead to various ABI violations. */
2743 if (targetm.calls.promote_prototypes (NULL_TREE)
2744 && INTEGRAL_TYPE_P (type)
2745 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2747 /* We have to be careful about biased types here. Make a subtype
2748 of integer_type_node with the proper biasing. */
2749 if (TREE_CODE (type) == INTEGER_TYPE
2750 && TYPE_BIASED_REPRESENTATION_P (type))
2752 tree subtype
2753 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2754 TREE_TYPE (subtype) = integer_type_node;
2755 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2756 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2757 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2758 type = subtype;
2760 else
2761 type = integer_type_node;
2764 DECL_ARG_TYPE (param_decl) = type;
2765 return param_decl;
2768 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2769 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2770 changed. GNAT_NODE is used for the position of error messages. */
2772 void
2773 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2774 Node_Id gnat_node)
2776 struct attrib *attr;
2778 for (attr = *attr_list; attr; attr = attr->next)
2779 switch (attr->type)
2781 case ATTR_MACHINE_ATTRIBUTE:
2782 Sloc_to_locus (Sloc (gnat_node), &input_location);
2783 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2784 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2785 break;
2787 case ATTR_LINK_ALIAS:
2788 if (!DECL_EXTERNAL (*node))
2790 TREE_STATIC (*node) = 1;
2791 assemble_alias (*node, attr->name);
2793 break;
2795 case ATTR_WEAK_EXTERNAL:
2796 if (SUPPORTS_WEAK)
2797 declare_weak (*node);
2798 else
2799 post_error ("?weak declarations not supported on this target",
2800 attr->error_point);
2801 break;
2803 case ATTR_LINK_SECTION:
2804 if (targetm_common.have_named_sections)
2806 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2807 DECL_COMMON (*node) = 0;
2809 else
2810 post_error ("?section attributes are not supported for this target",
2811 attr->error_point);
2812 break;
2814 case ATTR_LINK_CONSTRUCTOR:
2815 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2816 TREE_USED (*node) = 1;
2817 break;
2819 case ATTR_LINK_DESTRUCTOR:
2820 DECL_STATIC_DESTRUCTOR (*node) = 1;
2821 TREE_USED (*node) = 1;
2822 break;
2824 case ATTR_THREAD_LOCAL_STORAGE:
2825 set_decl_tls_model (*node, decl_default_tls_model (*node));
2826 DECL_COMMON (*node) = 0;
2827 break;
2830 *attr_list = NULL;
2833 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2834 a power of 2. */
2836 bool
2837 value_factor_p (tree value, HOST_WIDE_INT factor)
2839 if (tree_fits_uhwi_p (value))
2840 return tree_to_uhwi (value) % factor == 0;
2842 if (TREE_CODE (value) == MULT_EXPR)
2843 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2844 || value_factor_p (TREE_OPERAND (value, 1), factor));
2846 return false;
2849 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2850 from the parameter association for the instantiation of a generic. We do
2851 not want to emit source location for them: the code generated for their
2852 initialization is likely to disturb debugging. */
2854 bool
2855 renaming_from_generic_instantiation_p (Node_Id gnat_node)
2857 if (Nkind (gnat_node) != N_Defining_Identifier
2858 || !IN (Ekind (gnat_node), Object_Kind)
2859 || Comes_From_Source (gnat_node)
2860 || !Present (Renamed_Object (gnat_node)))
2861 return false;
2863 /* Get the object declaration of the renamed object, if any and if the
2864 renamed object is a mere identifier. */
2865 gnat_node = Renamed_Object (gnat_node);
2866 if (Nkind (gnat_node) != N_Identifier)
2867 return false;
2869 gnat_node = Entity (gnat_node);
2870 if (!Present (Parent (gnat_node)))
2871 return false;
2873 gnat_node = Parent (gnat_node);
2874 return
2875 (Present (gnat_node)
2876 && Nkind (gnat_node) == N_Object_Declaration
2877 && Present (Corresponding_Generic_Association (gnat_node)));
2880 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2881 feed it with the elaboration of GNAT_SCOPE. */
2883 static struct deferred_decl_context_node *
2884 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2886 struct deferred_decl_context_node *new_node;
2888 new_node
2889 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2890 new_node->decl = decl;
2891 new_node->gnat_scope = gnat_scope;
2892 new_node->force_global = force_global;
2893 new_node->types.create (1);
2894 new_node->next = deferred_decl_context_queue;
2895 deferred_decl_context_queue = new_node;
2896 return new_node;
2899 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2900 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2901 computed. */
2903 static void
2904 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2906 n->types.safe_push (type);
2909 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2910 NULL_TREE if it is not available. */
2912 static tree
2913 compute_deferred_decl_context (Entity_Id gnat_scope)
2915 tree context;
2917 if (present_gnu_tree (gnat_scope))
2918 context = get_gnu_tree (gnat_scope);
2919 else
2920 return NULL_TREE;
2922 if (TREE_CODE (context) == TYPE_DECL)
2924 const tree context_type = TREE_TYPE (context);
2926 /* Skip dummy types: only the final ones can appear in the context
2927 chain. */
2928 if (TYPE_DUMMY_P (context_type))
2929 return NULL_TREE;
2931 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2932 chain. */
2933 else
2934 context = context_type;
2937 return context;
2940 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2941 that cannot be processed yet, remove the other ones. If FORCE is true,
2942 force the processing for all nodes, use the global context when nodes don't
2943 have a GNU translation. */
2945 void
2946 process_deferred_decl_context (bool force)
2948 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2949 struct deferred_decl_context_node *node;
2951 while (*it != NULL)
2953 bool processed = false;
2954 tree context = NULL_TREE;
2955 Entity_Id gnat_scope;
2957 node = *it;
2959 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2960 get the first scope. */
2961 gnat_scope = node->gnat_scope;
2962 while (Present (gnat_scope))
2964 context = compute_deferred_decl_context (gnat_scope);
2965 if (!force || context)
2966 break;
2967 gnat_scope = get_debug_scope (gnat_scope, NULL);
2970 /* Imported declarations must not be in a local context (i.e. not inside
2971 a function). */
2972 if (context && node->force_global > 0)
2974 tree ctx = context;
2976 while (ctx)
2978 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2979 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
2983 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2984 was no elaborated scope, use the global context. */
2985 if (force && !context)
2986 context = get_global_context ();
2988 if (context)
2990 tree t;
2991 int i;
2993 DECL_CONTEXT (node->decl) = context;
2995 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2996 ..._TYPE nodes. */
2997 FOR_EACH_VEC_ELT (node->types, i, t)
2999 gnat_set_type_context (t, context);
3001 processed = true;
3004 /* If this node has been successfuly processed, remove it from the
3005 queue. Then move to the next node. */
3006 if (processed)
3008 *it = node->next;
3009 node->types.release ();
3010 free (node);
3012 else
3013 it = &node->next;
3018 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3020 static unsigned int
3021 scale_by_factor_of (tree expr, unsigned int value)
3023 unsigned HOST_WIDE_INT addend = 0;
3024 unsigned HOST_WIDE_INT factor = 1;
3026 /* Peel conversions around EXPR and try to extract bodies from function
3027 calls: it is possible to get the scale factor from size functions. */
3028 expr = remove_conversions (expr, true);
3029 if (TREE_CODE (expr) == CALL_EXPR)
3030 expr = maybe_inline_call_in_expr (expr);
3032 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3033 multiple of the scale factor we are looking for. */
3034 if (TREE_CODE (expr) == PLUS_EXPR
3035 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3036 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3038 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3039 expr = TREE_OPERAND (expr, 0);
3042 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3043 corresponding to the number of trailing zeros of the mask. */
3044 if (TREE_CODE (expr) == BIT_AND_EXPR
3045 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3047 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3048 unsigned int i = 0;
3050 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3052 mask >>= 1;
3053 factor *= 2;
3054 i++;
3058 /* If the addend is not a multiple of the factor we found, give up. In
3059 theory we could find a smaller common factor but it's useless for our
3060 needs. This situation arises when dealing with a field F1 with no
3061 alignment requirement but that is following a field F2 with such
3062 requirements. As long as we have F2's offset, we don't need alignment
3063 information to compute F1's. */
3064 if (addend % factor != 0)
3065 factor = 1;
3067 return factor * value;
3070 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3071 unless we can prove these 2 fields are laid out in such a way that no gap
3072 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3073 is the distance in bits between the end of PREV_FIELD and the starting
3074 position of CURR_FIELD. It is ignored if null. */
3076 static bool
3077 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3079 /* If this is the first field of the record, there cannot be any gap */
3080 if (!prev_field)
3081 return false;
3083 /* If the previous field is a union type, then return false: The only
3084 time when such a field is not the last field of the record is when
3085 there are other components at fixed positions after it (meaning there
3086 was a rep clause for every field), in which case we don't want the
3087 alignment constraint to override them. */
3088 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3089 return false;
3091 /* If the distance between the end of prev_field and the beginning of
3092 curr_field is constant, then there is a gap if the value of this
3093 constant is not null. */
3094 if (offset && tree_fits_uhwi_p (offset))
3095 return !integer_zerop (offset);
3097 /* If the size and position of the previous field are constant,
3098 then check the sum of this size and position. There will be a gap
3099 iff it is not multiple of the current field alignment. */
3100 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3101 && tree_fits_uhwi_p (bit_position (prev_field)))
3102 return ((tree_to_uhwi (bit_position (prev_field))
3103 + tree_to_uhwi (DECL_SIZE (prev_field)))
3104 % DECL_ALIGN (curr_field) != 0);
3106 /* If both the position and size of the previous field are multiples
3107 of the current field alignment, there cannot be any gap. */
3108 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3109 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3110 return false;
3112 /* Fallback, return that there may be a potential gap */
3113 return true;
3116 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3117 the decl. */
3119 tree
3120 create_label_decl (tree name, Node_Id gnat_node)
3122 tree label_decl
3123 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3125 DECL_MODE (label_decl) = VOIDmode;
3127 /* Add this decl to the current binding level. */
3128 gnat_pushdecl (label_decl, gnat_node);
3130 return label_decl;
3133 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3134 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3135 the list of its parameters (a list of PARM_DECL nodes chained through the
3136 DECL_CHAIN field).
3138 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3140 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3141 definition to be made visible outside of the current compilation unit.
3143 EXTERN_FLAG is true when processing an external subprogram declaration.
3145 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3147 DEBUG_INFO_P is true if we need to write debug information for it.
3149 ATTR_LIST is the list of attributes to be attached to the subprogram.
3151 GNAT_NODE is used for the position of the decl. */
3153 tree
3154 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3155 enum inline_status_t inline_status, bool public_flag,
3156 bool extern_flag, bool artificial_p, bool debug_info_p,
3157 struct attrib *attr_list, Node_Id gnat_node)
3159 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3160 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3162 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3163 DECL_EXTERNAL (subprog_decl) = extern_flag;
3164 TREE_PUBLIC (subprog_decl) = public_flag;
3166 if (!debug_info_p)
3167 DECL_IGNORED_P (subprog_decl) = 1;
3169 switch (inline_status)
3171 case is_suppressed:
3172 DECL_UNINLINABLE (subprog_decl) = 1;
3173 break;
3175 case is_disabled:
3176 break;
3178 case is_required:
3179 if (Back_End_Inlining)
3180 decl_attributes (&subprog_decl,
3181 tree_cons (get_identifier ("always_inline"),
3182 NULL_TREE, NULL_TREE),
3183 ATTR_FLAG_TYPE_IN_PLACE);
3185 /* ... fall through ... */
3187 case is_enabled:
3188 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3189 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3190 break;
3192 default:
3193 gcc_unreachable ();
3196 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3198 /* Once everything is processed, finish the subprogram declaration. */
3199 finish_subprog_decl (subprog_decl, asm_name, type);
3201 /* Add this decl to the current binding level. */
3202 gnat_pushdecl (subprog_decl, gnat_node);
3204 /* Output the assembler code and/or RTL for the declaration. */
3205 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3207 return subprog_decl;
3210 /* Given a subprogram declaration DECL, its assembler name and its type,
3211 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3213 void
3214 finish_subprog_decl (tree decl, tree asm_name, tree type)
3216 tree result_decl
3217 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3218 TREE_TYPE (type));
3220 DECL_ARTIFICIAL (result_decl) = 1;
3221 DECL_IGNORED_P (result_decl) = 1;
3222 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3223 DECL_RESULT (decl) = result_decl;
3225 TREE_READONLY (decl) = TYPE_READONLY (type);
3226 TREE_SIDE_EFFECTS (decl) = TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3228 if (asm_name)
3230 /* Let the target mangle the name if this isn't a verbatim asm. */
3231 if (*IDENTIFIER_POINTER (asm_name) != '*')
3232 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3234 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3236 /* The expand_main_function circuitry expects "main_identifier_node" to
3237 designate the DECL_NAME of the 'main' entry point, in turn expected
3238 to be declared as the "main" function literally by default. Ada
3239 program entry points are typically declared with a different name
3240 within the binder generated file, exported as 'main' to satisfy the
3241 system expectations. Force main_identifier_node in this case. */
3242 if (asm_name == main_identifier_node)
3243 DECL_NAME (decl) = main_identifier_node;
3247 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3248 body. This routine needs to be invoked before processing the declarations
3249 appearing in the subprogram. */
3251 void
3252 begin_subprog_body (tree subprog_decl)
3254 tree param_decl;
3256 announce_function (subprog_decl);
3258 /* This function is being defined. */
3259 TREE_STATIC (subprog_decl) = 1;
3261 /* The failure of this assertion will likely come from a wrong context for
3262 the subprogram body, e.g. another procedure for a procedure declared at
3263 library level. */
3264 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3266 current_function_decl = subprog_decl;
3268 /* Enter a new binding level and show that all the parameters belong to
3269 this function. */
3270 gnat_pushlevel ();
3272 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3273 param_decl = DECL_CHAIN (param_decl))
3274 DECL_CONTEXT (param_decl) = subprog_decl;
3276 make_decl_rtl (subprog_decl);
3279 /* Finish translating the current subprogram and set its BODY. */
3281 void
3282 end_subprog_body (tree body)
3284 tree fndecl = current_function_decl;
3286 /* Attach the BLOCK for this level to the function and pop the level. */
3287 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3288 DECL_INITIAL (fndecl) = current_binding_level->block;
3289 gnat_poplevel ();
3291 /* Mark the RESULT_DECL as being in this subprogram. */
3292 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3294 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3295 if (TREE_CODE (body) == BIND_EXPR)
3297 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3298 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3301 DECL_SAVED_TREE (fndecl) = body;
3303 current_function_decl = decl_function_context (fndecl);
3306 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3308 void
3309 rest_of_subprog_body_compilation (tree subprog_decl)
3311 /* We cannot track the location of errors past this point. */
3312 error_gnat_node = Empty;
3314 /* If we're only annotating types, don't actually compile this function. */
3315 if (type_annotate_only)
3316 return;
3318 /* Dump functions before gimplification. */
3319 dump_function (TDI_original, subprog_decl);
3321 if (!decl_function_context (subprog_decl))
3322 cgraph_node::finalize_function (subprog_decl, false);
3323 else
3324 /* Register this function with cgraph just far enough to get it
3325 added to our parent's nested function list. */
3326 (void) cgraph_node::get_create (subprog_decl);
3329 tree
3330 gnat_builtin_function (tree decl)
3332 gnat_pushdecl (decl, Empty);
3333 return decl;
3336 /* Return an integer type with the number of bits of precision given by
3337 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3338 it is a signed type. */
3340 tree
3341 gnat_type_for_size (unsigned precision, int unsignedp)
3343 tree t;
3344 char type_name[20];
3346 if (precision <= 2 * MAX_BITS_PER_WORD
3347 && signed_and_unsigned_types[precision][unsignedp])
3348 return signed_and_unsigned_types[precision][unsignedp];
3350 if (unsignedp)
3351 t = make_unsigned_type (precision);
3352 else
3353 t = make_signed_type (precision);
3355 if (precision <= 2 * MAX_BITS_PER_WORD)
3356 signed_and_unsigned_types[precision][unsignedp] = t;
3358 if (!TYPE_NAME (t))
3360 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3361 TYPE_NAME (t) = get_identifier (type_name);
3364 return t;
3367 /* Likewise for floating-point types. */
3369 static tree
3370 float_type_for_precision (int precision, machine_mode mode)
3372 tree t;
3373 char type_name[20];
3375 if (float_types[(int) mode])
3376 return float_types[(int) mode];
3378 float_types[(int) mode] = t = make_node (REAL_TYPE);
3379 TYPE_PRECISION (t) = precision;
3380 layout_type (t);
3382 gcc_assert (TYPE_MODE (t) == mode);
3383 if (!TYPE_NAME (t))
3385 sprintf (type_name, "FLOAT_%d", precision);
3386 TYPE_NAME (t) = get_identifier (type_name);
3389 return t;
3392 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3393 an unsigned type; otherwise a signed type is returned. */
3395 tree
3396 gnat_type_for_mode (machine_mode mode, int unsignedp)
3398 if (mode == BLKmode)
3399 return NULL_TREE;
3401 if (mode == VOIDmode)
3402 return void_type_node;
3404 if (COMPLEX_MODE_P (mode))
3405 return NULL_TREE;
3407 if (SCALAR_FLOAT_MODE_P (mode))
3408 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
3410 if (SCALAR_INT_MODE_P (mode))
3411 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
3413 if (VECTOR_MODE_P (mode))
3415 machine_mode inner_mode = GET_MODE_INNER (mode);
3416 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3417 if (inner_type)
3418 return build_vector_type_for_mode (inner_type, mode);
3421 return NULL_TREE;
3424 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3425 signedness being specified by UNSIGNEDP. */
3427 tree
3428 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3430 if (type_node == char_type_node)
3431 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3433 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3435 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3437 type = copy_type (type);
3438 TREE_TYPE (type) = type_node;
3440 else if (TREE_TYPE (type_node)
3441 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3442 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3444 type = copy_type (type);
3445 TREE_TYPE (type) = TREE_TYPE (type_node);
3448 return type;
3451 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3452 transparently converted to each other. */
3455 gnat_types_compatible_p (tree t1, tree t2)
3457 enum tree_code code;
3459 /* This is the default criterion. */
3460 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3461 return 1;
3463 /* We only check structural equivalence here. */
3464 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3465 return 0;
3467 /* Vector types are also compatible if they have the same number of subparts
3468 and the same form of (scalar) element type. */
3469 if (code == VECTOR_TYPE
3470 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3471 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3472 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3473 return 1;
3475 /* Array types are also compatible if they are constrained and have the same
3476 domain(s), the same component type and the same scalar storage order. */
3477 if (code == ARRAY_TYPE
3478 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3479 || (TYPE_DOMAIN (t1)
3480 && TYPE_DOMAIN (t2)
3481 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3482 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3483 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3484 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3485 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3486 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3487 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3488 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3489 return 1;
3491 return 0;
3494 /* Return true if EXPR is a useless type conversion. */
3496 bool
3497 gnat_useless_type_conversion (tree expr)
3499 if (CONVERT_EXPR_P (expr)
3500 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3501 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3502 return gnat_types_compatible_p (TREE_TYPE (expr),
3503 TREE_TYPE (TREE_OPERAND (expr, 0)));
3505 return false;
3508 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3510 bool
3511 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3512 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3514 return TYPE_CI_CO_LIST (t) == cico_list
3515 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3516 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3517 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3520 /* EXP is an expression for the size of an object. If this size contains
3521 discriminant references, replace them with the maximum (if MAX_P) or
3522 minimum (if !MAX_P) possible value of the discriminant. */
3524 tree
3525 max_size (tree exp, bool max_p)
3527 enum tree_code code = TREE_CODE (exp);
3528 tree type = TREE_TYPE (exp);
3530 switch (TREE_CODE_CLASS (code))
3532 case tcc_declaration:
3533 case tcc_constant:
3534 return exp;
3536 case tcc_vl_exp:
3537 if (code == CALL_EXPR)
3539 tree t, *argarray;
3540 int n, i;
3542 t = maybe_inline_call_in_expr (exp);
3543 if (t)
3544 return max_size (t, max_p);
3546 n = call_expr_nargs (exp);
3547 gcc_assert (n > 0);
3548 argarray = XALLOCAVEC (tree, n);
3549 for (i = 0; i < n; i++)
3550 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3551 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3553 break;
3555 case tcc_reference:
3556 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3557 modify. Otherwise, we treat it like a variable. */
3558 if (CONTAINS_PLACEHOLDER_P (exp))
3560 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3561 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3562 return max_size (convert (get_base_type (val_type), val), true);
3565 return exp;
3567 case tcc_comparison:
3568 return max_p ? size_one_node : size_zero_node;
3570 case tcc_unary:
3571 if (code == NON_LVALUE_EXPR)
3572 return max_size (TREE_OPERAND (exp, 0), max_p);
3574 return fold_build1 (code, type,
3575 max_size (TREE_OPERAND (exp, 0),
3576 code == NEGATE_EXPR ? !max_p : max_p));
3578 case tcc_binary:
3580 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3581 tree rhs = max_size (TREE_OPERAND (exp, 1),
3582 code == MINUS_EXPR ? !max_p : max_p);
3584 /* Special-case wanting the maximum value of a MIN_EXPR.
3585 In that case, if one side overflows, return the other. */
3586 if (max_p && code == MIN_EXPR)
3588 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3589 return lhs;
3591 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3592 return rhs;
3595 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3596 overflowing and the RHS a variable. */
3597 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3598 && TREE_CODE (lhs) == INTEGER_CST
3599 && TREE_OVERFLOW (lhs)
3600 && TREE_CODE (rhs) != INTEGER_CST)
3601 return lhs;
3603 /* If we are going to subtract a "negative" value in an unsigned type,
3604 do the operation as an addition of the negated value, in order to
3605 avoid creating a spurious overflow below. */
3606 if (code == MINUS_EXPR
3607 && TYPE_UNSIGNED (type)
3608 && TREE_CODE (rhs) == INTEGER_CST
3609 && !TREE_OVERFLOW (rhs)
3610 && tree_int_cst_sign_bit (rhs) != 0)
3612 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3613 code = PLUS_EXPR;
3616 /* We need to detect overflows so we call size_binop here. */
3617 return size_binop (code, lhs, rhs);
3620 case tcc_expression:
3621 switch (TREE_CODE_LENGTH (code))
3623 case 1:
3624 if (code == SAVE_EXPR)
3625 return exp;
3627 return fold_build1 (code, type,
3628 max_size (TREE_OPERAND (exp, 0), max_p));
3630 case 2:
3631 if (code == COMPOUND_EXPR)
3632 return max_size (TREE_OPERAND (exp, 1), max_p);
3634 return fold_build2 (code, type,
3635 max_size (TREE_OPERAND (exp, 0), max_p),
3636 max_size (TREE_OPERAND (exp, 1), max_p));
3638 case 3:
3639 if (code == COND_EXPR)
3640 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3641 max_size (TREE_OPERAND (exp, 1), max_p),
3642 max_size (TREE_OPERAND (exp, 2), max_p));
3644 default:
3645 break;
3648 /* Other tree classes cannot happen. */
3649 default:
3650 break;
3653 gcc_unreachable ();
3656 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3657 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3658 Return a constructor for the template. */
3660 tree
3661 build_template (tree template_type, tree array_type, tree expr)
3663 vec<constructor_elt, va_gc> *template_elts = NULL;
3664 tree bound_list = NULL_TREE;
3665 tree field;
3667 while (TREE_CODE (array_type) == RECORD_TYPE
3668 && (TYPE_PADDING_P (array_type)
3669 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3670 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3672 if (TREE_CODE (array_type) == ARRAY_TYPE
3673 || (TREE_CODE (array_type) == INTEGER_TYPE
3674 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3675 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3677 /* First make the list for a CONSTRUCTOR for the template. Go down the
3678 field list of the template instead of the type chain because this
3679 array might be an Ada array of arrays and we can't tell where the
3680 nested arrays stop being the underlying object. */
3682 for (field = TYPE_FIELDS (template_type); field;
3683 (bound_list
3684 ? (bound_list = TREE_CHAIN (bound_list))
3685 : (array_type = TREE_TYPE (array_type))),
3686 field = DECL_CHAIN (DECL_CHAIN (field)))
3688 tree bounds, min, max;
3690 /* If we have a bound list, get the bounds from there. Likewise
3691 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3692 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3693 This will give us a maximum range. */
3694 if (bound_list)
3695 bounds = TREE_VALUE (bound_list);
3696 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3697 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3698 else if (expr && TREE_CODE (expr) == PARM_DECL
3699 && DECL_BY_COMPONENT_PTR_P (expr))
3700 bounds = TREE_TYPE (field);
3701 else
3702 gcc_unreachable ();
3704 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3705 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3707 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3708 substitute it from OBJECT. */
3709 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3710 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3712 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3713 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3716 return gnat_build_constructor (template_type, template_elts);
3719 /* Return true if TYPE is suitable for the element type of a vector. */
3721 static bool
3722 type_for_vector_element_p (tree type)
3724 machine_mode mode;
3726 if (!INTEGRAL_TYPE_P (type)
3727 && !SCALAR_FLOAT_TYPE_P (type)
3728 && !FIXED_POINT_TYPE_P (type))
3729 return false;
3731 mode = TYPE_MODE (type);
3732 if (GET_MODE_CLASS (mode) != MODE_INT
3733 && !SCALAR_FLOAT_MODE_P (mode)
3734 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3735 return false;
3737 return true;
3740 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3741 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3742 attribute declaration and want to issue error messages on failure. */
3744 static tree
3745 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3747 unsigned HOST_WIDE_INT size_int, inner_size_int;
3748 int nunits;
3750 /* Silently punt on variable sizes. We can't make vector types for them,
3751 need to ignore them on front-end generated subtypes of unconstrained
3752 base types, and this attribute is for binding implementors, not end
3753 users, so we should never get there from legitimate explicit uses. */
3754 if (!tree_fits_uhwi_p (size))
3755 return NULL_TREE;
3756 size_int = tree_to_uhwi (size);
3758 if (!type_for_vector_element_p (inner_type))
3760 if (attribute)
3761 error ("invalid element type for attribute %qs",
3762 IDENTIFIER_POINTER (attribute));
3763 return NULL_TREE;
3765 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3767 if (size_int % inner_size_int)
3769 if (attribute)
3770 error ("vector size not an integral multiple of component size");
3771 return NULL_TREE;
3774 if (size_int == 0)
3776 if (attribute)
3777 error ("zero vector size");
3778 return NULL_TREE;
3781 nunits = size_int / inner_size_int;
3782 if (nunits & (nunits - 1))
3784 if (attribute)
3785 error ("number of components of vector not a power of two");
3786 return NULL_TREE;
3789 return build_vector_type (inner_type, nunits);
3792 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3793 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3794 processing the attribute and want to issue error messages on failure. */
3796 static tree
3797 build_vector_type_for_array (tree array_type, tree attribute)
3799 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3800 TYPE_SIZE_UNIT (array_type),
3801 attribute);
3802 if (!vector_type)
3803 return NULL_TREE;
3805 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3806 return vector_type;
3809 /* Build a type to be used to represent an aliased object whose nominal type
3810 is an unconstrained array. This consists of a RECORD_TYPE containing a
3811 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3812 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3813 an arbitrary unconstrained object. Use NAME as the name of the record.
3814 DEBUG_INFO_P is true if we need to write debug information for the type. */
3816 tree
3817 build_unc_object_type (tree template_type, tree object_type, tree name,
3818 bool debug_info_p)
3820 tree decl;
3821 tree type = make_node (RECORD_TYPE);
3822 tree template_field
3823 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3824 NULL_TREE, NULL_TREE, 0, 1);
3825 tree array_field
3826 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3827 NULL_TREE, NULL_TREE, 0, 1);
3829 TYPE_NAME (type) = name;
3830 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3831 DECL_CHAIN (template_field) = array_field;
3832 finish_record_type (type, template_field, 0, true);
3834 /* Declare it now since it will never be declared otherwise. This is
3835 necessary to ensure that its subtrees are properly marked. */
3836 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3838 /* template_type will not be used elsewhere than here, so to keep the debug
3839 info clean and in order to avoid scoping issues, make decl its
3840 context. */
3841 gnat_set_type_context (template_type, decl);
3843 return type;
3846 /* Same, taking a thin or fat pointer type instead of a template type. */
3848 tree
3849 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3850 tree name, bool debug_info_p)
3852 tree template_type;
3854 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3856 template_type
3857 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3858 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3859 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3861 return
3862 build_unc_object_type (template_type, object_type, name, debug_info_p);
3865 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3866 In the normal case this is just two adjustments, but we have more to
3867 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3869 void
3870 update_pointer_to (tree old_type, tree new_type)
3872 tree ptr = TYPE_POINTER_TO (old_type);
3873 tree ref = TYPE_REFERENCE_TO (old_type);
3874 tree t;
3876 /* If this is the main variant, process all the other variants first. */
3877 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3878 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3879 update_pointer_to (t, new_type);
3881 /* If no pointers and no references, we are done. */
3882 if (!ptr && !ref)
3883 return;
3885 /* Merge the old type qualifiers in the new type.
3887 Each old variant has qualifiers for specific reasons, and the new
3888 designated type as well. Each set of qualifiers represents useful
3889 information grabbed at some point, and merging the two simply unifies
3890 these inputs into the final type description.
3892 Consider for instance a volatile type frozen after an access to constant
3893 type designating it; after the designated type's freeze, we get here with
3894 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3895 when the access type was processed. We will make a volatile and readonly
3896 designated type, because that's what it really is.
3898 We might also get here for a non-dummy OLD_TYPE variant with different
3899 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3900 to private record type elaboration (see the comments around the call to
3901 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3902 the qualifiers in those cases too, to avoid accidentally discarding the
3903 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3904 new_type
3905 = build_qualified_type (new_type,
3906 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3908 /* If old type and new type are identical, there is nothing to do. */
3909 if (old_type == new_type)
3910 return;
3912 /* Otherwise, first handle the simple case. */
3913 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3915 tree new_ptr, new_ref;
3917 /* If pointer or reference already points to new type, nothing to do.
3918 This can happen as update_pointer_to can be invoked multiple times
3919 on the same couple of types because of the type variants. */
3920 if ((ptr && TREE_TYPE (ptr) == new_type)
3921 || (ref && TREE_TYPE (ref) == new_type))
3922 return;
3924 /* Chain PTR and its variants at the end. */
3925 new_ptr = TYPE_POINTER_TO (new_type);
3926 if (new_ptr)
3928 while (TYPE_NEXT_PTR_TO (new_ptr))
3929 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3930 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3932 else
3933 TYPE_POINTER_TO (new_type) = ptr;
3935 /* Now adjust them. */
3936 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3937 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3939 TREE_TYPE (t) = new_type;
3940 if (TYPE_NULL_BOUNDS (t))
3941 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3944 /* Chain REF and its variants at the end. */
3945 new_ref = TYPE_REFERENCE_TO (new_type);
3946 if (new_ref)
3948 while (TYPE_NEXT_REF_TO (new_ref))
3949 new_ref = TYPE_NEXT_REF_TO (new_ref);
3950 TYPE_NEXT_REF_TO (new_ref) = ref;
3952 else
3953 TYPE_REFERENCE_TO (new_type) = ref;
3955 /* Now adjust them. */
3956 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3957 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3958 TREE_TYPE (t) = new_type;
3960 TYPE_POINTER_TO (old_type) = NULL_TREE;
3961 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3964 /* Now deal with the unconstrained array case. In this case the pointer
3965 is actually a record where both fields are pointers to dummy nodes.
3966 Turn them into pointers to the correct types using update_pointer_to.
3967 Likewise for the pointer to the object record (thin pointer). */
3968 else
3970 tree new_ptr = TYPE_POINTER_TO (new_type);
3972 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3974 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3975 since update_pointer_to can be invoked multiple times on the same
3976 couple of types because of the type variants. */
3977 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3978 return;
3980 update_pointer_to
3981 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3982 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3984 update_pointer_to
3985 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3986 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3988 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3989 TYPE_OBJECT_RECORD_TYPE (new_type));
3991 TYPE_POINTER_TO (old_type) = NULL_TREE;
3992 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3996 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3997 unconstrained one. This involves making or finding a template. */
3999 static tree
4000 convert_to_fat_pointer (tree type, tree expr)
4002 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4003 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4004 tree etype = TREE_TYPE (expr);
4005 tree template_addr;
4006 vec<constructor_elt, va_gc> *v;
4007 vec_alloc (v, 2);
4009 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4010 array (compare_fat_pointers ensures that this is the full discriminant)
4011 and a valid pointer to the bounds. This latter property is necessary
4012 since the compiler can hoist the load of the bounds done through it. */
4013 if (integer_zerop (expr))
4015 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4016 tree null_bounds, t;
4018 if (TYPE_NULL_BOUNDS (ptr_template_type))
4019 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4020 else
4022 /* The template type can still be dummy at this point so we build an
4023 empty constructor. The middle-end will fill it in with zeros. */
4024 t = build_constructor (template_type, NULL);
4025 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4026 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4027 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4030 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4031 fold_convert (p_array_type, null_pointer_node));
4032 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4033 t = build_constructor (type, v);
4034 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4035 TREE_CONSTANT (t) = 0;
4036 TREE_STATIC (t) = 1;
4038 return t;
4041 /* If EXPR is a thin pointer, make template and data from the record. */
4042 if (TYPE_IS_THIN_POINTER_P (etype))
4044 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4046 expr = gnat_protect_expr (expr);
4048 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4049 the thin pointer value has been shifted so we shift it back to get
4050 the template address. */
4051 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4053 template_addr
4054 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4055 fold_build1 (NEGATE_EXPR, sizetype,
4056 byte_position
4057 (DECL_CHAIN (field))));
4058 template_addr
4059 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4060 template_addr);
4063 /* Otherwise we explicitly take the address of the fields. */
4064 else
4066 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4067 template_addr
4068 = build_unary_op (ADDR_EXPR, NULL_TREE,
4069 build_component_ref (expr, field, false));
4070 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4071 build_component_ref (expr, DECL_CHAIN (field),
4072 false));
4076 /* Otherwise, build the constructor for the template. */
4077 else
4078 template_addr
4079 = build_unary_op (ADDR_EXPR, NULL_TREE,
4080 build_template (template_type, TREE_TYPE (etype),
4081 expr));
4083 /* The final result is a constructor for the fat pointer.
4085 If EXPR is an argument of a foreign convention subprogram, the type it
4086 points to is directly the component type. In this case, the expression
4087 type may not match the corresponding FIELD_DECL type at this point, so we
4088 call "convert" here to fix that up if necessary. This type consistency is
4089 required, for instance because it ensures that possible later folding of
4090 COMPONENT_REFs against this constructor always yields something of the
4091 same type as the initial reference.
4093 Note that the call to "build_template" above is still fine because it
4094 will only refer to the provided TEMPLATE_TYPE in this case. */
4095 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4096 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4097 return gnat_build_constructor (type, v);
4100 /* Create an expression whose value is that of EXPR,
4101 converted to type TYPE. The TREE_TYPE of the value
4102 is always TYPE. This function implements all reasonable
4103 conversions; callers should filter out those that are
4104 not permitted by the language being compiled. */
4106 tree
4107 convert (tree type, tree expr)
4109 tree etype = TREE_TYPE (expr);
4110 enum tree_code ecode = TREE_CODE (etype);
4111 enum tree_code code = TREE_CODE (type);
4113 /* If the expression is already of the right type, we are done. */
4114 if (etype == type)
4115 return expr;
4117 /* If both input and output have padding and are of variable size, do this
4118 as an unchecked conversion. Likewise if one is a mere variant of the
4119 other, so we avoid a pointless unpad/repad sequence. */
4120 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4121 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4122 && (!TREE_CONSTANT (TYPE_SIZE (type))
4123 || !TREE_CONSTANT (TYPE_SIZE (etype))
4124 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4125 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4126 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4129 /* If the output type has padding, convert to the inner type and make a
4130 constructor to build the record, unless a variable size is involved. */
4131 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4133 vec<constructor_elt, va_gc> *v;
4135 /* If we previously converted from another type and our type is
4136 of variable size, remove the conversion to avoid the need for
4137 variable-sized temporaries. Likewise for a conversion between
4138 original and packable version. */
4139 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4140 && (!TREE_CONSTANT (TYPE_SIZE (type))
4141 || (ecode == RECORD_TYPE
4142 && TYPE_NAME (etype)
4143 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4144 expr = TREE_OPERAND (expr, 0);
4146 /* If we are just removing the padding from expr, convert the original
4147 object if we have variable size in order to avoid the need for some
4148 variable-sized temporaries. Likewise if the padding is a variant
4149 of the other, so we avoid a pointless unpad/repad sequence. */
4150 if (TREE_CODE (expr) == COMPONENT_REF
4151 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4152 && (!TREE_CONSTANT (TYPE_SIZE (type))
4153 || TYPE_MAIN_VARIANT (type)
4154 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4155 || (ecode == RECORD_TYPE
4156 && TYPE_NAME (etype)
4157 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4158 return convert (type, TREE_OPERAND (expr, 0));
4160 /* If the inner type is of self-referential size and the expression type
4161 is a record, do this as an unchecked conversion. But first pad the
4162 expression if possible to have the same size on both sides. */
4163 if (ecode == RECORD_TYPE
4164 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4166 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4167 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4168 false, false, false, true),
4169 expr);
4170 return unchecked_convert (type, expr, false);
4173 /* If we are converting between array types with variable size, do the
4174 final conversion as an unchecked conversion, again to avoid the need
4175 for some variable-sized temporaries. If valid, this conversion is
4176 very likely purely technical and without real effects. */
4177 if (ecode == ARRAY_TYPE
4178 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4179 && !TREE_CONSTANT (TYPE_SIZE (etype))
4180 && !TREE_CONSTANT (TYPE_SIZE (type)))
4181 return unchecked_convert (type,
4182 convert (TREE_TYPE (TYPE_FIELDS (type)),
4183 expr),
4184 false);
4186 vec_alloc (v, 1);
4187 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4188 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4189 return gnat_build_constructor (type, v);
4192 /* If the input type has padding, remove it and convert to the output type.
4193 The conditions ordering is arranged to ensure that the output type is not
4194 a padding type here, as it is not clear whether the conversion would
4195 always be correct if this was to happen. */
4196 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4198 tree unpadded;
4200 /* If we have just converted to this padded type, just get the
4201 inner expression. */
4202 if (TREE_CODE (expr) == CONSTRUCTOR)
4203 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4205 /* Otherwise, build an explicit component reference. */
4206 else
4207 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4209 return convert (type, unpadded);
4212 /* If the input is a biased type, convert first to the base type and add
4213 the bias. Note that the bias must go through a full conversion to the
4214 base type, lest it is itself a biased value; this happens for subtypes
4215 of biased types. */
4216 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4217 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4218 fold_convert (TREE_TYPE (etype), expr),
4219 convert (TREE_TYPE (etype),
4220 TYPE_MIN_VALUE (etype))));
4222 /* If the input is a justified modular type, we need to extract the actual
4223 object before converting it to any other type with the exceptions of an
4224 unconstrained array or of a mere type variant. It is useful to avoid the
4225 extraction and conversion in the type variant case because it could end
4226 up replacing a VAR_DECL expr by a constructor and we might be about the
4227 take the address of the result. */
4228 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4229 && code != UNCONSTRAINED_ARRAY_TYPE
4230 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4231 return
4232 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4234 /* If converting to a type that contains a template, convert to the data
4235 type and then build the template. */
4236 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4238 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4239 vec<constructor_elt, va_gc> *v;
4240 vec_alloc (v, 2);
4242 /* If the source already has a template, get a reference to the
4243 associated array only, as we are going to rebuild a template
4244 for the target type anyway. */
4245 expr = maybe_unconstrained_array (expr);
4247 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4248 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4249 obj_type, NULL_TREE));
4250 if (expr)
4251 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4252 convert (obj_type, expr));
4253 return gnat_build_constructor (type, v);
4256 /* There are some cases of expressions that we process specially. */
4257 switch (TREE_CODE (expr))
4259 case ERROR_MARK:
4260 return expr;
4262 case NULL_EXPR:
4263 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4264 conversion in gnat_expand_expr. NULL_EXPR does not represent
4265 and actual value, so no conversion is needed. */
4266 expr = copy_node (expr);
4267 TREE_TYPE (expr) = type;
4268 return expr;
4270 case STRING_CST:
4271 /* If we are converting a STRING_CST to another constrained array type,
4272 just make a new one in the proper type. */
4273 if (code == ecode && AGGREGATE_TYPE_P (etype)
4274 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4275 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4277 expr = copy_node (expr);
4278 TREE_TYPE (expr) = type;
4279 return expr;
4281 break;
4283 case VECTOR_CST:
4284 /* If we are converting a VECTOR_CST to a mere type variant, just make
4285 a new one in the proper type. */
4286 if (code == ecode && gnat_types_compatible_p (type, etype))
4288 expr = copy_node (expr);
4289 TREE_TYPE (expr) = type;
4290 return expr;
4292 break;
4294 case CONSTRUCTOR:
4295 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4296 another padding type around the same type, just make a new one in
4297 the proper type. */
4298 if (code == ecode
4299 && (gnat_types_compatible_p (type, etype)
4300 || (code == RECORD_TYPE
4301 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4302 && TREE_TYPE (TYPE_FIELDS (type))
4303 == TREE_TYPE (TYPE_FIELDS (etype)))))
4305 expr = copy_node (expr);
4306 TREE_TYPE (expr) = type;
4307 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4308 return expr;
4311 /* Likewise for a conversion between original and packable version, or
4312 conversion between types of the same size and with the same list of
4313 fields, but we have to work harder to preserve type consistency. */
4314 if (code == ecode
4315 && code == RECORD_TYPE
4316 && (TYPE_NAME (type) == TYPE_NAME (etype)
4317 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4320 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4321 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4322 vec<constructor_elt, va_gc> *v;
4323 vec_alloc (v, len);
4324 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4325 unsigned HOST_WIDE_INT idx;
4326 tree index, value;
4328 /* Whether we need to clear TREE_CONSTANT et al. on the output
4329 constructor when we convert in place. */
4330 bool clear_constant = false;
4332 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4334 /* Skip the missing fields in the CONSTRUCTOR. */
4335 while (efield && field && !SAME_FIELD_P (efield, index))
4337 efield = DECL_CHAIN (efield);
4338 field = DECL_CHAIN (field);
4340 /* The field must be the same. */
4341 if (!(efield && field && SAME_FIELD_P (efield, field)))
4342 break;
4343 constructor_elt elt
4344 = {field, convert (TREE_TYPE (field), value)};
4345 v->quick_push (elt);
4347 /* If packing has made this field a bitfield and the input
4348 value couldn't be emitted statically any more, we need to
4349 clear TREE_CONSTANT on our output. */
4350 if (!clear_constant
4351 && TREE_CONSTANT (expr)
4352 && !CONSTRUCTOR_BITFIELD_P (efield)
4353 && CONSTRUCTOR_BITFIELD_P (field)
4354 && !initializer_constant_valid_for_bitfield_p (value))
4355 clear_constant = true;
4357 efield = DECL_CHAIN (efield);
4358 field = DECL_CHAIN (field);
4361 /* If we have been able to match and convert all the input fields
4362 to their output type, convert in place now. We'll fallback to a
4363 view conversion downstream otherwise. */
4364 if (idx == len)
4366 expr = copy_node (expr);
4367 TREE_TYPE (expr) = type;
4368 CONSTRUCTOR_ELTS (expr) = v;
4369 if (clear_constant)
4370 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4371 return expr;
4375 /* Likewise for a conversion between array type and vector type with a
4376 compatible representative array. */
4377 else if (code == VECTOR_TYPE
4378 && ecode == ARRAY_TYPE
4379 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4380 etype))
4382 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4383 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4384 vec<constructor_elt, va_gc> *v;
4385 unsigned HOST_WIDE_INT ix;
4386 tree value;
4388 /* Build a VECTOR_CST from a *constant* array constructor. */
4389 if (TREE_CONSTANT (expr))
4391 bool constant_p = true;
4393 /* Iterate through elements and check if all constructor
4394 elements are *_CSTs. */
4395 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4396 if (!CONSTANT_CLASS_P (value))
4398 constant_p = false;
4399 break;
4402 if (constant_p)
4403 return build_vector_from_ctor (type,
4404 CONSTRUCTOR_ELTS (expr));
4407 /* Otherwise, build a regular vector constructor. */
4408 vec_alloc (v, len);
4409 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4411 constructor_elt elt = {NULL_TREE, value};
4412 v->quick_push (elt);
4414 expr = copy_node (expr);
4415 TREE_TYPE (expr) = type;
4416 CONSTRUCTOR_ELTS (expr) = v;
4417 return expr;
4419 break;
4421 case UNCONSTRAINED_ARRAY_REF:
4422 /* First retrieve the underlying array. */
4423 expr = maybe_unconstrained_array (expr);
4424 etype = TREE_TYPE (expr);
4425 ecode = TREE_CODE (etype);
4426 break;
4428 case VIEW_CONVERT_EXPR:
4430 /* GCC 4.x is very sensitive to type consistency overall, and view
4431 conversions thus are very frequent. Even though just "convert"ing
4432 the inner operand to the output type is fine in most cases, it
4433 might expose unexpected input/output type mismatches in special
4434 circumstances so we avoid such recursive calls when we can. */
4435 tree op0 = TREE_OPERAND (expr, 0);
4437 /* If we are converting back to the original type, we can just
4438 lift the input conversion. This is a common occurrence with
4439 switches back-and-forth amongst type variants. */
4440 if (type == TREE_TYPE (op0))
4441 return op0;
4443 /* Otherwise, if we're converting between two aggregate or vector
4444 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4445 target type in place or to just convert the inner expression. */
4446 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4447 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4449 /* If we are converting between mere variants, we can just
4450 substitute the VIEW_CONVERT_EXPR in place. */
4451 if (gnat_types_compatible_p (type, etype))
4452 return build1 (VIEW_CONVERT_EXPR, type, op0);
4454 /* Otherwise, we may just bypass the input view conversion unless
4455 one of the types is a fat pointer, which is handled by
4456 specialized code below which relies on exact type matching. */
4457 else if (!TYPE_IS_FAT_POINTER_P (type)
4458 && !TYPE_IS_FAT_POINTER_P (etype))
4459 return convert (type, op0);
4462 break;
4465 default:
4466 break;
4469 /* Check for converting to a pointer to an unconstrained array. */
4470 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4471 return convert_to_fat_pointer (type, expr);
4473 /* If we are converting between two aggregate or vector types that are mere
4474 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4475 to a vector type from its representative array type. */
4476 else if ((code == ecode
4477 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4478 && gnat_types_compatible_p (type, etype))
4479 || (code == VECTOR_TYPE
4480 && ecode == ARRAY_TYPE
4481 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4482 etype)))
4483 return build1 (VIEW_CONVERT_EXPR, type, expr);
4485 /* If we are converting between tagged types, try to upcast properly. */
4486 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4487 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4489 tree child_etype = etype;
4490 do {
4491 tree field = TYPE_FIELDS (child_etype);
4492 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4493 return build_component_ref (expr, field, false);
4494 child_etype = TREE_TYPE (field);
4495 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4498 /* If we are converting from a smaller form of record type back to it, just
4499 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4500 size on both sides. */
4501 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4502 && smaller_form_type_p (etype, type))
4504 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4505 false, false, false, true),
4506 expr);
4507 return build1 (VIEW_CONVERT_EXPR, type, expr);
4510 /* In all other cases of related types, make a NOP_EXPR. */
4511 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4512 return fold_convert (type, expr);
4514 switch (code)
4516 case VOID_TYPE:
4517 return fold_build1 (CONVERT_EXPR, type, expr);
4519 case INTEGER_TYPE:
4520 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4521 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4522 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4523 return unchecked_convert (type, expr, false);
4525 /* If the output is a biased type, convert first to the base type and
4526 subtract the bias. Note that the bias itself must go through a full
4527 conversion to the base type, lest it is a biased value; this happens
4528 for subtypes of biased types. */
4529 if (TYPE_BIASED_REPRESENTATION_P (type))
4530 return fold_convert (type,
4531 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4532 convert (TREE_TYPE (type), expr),
4533 convert (TREE_TYPE (type),
4534 TYPE_MIN_VALUE (type))));
4536 /* ... fall through ... */
4538 case ENUMERAL_TYPE:
4539 case BOOLEAN_TYPE:
4540 /* If we are converting an additive expression to an integer type
4541 with lower precision, be wary of the optimization that can be
4542 applied by convert_to_integer. There are 2 problematic cases:
4543 - if the first operand was originally of a biased type,
4544 because we could be recursively called to convert it
4545 to an intermediate type and thus rematerialize the
4546 additive operator endlessly,
4547 - if the expression contains a placeholder, because an
4548 intermediate conversion that changes the sign could
4549 be inserted and thus introduce an artificial overflow
4550 at compile time when the placeholder is substituted. */
4551 if (code == INTEGER_TYPE
4552 && ecode == INTEGER_TYPE
4553 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4554 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4556 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4558 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4559 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4560 || CONTAINS_PLACEHOLDER_P (expr))
4561 return build1 (NOP_EXPR, type, expr);
4564 return fold (convert_to_integer (type, expr));
4566 case POINTER_TYPE:
4567 case REFERENCE_TYPE:
4568 /* If converting between two thin pointers, adjust if needed to account
4569 for differing offsets from the base pointer, depending on whether
4570 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4571 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4573 tree etype_pos
4574 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4575 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4576 : size_zero_node;
4577 tree type_pos
4578 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4579 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4580 : size_zero_node;
4581 tree byte_diff = size_diffop (type_pos, etype_pos);
4583 expr = build1 (NOP_EXPR, type, expr);
4584 if (integer_zerop (byte_diff))
4585 return expr;
4587 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4588 fold_convert (sizetype, byte_diff));
4591 /* If converting fat pointer to normal or thin pointer, get the pointer
4592 to the array and then convert it. */
4593 if (TYPE_IS_FAT_POINTER_P (etype))
4594 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4596 return fold (convert_to_pointer (type, expr));
4598 case REAL_TYPE:
4599 return fold (convert_to_real (type, expr));
4601 case RECORD_TYPE:
4602 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4604 vec<constructor_elt, va_gc> *v;
4605 vec_alloc (v, 1);
4607 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4608 convert (TREE_TYPE (TYPE_FIELDS (type)),
4609 expr));
4610 return gnat_build_constructor (type, v);
4613 /* ... fall through ... */
4615 case ARRAY_TYPE:
4616 /* In these cases, assume the front-end has validated the conversion.
4617 If the conversion is valid, it will be a bit-wise conversion, so
4618 it can be viewed as an unchecked conversion. */
4619 return unchecked_convert (type, expr, false);
4621 case UNION_TYPE:
4622 /* This is a either a conversion between a tagged type and some
4623 subtype, which we have to mark as a UNION_TYPE because of
4624 overlapping fields or a conversion of an Unchecked_Union. */
4625 return unchecked_convert (type, expr, false);
4627 case UNCONSTRAINED_ARRAY_TYPE:
4628 /* If the input is a VECTOR_TYPE, convert to the representative
4629 array type first. */
4630 if (ecode == VECTOR_TYPE)
4632 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4633 etype = TREE_TYPE (expr);
4634 ecode = TREE_CODE (etype);
4637 /* If EXPR is a constrained array, take its address, convert it to a
4638 fat pointer, and then dereference it. Likewise if EXPR is a
4639 record containing both a template and a constrained array.
4640 Note that a record representing a justified modular type
4641 always represents a packed constrained array. */
4642 if (ecode == ARRAY_TYPE
4643 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4644 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4645 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4646 return
4647 build_unary_op
4648 (INDIRECT_REF, NULL_TREE,
4649 convert_to_fat_pointer (TREE_TYPE (type),
4650 build_unary_op (ADDR_EXPR,
4651 NULL_TREE, expr)));
4653 /* Do something very similar for converting one unconstrained
4654 array to another. */
4655 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4656 return
4657 build_unary_op (INDIRECT_REF, NULL_TREE,
4658 convert (TREE_TYPE (type),
4659 build_unary_op (ADDR_EXPR,
4660 NULL_TREE, expr)));
4661 else
4662 gcc_unreachable ();
4664 case COMPLEX_TYPE:
4665 return fold (convert_to_complex (type, expr));
4667 default:
4668 gcc_unreachable ();
4672 /* Create an expression whose value is that of EXPR converted to the common
4673 index type, which is sizetype. EXPR is supposed to be in the base type
4674 of the GNAT index type. Calling it is equivalent to doing
4676 convert (sizetype, expr)
4678 but we try to distribute the type conversion with the knowledge that EXPR
4679 cannot overflow in its type. This is a best-effort approach and we fall
4680 back to the above expression as soon as difficulties are encountered.
4682 This is necessary to overcome issues that arise when the GNAT base index
4683 type and the GCC common index type (sizetype) don't have the same size,
4684 which is quite frequent on 64-bit architectures. In this case, and if
4685 the GNAT base index type is signed but the iteration type of the loop has
4686 been forced to unsigned, the loop scalar evolution engine cannot compute
4687 a simple evolution for the general induction variables associated with the
4688 array indices, because it will preserve the wrap-around semantics in the
4689 unsigned type of their "inner" part. As a result, many loop optimizations
4690 are blocked.
4692 The solution is to use a special (basic) induction variable that is at
4693 least as large as sizetype, and to express the aforementioned general
4694 induction variables in terms of this induction variable, eliminating
4695 the problematic intermediate truncation to the GNAT base index type.
4696 This is possible as long as the original expression doesn't overflow
4697 and if the middle-end hasn't introduced artificial overflows in the
4698 course of the various simplification it can make to the expression. */
4700 tree
4701 convert_to_index_type (tree expr)
4703 enum tree_code code = TREE_CODE (expr);
4704 tree type = TREE_TYPE (expr);
4706 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4707 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4708 if (TYPE_UNSIGNED (type) || !optimize)
4709 return convert (sizetype, expr);
4711 switch (code)
4713 case VAR_DECL:
4714 /* The main effect of the function: replace a loop parameter with its
4715 associated special induction variable. */
4716 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4717 expr = DECL_INDUCTION_VAR (expr);
4718 break;
4720 CASE_CONVERT:
4722 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4723 /* Bail out as soon as we suspect some sort of type frobbing. */
4724 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4725 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4726 break;
4729 /* ... fall through ... */
4731 case NON_LVALUE_EXPR:
4732 return fold_build1 (code, sizetype,
4733 convert_to_index_type (TREE_OPERAND (expr, 0)));
4735 case PLUS_EXPR:
4736 case MINUS_EXPR:
4737 case MULT_EXPR:
4738 return fold_build2 (code, sizetype,
4739 convert_to_index_type (TREE_OPERAND (expr, 0)),
4740 convert_to_index_type (TREE_OPERAND (expr, 1)));
4742 case COMPOUND_EXPR:
4743 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4744 convert_to_index_type (TREE_OPERAND (expr, 1)));
4746 case COND_EXPR:
4747 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4748 convert_to_index_type (TREE_OPERAND (expr, 1)),
4749 convert_to_index_type (TREE_OPERAND (expr, 2)));
4751 default:
4752 break;
4755 return convert (sizetype, expr);
4758 /* Remove all conversions that are done in EXP. This includes converting
4759 from a padded type or to a justified modular type. If TRUE_ADDRESS
4760 is true, always return the address of the containing object even if
4761 the address is not bit-aligned. */
4763 tree
4764 remove_conversions (tree exp, bool true_address)
4766 switch (TREE_CODE (exp))
4768 case CONSTRUCTOR:
4769 if (true_address
4770 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4771 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4772 return
4773 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
4774 break;
4776 case COMPONENT_REF:
4777 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4778 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4779 break;
4781 CASE_CONVERT:
4782 case VIEW_CONVERT_EXPR:
4783 case NON_LVALUE_EXPR:
4784 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4786 default:
4787 break;
4790 return exp;
4793 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4794 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4795 likewise return an expression pointing to the underlying array. */
4797 tree
4798 maybe_unconstrained_array (tree exp)
4800 enum tree_code code = TREE_CODE (exp);
4801 tree type = TREE_TYPE (exp);
4803 switch (TREE_CODE (type))
4805 case UNCONSTRAINED_ARRAY_TYPE:
4806 if (code == UNCONSTRAINED_ARRAY_REF)
4808 const bool read_only = TREE_READONLY (exp);
4809 const bool no_trap = TREE_THIS_NOTRAP (exp);
4811 exp = TREE_OPERAND (exp, 0);
4812 type = TREE_TYPE (exp);
4814 if (TREE_CODE (exp) == COND_EXPR)
4816 tree op1
4817 = build_unary_op (INDIRECT_REF, NULL_TREE,
4818 build_component_ref (TREE_OPERAND (exp, 1),
4819 TYPE_FIELDS (type),
4820 false));
4821 tree op2
4822 = build_unary_op (INDIRECT_REF, NULL_TREE,
4823 build_component_ref (TREE_OPERAND (exp, 2),
4824 TYPE_FIELDS (type),
4825 false));
4827 exp = build3 (COND_EXPR,
4828 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4829 TREE_OPERAND (exp, 0), op1, op2);
4831 else
4833 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4834 build_component_ref (exp,
4835 TYPE_FIELDS (type),
4836 false));
4837 TREE_READONLY (exp) = read_only;
4838 TREE_THIS_NOTRAP (exp) = no_trap;
4842 else if (code == NULL_EXPR)
4843 exp = build1 (NULL_EXPR,
4844 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4845 TREE_OPERAND (exp, 0));
4846 break;
4848 case RECORD_TYPE:
4849 /* If this is a padded type and it contains a template, convert to the
4850 unpadded type first. */
4851 if (TYPE_PADDING_P (type)
4852 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4853 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4855 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4856 code = TREE_CODE (exp);
4857 type = TREE_TYPE (exp);
4860 if (TYPE_CONTAINS_TEMPLATE_P (type))
4862 /* If the array initializer is a box, return NULL_TREE. */
4863 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
4864 return NULL_TREE;
4866 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
4867 false);
4868 type = TREE_TYPE (exp);
4870 /* If the array type is padded, convert to the unpadded type. */
4871 if (TYPE_IS_PADDING_P (type))
4872 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4874 break;
4876 default:
4877 break;
4880 return exp;
4883 /* Return true if EXPR is an expression that can be folded as an operand
4884 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4886 static bool
4887 can_fold_for_view_convert_p (tree expr)
4889 tree t1, t2;
4891 /* The folder will fold NOP_EXPRs between integral types with the same
4892 precision (in the middle-end's sense). We cannot allow it if the
4893 types don't have the same precision in the Ada sense as well. */
4894 if (TREE_CODE (expr) != NOP_EXPR)
4895 return true;
4897 t1 = TREE_TYPE (expr);
4898 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4900 /* Defer to the folder for non-integral conversions. */
4901 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4902 return true;
4904 /* Only fold conversions that preserve both precisions. */
4905 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4906 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4907 return true;
4909 return false;
4912 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4913 If NOTRUNC_P is true, truncation operations should be suppressed.
4915 Special care is required with (source or target) integral types whose
4916 precision is not equal to their size, to make sure we fetch or assign
4917 the value bits whose location might depend on the endianness, e.g.
4919 Rmsize : constant := 8;
4920 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4922 type Bit_Array is array (1 .. Rmsize) of Boolean;
4923 pragma Pack (Bit_Array);
4925 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4927 Value : Int := 2#1000_0001#;
4928 Vbits : Bit_Array := To_Bit_Array (Value);
4930 we expect the 8 bits at Vbits'Address to always contain Value, while
4931 their original location depends on the endianness, at Value'Address
4932 on a little-endian architecture but not on a big-endian one.
4934 One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
4935 the bits between the precision and the size are filled, because of the
4936 trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
4937 So we use the special predicate type_unsigned_for_rm above. */
4939 tree
4940 unchecked_convert (tree type, tree expr, bool notrunc_p)
4942 tree etype = TREE_TYPE (expr);
4943 enum tree_code ecode = TREE_CODE (etype);
4944 enum tree_code code = TREE_CODE (type);
4945 tree tem;
4946 int c;
4948 /* If the expression is already of the right type, we are done. */
4949 if (etype == type)
4950 return expr;
4952 /* If both types are integral just do a normal conversion.
4953 Likewise for a conversion to an unconstrained array. */
4954 if (((INTEGRAL_TYPE_P (type)
4955 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
4956 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4957 && (INTEGRAL_TYPE_P (etype)
4958 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4959 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4960 || code == UNCONSTRAINED_ARRAY_TYPE)
4962 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4964 tree ntype = copy_type (etype);
4965 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4966 TYPE_MAIN_VARIANT (ntype) = ntype;
4967 expr = build1 (NOP_EXPR, ntype, expr);
4970 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4972 tree rtype = copy_type (type);
4973 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4974 TYPE_MAIN_VARIANT (rtype) = rtype;
4975 expr = convert (rtype, expr);
4976 expr = build1 (NOP_EXPR, type, expr);
4978 else
4979 expr = convert (type, expr);
4982 /* If we are converting to an integral type whose precision is not equal
4983 to its size, first unchecked convert to a record type that contains a
4984 field of the given precision. Then extract the result from the field.
4986 There is a subtlety if the source type is an aggregate type with reverse
4987 storage order because its representation is not contiguous in the native
4988 storage order, i.e. a direct unchecked conversion to an integral type
4989 with N bits of precision cannot read the first N bits of the aggregate
4990 type. To overcome it, we do an unchecked conversion to an integral type
4991 with reverse storage order and return the resulting value. This also
4992 ensures that the result of the unchecked conversion doesn't depend on
4993 the endianness of the target machine, but only on the storage order of
4994 the aggregate type.
4996 Finally, for the sake of consistency, we do the unchecked conversion
4997 to an integral type with reverse storage order as soon as the source
4998 type is an aggregate type with reverse storage order, even if there
4999 are no considerations of precision or size involved. */
5000 else if (INTEGRAL_TYPE_P (type)
5001 && TYPE_RM_SIZE (type)
5002 && (tree_int_cst_compare (TYPE_RM_SIZE (type),
5003 TYPE_SIZE (type)) < 0
5004 || (AGGREGATE_TYPE_P (etype)
5005 && TYPE_REVERSE_STORAGE_ORDER (etype))))
5007 tree rec_type = make_node (RECORD_TYPE);
5008 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5009 tree field_type, field;
5011 if (AGGREGATE_TYPE_P (etype))
5012 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5013 = TYPE_REVERSE_STORAGE_ORDER (etype);
5015 if (type_unsigned_for_rm (type))
5016 field_type = make_unsigned_type (prec);
5017 else
5018 field_type = make_signed_type (prec);
5019 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5021 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5022 NULL_TREE, bitsize_zero_node, 1, 0);
5024 finish_record_type (rec_type, field, 1, false);
5026 expr = unchecked_convert (rec_type, expr, notrunc_p);
5027 expr = build_component_ref (expr, field, false);
5028 expr = fold_build1 (NOP_EXPR, type, expr);
5031 /* Similarly if we are converting from an integral type whose precision is
5032 not equal to its size, first copy into a field of the given precision
5033 and unchecked convert the record type.
5035 The same considerations as above apply if the target type is an aggregate
5036 type with reverse storage order and we also proceed similarly. */
5037 else if (INTEGRAL_TYPE_P (etype)
5038 && TYPE_RM_SIZE (etype)
5039 && (tree_int_cst_compare (TYPE_RM_SIZE (etype),
5040 TYPE_SIZE (etype)) < 0
5041 || (AGGREGATE_TYPE_P (type)
5042 && TYPE_REVERSE_STORAGE_ORDER (type))))
5044 tree rec_type = make_node (RECORD_TYPE);
5045 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5046 vec<constructor_elt, va_gc> *v;
5047 vec_alloc (v, 1);
5048 tree field_type, field;
5050 if (AGGREGATE_TYPE_P (type))
5051 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5052 = TYPE_REVERSE_STORAGE_ORDER (type);
5054 if (type_unsigned_for_rm (etype))
5055 field_type = make_unsigned_type (prec);
5056 else
5057 field_type = make_signed_type (prec);
5058 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5060 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5061 NULL_TREE, bitsize_zero_node, 1, 0);
5063 finish_record_type (rec_type, field, 1, false);
5065 expr = fold_build1 (NOP_EXPR, field_type, expr);
5066 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5067 expr = gnat_build_constructor (rec_type, v);
5068 expr = unchecked_convert (type, expr, notrunc_p);
5071 /* If we are converting from a scalar type to a type with a different size,
5072 we need to pad to have the same size on both sides.
5074 ??? We cannot do it unconditionally because unchecked conversions are
5075 used liberally by the front-end to implement polymorphism, e.g. in:
5077 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5078 return p___size__4 (p__object!(S191s.all));
5080 so we skip all expressions that are references. */
5081 else if (!REFERENCE_CLASS_P (expr)
5082 && !AGGREGATE_TYPE_P (etype)
5083 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5084 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5086 if (c < 0)
5088 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5089 false, false, false, true),
5090 expr);
5091 expr = unchecked_convert (type, expr, notrunc_p);
5093 else
5095 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5096 false, false, false, true);
5097 expr = unchecked_convert (rec_type, expr, notrunc_p);
5098 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5102 /* We have a special case when we are converting between two unconstrained
5103 array types. In that case, take the address, convert the fat pointer
5104 types, and dereference. */
5105 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5106 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5107 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5108 build_unary_op (ADDR_EXPR, NULL_TREE,
5109 expr)));
5111 /* Another special case is when we are converting to a vector type from its
5112 representative array type; this a regular conversion. */
5113 else if (code == VECTOR_TYPE
5114 && ecode == ARRAY_TYPE
5115 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5116 etype))
5117 expr = convert (type, expr);
5119 /* And, if the array type is not the representative, we try to build an
5120 intermediate vector type of which the array type is the representative
5121 and to do the unchecked conversion between the vector types, in order
5122 to enable further simplifications in the middle-end. */
5123 else if (code == VECTOR_TYPE
5124 && ecode == ARRAY_TYPE
5125 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5127 expr = convert (tem, expr);
5128 return unchecked_convert (type, expr, notrunc_p);
5131 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5132 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5133 else if (TREE_CODE (expr) == CONSTRUCTOR
5134 && code == RECORD_TYPE
5135 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5137 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5138 Empty, false, false, false, true),
5139 expr);
5140 return unchecked_convert (type, expr, notrunc_p);
5143 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5144 else
5146 expr = maybe_unconstrained_array (expr);
5147 etype = TREE_TYPE (expr);
5148 ecode = TREE_CODE (etype);
5149 if (can_fold_for_view_convert_p (expr))
5150 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5151 else
5152 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5155 /* If the result is a non-biased integral type whose precision is not equal
5156 to its size, sign- or zero-extend the result. But we need not do this
5157 if the input is also an integral type and both are unsigned or both are
5158 signed and have the same precision. */
5159 if (!notrunc_p
5160 && INTEGRAL_TYPE_P (type)
5161 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5162 && TYPE_RM_SIZE (type)
5163 && tree_int_cst_compare (TYPE_RM_SIZE (type), TYPE_SIZE (type)) < 0
5164 && !(INTEGRAL_TYPE_P (etype)
5165 && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5166 && (type_unsigned_for_rm (type)
5167 || tree_int_cst_compare (TYPE_RM_SIZE (type),
5168 TYPE_RM_SIZE (etype)
5169 ? TYPE_RM_SIZE (etype)
5170 : TYPE_SIZE (etype)) == 0)))
5172 tree base_type
5173 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5174 type_unsigned_for_rm (type));
5175 tree shift_expr
5176 = convert (base_type,
5177 size_binop (MINUS_EXPR,
5178 TYPE_SIZE (type), TYPE_RM_SIZE (type)));
5179 expr
5180 = convert (type,
5181 build_binary_op (RSHIFT_EXPR, base_type,
5182 build_binary_op (LSHIFT_EXPR, base_type,
5183 convert (base_type, expr),
5184 shift_expr),
5185 shift_expr));
5188 /* An unchecked conversion should never raise Constraint_Error. The code
5189 below assumes that GCC's conversion routines overflow the same way that
5190 the underlying hardware does. This is probably true. In the rare case
5191 when it is false, we can rely on the fact that such conversions are
5192 erroneous anyway. */
5193 if (TREE_CODE (expr) == INTEGER_CST)
5194 TREE_OVERFLOW (expr) = 0;
5196 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5197 show no longer constant. */
5198 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5199 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5200 OEP_ONLY_CONST))
5201 TREE_CONSTANT (expr) = 0;
5203 return expr;
5206 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5207 the latter being a record type as predicated by Is_Record_Type. */
5209 enum tree_code
5210 tree_code_for_record_type (Entity_Id gnat_type)
5212 Node_Id component_list, component;
5214 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5215 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5216 if (!Is_Unchecked_Union (gnat_type))
5217 return RECORD_TYPE;
5219 gnat_type = Implementation_Base_Type (gnat_type);
5220 component_list
5221 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5223 for (component = First_Non_Pragma (Component_Items (component_list));
5224 Present (component);
5225 component = Next_Non_Pragma (component))
5226 if (Ekind (Defining_Entity (component)) == E_Component)
5227 return RECORD_TYPE;
5229 return UNION_TYPE;
5232 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5233 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5234 according to the presence of an alignment clause on the type or, if it
5235 is an array, on the component type. */
5237 bool
5238 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5240 gnat_type = Underlying_Type (gnat_type);
5242 *align_clause = Present (Alignment_Clause (gnat_type));
5244 if (Is_Array_Type (gnat_type))
5246 gnat_type = Underlying_Type (Component_Type (gnat_type));
5247 if (Present (Alignment_Clause (gnat_type)))
5248 *align_clause = true;
5251 if (!Is_Floating_Point_Type (gnat_type))
5252 return false;
5254 if (UI_To_Int (Esize (gnat_type)) != 64)
5255 return false;
5257 return true;
5260 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5261 size is greater or equal to 64 bits, or an array of such a type. Set
5262 ALIGN_CLAUSE according to the presence of an alignment clause on the
5263 type or, if it is an array, on the component type. */
5265 bool
5266 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5268 gnat_type = Underlying_Type (gnat_type);
5270 *align_clause = Present (Alignment_Clause (gnat_type));
5272 if (Is_Array_Type (gnat_type))
5274 gnat_type = Underlying_Type (Component_Type (gnat_type));
5275 if (Present (Alignment_Clause (gnat_type)))
5276 *align_clause = true;
5279 if (!Is_Scalar_Type (gnat_type))
5280 return false;
5282 if (UI_To_Int (Esize (gnat_type)) < 64)
5283 return false;
5285 return true;
5288 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5289 component of an aggregate type. */
5291 bool
5292 type_for_nonaliased_component_p (tree gnu_type)
5294 /* If the type is passed by reference, we may have pointers to the
5295 component so it cannot be made non-aliased. */
5296 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5297 return false;
5299 /* We used to say that any component of aggregate type is aliased
5300 because the front-end may take 'Reference of it. The front-end
5301 has been enhanced in the meantime so as to use a renaming instead
5302 in most cases, but the back-end can probably take the address of
5303 such a component too so we go for the conservative stance.
5305 For instance, we might need the address of any array type, even
5306 if normally passed by copy, to construct a fat pointer if the
5307 component is used as an actual for an unconstrained formal.
5309 Likewise for record types: even if a specific record subtype is
5310 passed by copy, the parent type might be passed by ref (e.g. if
5311 it's of variable size) and we might take the address of a child
5312 component to pass to a parent formal. We have no way to check
5313 for such conditions here. */
5314 if (AGGREGATE_TYPE_P (gnu_type))
5315 return false;
5317 return true;
5320 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5322 bool
5323 smaller_form_type_p (tree type, tree orig_type)
5325 tree size, osize;
5327 /* We're not interested in variants here. */
5328 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5329 return false;
5331 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5332 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5333 return false;
5335 size = TYPE_SIZE (type);
5336 osize = TYPE_SIZE (orig_type);
5338 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5339 return false;
5341 return tree_int_cst_lt (size, osize) != 0;
5344 /* Perform final processing on global declarations. */
5346 static GTY (()) tree dummy_global;
5348 void
5349 gnat_write_global_declarations (void)
5351 unsigned int i;
5352 tree iter;
5354 /* If we have declared types as used at the global level, insert them in
5355 the global hash table. We use a dummy variable for this purpose, but
5356 we need to build it unconditionally to avoid -fcompare-debug issues. */
5357 if (first_global_object_name)
5359 struct varpool_node *node;
5360 char *label;
5362 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5363 dummy_global
5364 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5365 void_type_node);
5366 DECL_HARD_REGISTER (dummy_global) = 1;
5367 TREE_STATIC (dummy_global) = 1;
5368 node = varpool_node::get_create (dummy_global);
5369 node->definition = 1;
5370 node->force_output = 1;
5372 if (types_used_by_cur_var_decl)
5373 while (!types_used_by_cur_var_decl->is_empty ())
5375 tree t = types_used_by_cur_var_decl->pop ();
5376 types_used_by_var_decl_insert (t, dummy_global);
5380 /* Output debug information for all global type declarations first. This
5381 ensures that global types whose compilation hasn't been finalized yet,
5382 for example pointers to Taft amendment types, have their compilation
5383 finalized in the right context. */
5384 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5385 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5386 debug_hooks->type_decl (iter, false);
5388 /* Then output the global variables. We need to do that after the debug
5389 information for global types is emitted so that they are finalized. */
5390 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5391 if (TREE_CODE (iter) == VAR_DECL)
5392 rest_of_decl_compilation (iter, true, 0);
5394 /* Output the imported modules/declarations. In GNAT, these are only
5395 materializing subprogram. */
5396 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5397 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5398 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5399 DECL_CONTEXT (iter), 0);
5402 /* ************************************************************************
5403 * * GCC builtins support *
5404 * ************************************************************************ */
5406 /* The general scheme is fairly simple:
5408 For each builtin function/type to be declared, gnat_install_builtins calls
5409 internal facilities which eventually get to gnat_pushdecl, which in turn
5410 tracks the so declared builtin function decls in the 'builtin_decls' global
5411 datastructure. When an Intrinsic subprogram declaration is processed, we
5412 search this global datastructure to retrieve the associated BUILT_IN DECL
5413 node. */
5415 /* Search the chain of currently available builtin declarations for a node
5416 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5417 found, if any, or NULL_TREE otherwise. */
5418 tree
5419 builtin_decl_for (tree name)
5421 unsigned i;
5422 tree decl;
5424 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5425 if (DECL_NAME (decl) == name)
5426 return decl;
5428 return NULL_TREE;
5431 /* The code below eventually exposes gnat_install_builtins, which declares
5432 the builtin types and functions we might need, either internally or as
5433 user accessible facilities.
5435 ??? This is a first implementation shot, still in rough shape. It is
5436 heavily inspired from the "C" family implementation, with chunks copied
5437 verbatim from there.
5439 Two obvious improvement candidates are:
5440 o Use a more efficient name/decl mapping scheme
5441 o Devise a middle-end infrastructure to avoid having to copy
5442 pieces between front-ends. */
5444 /* ----------------------------------------------------------------------- *
5445 * BUILTIN ELEMENTARY TYPES *
5446 * ----------------------------------------------------------------------- */
5448 /* Standard data types to be used in builtin argument declarations. */
5450 enum c_tree_index
5452 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5453 CTI_STRING_TYPE,
5454 CTI_CONST_STRING_TYPE,
5456 CTI_MAX
5459 static tree c_global_trees[CTI_MAX];
5461 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5462 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5463 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5465 /* ??? In addition some attribute handlers, we currently don't support a
5466 (small) number of builtin-types, which in turns inhibits support for a
5467 number of builtin functions. */
5468 #define wint_type_node void_type_node
5469 #define intmax_type_node void_type_node
5470 #define uintmax_type_node void_type_node
5472 /* Used to help initialize the builtin-types.def table. When a type of
5473 the correct size doesn't exist, use error_mark_node instead of NULL.
5474 The later results in segfaults even when a decl using the type doesn't
5475 get invoked. */
5477 static tree
5478 builtin_type_for_size (int size, bool unsignedp)
5480 tree type = gnat_type_for_size (size, unsignedp);
5481 return type ? type : error_mark_node;
5484 /* Build/push the elementary type decls that builtin functions/types
5485 will need. */
5487 static void
5488 install_builtin_elementary_types (void)
5490 signed_size_type_node = gnat_signed_type_for (size_type_node);
5491 pid_type_node = integer_type_node;
5493 string_type_node = build_pointer_type (char_type_node);
5494 const_string_type_node
5495 = build_pointer_type (build_qualified_type
5496 (char_type_node, TYPE_QUAL_CONST));
5499 /* ----------------------------------------------------------------------- *
5500 * BUILTIN FUNCTION TYPES *
5501 * ----------------------------------------------------------------------- */
5503 /* Now, builtin function types per se. */
5505 enum c_builtin_type
5507 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5508 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5509 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5510 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5511 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5512 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5513 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5514 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5515 ARG6) NAME,
5516 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5517 ARG6, ARG7) NAME,
5518 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5519 ARG6, ARG7, ARG8) NAME,
5520 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5521 ARG6, ARG7, ARG8, ARG9) NAME,
5522 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5523 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5524 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5525 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5526 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5527 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5528 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5529 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5530 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5531 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5532 NAME,
5533 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5534 ARG6) NAME,
5535 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5536 ARG6, ARG7) NAME,
5537 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5538 #include "builtin-types.def"
5539 #undef DEF_PRIMITIVE_TYPE
5540 #undef DEF_FUNCTION_TYPE_0
5541 #undef DEF_FUNCTION_TYPE_1
5542 #undef DEF_FUNCTION_TYPE_2
5543 #undef DEF_FUNCTION_TYPE_3
5544 #undef DEF_FUNCTION_TYPE_4
5545 #undef DEF_FUNCTION_TYPE_5
5546 #undef DEF_FUNCTION_TYPE_6
5547 #undef DEF_FUNCTION_TYPE_7
5548 #undef DEF_FUNCTION_TYPE_8
5549 #undef DEF_FUNCTION_TYPE_9
5550 #undef DEF_FUNCTION_TYPE_10
5551 #undef DEF_FUNCTION_TYPE_11
5552 #undef DEF_FUNCTION_TYPE_VAR_0
5553 #undef DEF_FUNCTION_TYPE_VAR_1
5554 #undef DEF_FUNCTION_TYPE_VAR_2
5555 #undef DEF_FUNCTION_TYPE_VAR_3
5556 #undef DEF_FUNCTION_TYPE_VAR_4
5557 #undef DEF_FUNCTION_TYPE_VAR_5
5558 #undef DEF_FUNCTION_TYPE_VAR_6
5559 #undef DEF_FUNCTION_TYPE_VAR_7
5560 #undef DEF_POINTER_TYPE
5561 BT_LAST
5564 typedef enum c_builtin_type builtin_type;
5566 /* A temporary array used in communication with def_fn_type. */
5567 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5569 /* A helper function for install_builtin_types. Build function type
5570 for DEF with return type RET and N arguments. If VAR is true, then the
5571 function should be variadic after those N arguments.
5573 Takes special care not to ICE if any of the types involved are
5574 error_mark_node, which indicates that said type is not in fact available
5575 (see builtin_type_for_size). In which case the function type as a whole
5576 should be error_mark_node. */
5578 static void
5579 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5581 tree t;
5582 tree *args = XALLOCAVEC (tree, n);
5583 va_list list;
5584 int i;
5586 va_start (list, n);
5587 for (i = 0; i < n; ++i)
5589 builtin_type a = (builtin_type) va_arg (list, int);
5590 t = builtin_types[a];
5591 if (t == error_mark_node)
5592 goto egress;
5593 args[i] = t;
5596 t = builtin_types[ret];
5597 if (t == error_mark_node)
5598 goto egress;
5599 if (var)
5600 t = build_varargs_function_type_array (t, n, args);
5601 else
5602 t = build_function_type_array (t, n, args);
5604 egress:
5605 builtin_types[def] = t;
5606 va_end (list);
5609 /* Build the builtin function types and install them in the builtin_types
5610 array for later use in builtin function decls. */
5612 static void
5613 install_builtin_function_types (void)
5615 tree va_list_ref_type_node;
5616 tree va_list_arg_type_node;
5618 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5620 va_list_arg_type_node = va_list_ref_type_node =
5621 build_pointer_type (TREE_TYPE (va_list_type_node));
5623 else
5625 va_list_arg_type_node = va_list_type_node;
5626 va_list_ref_type_node = build_reference_type (va_list_type_node);
5629 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5630 builtin_types[ENUM] = VALUE;
5631 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5632 def_fn_type (ENUM, RETURN, 0, 0);
5633 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5634 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5635 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5636 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5637 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5638 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5639 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5640 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5641 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5642 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5643 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5644 ARG6) \
5645 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5646 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5647 ARG6, ARG7) \
5648 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5649 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5650 ARG6, ARG7, ARG8) \
5651 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5652 ARG7, ARG8);
5653 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5654 ARG6, ARG7, ARG8, ARG9) \
5655 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5656 ARG7, ARG8, ARG9);
5657 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5658 ARG6, ARG7, ARG8, ARG9, ARG10) \
5659 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5660 ARG7, ARG8, ARG9, ARG10);
5661 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5662 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5663 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5664 ARG7, ARG8, ARG9, ARG10, ARG11);
5665 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5666 def_fn_type (ENUM, RETURN, 1, 0);
5667 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5668 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5669 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5670 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5671 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5672 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5673 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5674 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5675 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5676 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5677 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5678 ARG6) \
5679 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5680 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5681 ARG6, ARG7) \
5682 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5683 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5684 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5686 #include "builtin-types.def"
5688 #undef DEF_PRIMITIVE_TYPE
5689 #undef DEF_FUNCTION_TYPE_0
5690 #undef DEF_FUNCTION_TYPE_1
5691 #undef DEF_FUNCTION_TYPE_2
5692 #undef DEF_FUNCTION_TYPE_3
5693 #undef DEF_FUNCTION_TYPE_4
5694 #undef DEF_FUNCTION_TYPE_5
5695 #undef DEF_FUNCTION_TYPE_6
5696 #undef DEF_FUNCTION_TYPE_7
5697 #undef DEF_FUNCTION_TYPE_8
5698 #undef DEF_FUNCTION_TYPE_9
5699 #undef DEF_FUNCTION_TYPE_10
5700 #undef DEF_FUNCTION_TYPE_11
5701 #undef DEF_FUNCTION_TYPE_VAR_0
5702 #undef DEF_FUNCTION_TYPE_VAR_1
5703 #undef DEF_FUNCTION_TYPE_VAR_2
5704 #undef DEF_FUNCTION_TYPE_VAR_3
5705 #undef DEF_FUNCTION_TYPE_VAR_4
5706 #undef DEF_FUNCTION_TYPE_VAR_5
5707 #undef DEF_FUNCTION_TYPE_VAR_6
5708 #undef DEF_FUNCTION_TYPE_VAR_7
5709 #undef DEF_POINTER_TYPE
5710 builtin_types[(int) BT_LAST] = NULL_TREE;
5713 /* ----------------------------------------------------------------------- *
5714 * BUILTIN ATTRIBUTES *
5715 * ----------------------------------------------------------------------- */
5717 enum built_in_attribute
5719 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5720 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5721 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5722 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5723 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5724 #include "builtin-attrs.def"
5725 #undef DEF_ATTR_NULL_TREE
5726 #undef DEF_ATTR_INT
5727 #undef DEF_ATTR_STRING
5728 #undef DEF_ATTR_IDENT
5729 #undef DEF_ATTR_TREE_LIST
5730 ATTR_LAST
5733 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5735 static void
5736 install_builtin_attributes (void)
5738 /* Fill in the built_in_attributes array. */
5739 #define DEF_ATTR_NULL_TREE(ENUM) \
5740 built_in_attributes[(int) ENUM] = NULL_TREE;
5741 #define DEF_ATTR_INT(ENUM, VALUE) \
5742 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5743 #define DEF_ATTR_STRING(ENUM, VALUE) \
5744 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5745 #define DEF_ATTR_IDENT(ENUM, STRING) \
5746 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5747 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5748 built_in_attributes[(int) ENUM] \
5749 = tree_cons (built_in_attributes[(int) PURPOSE], \
5750 built_in_attributes[(int) VALUE], \
5751 built_in_attributes[(int) CHAIN]);
5752 #include "builtin-attrs.def"
5753 #undef DEF_ATTR_NULL_TREE
5754 #undef DEF_ATTR_INT
5755 #undef DEF_ATTR_STRING
5756 #undef DEF_ATTR_IDENT
5757 #undef DEF_ATTR_TREE_LIST
5760 /* Handle a "const" attribute; arguments as in
5761 struct attribute_spec.handler. */
5763 static tree
5764 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5765 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5766 bool *no_add_attrs)
5768 if (TREE_CODE (*node) == FUNCTION_DECL)
5769 TREE_READONLY (*node) = 1;
5770 else
5771 *no_add_attrs = true;
5773 return NULL_TREE;
5776 /* Handle a "nothrow" attribute; arguments as in
5777 struct attribute_spec.handler. */
5779 static tree
5780 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5781 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5782 bool *no_add_attrs)
5784 if (TREE_CODE (*node) == FUNCTION_DECL)
5785 TREE_NOTHROW (*node) = 1;
5786 else
5787 *no_add_attrs = true;
5789 return NULL_TREE;
5792 /* Handle a "pure" attribute; arguments as in
5793 struct attribute_spec.handler. */
5795 static tree
5796 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5797 int ARG_UNUSED (flags), bool *no_add_attrs)
5799 if (TREE_CODE (*node) == FUNCTION_DECL)
5800 DECL_PURE_P (*node) = 1;
5801 /* TODO: support types. */
5802 else
5804 warning (OPT_Wattributes, "%qs attribute ignored",
5805 IDENTIFIER_POINTER (name));
5806 *no_add_attrs = true;
5809 return NULL_TREE;
5812 /* Handle a "no vops" attribute; arguments as in
5813 struct attribute_spec.handler. */
5815 static tree
5816 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5817 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5818 bool *ARG_UNUSED (no_add_attrs))
5820 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5821 DECL_IS_NOVOPS (*node) = 1;
5822 return NULL_TREE;
5825 /* Helper for nonnull attribute handling; fetch the operand number
5826 from the attribute argument list. */
5828 static bool
5829 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5831 /* Verify the arg number is a constant. */
5832 if (!tree_fits_uhwi_p (arg_num_expr))
5833 return false;
5835 *valp = TREE_INT_CST_LOW (arg_num_expr);
5836 return true;
5839 /* Handle the "nonnull" attribute. */
5840 static tree
5841 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5842 tree args, int ARG_UNUSED (flags),
5843 bool *no_add_attrs)
5845 tree type = *node;
5846 unsigned HOST_WIDE_INT attr_arg_num;
5848 /* If no arguments are specified, all pointer arguments should be
5849 non-null. Verify a full prototype is given so that the arguments
5850 will have the correct types when we actually check them later.
5851 Avoid diagnosing type-generic built-ins since those have no
5852 prototype. */
5853 if (!args)
5855 if (!prototype_p (type)
5856 && (!TYPE_ATTRIBUTES (type)
5857 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
5859 error ("nonnull attribute without arguments on a non-prototype");
5860 *no_add_attrs = true;
5862 return NULL_TREE;
5865 /* Argument list specified. Verify that each argument number references
5866 a pointer argument. */
5867 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5869 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5871 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5873 error ("nonnull argument has invalid operand number (argument %lu)",
5874 (unsigned long) attr_arg_num);
5875 *no_add_attrs = true;
5876 return NULL_TREE;
5879 if (prototype_p (type))
5881 function_args_iterator iter;
5882 tree argument;
5884 function_args_iter_init (&iter, type);
5885 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5887 argument = function_args_iter_cond (&iter);
5888 if (!argument || ck_num == arg_num)
5889 break;
5892 if (!argument
5893 || TREE_CODE (argument) == VOID_TYPE)
5895 error ("nonnull argument with out-of-range operand number "
5896 "(argument %lu, operand %lu)",
5897 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5898 *no_add_attrs = true;
5899 return NULL_TREE;
5902 if (TREE_CODE (argument) != POINTER_TYPE)
5904 error ("nonnull argument references non-pointer operand "
5905 "(argument %lu, operand %lu)",
5906 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5907 *no_add_attrs = true;
5908 return NULL_TREE;
5913 return NULL_TREE;
5916 /* Handle a "sentinel" attribute. */
5918 static tree
5919 handle_sentinel_attribute (tree *node, tree name, tree args,
5920 int ARG_UNUSED (flags), bool *no_add_attrs)
5922 if (!prototype_p (*node))
5924 warning (OPT_Wattributes,
5925 "%qs attribute requires prototypes with named arguments",
5926 IDENTIFIER_POINTER (name));
5927 *no_add_attrs = true;
5929 else
5931 if (!stdarg_p (*node))
5933 warning (OPT_Wattributes,
5934 "%qs attribute only applies to variadic functions",
5935 IDENTIFIER_POINTER (name));
5936 *no_add_attrs = true;
5940 if (args)
5942 tree position = TREE_VALUE (args);
5944 if (TREE_CODE (position) != INTEGER_CST)
5946 warning (0, "requested position is not an integer constant");
5947 *no_add_attrs = true;
5949 else
5951 if (tree_int_cst_lt (position, integer_zero_node))
5953 warning (0, "requested position is less than zero");
5954 *no_add_attrs = true;
5959 return NULL_TREE;
5962 /* Handle a "noreturn" attribute; arguments as in
5963 struct attribute_spec.handler. */
5965 static tree
5966 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5967 int ARG_UNUSED (flags), bool *no_add_attrs)
5969 tree type = TREE_TYPE (*node);
5971 /* See FIXME comment in c_common_attribute_table. */
5972 if (TREE_CODE (*node) == FUNCTION_DECL)
5973 TREE_THIS_VOLATILE (*node) = 1;
5974 else if (TREE_CODE (type) == POINTER_TYPE
5975 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5976 TREE_TYPE (*node)
5977 = build_pointer_type
5978 (build_type_variant (TREE_TYPE (type),
5979 TYPE_READONLY (TREE_TYPE (type)), 1));
5980 else
5982 warning (OPT_Wattributes, "%qs attribute ignored",
5983 IDENTIFIER_POINTER (name));
5984 *no_add_attrs = true;
5987 return NULL_TREE;
5990 /* Handle a "noinline" attribute; arguments as in
5991 struct attribute_spec.handler. */
5993 static tree
5994 handle_noinline_attribute (tree *node, tree name,
5995 tree ARG_UNUSED (args),
5996 int ARG_UNUSED (flags), bool *no_add_attrs)
5998 if (TREE_CODE (*node) == FUNCTION_DECL)
6000 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6002 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6003 "with attribute %qs", name, "always_inline");
6004 *no_add_attrs = true;
6006 else
6007 DECL_UNINLINABLE (*node) = 1;
6009 else
6011 warning (OPT_Wattributes, "%qE attribute ignored", name);
6012 *no_add_attrs = true;
6015 return NULL_TREE;
6018 /* Handle a "noclone" attribute; arguments as in
6019 struct attribute_spec.handler. */
6021 static tree
6022 handle_noclone_attribute (tree *node, tree name,
6023 tree ARG_UNUSED (args),
6024 int ARG_UNUSED (flags), bool *no_add_attrs)
6026 if (TREE_CODE (*node) != FUNCTION_DECL)
6028 warning (OPT_Wattributes, "%qE attribute ignored", name);
6029 *no_add_attrs = true;
6032 return NULL_TREE;
6035 /* Handle a "leaf" attribute; arguments as in
6036 struct attribute_spec.handler. */
6038 static tree
6039 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6040 int ARG_UNUSED (flags), bool *no_add_attrs)
6042 if (TREE_CODE (*node) != FUNCTION_DECL)
6044 warning (OPT_Wattributes, "%qE attribute ignored", name);
6045 *no_add_attrs = true;
6047 if (!TREE_PUBLIC (*node))
6049 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6050 *no_add_attrs = true;
6053 return NULL_TREE;
6056 /* Handle a "always_inline" attribute; arguments as in
6057 struct attribute_spec.handler. */
6059 static tree
6060 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6061 int ARG_UNUSED (flags), bool *no_add_attrs)
6063 if (TREE_CODE (*node) == FUNCTION_DECL)
6065 /* Set the attribute and mark it for disregarding inline limits. */
6066 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6068 else
6070 warning (OPT_Wattributes, "%qE attribute ignored", name);
6071 *no_add_attrs = true;
6074 return NULL_TREE;
6077 /* Handle a "malloc" attribute; arguments as in
6078 struct attribute_spec.handler. */
6080 static tree
6081 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6082 int ARG_UNUSED (flags), bool *no_add_attrs)
6084 if (TREE_CODE (*node) == FUNCTION_DECL
6085 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6086 DECL_IS_MALLOC (*node) = 1;
6087 else
6089 warning (OPT_Wattributes, "%qs attribute ignored",
6090 IDENTIFIER_POINTER (name));
6091 *no_add_attrs = true;
6094 return NULL_TREE;
6097 /* Fake handler for attributes we don't properly support. */
6099 tree
6100 fake_attribute_handler (tree * ARG_UNUSED (node),
6101 tree ARG_UNUSED (name),
6102 tree ARG_UNUSED (args),
6103 int ARG_UNUSED (flags),
6104 bool * ARG_UNUSED (no_add_attrs))
6106 return NULL_TREE;
6109 /* Handle a "type_generic" attribute. */
6111 static tree
6112 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6113 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6114 bool * ARG_UNUSED (no_add_attrs))
6116 /* Ensure we have a function type. */
6117 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6119 /* Ensure we have a variadic function. */
6120 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6122 return NULL_TREE;
6125 /* Handle a "vector_size" attribute; arguments as in
6126 struct attribute_spec.handler. */
6128 static tree
6129 handle_vector_size_attribute (tree *node, tree name, tree args,
6130 int ARG_UNUSED (flags), bool *no_add_attrs)
6132 tree type = *node;
6133 tree vector_type;
6135 *no_add_attrs = true;
6137 /* We need to provide for vector pointers, vector arrays, and
6138 functions returning vectors. For example:
6140 __attribute__((vector_size(16))) short *foo;
6142 In this case, the mode is SI, but the type being modified is
6143 HI, so we need to look further. */
6144 while (POINTER_TYPE_P (type)
6145 || TREE_CODE (type) == FUNCTION_TYPE
6146 || TREE_CODE (type) == ARRAY_TYPE)
6147 type = TREE_TYPE (type);
6149 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6150 if (!vector_type)
6151 return NULL_TREE;
6153 /* Build back pointers if needed. */
6154 *node = reconstruct_complex_type (*node, vector_type);
6156 return NULL_TREE;
6159 /* Handle a "vector_type" attribute; arguments as in
6160 struct attribute_spec.handler. */
6162 static tree
6163 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6164 int ARG_UNUSED (flags), bool *no_add_attrs)
6166 tree type = *node;
6167 tree vector_type;
6169 *no_add_attrs = true;
6171 if (TREE_CODE (type) != ARRAY_TYPE)
6173 error ("attribute %qs applies to array types only",
6174 IDENTIFIER_POINTER (name));
6175 return NULL_TREE;
6178 vector_type = build_vector_type_for_array (type, name);
6179 if (!vector_type)
6180 return NULL_TREE;
6182 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6183 *node = vector_type;
6185 return NULL_TREE;
6188 /* Return whether EXPR, which is the renamed object in an object renaming
6189 declaration, can be materialized as a reference (REFERENCE_TYPE). This
6190 should be synchronized with Exp_Dbug.Debug_Renaming_Declaration. */
6192 bool
6193 can_materialize_object_renaming_p (Node_Id expr)
6195 while (true)
6197 switch Nkind (expr)
6199 case N_Identifier:
6200 case N_Expanded_Name:
6201 return true;
6203 case N_Selected_Component:
6205 if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
6206 return false;
6208 const Uint bitpos
6209 = Normalized_First_Bit (Entity (Selector_Name (expr)));
6210 if (!UI_Is_In_Int_Range (bitpos)
6211 || (bitpos != UI_No_Uint && bitpos != UI_From_Int (0)))
6212 return false;
6214 expr = Prefix (expr);
6215 break;
6218 case N_Indexed_Component:
6219 case N_Slice:
6221 const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
6223 if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
6224 return false;
6226 expr = Prefix (expr);
6227 break;
6230 case N_Explicit_Dereference:
6231 expr = Prefix (expr);
6232 break;
6234 default:
6235 return true;
6240 /* ----------------------------------------------------------------------- *
6241 * BUILTIN FUNCTIONS *
6242 * ----------------------------------------------------------------------- */
6244 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6245 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6246 if nonansi_p and flag_no_nonansi_builtin. */
6248 static void
6249 def_builtin_1 (enum built_in_function fncode,
6250 const char *name,
6251 enum built_in_class fnclass,
6252 tree fntype, tree libtype,
6253 bool both_p, bool fallback_p,
6254 bool nonansi_p ATTRIBUTE_UNUSED,
6255 tree fnattrs, bool implicit_p)
6257 tree decl;
6258 const char *libname;
6260 /* Preserve an already installed decl. It most likely was setup in advance
6261 (e.g. as part of the internal builtins) for specific reasons. */
6262 if (builtin_decl_explicit (fncode))
6263 return;
6265 gcc_assert ((!both_p && !fallback_p)
6266 || !strncmp (name, "__builtin_",
6267 strlen ("__builtin_")));
6269 libname = name + strlen ("__builtin_");
6270 decl = add_builtin_function (name, fntype, fncode, fnclass,
6271 (fallback_p ? libname : NULL),
6272 fnattrs);
6273 if (both_p)
6274 /* ??? This is normally further controlled by command-line options
6275 like -fno-builtin, but we don't have them for Ada. */
6276 add_builtin_function (libname, libtype, fncode, fnclass,
6277 NULL, fnattrs);
6279 set_builtin_decl (fncode, decl, implicit_p);
6282 static int flag_isoc94 = 0;
6283 static int flag_isoc99 = 0;
6284 static int flag_isoc11 = 0;
6286 /* Install what the common builtins.def offers. */
6288 static void
6289 install_builtin_functions (void)
6291 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6292 NONANSI_P, ATTRS, IMPLICIT, COND) \
6293 if (NAME && COND) \
6294 def_builtin_1 (ENUM, NAME, CLASS, \
6295 builtin_types[(int) TYPE], \
6296 builtin_types[(int) LIBTYPE], \
6297 BOTH_P, FALLBACK_P, NONANSI_P, \
6298 built_in_attributes[(int) ATTRS], IMPLICIT);
6299 #include "builtins.def"
6302 /* ----------------------------------------------------------------------- *
6303 * BUILTIN FUNCTIONS *
6304 * ----------------------------------------------------------------------- */
6306 /* Install the builtin functions we might need. */
6308 void
6309 gnat_install_builtins (void)
6311 install_builtin_elementary_types ();
6312 install_builtin_function_types ();
6313 install_builtin_attributes ();
6315 /* Install builtins used by generic middle-end pieces first. Some of these
6316 know about internal specificities and control attributes accordingly, for
6317 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6318 the generic definition from builtins.def. */
6319 build_common_builtin_nodes ();
6321 /* Now, install the target specific builtins, such as the AltiVec family on
6322 ppc, and the common set as exposed by builtins.def. */
6323 targetm.init_builtins ();
6324 install_builtin_functions ();
6327 #include "gt-ada-utils.h"
6328 #include "gtype-ada.h"