Fix fallthrough comments for Ada, Solaris/x86, SPARC
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob4d61f52e0e46e082de6ac158576c67639c5db630
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2016, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute,
113 false },
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
115 false },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute,
117 false },
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
119 false },
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
121 false },
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
123 false },
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
125 false },
126 { "noinline", 0, 0, true, false, false, handle_noinline_attribute,
127 false },
128 { "noclone", 0, 0, true, false, false, handle_noclone_attribute,
129 false },
130 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
131 false },
132 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
133 false },
134 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
135 false },
136 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
137 false },
139 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
140 false },
141 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
142 false },
143 { "may_alias", 0, 0, false, true, false, NULL, false },
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
148 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
149 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
151 { NULL, 0, 0, false, false, false, NULL, false }
154 /* Associates a GNAT tree node to a GCC tree node. It is used in
155 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
156 of `save_gnu_tree' for more info. */
157 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
159 #define GET_GNU_TREE(GNAT_ENTITY) \
160 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
162 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
163 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
165 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
166 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
168 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
169 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
171 #define GET_DUMMY_NODE(GNAT_ENTITY) \
172 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
174 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
175 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
177 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
178 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
180 /* This variable keeps a table for types for each precision so that we only
181 allocate each of them once. Signed and unsigned types are kept separate.
183 Note that these types are only used when fold-const requests something
184 special. Perhaps we should NOT share these types; we'll see how it
185 goes later. */
186 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
188 /* Likewise for float types, but record these by mode. */
189 static GTY(()) tree float_types[NUM_MACHINE_MODES];
191 /* For each binding contour we allocate a binding_level structure to indicate
192 the binding depth. */
194 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
195 /* The binding level containing this one (the enclosing binding level). */
196 struct gnat_binding_level *chain;
197 /* The BLOCK node for this level. */
198 tree block;
199 /* If nonzero, the setjmp buffer that needs to be updated for any
200 variable-sized definition within this context. */
201 tree jmpbuf_decl;
204 /* The binding level currently in effect. */
205 static GTY(()) struct gnat_binding_level *current_binding_level;
207 /* A chain of gnat_binding_level structures awaiting reuse. */
208 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
210 /* The context to be used for global declarations. */
211 static GTY(()) tree global_context;
213 /* An array of global declarations. */
214 static GTY(()) vec<tree, va_gc> *global_decls;
216 /* An array of builtin function declarations. */
217 static GTY(()) vec<tree, va_gc> *builtin_decls;
219 /* A chain of unused BLOCK nodes. */
220 static GTY((deletable)) tree free_block_chain;
222 /* A hash table of padded types. It is modelled on the generic type
223 hash table in tree.c, which must thus be used as a reference. */
225 struct GTY((for_user)) pad_type_hash {
226 unsigned long hash;
227 tree type;
230 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
232 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
233 static bool equal (pad_type_hash *a, pad_type_hash *b);
234 static int keep_cache_entry (pad_type_hash *&);
237 static GTY ((cache))
238 hash_table<pad_type_hasher> *pad_type_hash_table;
240 static tree merge_sizes (tree, tree, tree, bool, bool);
241 static tree compute_related_constant (tree, tree);
242 static tree split_plus (tree, tree *);
243 static tree float_type_for_precision (int, machine_mode);
244 static tree convert_to_fat_pointer (tree, tree);
245 static unsigned int scale_by_factor_of (tree, unsigned int);
246 static bool potential_alignment_gap (tree, tree, tree);
248 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
249 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
250 struct deferred_decl_context_node
252 /* The ..._DECL node to work on. */
253 tree decl;
255 /* The corresponding entity's Scope. */
256 Entity_Id gnat_scope;
258 /* The value of force_global when DECL was pushed. */
259 int force_global;
261 /* The list of ..._TYPE nodes to propagate the context to. */
262 vec<tree> types;
264 /* The next queue item. */
265 struct deferred_decl_context_node *next;
268 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
270 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
271 feed it with the elaboration of GNAT_SCOPE. */
272 static struct deferred_decl_context_node *
273 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
275 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
276 feed it with the DECL_CONTEXT computed as part of N as soon as it is
277 computed. */
278 static void add_deferred_type_context (struct deferred_decl_context_node *n,
279 tree type);
281 /* Initialize data structures of the utils.c module. */
283 void
284 init_gnat_utils (void)
286 /* Initialize the association of GNAT nodes to GCC trees. */
287 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
289 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
290 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
292 /* Initialize the hash table of padded types. */
293 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
296 /* Destroy data structures of the utils.c module. */
298 void
299 destroy_gnat_utils (void)
301 /* Destroy the association of GNAT nodes to GCC trees. */
302 ggc_free (associate_gnat_to_gnu);
303 associate_gnat_to_gnu = NULL;
305 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
306 ggc_free (dummy_node_table);
307 dummy_node_table = NULL;
309 /* Destroy the hash table of padded types. */
310 pad_type_hash_table->empty ();
311 pad_type_hash_table = NULL;
314 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
315 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
316 If NO_CHECK is true, the latter check is suppressed.
318 If GNU_DECL is zero, reset a previous association. */
320 void
321 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
323 /* Check that GNAT_ENTITY is not already defined and that it is being set
324 to something which is a decl. If that is not the case, this usually
325 means GNAT_ENTITY is defined twice, but occasionally is due to some
326 Gigi problem. */
327 gcc_assert (!(gnu_decl
328 && (PRESENT_GNU_TREE (gnat_entity)
329 || (!no_check && !DECL_P (gnu_decl)))));
331 SET_GNU_TREE (gnat_entity, gnu_decl);
334 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
335 that was associated with it. If there is no such tree node, abort.
337 In some cases, such as delayed elaboration or expressions that need to
338 be elaborated only once, GNAT_ENTITY is really not an entity. */
340 tree
341 get_gnu_tree (Entity_Id gnat_entity)
343 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
344 return GET_GNU_TREE (gnat_entity);
347 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
349 bool
350 present_gnu_tree (Entity_Id gnat_entity)
352 return PRESENT_GNU_TREE (gnat_entity);
355 /* Make a dummy type corresponding to GNAT_TYPE. */
357 tree
358 make_dummy_type (Entity_Id gnat_type)
360 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
361 tree gnu_type;
363 /* If there was no equivalent type (can only happen when just annotating
364 types) or underlying type, go back to the original type. */
365 if (No (gnat_equiv))
366 gnat_equiv = gnat_type;
368 /* If it there already a dummy type, use that one. Else make one. */
369 if (PRESENT_DUMMY_NODE (gnat_equiv))
370 return GET_DUMMY_NODE (gnat_equiv);
372 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
373 an ENUMERAL_TYPE. */
374 gnu_type = make_node (Is_Record_Type (gnat_equiv)
375 ? tree_code_for_record_type (gnat_equiv)
376 : ENUMERAL_TYPE);
377 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
378 TYPE_DUMMY_P (gnu_type) = 1;
379 TYPE_STUB_DECL (gnu_type)
380 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
381 if (Is_By_Reference_Type (gnat_equiv))
382 TYPE_BY_REFERENCE_P (gnu_type) = 1;
384 SET_DUMMY_NODE (gnat_equiv, gnu_type);
386 return gnu_type;
389 /* Return the dummy type that was made for GNAT_TYPE, if any. */
391 tree
392 get_dummy_type (Entity_Id gnat_type)
394 return GET_DUMMY_NODE (gnat_type);
397 /* Build dummy fat and thin pointer types whose designated type is specified
398 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
400 void
401 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
403 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
404 tree gnu_fat_type, fields, gnu_object_type;
406 gnu_template_type = make_node (RECORD_TYPE);
407 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
408 TYPE_DUMMY_P (gnu_template_type) = 1;
409 gnu_ptr_template = build_pointer_type (gnu_template_type);
411 gnu_array_type = make_node (ENUMERAL_TYPE);
412 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
413 TYPE_DUMMY_P (gnu_array_type) = 1;
414 gnu_ptr_array = build_pointer_type (gnu_array_type);
416 gnu_fat_type = make_node (RECORD_TYPE);
417 /* Build a stub DECL to trigger the special processing for fat pointer types
418 in gnat_pushdecl. */
419 TYPE_NAME (gnu_fat_type)
420 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
421 gnu_fat_type);
422 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
423 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
424 DECL_CHAIN (fields)
425 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
426 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
427 finish_fat_pointer_type (gnu_fat_type, fields);
428 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
429 /* Suppress debug info until after the type is completed. */
430 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
432 gnu_object_type = make_node (RECORD_TYPE);
433 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
434 TYPE_DUMMY_P (gnu_object_type) = 1;
436 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
437 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
438 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
441 /* Return true if we are in the global binding level. */
443 bool
444 global_bindings_p (void)
446 return force_global || !current_function_decl;
449 /* Enter a new binding level. */
451 void
452 gnat_pushlevel (void)
454 struct gnat_binding_level *newlevel = NULL;
456 /* Reuse a struct for this binding level, if there is one. */
457 if (free_binding_level)
459 newlevel = free_binding_level;
460 free_binding_level = free_binding_level->chain;
462 else
463 newlevel = ggc_alloc<gnat_binding_level> ();
465 /* Use a free BLOCK, if any; otherwise, allocate one. */
466 if (free_block_chain)
468 newlevel->block = free_block_chain;
469 free_block_chain = BLOCK_CHAIN (free_block_chain);
470 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
472 else
473 newlevel->block = make_node (BLOCK);
475 /* Point the BLOCK we just made to its parent. */
476 if (current_binding_level)
477 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
479 BLOCK_VARS (newlevel->block) = NULL_TREE;
480 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
481 TREE_USED (newlevel->block) = 1;
483 /* Add this level to the front of the chain (stack) of active levels. */
484 newlevel->chain = current_binding_level;
485 newlevel->jmpbuf_decl = NULL_TREE;
486 current_binding_level = newlevel;
489 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
490 and point FNDECL to this BLOCK. */
492 void
493 set_current_block_context (tree fndecl)
495 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
496 DECL_INITIAL (fndecl) = current_binding_level->block;
497 set_block_for_group (current_binding_level->block);
500 /* Set the jmpbuf_decl for the current binding level to DECL. */
502 void
503 set_block_jmpbuf_decl (tree decl)
505 current_binding_level->jmpbuf_decl = decl;
508 /* Get the jmpbuf_decl, if any, for the current binding level. */
510 tree
511 get_block_jmpbuf_decl (void)
513 return current_binding_level->jmpbuf_decl;
516 /* Exit a binding level. Set any BLOCK into the current code group. */
518 void
519 gnat_poplevel (void)
521 struct gnat_binding_level *level = current_binding_level;
522 tree block = level->block;
524 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
525 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
527 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
528 are no variables free the block and merge its subblocks into those of its
529 parent block. Otherwise, add it to the list of its parent. */
530 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
532 else if (!BLOCK_VARS (block))
534 BLOCK_SUBBLOCKS (level->chain->block)
535 = block_chainon (BLOCK_SUBBLOCKS (block),
536 BLOCK_SUBBLOCKS (level->chain->block));
537 BLOCK_CHAIN (block) = free_block_chain;
538 free_block_chain = block;
540 else
542 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
543 BLOCK_SUBBLOCKS (level->chain->block) = block;
544 TREE_USED (block) = 1;
545 set_block_for_group (block);
548 /* Free this binding structure. */
549 current_binding_level = level->chain;
550 level->chain = free_binding_level;
551 free_binding_level = level;
554 /* Exit a binding level and discard the associated BLOCK. */
556 void
557 gnat_zaplevel (void)
559 struct gnat_binding_level *level = current_binding_level;
560 tree block = level->block;
562 BLOCK_CHAIN (block) = free_block_chain;
563 free_block_chain = block;
565 /* Free this binding structure. */
566 current_binding_level = level->chain;
567 level->chain = free_binding_level;
568 free_binding_level = level;
571 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
573 static void
574 gnat_set_type_context (tree type, tree context)
576 tree decl = TYPE_STUB_DECL (type);
578 TYPE_CONTEXT (type) = context;
580 while (decl && DECL_PARALLEL_TYPE (decl))
582 tree parallel_type = DECL_PARALLEL_TYPE (decl);
584 /* Give a context to the parallel types and their stub decl, if any.
585 Some parallel types seems to be present in multiple parallel type
586 chains, so don't mess with their context if they already have one. */
587 if (!TYPE_CONTEXT (parallel_type))
589 if (TYPE_STUB_DECL (parallel_type))
590 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
591 TYPE_CONTEXT (parallel_type) = context;
594 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
598 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
599 the debug info, or Empty if there is no such scope. If not NULL, set
600 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
602 Entity_Id
603 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
605 Entity_Id gnat_entity;
607 if (is_subprogram)
608 *is_subprogram = false;
610 if (Nkind (gnat_node) == N_Defining_Identifier
611 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
612 gnat_entity = Scope (gnat_node);
613 else
614 return Empty;
616 while (Present (gnat_entity))
618 switch (Ekind (gnat_entity))
620 case E_Function:
621 case E_Procedure:
622 if (Present (Protected_Body_Subprogram (gnat_entity)))
623 gnat_entity = Protected_Body_Subprogram (gnat_entity);
625 /* If the scope is a subprogram, then just rely on
626 current_function_decl, so that we don't have to defer
627 anything. This is needed because other places rely on the
628 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
629 if (is_subprogram)
630 *is_subprogram = true;
631 return gnat_entity;
633 case E_Record_Type:
634 case E_Record_Subtype:
635 return gnat_entity;
637 default:
638 /* By default, we are not interested in this particular scope: go to
639 the outer one. */
640 break;
643 gnat_entity = Scope (gnat_entity);
646 return Empty;
649 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
650 of N otherwise. */
652 static void
653 defer_or_set_type_context (tree type, tree context,
654 struct deferred_decl_context_node *n)
656 if (n)
657 add_deferred_type_context (n, type);
658 else
659 gnat_set_type_context (type, context);
662 /* Return global_context, but create it first if need be. */
664 static tree
665 get_global_context (void)
667 if (!global_context)
669 global_context = build_translation_unit_decl (NULL_TREE);
670 debug_hooks->register_main_translation_unit (global_context);
673 return global_context;
676 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
677 for location information and flag propagation. */
679 void
680 gnat_pushdecl (tree decl, Node_Id gnat_node)
682 tree context = NULL_TREE;
683 struct deferred_decl_context_node *deferred_decl_context = NULL;
685 /* If explicitely asked to make DECL global or if it's an imported nested
686 object, short-circuit the regular Scope-based context computation. */
687 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
689 /* Rely on the GNAT scope, or fallback to the current_function_decl if
690 the GNAT scope reached the global scope, if it reached a subprogram
691 or the declaration is a subprogram or a variable (for them we skip
692 intermediate context types because the subprogram body elaboration
693 machinery and the inliner both expect a subprogram context).
695 Falling back to current_function_decl is necessary for implicit
696 subprograms created by gigi, such as the elaboration subprograms. */
697 bool context_is_subprogram = false;
698 const Entity_Id gnat_scope
699 = get_debug_scope (gnat_node, &context_is_subprogram);
701 if (Present (gnat_scope)
702 && !context_is_subprogram
703 && TREE_CODE (decl) != FUNCTION_DECL
704 && TREE_CODE (decl) != VAR_DECL)
705 /* Always assume the scope has not been elaborated, thus defer the
706 context propagation to the time its elaboration will be
707 available. */
708 deferred_decl_context
709 = add_deferred_decl_context (decl, gnat_scope, force_global);
711 /* External declarations (when force_global > 0) may not be in a
712 local context. */
713 else if (current_function_decl && force_global == 0)
714 context = current_function_decl;
717 /* If either we are forced to be in global mode or if both the GNAT scope and
718 the current_function_decl did not help in determining the context, use the
719 global scope. */
720 if (!deferred_decl_context && !context)
721 context = get_global_context ();
723 /* Functions imported in another function are not really nested.
724 For really nested functions mark them initially as needing
725 a static chain for uses of that flag before unnesting;
726 lower_nested_functions will then recompute it. */
727 if (TREE_CODE (decl) == FUNCTION_DECL
728 && !TREE_PUBLIC (decl)
729 && context
730 && (TREE_CODE (context) == FUNCTION_DECL
731 || decl_function_context (context)))
732 DECL_STATIC_CHAIN (decl) = 1;
734 if (!deferred_decl_context)
735 DECL_CONTEXT (decl) = context;
737 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
739 /* Set the location of DECL and emit a declaration for it. */
740 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
741 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
743 add_decl_expr (decl, gnat_node);
745 /* Put the declaration on the list. The list of declarations is in reverse
746 order. The list will be reversed later. Put global declarations in the
747 globals list and local ones in the current block. But skip TYPE_DECLs
748 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
749 with the debugger and aren't needed anyway. */
750 if (!(TREE_CODE (decl) == TYPE_DECL
751 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
753 if (DECL_EXTERNAL (decl))
755 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
756 vec_safe_push (builtin_decls, decl);
758 else if (global_bindings_p ())
759 vec_safe_push (global_decls, decl);
760 else
762 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
763 BLOCK_VARS (current_binding_level->block) = decl;
767 /* For the declaration of a type, set its name either if it isn't already
768 set or if the previous type name was not derived from a source name.
769 We'd rather have the type named with a real name and all the pointer
770 types to the same object have the same node, except when the names are
771 both derived from source names. */
772 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
774 tree t = TREE_TYPE (decl);
776 /* Array and pointer types aren't tagged types in the C sense so we need
777 to generate a typedef in DWARF for them and make sure it is preserved,
778 unless the type is artificial. */
779 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
780 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
781 || DECL_ARTIFICIAL (decl)))
783 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
784 generate the typedef in DWARF. Also do that for fat pointer types
785 because, even though they are tagged types in the C sense, they are
786 still XUP types attached to the base array type at this point. */
787 else if (!DECL_ARTIFICIAL (decl)
788 && (TREE_CODE (t) == ARRAY_TYPE
789 || TREE_CODE (t) == POINTER_TYPE
790 || TYPE_IS_FAT_POINTER_P (t)))
792 tree tt = build_variant_type_copy (t);
793 TYPE_NAME (tt) = decl;
794 defer_or_set_type_context (tt,
795 DECL_CONTEXT (decl),
796 deferred_decl_context);
797 TREE_TYPE (decl) = tt;
798 if (TYPE_NAME (t)
799 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
800 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
801 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
802 else
803 DECL_ORIGINAL_TYPE (decl) = t;
804 /* Array types need to have a name so that they can be related to
805 their GNAT encodings. */
806 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
807 TYPE_NAME (t) = DECL_NAME (decl);
808 t = NULL_TREE;
810 else if (TYPE_NAME (t)
811 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
812 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
814 else
815 t = NULL_TREE;
817 /* Propagate the name to all the variants, this is needed for the type
818 qualifiers machinery to work properly (see check_qualified_type).
819 Also propagate the context to them. Note that it will be propagated
820 to all parallel types too thanks to gnat_set_type_context. */
821 if (t)
822 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
823 /* ??? Because of the previous kludge, we can have variants of fat
824 pointer types with different names. */
825 if (!(TYPE_IS_FAT_POINTER_P (t)
826 && TYPE_NAME (t)
827 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
829 TYPE_NAME (t) = decl;
830 defer_or_set_type_context (t,
831 DECL_CONTEXT (decl),
832 deferred_decl_context);
837 /* Create a record type that contains a SIZE bytes long field of TYPE with a
838 starting bit position so that it is aligned to ALIGN bits, and leaving at
839 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
840 record is guaranteed to get. GNAT_NODE is used for the position of the
841 associated TYPE_DECL. */
843 tree
844 make_aligning_type (tree type, unsigned int align, tree size,
845 unsigned int base_align, int room, Node_Id gnat_node)
847 /* We will be crafting a record type with one field at a position set to be
848 the next multiple of ALIGN past record'address + room bytes. We use a
849 record placeholder to express record'address. */
850 tree record_type = make_node (RECORD_TYPE);
851 tree record = build0 (PLACEHOLDER_EXPR, record_type);
853 tree record_addr_st
854 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
856 /* The diagram below summarizes the shape of what we manipulate:
858 <--------- pos ---------->
859 { +------------+-------------+-----------------+
860 record =>{ |############| ... | field (type) |
861 { +------------+-------------+-----------------+
862 |<-- room -->|<- voffset ->|<---- size ----->|
865 record_addr vblock_addr
867 Every length is in sizetype bytes there, except "pos" which has to be
868 set as a bit position in the GCC tree for the record. */
869 tree room_st = size_int (room);
870 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
871 tree voffset_st, pos, field;
873 tree name = TYPE_IDENTIFIER (type);
875 name = concat_name (name, "ALIGN");
876 TYPE_NAME (record_type) = name;
878 /* Compute VOFFSET and then POS. The next byte position multiple of some
879 alignment after some address is obtained by "and"ing the alignment minus
880 1 with the two's complement of the address. */
881 voffset_st = size_binop (BIT_AND_EXPR,
882 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
883 size_int ((align / BITS_PER_UNIT) - 1));
885 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
886 pos = size_binop (MULT_EXPR,
887 convert (bitsizetype,
888 size_binop (PLUS_EXPR, room_st, voffset_st)),
889 bitsize_unit_node);
891 /* Craft the GCC record representation. We exceptionally do everything
892 manually here because 1) our generic circuitry is not quite ready to
893 handle the complex position/size expressions we are setting up, 2) we
894 have a strong simplifying factor at hand: we know the maximum possible
895 value of voffset, and 3) we have to set/reset at least the sizes in
896 accordance with this maximum value anyway, as we need them to convey
897 what should be "alloc"ated for this type.
899 Use -1 as the 'addressable' indication for the field to prevent the
900 creation of a bitfield. We don't need one, it would have damaging
901 consequences on the alignment computation, and create_field_decl would
902 make one without this special argument, for instance because of the
903 complex position expression. */
904 field = create_field_decl (get_identifier ("F"), type, record_type, size,
905 pos, 1, -1);
906 TYPE_FIELDS (record_type) = field;
908 SET_TYPE_ALIGN (record_type, base_align);
909 TYPE_USER_ALIGN (record_type) = 1;
911 TYPE_SIZE (record_type)
912 = size_binop (PLUS_EXPR,
913 size_binop (MULT_EXPR, convert (bitsizetype, size),
914 bitsize_unit_node),
915 bitsize_int (align + room * BITS_PER_UNIT));
916 TYPE_SIZE_UNIT (record_type)
917 = size_binop (PLUS_EXPR, size,
918 size_int (room + align / BITS_PER_UNIT));
920 SET_TYPE_MODE (record_type, BLKmode);
921 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
923 /* Declare it now since it will never be declared otherwise. This is
924 necessary to ensure that its subtrees are properly marked. */
925 create_type_decl (name, record_type, true, false, gnat_node);
927 return record_type;
930 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
931 as the field type of a packed record if IN_RECORD is true, or as the
932 component type of a packed array if IN_RECORD is false. See if we can
933 rewrite it either as a type that has non-BLKmode, which we can pack
934 tighter in the packed record case, or as a smaller type with at most
935 MAX_ALIGN alignment if the value is non-zero. If so, return the new
936 type; if not, return the original type. */
938 tree
939 make_packable_type (tree type, bool in_record, unsigned int max_align)
941 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
942 unsigned HOST_WIDE_INT new_size;
943 unsigned int align = TYPE_ALIGN (type);
944 unsigned int new_align;
946 /* No point in doing anything if the size is zero. */
947 if (size == 0)
948 return type;
950 tree new_type = make_node (TREE_CODE (type));
952 /* Copy the name and flags from the old type to that of the new.
953 Note that we rely on the pointer equality created here for
954 TYPE_NAME to look through conversions in various places. */
955 TYPE_NAME (new_type) = TYPE_NAME (type);
956 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
957 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
958 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
959 if (TREE_CODE (type) == RECORD_TYPE)
960 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
962 /* If we are in a record and have a small size, set the alignment to
963 try for an integral mode. Otherwise set it to try for a smaller
964 type with BLKmode. */
965 if (in_record && size <= MAX_FIXED_MODE_SIZE)
967 new_size = ceil_pow2 (size);
968 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
969 SET_TYPE_ALIGN (new_type, new_align);
971 else
973 /* Do not try to shrink the size if the RM size is not constant. */
974 if (TYPE_CONTAINS_TEMPLATE_P (type)
975 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
976 return type;
978 /* Round the RM size up to a unit boundary to get the minimal size
979 for a BLKmode record. Give up if it's already the size and we
980 don't need to lower the alignment. */
981 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
982 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
983 if (new_size == size && (max_align == 0 || align <= max_align))
984 return type;
986 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
987 if (max_align > 0 && new_align > max_align)
988 new_align = max_align;
989 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
992 TYPE_USER_ALIGN (new_type) = 1;
994 /* Now copy the fields, keeping the position and size as we don't want
995 to change the layout by propagating the packedness downwards. */
996 tree new_field_list = NULL_TREE;
997 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
999 tree new_field_type = TREE_TYPE (field);
1000 tree new_field, new_size;
1002 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1003 && !TYPE_FAT_POINTER_P (new_field_type)
1004 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1005 new_field_type = make_packable_type (new_field_type, true, max_align);
1007 /* However, for the last field in a not already packed record type
1008 that is of an aggregate type, we need to use the RM size in the
1009 packable version of the record type, see finish_record_type. */
1010 if (!DECL_CHAIN (field)
1011 && !TYPE_PACKED (type)
1012 && RECORD_OR_UNION_TYPE_P (new_field_type)
1013 && !TYPE_FAT_POINTER_P (new_field_type)
1014 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1015 && TYPE_ADA_SIZE (new_field_type))
1016 new_size = TYPE_ADA_SIZE (new_field_type);
1017 else
1018 new_size = DECL_SIZE (field);
1020 new_field
1021 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1022 new_size, bit_position (field),
1023 TYPE_PACKED (type),
1024 !DECL_NONADDRESSABLE_P (field));
1026 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1027 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1028 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1029 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1031 DECL_CHAIN (new_field) = new_field_list;
1032 new_field_list = new_field;
1035 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1036 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1037 if (TYPE_STUB_DECL (type))
1038 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1039 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1041 /* If this is a padding record, we never want to make the size smaller
1042 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1043 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1045 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1046 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1047 new_size = size;
1049 else
1051 TYPE_SIZE (new_type) = bitsize_int (new_size);
1052 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1055 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1056 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1058 compute_record_mode (new_type);
1060 /* Try harder to get a packable type if necessary, for example
1061 in case the record itself contains a BLKmode field. */
1062 if (in_record && TYPE_MODE (new_type) == BLKmode)
1063 SET_TYPE_MODE (new_type,
1064 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1066 /* If neither mode nor size nor alignment shrunk, return the old type. */
1067 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1068 return type;
1070 return new_type;
1073 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1074 If TYPE is the best type, return it. Otherwise, make a new type. We
1075 only support new integral and pointer types. FOR_BIASED is true if
1076 we are making a biased type. */
1078 tree
1079 make_type_from_size (tree type, tree size_tree, bool for_biased)
1081 unsigned HOST_WIDE_INT size;
1082 bool biased_p;
1083 tree new_type;
1085 /* If size indicates an error, just return TYPE to avoid propagating
1086 the error. Likewise if it's too large to represent. */
1087 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1088 return type;
1090 size = tree_to_uhwi (size_tree);
1092 switch (TREE_CODE (type))
1094 case INTEGER_TYPE:
1095 case ENUMERAL_TYPE:
1096 case BOOLEAN_TYPE:
1097 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1098 && TYPE_BIASED_REPRESENTATION_P (type));
1100 /* Integer types with precision 0 are forbidden. */
1101 if (size == 0)
1102 size = 1;
1104 /* Only do something if the type isn't a packed array type and doesn't
1105 already have the proper size and the size isn't too large. */
1106 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1107 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1108 || size > LONG_LONG_TYPE_SIZE)
1109 break;
1111 biased_p |= for_biased;
1113 /* The type should be an unsigned type if the original type is unsigned
1114 or if the lower bound is constant and non-negative or if the type is
1115 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1116 if (TYPE_UNSIGNED (type)
1117 || (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1118 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1119 || biased_p)
1120 new_type = make_unsigned_type (size);
1121 else
1122 new_type = make_signed_type (size);
1123 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1124 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1125 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1126 /* Copy the name to show that it's essentially the same type and
1127 not a subrange type. */
1128 TYPE_NAME (new_type) = TYPE_NAME (type);
1129 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1130 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1131 return new_type;
1133 case RECORD_TYPE:
1134 /* Do something if this is a fat pointer, in which case we
1135 may need to return the thin pointer. */
1136 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1138 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
1139 if (!targetm.valid_pointer_mode (p_mode))
1140 p_mode = ptr_mode;
1141 return
1142 build_pointer_type_for_mode
1143 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1144 p_mode, 0);
1146 break;
1148 case POINTER_TYPE:
1149 /* Only do something if this is a thin pointer, in which case we
1150 may need to return the fat pointer. */
1151 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1152 return
1153 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1154 break;
1156 default:
1157 break;
1160 return type;
1163 /* See if the data pointed to by the hash table slot is marked. */
1166 pad_type_hasher::keep_cache_entry (pad_type_hash *&t)
1168 return ggc_marked_p (t->type);
1171 /* Return true iff the padded types are equivalent. */
1173 bool
1174 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1176 tree type1, type2;
1178 if (t1->hash != t2->hash)
1179 return 0;
1181 type1 = t1->type;
1182 type2 = t2->type;
1184 /* We consider that the padded types are equivalent if they pad the same type
1185 and have the same size, alignment, RM size and storage order. Taking the
1186 mode into account is redundant since it is determined by the others. */
1187 return
1188 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1189 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1190 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1191 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1192 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1195 /* Look up the padded TYPE in the hash table and return its canonical version
1196 if it exists; otherwise, insert it into the hash table. */
1198 static tree
1199 lookup_and_insert_pad_type (tree type)
1201 hashval_t hashcode;
1202 struct pad_type_hash in, *h;
1204 hashcode
1205 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1206 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1207 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1208 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1210 in.hash = hashcode;
1211 in.type = type;
1212 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1213 if (h)
1214 return h->type;
1216 h = ggc_alloc<pad_type_hash> ();
1217 h->hash = hashcode;
1218 h->type = type;
1219 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
1220 return NULL_TREE;
1223 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1224 if needed. We have already verified that SIZE and ALIGN are large enough.
1225 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1226 IS_COMPONENT_TYPE is true if this is being done for the component type of
1227 an array. IS_USER_TYPE is true if the original type needs to be completed.
1228 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1229 the RM size of the resulting type is to be set to SIZE too; in this case,
1230 the padded type is canonicalized before being returned. */
1232 tree
1233 maybe_pad_type (tree type, tree size, unsigned int align,
1234 Entity_Id gnat_entity, bool is_component_type,
1235 bool is_user_type, bool definition, bool set_rm_size)
1237 tree orig_size = TYPE_SIZE (type);
1238 unsigned int orig_align = TYPE_ALIGN (type);
1239 tree record, field;
1241 /* If TYPE is a padded type, see if it agrees with any size and alignment
1242 we were given. If so, return the original type. Otherwise, strip
1243 off the padding, since we will either be returning the inner type
1244 or repadding it. If no size or alignment is specified, use that of
1245 the original padded type. */
1246 if (TYPE_IS_PADDING_P (type))
1248 if ((!size
1249 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1250 && (align == 0 || align == orig_align))
1251 return type;
1253 if (!size)
1254 size = orig_size;
1255 if (align == 0)
1256 align = orig_align;
1258 type = TREE_TYPE (TYPE_FIELDS (type));
1259 orig_size = TYPE_SIZE (type);
1260 orig_align = TYPE_ALIGN (type);
1263 /* If the size is either not being changed or is being made smaller (which
1264 is not done here and is only valid for bitfields anyway), show the size
1265 isn't changing. Likewise, clear the alignment if it isn't being
1266 changed. Then return if we aren't doing anything. */
1267 if (size
1268 && (operand_equal_p (size, orig_size, 0)
1269 || (TREE_CODE (orig_size) == INTEGER_CST
1270 && tree_int_cst_lt (size, orig_size))))
1271 size = NULL_TREE;
1273 if (align == orig_align)
1274 align = 0;
1276 if (align == 0 && !size)
1277 return type;
1279 /* If requested, complete the original type and give it a name. */
1280 if (is_user_type)
1281 create_type_decl (get_entity_name (gnat_entity), type,
1282 !Comes_From_Source (gnat_entity),
1283 !(TYPE_NAME (type)
1284 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1285 && DECL_IGNORED_P (TYPE_NAME (type))),
1286 gnat_entity);
1288 /* We used to modify the record in place in some cases, but that could
1289 generate incorrect debugging information. So make a new record
1290 type and name. */
1291 record = make_node (RECORD_TYPE);
1292 TYPE_PADDING_P (record) = 1;
1294 /* ??? Padding types around packed array implementation types will be
1295 considered as root types in the array descriptor language hook (see
1296 gnat_get_array_descr_info). Give them the original packed array type
1297 name so that the one coming from sources appears in the debugging
1298 information. */
1299 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1300 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1301 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1302 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1303 else if (Present (gnat_entity))
1304 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1306 SET_TYPE_ALIGN (record, align ? align : orig_align);
1307 TYPE_SIZE (record) = size ? size : orig_size;
1308 TYPE_SIZE_UNIT (record)
1309 = convert (sizetype,
1310 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1311 bitsize_unit_node));
1313 /* If we are changing the alignment and the input type is a record with
1314 BLKmode and a small constant size, try to make a form that has an
1315 integral mode. This might allow the padding record to also have an
1316 integral mode, which will be much more efficient. There is no point
1317 in doing so if a size is specified unless it is also a small constant
1318 size and it is incorrect to do so if we cannot guarantee that the mode
1319 will be naturally aligned since the field must always be addressable.
1321 ??? This might not always be a win when done for a stand-alone object:
1322 since the nominal and the effective type of the object will now have
1323 different modes, a VIEW_CONVERT_EXPR will be required for converting
1324 between them and it might be hard to overcome afterwards, including
1325 at the RTL level when the stand-alone object is accessed as a whole. */
1326 if (align != 0
1327 && RECORD_OR_UNION_TYPE_P (type)
1328 && TYPE_MODE (type) == BLKmode
1329 && !TYPE_BY_REFERENCE_P (type)
1330 && TREE_CODE (orig_size) == INTEGER_CST
1331 && !TREE_OVERFLOW (orig_size)
1332 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1333 && (!size
1334 || (TREE_CODE (size) == INTEGER_CST
1335 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1337 tree packable_type = make_packable_type (type, true);
1338 if (TYPE_MODE (packable_type) != BLKmode
1339 && align >= TYPE_ALIGN (packable_type))
1340 type = packable_type;
1343 /* Now create the field with the original size. */
1344 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1345 bitsize_zero_node, 0, 1);
1346 DECL_INTERNAL_P (field) = 1;
1348 /* We will output additional debug info manually below. */
1349 finish_record_type (record, field, 1, false);
1351 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1352 SET_TYPE_DEBUG_TYPE (record, type);
1354 /* Set the RM size if requested. */
1355 if (set_rm_size)
1357 tree canonical_pad_type;
1359 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1361 /* If the padded type is complete and has constant size, we canonicalize
1362 it by means of the hash table. This is consistent with the language
1363 semantics and ensures that gigi and the middle-end have a common view
1364 of these padded types. */
1365 if (TREE_CONSTANT (TYPE_SIZE (record))
1366 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1368 record = canonical_pad_type;
1369 goto built;
1373 /* Unless debugging information isn't being written for the input type,
1374 write a record that shows what we are a subtype of and also make a
1375 variable that indicates our size, if still variable. */
1376 if (TREE_CODE (orig_size) != INTEGER_CST
1377 && TYPE_NAME (record)
1378 && TYPE_NAME (type)
1379 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1380 && DECL_IGNORED_P (TYPE_NAME (type))))
1382 tree name = TYPE_IDENTIFIER (record);
1383 tree size_unit = TYPE_SIZE_UNIT (record);
1385 /* A variable that holds the size is required even with no encoding since
1386 it will be referenced by debugging information attributes. At global
1387 level, we need a single variable across all translation units. */
1388 if (size
1389 && TREE_CODE (size) != INTEGER_CST
1390 && (definition || global_bindings_p ()))
1392 /* Whether or not gnat_entity comes from source, this XVZ variable is
1393 is a compilation artifact. */
1394 size_unit
1395 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1396 size_unit, true, global_bindings_p (),
1397 !definition && global_bindings_p (), false,
1398 false, true, true, NULL, gnat_entity);
1399 TYPE_SIZE_UNIT (record) = size_unit;
1402 /* There is no need to show what we are a subtype of when outputting as
1403 few encodings as possible: regular debugging infomation makes this
1404 redundant. */
1405 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1407 tree marker = make_node (RECORD_TYPE);
1408 tree orig_name = TYPE_IDENTIFIER (type);
1410 TYPE_NAME (marker) = concat_name (name, "XVS");
1411 finish_record_type (marker,
1412 create_field_decl (orig_name,
1413 build_reference_type (type),
1414 marker, NULL_TREE, NULL_TREE,
1415 0, 0),
1416 0, true);
1417 TYPE_SIZE_UNIT (marker) = size_unit;
1419 add_parallel_type (record, marker);
1423 built:
1424 /* If a simple size was explicitly given, maybe issue a warning. */
1425 if (!size
1426 || TREE_CODE (size) == COND_EXPR
1427 || TREE_CODE (size) == MAX_EXPR
1428 || No (gnat_entity))
1429 return record;
1431 /* But don't do it if we are just annotating types and the type is tagged or
1432 concurrent, since these types aren't fully laid out in this mode. */
1433 if (type_annotate_only)
1435 Entity_Id gnat_type
1436 = is_component_type
1437 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1439 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1440 return record;
1443 /* Take the original size as the maximum size of the input if there was an
1444 unconstrained record involved and round it up to the specified alignment,
1445 if one was specified, but only for aggregate types. */
1446 if (CONTAINS_PLACEHOLDER_P (orig_size))
1447 orig_size = max_size (orig_size, true);
1449 if (align && AGGREGATE_TYPE_P (type))
1450 orig_size = round_up (orig_size, align);
1452 if (!operand_equal_p (size, orig_size, 0)
1453 && !(TREE_CODE (size) == INTEGER_CST
1454 && TREE_CODE (orig_size) == INTEGER_CST
1455 && (TREE_OVERFLOW (size)
1456 || TREE_OVERFLOW (orig_size)
1457 || tree_int_cst_lt (size, orig_size))))
1459 Node_Id gnat_error_node = Empty;
1461 /* For a packed array, post the message on the original array type. */
1462 if (Is_Packed_Array_Impl_Type (gnat_entity))
1463 gnat_entity = Original_Array_Type (gnat_entity);
1465 if ((Ekind (gnat_entity) == E_Component
1466 || Ekind (gnat_entity) == E_Discriminant)
1467 && Present (Component_Clause (gnat_entity)))
1468 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1469 else if (Present (Size_Clause (gnat_entity)))
1470 gnat_error_node = Expression (Size_Clause (gnat_entity));
1472 /* Generate message only for entities that come from source, since
1473 if we have an entity created by expansion, the message will be
1474 generated for some other corresponding source entity. */
1475 if (Comes_From_Source (gnat_entity))
1477 if (Present (gnat_error_node))
1478 post_error_ne_tree ("{^ }bits of & unused?",
1479 gnat_error_node, gnat_entity,
1480 size_diffop (size, orig_size));
1481 else if (is_component_type)
1482 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1483 gnat_entity, gnat_entity,
1484 size_diffop (size, orig_size));
1488 return record;
1491 /* Return a copy of the padded TYPE but with reverse storage order. */
1493 tree
1494 set_reverse_storage_order_on_pad_type (tree type)
1496 tree field, canonical_pad_type;
1498 if (flag_checking)
1500 /* If the inner type is not scalar then the function does nothing. */
1501 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1502 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1503 && !VECTOR_TYPE_P (inner_type));
1506 /* This is required for the canonicalization. */
1507 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1509 field = copy_node (TYPE_FIELDS (type));
1510 type = copy_type (type);
1511 DECL_CONTEXT (field) = type;
1512 TYPE_FIELDS (type) = field;
1513 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1514 canonical_pad_type = lookup_and_insert_pad_type (type);
1515 return canonical_pad_type ? canonical_pad_type : type;
1518 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1519 If this is a multi-dimensional array type, do this recursively.
1521 OP may be
1522 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1523 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1524 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1526 void
1527 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1529 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1530 of a one-dimensional array, since the padding has the same alias set
1531 as the field type, but if it's a multi-dimensional array, we need to
1532 see the inner types. */
1533 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1534 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1535 || TYPE_PADDING_P (gnu_old_type)))
1536 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1538 /* Unconstrained array types are deemed incomplete and would thus be given
1539 alias set 0. Retrieve the underlying array type. */
1540 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1541 gnu_old_type
1542 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1543 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1544 gnu_new_type
1545 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1547 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1548 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1549 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1550 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1552 switch (op)
1554 case ALIAS_SET_COPY:
1555 /* The alias set shouldn't be copied between array types with different
1556 aliasing settings because this can break the aliasing relationship
1557 between the array type and its element type. */
1558 if (flag_checking || flag_strict_aliasing)
1559 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1560 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1561 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1562 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1564 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1565 break;
1567 case ALIAS_SET_SUBSET:
1568 case ALIAS_SET_SUPERSET:
1570 alias_set_type old_set = get_alias_set (gnu_old_type);
1571 alias_set_type new_set = get_alias_set (gnu_new_type);
1573 /* Do nothing if the alias sets conflict. This ensures that we
1574 never call record_alias_subset several times for the same pair
1575 or at all for alias set 0. */
1576 if (!alias_sets_conflict_p (old_set, new_set))
1578 if (op == ALIAS_SET_SUBSET)
1579 record_alias_subset (old_set, new_set);
1580 else
1581 record_alias_subset (new_set, old_set);
1584 break;
1586 default:
1587 gcc_unreachable ();
1590 record_component_aliases (gnu_new_type);
1593 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1594 ARTIFICIAL_P is true if the type was generated by the compiler. */
1596 void
1597 record_builtin_type (const char *name, tree type, bool artificial_p)
1599 tree type_decl = build_decl (input_location,
1600 TYPE_DECL, get_identifier (name), type);
1601 DECL_ARTIFICIAL (type_decl) = artificial_p;
1602 TYPE_ARTIFICIAL (type) = artificial_p;
1603 gnat_pushdecl (type_decl, Empty);
1605 if (debug_hooks->type_decl)
1606 debug_hooks->type_decl (type_decl, false);
1609 /* Finish constructing the character type CHAR_TYPE.
1611 In Ada character types are enumeration types and, as a consequence, are
1612 represented in the front-end by integral types holding the positions of
1613 the enumeration values as defined by the language, which means that the
1614 integral types are unsigned.
1616 Unfortunately the signedness of 'char' in C is implementation-defined
1617 and GCC even has the option -fsigned-char to toggle it at run time.
1618 Since GNAT's philosophy is to be compatible with C by default, to wit
1619 Interfaces.C.char is defined as a mere copy of Character, we may need
1620 to declare character types as signed types in GENERIC and generate the
1621 necessary adjustments to make them behave as unsigned types.
1623 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1624 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1625 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1626 types. The idea is to ensure that the bit pattern contained in the
1627 Esize'd objects is not changed, even though the numerical value will
1628 be interpreted differently depending on the signedness.
1630 For character types, the bounds are implicit and, therefore, need to
1631 be adjusted. Morever, the debug info needs the unsigned version. */
1633 void
1634 finish_character_type (tree char_type)
1636 if (TYPE_UNSIGNED (char_type))
1637 return;
1639 /* Make a copy of a generic unsigned version since we'll modify it. */
1640 tree unsigned_char_type
1641 = (char_type == char_type_node
1642 ? unsigned_char_type_node
1643 : copy_type (gnat_unsigned_type_for (char_type)));
1645 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1646 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1647 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1649 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1650 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1651 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1654 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1655 finish constructing the record type as a fat pointer type. */
1657 void
1658 finish_fat_pointer_type (tree record_type, tree field_list)
1660 /* Make sure we can put it into a register. */
1661 if (STRICT_ALIGNMENT)
1662 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1664 /* Show what it really is. */
1665 TYPE_FAT_POINTER_P (record_type) = 1;
1667 /* Do not emit debug info for it since the types of its fields may still be
1668 incomplete at this point. */
1669 finish_record_type (record_type, field_list, 0, false);
1671 /* Force type_contains_placeholder_p to return true on it. Although the
1672 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1673 type but the representation of the unconstrained array. */
1674 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1677 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1678 finish constructing the record or union type. If REP_LEVEL is zero, this
1679 record has no representation clause and so will be entirely laid out here.
1680 If REP_LEVEL is one, this record has a representation clause and has been
1681 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1682 this record is derived from a parent record and thus inherits its layout;
1683 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1684 additional debug info needs to be output for this type. */
1686 void
1687 finish_record_type (tree record_type, tree field_list, int rep_level,
1688 bool debug_info_p)
1690 enum tree_code code = TREE_CODE (record_type);
1691 tree name = TYPE_IDENTIFIER (record_type);
1692 tree ada_size = bitsize_zero_node;
1693 tree size = bitsize_zero_node;
1694 bool had_size = TYPE_SIZE (record_type) != 0;
1695 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1696 bool had_align = TYPE_ALIGN (record_type) != 0;
1697 tree field;
1699 TYPE_FIELDS (record_type) = field_list;
1701 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1702 generate debug info and have a parallel type. */
1703 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1705 /* Globally initialize the record first. If this is a rep'ed record,
1706 that just means some initializations; otherwise, layout the record. */
1707 if (rep_level > 0)
1709 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1710 TYPE_ALIGN (record_type)));
1712 if (!had_size_unit)
1713 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1715 if (!had_size)
1716 TYPE_SIZE (record_type) = bitsize_zero_node;
1718 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1719 out just like a UNION_TYPE, since the size will be fixed. */
1720 else if (code == QUAL_UNION_TYPE)
1721 code = UNION_TYPE;
1723 else
1725 /* Ensure there isn't a size already set. There can be in an error
1726 case where there is a rep clause but all fields have errors and
1727 no longer have a position. */
1728 TYPE_SIZE (record_type) = 0;
1730 /* Ensure we use the traditional GCC layout for bitfields when we need
1731 to pack the record type or have a representation clause. The other
1732 possible layout (Microsoft C compiler), if available, would prevent
1733 efficient packing in almost all cases. */
1734 #ifdef TARGET_MS_BITFIELD_LAYOUT
1735 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1736 decl_attributes (&record_type,
1737 tree_cons (get_identifier ("gcc_struct"),
1738 NULL_TREE, NULL_TREE),
1739 ATTR_FLAG_TYPE_IN_PLACE);
1740 #endif
1742 layout_type (record_type);
1745 /* At this point, the position and size of each field is known. It was
1746 either set before entry by a rep clause, or by laying out the type above.
1748 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1749 to compute the Ada size; the GCC size and alignment (for rep'ed records
1750 that are not padding types); and the mode (for rep'ed records). We also
1751 clear the DECL_BIT_FIELD indication for the cases we know have not been
1752 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1754 if (code == QUAL_UNION_TYPE)
1755 field_list = nreverse (field_list);
1757 for (field = field_list; field; field = DECL_CHAIN (field))
1759 tree type = TREE_TYPE (field);
1760 tree pos = bit_position (field);
1761 tree this_size = DECL_SIZE (field);
1762 tree this_ada_size;
1764 if (RECORD_OR_UNION_TYPE_P (type)
1765 && !TYPE_FAT_POINTER_P (type)
1766 && !TYPE_CONTAINS_TEMPLATE_P (type)
1767 && TYPE_ADA_SIZE (type))
1768 this_ada_size = TYPE_ADA_SIZE (type);
1769 else
1770 this_ada_size = this_size;
1772 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1773 if (DECL_BIT_FIELD (field)
1774 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1776 unsigned int align = TYPE_ALIGN (type);
1778 /* In the general case, type alignment is required. */
1779 if (value_factor_p (pos, align))
1781 /* The enclosing record type must be sufficiently aligned.
1782 Otherwise, if no alignment was specified for it and it
1783 has been laid out already, bump its alignment to the
1784 desired one if this is compatible with its size and
1785 maximum alignment, if any. */
1786 if (TYPE_ALIGN (record_type) >= align)
1788 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1789 DECL_BIT_FIELD (field) = 0;
1791 else if (!had_align
1792 && rep_level == 0
1793 && value_factor_p (TYPE_SIZE (record_type), align)
1794 && (!TYPE_MAX_ALIGN (record_type)
1795 || TYPE_MAX_ALIGN (record_type) >= align))
1797 SET_TYPE_ALIGN (record_type, align);
1798 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1799 DECL_BIT_FIELD (field) = 0;
1803 /* In the non-strict alignment case, only byte alignment is. */
1804 if (!STRICT_ALIGNMENT
1805 && DECL_BIT_FIELD (field)
1806 && value_factor_p (pos, BITS_PER_UNIT))
1807 DECL_BIT_FIELD (field) = 0;
1810 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1811 field is technically not addressable. Except that it can actually
1812 be addressed if it is BLKmode and happens to be properly aligned. */
1813 if (DECL_BIT_FIELD (field)
1814 && !(DECL_MODE (field) == BLKmode
1815 && value_factor_p (pos, BITS_PER_UNIT)))
1816 DECL_NONADDRESSABLE_P (field) = 1;
1818 /* A type must be as aligned as its most aligned field that is not
1819 a bit-field. But this is already enforced by layout_type. */
1820 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1821 SET_TYPE_ALIGN (record_type,
1822 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
1824 switch (code)
1826 case UNION_TYPE:
1827 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1828 size = size_binop (MAX_EXPR, size, this_size);
1829 break;
1831 case QUAL_UNION_TYPE:
1832 ada_size
1833 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1834 this_ada_size, ada_size);
1835 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1836 this_size, size);
1837 break;
1839 case RECORD_TYPE:
1840 /* Since we know here that all fields are sorted in order of
1841 increasing bit position, the size of the record is one
1842 higher than the ending bit of the last field processed
1843 unless we have a rep clause, since in that case we might
1844 have a field outside a QUAL_UNION_TYPE that has a higher ending
1845 position. So use a MAX in that case. Also, if this field is a
1846 QUAL_UNION_TYPE, we need to take into account the previous size in
1847 the case of empty variants. */
1848 ada_size
1849 = merge_sizes (ada_size, pos, this_ada_size,
1850 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1851 size
1852 = merge_sizes (size, pos, this_size,
1853 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1854 break;
1856 default:
1857 gcc_unreachable ();
1861 if (code == QUAL_UNION_TYPE)
1862 nreverse (field_list);
1864 if (rep_level < 2)
1866 /* If this is a padding record, we never want to make the size smaller
1867 than what was specified in it, if any. */
1868 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1869 size = TYPE_SIZE (record_type);
1871 /* Now set any of the values we've just computed that apply. */
1872 if (!TYPE_FAT_POINTER_P (record_type)
1873 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1874 SET_TYPE_ADA_SIZE (record_type, ada_size);
1876 if (rep_level > 0)
1878 tree size_unit = had_size_unit
1879 ? TYPE_SIZE_UNIT (record_type)
1880 : convert (sizetype,
1881 size_binop (CEIL_DIV_EXPR, size,
1882 bitsize_unit_node));
1883 unsigned int align = TYPE_ALIGN (record_type);
1885 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1886 TYPE_SIZE_UNIT (record_type)
1887 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1889 compute_record_mode (record_type);
1893 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1894 TYPE_MAX_ALIGN (record_type) = 0;
1896 if (debug_info_p)
1897 rest_of_record_type_compilation (record_type);
1900 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1901 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1902 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1903 moment TYPE will get a context. */
1905 void
1906 add_parallel_type (tree type, tree parallel_type)
1908 tree decl = TYPE_STUB_DECL (type);
1910 while (DECL_PARALLEL_TYPE (decl))
1911 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1913 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1915 /* If PARALLEL_TYPE already has a context, we are done. */
1916 if (TYPE_CONTEXT (parallel_type))
1917 return;
1919 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
1920 it to PARALLEL_TYPE. */
1921 if (TYPE_CONTEXT (type))
1922 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1924 /* Otherwise TYPE has not context yet. We know it will have one thanks to
1925 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
1926 so we have nothing to do in this case. */
1929 /* Return true if TYPE has a parallel type. */
1931 static bool
1932 has_parallel_type (tree type)
1934 tree decl = TYPE_STUB_DECL (type);
1936 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1939 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
1940 associated with it. It need not be invoked directly in most cases as
1941 finish_record_type takes care of doing so. */
1943 void
1944 rest_of_record_type_compilation (tree record_type)
1946 bool var_size = false;
1947 tree field;
1949 /* If this is a padded type, the bulk of the debug info has already been
1950 generated for the field's type. */
1951 if (TYPE_IS_PADDING_P (record_type))
1952 return;
1954 /* If the type already has a parallel type (XVS type), then we're done. */
1955 if (has_parallel_type (record_type))
1956 return;
1958 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1960 /* We need to make an XVE/XVU record if any field has variable size,
1961 whether or not the record does. For example, if we have a union,
1962 it may be that all fields, rounded up to the alignment, have the
1963 same size, in which case we'll use that size. But the debug
1964 output routines (except Dwarf2) won't be able to output the fields,
1965 so we need to make the special record. */
1966 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1967 /* If a field has a non-constant qualifier, the record will have
1968 variable size too. */
1969 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1970 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1972 var_size = true;
1973 break;
1977 /* If this record type is of variable size, make a parallel record type that
1978 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1979 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1981 tree new_record_type
1982 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1983 ? UNION_TYPE : TREE_CODE (record_type));
1984 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
1985 tree last_pos = bitsize_zero_node;
1986 tree old_field, prev_old_field = NULL_TREE;
1988 new_name
1989 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1990 ? "XVU" : "XVE");
1991 TYPE_NAME (new_record_type) = new_name;
1992 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
1993 TYPE_STUB_DECL (new_record_type)
1994 = create_type_stub_decl (new_name, new_record_type);
1995 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1996 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1997 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
1998 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1999 TYPE_SIZE_UNIT (new_record_type)
2000 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2002 /* Now scan all the fields, replacing each field with a new field
2003 corresponding to the new encoding. */
2004 for (old_field = TYPE_FIELDS (record_type); old_field;
2005 old_field = DECL_CHAIN (old_field))
2007 tree field_type = TREE_TYPE (old_field);
2008 tree field_name = DECL_NAME (old_field);
2009 tree curpos = bit_position (old_field);
2010 tree pos, new_field;
2011 bool var = false;
2012 unsigned int align = 0;
2014 /* We're going to do some pattern matching below so remove as many
2015 conversions as possible. */
2016 curpos = remove_conversions (curpos, true);
2018 /* See how the position was modified from the last position.
2020 There are two basic cases we support: a value was added
2021 to the last position or the last position was rounded to
2022 a boundary and they something was added. Check for the
2023 first case first. If not, see if there is any evidence
2024 of rounding. If so, round the last position and retry.
2026 If this is a union, the position can be taken as zero. */
2027 if (TREE_CODE (new_record_type) == UNION_TYPE)
2028 pos = bitsize_zero_node;
2029 else
2030 pos = compute_related_constant (curpos, last_pos);
2032 if (!pos
2033 && TREE_CODE (curpos) == MULT_EXPR
2034 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2036 tree offset = TREE_OPERAND (curpos, 0);
2037 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2038 align = scale_by_factor_of (offset, align);
2039 last_pos = round_up (last_pos, align);
2040 pos = compute_related_constant (curpos, last_pos);
2042 else if (!pos
2043 && TREE_CODE (curpos) == PLUS_EXPR
2044 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2045 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2046 && tree_fits_uhwi_p
2047 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2049 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2050 unsigned HOST_WIDE_INT addend
2051 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2052 align
2053 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2054 align = scale_by_factor_of (offset, align);
2055 align = MIN (align, addend & -addend);
2056 last_pos = round_up (last_pos, align);
2057 pos = compute_related_constant (curpos, last_pos);
2059 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2061 align = TYPE_ALIGN (field_type);
2062 last_pos = round_up (last_pos, align);
2063 pos = compute_related_constant (curpos, last_pos);
2066 /* If we can't compute a position, set it to zero.
2068 ??? We really should abort here, but it's too much work
2069 to get this correct for all cases. */
2070 if (!pos)
2071 pos = bitsize_zero_node;
2073 /* See if this type is variable-sized and make a pointer type
2074 and indicate the indirection if so. Beware that the debug
2075 back-end may adjust the position computed above according
2076 to the alignment of the field type, i.e. the pointer type
2077 in this case, if we don't preventively counter that. */
2078 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2080 field_type = build_pointer_type (field_type);
2081 if (align != 0 && TYPE_ALIGN (field_type) > align)
2083 field_type = copy_type (field_type);
2084 SET_TYPE_ALIGN (field_type, align);
2086 var = true;
2089 /* Make a new field name, if necessary. */
2090 if (var || align != 0)
2092 char suffix[16];
2094 if (align != 0)
2095 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2096 align / BITS_PER_UNIT);
2097 else
2098 strcpy (suffix, "XVL");
2100 field_name = concat_name (field_name, suffix);
2103 new_field
2104 = create_field_decl (field_name, field_type, new_record_type,
2105 DECL_SIZE (old_field), pos, 0, 0);
2106 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2107 TYPE_FIELDS (new_record_type) = new_field;
2109 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2110 zero. The only time it's not the last field of the record
2111 is when there are other components at fixed positions after
2112 it (meaning there was a rep clause for every field) and we
2113 want to be able to encode them. */
2114 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2115 (TREE_CODE (TREE_TYPE (old_field))
2116 == QUAL_UNION_TYPE)
2117 ? bitsize_zero_node
2118 : DECL_SIZE (old_field));
2119 prev_old_field = old_field;
2122 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2124 add_parallel_type (record_type, new_record_type);
2128 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2129 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2130 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2131 replace a value of zero with the old size. If HAS_REP is true, we take the
2132 MAX of the end position of this field with LAST_SIZE. In all other cases,
2133 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2135 static tree
2136 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2137 bool has_rep)
2139 tree type = TREE_TYPE (last_size);
2140 tree new_size;
2142 if (!special || TREE_CODE (size) != COND_EXPR)
2144 new_size = size_binop (PLUS_EXPR, first_bit, size);
2145 if (has_rep)
2146 new_size = size_binop (MAX_EXPR, last_size, new_size);
2149 else
2150 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2151 integer_zerop (TREE_OPERAND (size, 1))
2152 ? last_size : merge_sizes (last_size, first_bit,
2153 TREE_OPERAND (size, 1),
2154 1, has_rep),
2155 integer_zerop (TREE_OPERAND (size, 2))
2156 ? last_size : merge_sizes (last_size, first_bit,
2157 TREE_OPERAND (size, 2),
2158 1, has_rep));
2160 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2161 when fed through substitute_in_expr) into thinking that a constant
2162 size is not constant. */
2163 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2164 new_size = TREE_OPERAND (new_size, 0);
2166 return new_size;
2169 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2170 related by the addition of a constant. Return that constant if so. */
2172 static tree
2173 compute_related_constant (tree op0, tree op1)
2175 tree op0_var, op1_var;
2176 tree op0_con = split_plus (op0, &op0_var);
2177 tree op1_con = split_plus (op1, &op1_var);
2178 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2180 if (operand_equal_p (op0_var, op1_var, 0))
2181 return result;
2182 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2183 return result;
2184 else
2185 return 0;
2188 /* Utility function of above to split a tree OP which may be a sum, into a
2189 constant part, which is returned, and a variable part, which is stored
2190 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2191 bitsizetype. */
2193 static tree
2194 split_plus (tree in, tree *pvar)
2196 /* Strip conversions in order to ease the tree traversal and maximize the
2197 potential for constant or plus/minus discovery. We need to be careful
2198 to always return and set *pvar to bitsizetype trees, but it's worth
2199 the effort. */
2200 in = remove_conversions (in, false);
2202 *pvar = convert (bitsizetype, in);
2204 if (TREE_CODE (in) == INTEGER_CST)
2206 *pvar = bitsize_zero_node;
2207 return convert (bitsizetype, in);
2209 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2211 tree lhs_var, rhs_var;
2212 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2213 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2215 if (lhs_var == TREE_OPERAND (in, 0)
2216 && rhs_var == TREE_OPERAND (in, 1))
2217 return bitsize_zero_node;
2219 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2220 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2222 else
2223 return bitsize_zero_node;
2226 /* Return a copy of TYPE but safe to modify in any way. */
2228 tree
2229 copy_type (tree type)
2231 tree new_type = copy_node (type);
2233 /* Unshare the language-specific data. */
2234 if (TYPE_LANG_SPECIFIC (type))
2236 TYPE_LANG_SPECIFIC (new_type) = NULL;
2237 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2240 /* And the contents of the language-specific slot if needed. */
2241 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2242 && TYPE_RM_VALUES (type))
2244 TYPE_RM_VALUES (new_type) = NULL_TREE;
2245 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2246 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2247 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2250 /* copy_node clears this field instead of copying it, because it is
2251 aliased with TREE_CHAIN. */
2252 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2254 TYPE_POINTER_TO (new_type) = NULL_TREE;
2255 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2256 TYPE_MAIN_VARIANT (new_type) = new_type;
2257 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2258 TYPE_CANONICAL (new_type) = new_type;
2260 return new_type;
2263 /* Return a subtype of sizetype with range MIN to MAX and whose
2264 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2265 of the associated TYPE_DECL. */
2267 tree
2268 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2270 /* First build a type for the desired range. */
2271 tree type = build_nonshared_range_type (sizetype, min, max);
2273 /* Then set the index type. */
2274 SET_TYPE_INDEX_TYPE (type, index);
2275 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2277 return type;
2280 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2281 sizetype is used. */
2283 tree
2284 create_range_type (tree type, tree min, tree max)
2286 tree range_type;
2288 if (!type)
2289 type = sizetype;
2291 /* First build a type with the base range. */
2292 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2293 TYPE_MAX_VALUE (type));
2295 /* Then set the actual range. */
2296 SET_TYPE_RM_MIN_VALUE (range_type, min);
2297 SET_TYPE_RM_MAX_VALUE (range_type, max);
2299 return range_type;
2302 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2303 NAME gives the name of the type to be used in the declaration. */
2305 tree
2306 create_type_stub_decl (tree name, tree type)
2308 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2309 DECL_ARTIFICIAL (type_decl) = 1;
2310 TYPE_ARTIFICIAL (type) = 1;
2311 return type_decl;
2314 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2315 used in the declaration. ARTIFICIAL_P is true if the declaration was
2316 generated by the compiler. DEBUG_INFO_P is true if we need to write
2317 debug information about this type. GNAT_NODE is used for the position
2318 of the decl. */
2320 tree
2321 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2322 Node_Id gnat_node)
2324 enum tree_code code = TREE_CODE (type);
2325 bool is_named
2326 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2327 tree type_decl;
2329 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2330 gcc_assert (!TYPE_IS_DUMMY_P (type));
2332 /* If the type hasn't been named yet, we're naming it; preserve an existing
2333 TYPE_STUB_DECL that has been attached to it for some purpose. */
2334 if (!is_named && TYPE_STUB_DECL (type))
2336 type_decl = TYPE_STUB_DECL (type);
2337 DECL_NAME (type_decl) = name;
2339 else
2340 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2342 DECL_ARTIFICIAL (type_decl) = artificial_p;
2343 TYPE_ARTIFICIAL (type) = artificial_p;
2345 /* Add this decl to the current binding level. */
2346 gnat_pushdecl (type_decl, gnat_node);
2348 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2349 causes the name to be also viewed as a "tag" by the debug back-end, with
2350 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2351 types in DWARF.
2353 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2354 from multiple contexts, and "type_decl" references a copy of it: in such a
2355 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2356 with the mechanism above. */
2357 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2358 TYPE_STUB_DECL (type) = type_decl;
2360 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2361 back-end doesn't support, and for others if we don't need to. */
2362 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2363 DECL_IGNORED_P (type_decl) = 1;
2365 return type_decl;
2368 /* Return a VAR_DECL or CONST_DECL node.
2370 NAME gives the name of the variable. ASM_NAME is its assembler name
2371 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2372 the GCC tree for an optional initial expression; NULL_TREE if none.
2374 CONST_FLAG is true if this variable is constant, in which case we might
2375 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2377 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2378 definition to be made visible outside of the current compilation unit, for
2379 instance variable definitions in a package specification.
2381 EXTERN_FLAG is true when processing an external variable declaration (as
2382 opposed to a definition: no storage is to be allocated for the variable).
2384 STATIC_FLAG is only relevant when not at top level and indicates whether
2385 to always allocate storage to the variable.
2387 VOLATILE_FLAG is true if this variable is declared as volatile.
2389 ARTIFICIAL_P is true if the variable was generated by the compiler.
2391 DEBUG_INFO_P is true if we need to write debug information for it.
2393 ATTR_LIST is the list of attributes to be attached to the variable.
2395 GNAT_NODE is used for the position of the decl. */
2397 tree
2398 create_var_decl (tree name, tree asm_name, tree type, tree init,
2399 bool const_flag, bool public_flag, bool extern_flag,
2400 bool static_flag, bool volatile_flag, bool artificial_p,
2401 bool debug_info_p, struct attrib *attr_list,
2402 Node_Id gnat_node, bool const_decl_allowed_p)
2404 /* Whether the object has static storage duration, either explicitly or by
2405 virtue of being declared at the global level. */
2406 const bool static_storage = static_flag || global_bindings_p ();
2408 /* Whether the initializer is constant: for an external object or an object
2409 with static storage duration, we check that the initializer is a valid
2410 constant expression for initializing a static variable; otherwise, we
2411 only check that it is constant. */
2412 const bool init_const
2413 = (init
2414 && gnat_types_compatible_p (type, TREE_TYPE (init))
2415 && (extern_flag || static_storage
2416 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2417 != NULL_TREE
2418 : TREE_CONSTANT (init)));
2420 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2421 case the initializer may be used in lieu of the DECL node (as done in
2422 Identifier_to_gnu). This is useful to prevent the need of elaboration
2423 code when an identifier for which such a DECL is made is in turn used
2424 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2425 but extra constraints apply to this choice (see below) and they are not
2426 relevant to the distinction we wish to make. */
2427 const bool constant_p = const_flag && init_const;
2429 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2430 and may be used for scalars in general but not for aggregates. */
2431 tree var_decl
2432 = build_decl (input_location,
2433 (constant_p
2434 && const_decl_allowed_p
2435 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2436 name, type);
2438 /* Detect constants created by the front-end to hold 'reference to function
2439 calls for stabilization purposes. This is needed for renaming. */
2440 if (const_flag && init && POINTER_TYPE_P (type))
2442 tree inner = init;
2443 if (TREE_CODE (inner) == COMPOUND_EXPR)
2444 inner = TREE_OPERAND (inner, 1);
2445 inner = remove_conversions (inner, true);
2446 if (TREE_CODE (inner) == ADDR_EXPR
2447 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2448 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2449 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2450 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2451 DECL_RETURN_VALUE_P (var_decl) = 1;
2454 /* If this is external, throw away any initializations (they will be done
2455 elsewhere) unless this is a constant for which we would like to remain
2456 able to get the initializer. If we are defining a global here, leave a
2457 constant initialization and save any variable elaborations for the
2458 elaboration routine. If we are just annotating types, throw away the
2459 initialization if it isn't a constant. */
2460 if ((extern_flag && init && !constant_p)
2461 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2463 init = NULL_TREE;
2465 /* In LTO mode, also clear TREE_READONLY the same way add_decl_expr
2466 would do it if the initializer was not thrown away here, as the
2467 WPA phase requires a consistent view across compilation units. */
2468 if (const_flag && flag_generate_lto)
2470 const_flag = false;
2471 DECL_READONLY_ONCE_ELAB (var_decl) = 1;
2475 /* At the global level, a non-constant initializer generates elaboration
2476 statements. Check that such statements are allowed, that is to say,
2477 not violating a No_Elaboration_Code restriction. */
2478 if (init && !init_const && global_bindings_p ())
2479 Check_Elaboration_Code_Allowed (gnat_node);
2481 /* Attach the initializer, if any. */
2482 DECL_INITIAL (var_decl) = init;
2484 /* Directly set some flags. */
2485 DECL_ARTIFICIAL (var_decl) = artificial_p;
2486 DECL_EXTERNAL (var_decl) = extern_flag;
2488 TREE_CONSTANT (var_decl) = constant_p;
2489 TREE_READONLY (var_decl) = const_flag;
2491 /* The object is public if it is external or if it is declared public
2492 and has static storage duration. */
2493 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2495 /* We need to allocate static storage for an object with static storage
2496 duration if it isn't external. */
2497 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2499 TREE_SIDE_EFFECTS (var_decl)
2500 = TREE_THIS_VOLATILE (var_decl)
2501 = TYPE_VOLATILE (type) | volatile_flag;
2503 if (TREE_SIDE_EFFECTS (var_decl))
2504 TREE_ADDRESSABLE (var_decl) = 1;
2506 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2507 try to fiddle with DECL_COMMON. However, on platforms that don't
2508 support global BSS sections, uninitialized global variables would
2509 go in DATA instead, thus increasing the size of the executable. */
2510 if (!flag_no_common
2511 && TREE_CODE (var_decl) == VAR_DECL
2512 && TREE_PUBLIC (var_decl)
2513 && !have_global_bss_p ())
2514 DECL_COMMON (var_decl) = 1;
2516 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2517 since we will create an associated variable. Likewise for an external
2518 constant whose initializer is not absolute, because this would mean a
2519 global relocation in a read-only section which runs afoul of the PE-COFF
2520 run-time relocation mechanism. */
2521 if (!debug_info_p
2522 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2523 || (extern_flag
2524 && constant_p
2525 && init
2526 && initializer_constant_valid_p (init, TREE_TYPE (init))
2527 != null_pointer_node))
2528 DECL_IGNORED_P (var_decl) = 1;
2530 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2531 if (TREE_CODE (var_decl) == VAR_DECL)
2532 process_attributes (&var_decl, &attr_list, true, gnat_node);
2534 /* Add this decl to the current binding level. */
2535 gnat_pushdecl (var_decl, gnat_node);
2537 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2539 /* Let the target mangle the name if this isn't a verbatim asm. */
2540 if (*IDENTIFIER_POINTER (asm_name) != '*')
2541 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2543 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2546 return var_decl;
2549 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2551 static bool
2552 aggregate_type_contains_array_p (tree type)
2554 switch (TREE_CODE (type))
2556 case RECORD_TYPE:
2557 case UNION_TYPE:
2558 case QUAL_UNION_TYPE:
2560 tree field;
2561 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2562 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2563 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2564 return true;
2565 return false;
2568 case ARRAY_TYPE:
2569 return true;
2571 default:
2572 gcc_unreachable ();
2576 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2577 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2578 is the specified size of the field. If POS is nonzero, it is the bit
2579 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2580 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2581 means we are allowed to take the address of the field; if it is negative,
2582 we should not make a bitfield, which is used by make_aligning_type. */
2584 tree
2585 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2586 int packed, int addressable)
2588 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2590 DECL_CONTEXT (field_decl) = record_type;
2591 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2593 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2594 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2595 Likewise for an aggregate without specified position that contains an
2596 array, because in this case slices of variable length of this array
2597 must be handled by GCC and variable-sized objects need to be aligned
2598 to at least a byte boundary. */
2599 if (packed && (TYPE_MODE (type) == BLKmode
2600 || (!pos
2601 && AGGREGATE_TYPE_P (type)
2602 && aggregate_type_contains_array_p (type))))
2603 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2605 /* If a size is specified, use it. Otherwise, if the record type is packed
2606 compute a size to use, which may differ from the object's natural size.
2607 We always set a size in this case to trigger the checks for bitfield
2608 creation below, which is typically required when no position has been
2609 specified. */
2610 if (size)
2611 size = convert (bitsizetype, size);
2612 else if (packed == 1)
2614 size = rm_size (type);
2615 if (TYPE_MODE (type) == BLKmode)
2616 size = round_up (size, BITS_PER_UNIT);
2619 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2620 specified for two reasons: first if the size differs from the natural
2621 size. Second, if the alignment is insufficient. There are a number of
2622 ways the latter can be true.
2624 We never make a bitfield if the type of the field has a nonconstant size,
2625 because no such entity requiring bitfield operations should reach here.
2627 We do *preventively* make a bitfield when there might be the need for it
2628 but we don't have all the necessary information to decide, as is the case
2629 of a field with no specified position in a packed record.
2631 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2632 in layout_decl or finish_record_type to clear the bit_field indication if
2633 it is in fact not needed. */
2634 if (addressable >= 0
2635 && size
2636 && TREE_CODE (size) == INTEGER_CST
2637 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2638 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2639 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2640 || packed
2641 || (TYPE_ALIGN (record_type) != 0
2642 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2644 DECL_BIT_FIELD (field_decl) = 1;
2645 DECL_SIZE (field_decl) = size;
2646 if (!packed && !pos)
2648 if (TYPE_ALIGN (record_type) != 0
2649 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2650 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2651 else
2652 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2656 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2658 /* Bump the alignment if need be, either for bitfield/packing purposes or
2659 to satisfy the type requirements if no such consideration applies. When
2660 we get the alignment from the type, indicate if this is from an explicit
2661 user request, which prevents stor-layout from lowering it later on. */
2663 unsigned int bit_align
2664 = (DECL_BIT_FIELD (field_decl) ? 1
2665 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2667 if (bit_align > DECL_ALIGN (field_decl))
2668 SET_DECL_ALIGN (field_decl, bit_align);
2669 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2671 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2672 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2676 if (pos)
2678 /* We need to pass in the alignment the DECL is known to have.
2679 This is the lowest-order bit set in POS, but no more than
2680 the alignment of the record, if one is specified. Note
2681 that an alignment of 0 is taken as infinite. */
2682 unsigned int known_align;
2684 if (tree_fits_uhwi_p (pos))
2685 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2686 else
2687 known_align = BITS_PER_UNIT;
2689 if (TYPE_ALIGN (record_type)
2690 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2691 known_align = TYPE_ALIGN (record_type);
2693 layout_decl (field_decl, known_align);
2694 SET_DECL_OFFSET_ALIGN (field_decl,
2695 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2696 : BITS_PER_UNIT);
2697 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2698 &DECL_FIELD_BIT_OFFSET (field_decl),
2699 DECL_OFFSET_ALIGN (field_decl), pos);
2702 /* In addition to what our caller says, claim the field is addressable if we
2703 know that its type is not suitable.
2705 The field may also be "technically" nonaddressable, meaning that even if
2706 we attempt to take the field's address we will actually get the address
2707 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2708 value we have at this point is not accurate enough, so we don't account
2709 for this here and let finish_record_type decide. */
2710 if (!addressable && !type_for_nonaliased_component_p (type))
2711 addressable = 1;
2713 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2715 return field_decl;
2718 /* Return a PARM_DECL node with NAME and TYPE. */
2720 tree
2721 create_param_decl (tree name, tree type)
2723 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2725 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2726 can lead to various ABI violations. */
2727 if (targetm.calls.promote_prototypes (NULL_TREE)
2728 && INTEGRAL_TYPE_P (type)
2729 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2731 /* We have to be careful about biased types here. Make a subtype
2732 of integer_type_node with the proper biasing. */
2733 if (TREE_CODE (type) == INTEGER_TYPE
2734 && TYPE_BIASED_REPRESENTATION_P (type))
2736 tree subtype
2737 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2738 TREE_TYPE (subtype) = integer_type_node;
2739 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2740 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2741 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2742 type = subtype;
2744 else
2745 type = integer_type_node;
2748 DECL_ARG_TYPE (param_decl) = type;
2749 return param_decl;
2752 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2753 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2754 changed. GNAT_NODE is used for the position of error messages. */
2756 void
2757 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2758 Node_Id gnat_node)
2760 struct attrib *attr;
2762 for (attr = *attr_list; attr; attr = attr->next)
2763 switch (attr->type)
2765 case ATTR_MACHINE_ATTRIBUTE:
2766 Sloc_to_locus (Sloc (gnat_node), &input_location);
2767 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2768 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2769 break;
2771 case ATTR_LINK_ALIAS:
2772 if (!DECL_EXTERNAL (*node))
2774 TREE_STATIC (*node) = 1;
2775 assemble_alias (*node, attr->name);
2777 break;
2779 case ATTR_WEAK_EXTERNAL:
2780 if (SUPPORTS_WEAK)
2781 declare_weak (*node);
2782 else
2783 post_error ("?weak declarations not supported on this target",
2784 attr->error_point);
2785 break;
2787 case ATTR_LINK_SECTION:
2788 if (targetm_common.have_named_sections)
2790 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2791 DECL_COMMON (*node) = 0;
2793 else
2794 post_error ("?section attributes are not supported for this target",
2795 attr->error_point);
2796 break;
2798 case ATTR_LINK_CONSTRUCTOR:
2799 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2800 TREE_USED (*node) = 1;
2801 break;
2803 case ATTR_LINK_DESTRUCTOR:
2804 DECL_STATIC_DESTRUCTOR (*node) = 1;
2805 TREE_USED (*node) = 1;
2806 break;
2808 case ATTR_THREAD_LOCAL_STORAGE:
2809 set_decl_tls_model (*node, decl_default_tls_model (*node));
2810 DECL_COMMON (*node) = 0;
2811 break;
2814 *attr_list = NULL;
2817 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2818 a power of 2. */
2820 bool
2821 value_factor_p (tree value, HOST_WIDE_INT factor)
2823 if (tree_fits_uhwi_p (value))
2824 return tree_to_uhwi (value) % factor == 0;
2826 if (TREE_CODE (value) == MULT_EXPR)
2827 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2828 || value_factor_p (TREE_OPERAND (value, 1), factor));
2830 return false;
2833 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2834 from the parameter association for the instantiation of a generic. We do
2835 not want to emit source location for them: the code generated for their
2836 initialization is likely to disturb debugging. */
2838 bool
2839 renaming_from_generic_instantiation_p (Node_Id gnat_node)
2841 if (Nkind (gnat_node) != N_Defining_Identifier
2842 || !IN (Ekind (gnat_node), Object_Kind)
2843 || Comes_From_Source (gnat_node)
2844 || !Present (Renamed_Object (gnat_node)))
2845 return false;
2847 /* Get the object declaration of the renamed object, if any and if the
2848 renamed object is a mere identifier. */
2849 gnat_node = Renamed_Object (gnat_node);
2850 if (Nkind (gnat_node) != N_Identifier)
2851 return false;
2853 gnat_node = Entity (gnat_node);
2854 if (!Present (Parent (gnat_node)))
2855 return false;
2857 gnat_node = Parent (gnat_node);
2858 return
2859 (Present (gnat_node)
2860 && Nkind (gnat_node) == N_Object_Declaration
2861 && Present (Corresponding_Generic_Association (gnat_node)));
2864 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2865 feed it with the elaboration of GNAT_SCOPE. */
2867 static struct deferred_decl_context_node *
2868 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2870 struct deferred_decl_context_node *new_node;
2872 new_node
2873 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2874 new_node->decl = decl;
2875 new_node->gnat_scope = gnat_scope;
2876 new_node->force_global = force_global;
2877 new_node->types.create (1);
2878 new_node->next = deferred_decl_context_queue;
2879 deferred_decl_context_queue = new_node;
2880 return new_node;
2883 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2884 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2885 computed. */
2887 static void
2888 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2890 n->types.safe_push (type);
2893 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2894 NULL_TREE if it is not available. */
2896 static tree
2897 compute_deferred_decl_context (Entity_Id gnat_scope)
2899 tree context;
2901 if (present_gnu_tree (gnat_scope))
2902 context = get_gnu_tree (gnat_scope);
2903 else
2904 return NULL_TREE;
2906 if (TREE_CODE (context) == TYPE_DECL)
2908 const tree context_type = TREE_TYPE (context);
2910 /* Skip dummy types: only the final ones can appear in the context
2911 chain. */
2912 if (TYPE_DUMMY_P (context_type))
2913 return NULL_TREE;
2915 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2916 chain. */
2917 else
2918 context = context_type;
2921 return context;
2924 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2925 that cannot be processed yet, remove the other ones. If FORCE is true,
2926 force the processing for all nodes, use the global context when nodes don't
2927 have a GNU translation. */
2929 void
2930 process_deferred_decl_context (bool force)
2932 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2933 struct deferred_decl_context_node *node;
2935 while (*it != NULL)
2937 bool processed = false;
2938 tree context = NULL_TREE;
2939 Entity_Id gnat_scope;
2941 node = *it;
2943 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2944 get the first scope. */
2945 gnat_scope = node->gnat_scope;
2946 while (Present (gnat_scope))
2948 context = compute_deferred_decl_context (gnat_scope);
2949 if (!force || context)
2950 break;
2951 gnat_scope = get_debug_scope (gnat_scope, NULL);
2954 /* Imported declarations must not be in a local context (i.e. not inside
2955 a function). */
2956 if (context && node->force_global > 0)
2958 tree ctx = context;
2960 while (ctx)
2962 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2963 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
2967 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2968 was no elaborated scope, use the global context. */
2969 if (force && !context)
2970 context = get_global_context ();
2972 if (context)
2974 tree t;
2975 int i;
2977 DECL_CONTEXT (node->decl) = context;
2979 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2980 ..._TYPE nodes. */
2981 FOR_EACH_VEC_ELT (node->types, i, t)
2983 gnat_set_type_context (t, context);
2985 processed = true;
2988 /* If this node has been successfuly processed, remove it from the
2989 queue. Then move to the next node. */
2990 if (processed)
2992 *it = node->next;
2993 node->types.release ();
2994 free (node);
2996 else
2997 it = &node->next;
3002 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3004 static unsigned int
3005 scale_by_factor_of (tree expr, unsigned int value)
3007 unsigned HOST_WIDE_INT addend = 0;
3008 unsigned HOST_WIDE_INT factor = 1;
3010 /* Peel conversions around EXPR and try to extract bodies from function
3011 calls: it is possible to get the scale factor from size functions. */
3012 expr = remove_conversions (expr, true);
3013 if (TREE_CODE (expr) == CALL_EXPR)
3014 expr = maybe_inline_call_in_expr (expr);
3016 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3017 multiple of the scale factor we are looking for. */
3018 if (TREE_CODE (expr) == PLUS_EXPR
3019 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3020 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3022 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3023 expr = TREE_OPERAND (expr, 0);
3026 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3027 corresponding to the number of trailing zeros of the mask. */
3028 if (TREE_CODE (expr) == BIT_AND_EXPR
3029 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3031 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3032 unsigned int i = 0;
3034 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3036 mask >>= 1;
3037 factor *= 2;
3038 i++;
3042 /* If the addend is not a multiple of the factor we found, give up. In
3043 theory we could find a smaller common factor but it's useless for our
3044 needs. This situation arises when dealing with a field F1 with no
3045 alignment requirement but that is following a field F2 with such
3046 requirements. As long as we have F2's offset, we don't need alignment
3047 information to compute F1's. */
3048 if (addend % factor != 0)
3049 factor = 1;
3051 return factor * value;
3054 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3055 unless we can prove these 2 fields are laid out in such a way that no gap
3056 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3057 is the distance in bits between the end of PREV_FIELD and the starting
3058 position of CURR_FIELD. It is ignored if null. */
3060 static bool
3061 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3063 /* If this is the first field of the record, there cannot be any gap */
3064 if (!prev_field)
3065 return false;
3067 /* If the previous field is a union type, then return false: The only
3068 time when such a field is not the last field of the record is when
3069 there are other components at fixed positions after it (meaning there
3070 was a rep clause for every field), in which case we don't want the
3071 alignment constraint to override them. */
3072 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3073 return false;
3075 /* If the distance between the end of prev_field and the beginning of
3076 curr_field is constant, then there is a gap if the value of this
3077 constant is not null. */
3078 if (offset && tree_fits_uhwi_p (offset))
3079 return !integer_zerop (offset);
3081 /* If the size and position of the previous field are constant,
3082 then check the sum of this size and position. There will be a gap
3083 iff it is not multiple of the current field alignment. */
3084 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3085 && tree_fits_uhwi_p (bit_position (prev_field)))
3086 return ((tree_to_uhwi (bit_position (prev_field))
3087 + tree_to_uhwi (DECL_SIZE (prev_field)))
3088 % DECL_ALIGN (curr_field) != 0);
3090 /* If both the position and size of the previous field are multiples
3091 of the current field alignment, there cannot be any gap. */
3092 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3093 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3094 return false;
3096 /* Fallback, return that there may be a potential gap */
3097 return true;
3100 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3101 the decl. */
3103 tree
3104 create_label_decl (tree name, Node_Id gnat_node)
3106 tree label_decl
3107 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3109 DECL_MODE (label_decl) = VOIDmode;
3111 /* Add this decl to the current binding level. */
3112 gnat_pushdecl (label_decl, gnat_node);
3114 return label_decl;
3117 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3118 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3119 the list of its parameters (a list of PARM_DECL nodes chained through the
3120 DECL_CHAIN field).
3122 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3124 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3125 definition to be made visible outside of the current compilation unit.
3127 EXTERN_FLAG is true when processing an external subprogram declaration.
3129 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3131 DEBUG_INFO_P is true if we need to write debug information for it.
3133 ATTR_LIST is the list of attributes to be attached to the subprogram.
3135 GNAT_NODE is used for the position of the decl. */
3137 tree
3138 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3139 enum inline_status_t inline_status, bool public_flag,
3140 bool extern_flag, bool artificial_p, bool debug_info_p,
3141 struct attrib *attr_list, Node_Id gnat_node)
3143 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3144 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3146 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3147 DECL_EXTERNAL (subprog_decl) = extern_flag;
3148 TREE_PUBLIC (subprog_decl) = public_flag;
3150 if (!debug_info_p)
3151 DECL_IGNORED_P (subprog_decl) = 1;
3153 switch (inline_status)
3155 case is_suppressed:
3156 DECL_UNINLINABLE (subprog_decl) = 1;
3157 break;
3159 case is_disabled:
3160 break;
3162 case is_required:
3163 if (Back_End_Inlining)
3164 decl_attributes (&subprog_decl,
3165 tree_cons (get_identifier ("always_inline"),
3166 NULL_TREE, NULL_TREE),
3167 ATTR_FLAG_TYPE_IN_PLACE);
3169 /* fall through */
3171 case is_enabled:
3172 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3173 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3174 break;
3176 default:
3177 gcc_unreachable ();
3180 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3182 /* Once everything is processed, finish the subprogram declaration. */
3183 finish_subprog_decl (subprog_decl, asm_name, type);
3185 /* Add this decl to the current binding level. */
3186 gnat_pushdecl (subprog_decl, gnat_node);
3188 /* Output the assembler code and/or RTL for the declaration. */
3189 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3191 return subprog_decl;
3194 /* Given a subprogram declaration DECL, its assembler name and its type,
3195 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3197 void
3198 finish_subprog_decl (tree decl, tree asm_name, tree type)
3200 tree result_decl
3201 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3202 TREE_TYPE (type));
3204 DECL_ARTIFICIAL (result_decl) = 1;
3205 DECL_IGNORED_P (result_decl) = 1;
3206 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3207 DECL_RESULT (decl) = result_decl;
3209 TREE_READONLY (decl) = TYPE_READONLY (type);
3210 TREE_SIDE_EFFECTS (decl) = TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3212 if (asm_name)
3214 /* Let the target mangle the name if this isn't a verbatim asm. */
3215 if (*IDENTIFIER_POINTER (asm_name) != '*')
3216 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3218 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3220 /* The expand_main_function circuitry expects "main_identifier_node" to
3221 designate the DECL_NAME of the 'main' entry point, in turn expected
3222 to be declared as the "main" function literally by default. Ada
3223 program entry points are typically declared with a different name
3224 within the binder generated file, exported as 'main' to satisfy the
3225 system expectations. Force main_identifier_node in this case. */
3226 if (asm_name == main_identifier_node)
3227 DECL_NAME (decl) = main_identifier_node;
3231 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3232 body. This routine needs to be invoked before processing the declarations
3233 appearing in the subprogram. */
3235 void
3236 begin_subprog_body (tree subprog_decl)
3238 tree param_decl;
3240 announce_function (subprog_decl);
3242 /* This function is being defined. */
3243 TREE_STATIC (subprog_decl) = 1;
3245 /* The failure of this assertion will likely come from a wrong context for
3246 the subprogram body, e.g. another procedure for a procedure declared at
3247 library level. */
3248 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3250 current_function_decl = subprog_decl;
3252 /* Enter a new binding level and show that all the parameters belong to
3253 this function. */
3254 gnat_pushlevel ();
3256 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3257 param_decl = DECL_CHAIN (param_decl))
3258 DECL_CONTEXT (param_decl) = subprog_decl;
3260 make_decl_rtl (subprog_decl);
3263 /* Finish translating the current subprogram and set its BODY. */
3265 void
3266 end_subprog_body (tree body)
3268 tree fndecl = current_function_decl;
3270 /* Attach the BLOCK for this level to the function and pop the level. */
3271 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3272 DECL_INITIAL (fndecl) = current_binding_level->block;
3273 gnat_poplevel ();
3275 /* Mark the RESULT_DECL as being in this subprogram. */
3276 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3278 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3279 if (TREE_CODE (body) == BIND_EXPR)
3281 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3282 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3285 DECL_SAVED_TREE (fndecl) = body;
3287 current_function_decl = decl_function_context (fndecl);
3290 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3292 void
3293 rest_of_subprog_body_compilation (tree subprog_decl)
3295 /* We cannot track the location of errors past this point. */
3296 error_gnat_node = Empty;
3298 /* If we're only annotating types, don't actually compile this function. */
3299 if (type_annotate_only)
3300 return;
3302 /* Dump functions before gimplification. */
3303 dump_function (TDI_original, subprog_decl);
3305 if (!decl_function_context (subprog_decl))
3306 cgraph_node::finalize_function (subprog_decl, false);
3307 else
3308 /* Register this function with cgraph just far enough to get it
3309 added to our parent's nested function list. */
3310 (void) cgraph_node::get_create (subprog_decl);
3313 tree
3314 gnat_builtin_function (tree decl)
3316 gnat_pushdecl (decl, Empty);
3317 return decl;
3320 /* Return an integer type with the number of bits of precision given by
3321 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3322 it is a signed type. */
3324 tree
3325 gnat_type_for_size (unsigned precision, int unsignedp)
3327 tree t;
3328 char type_name[20];
3330 if (precision <= 2 * MAX_BITS_PER_WORD
3331 && signed_and_unsigned_types[precision][unsignedp])
3332 return signed_and_unsigned_types[precision][unsignedp];
3334 if (unsignedp)
3335 t = make_unsigned_type (precision);
3336 else
3337 t = make_signed_type (precision);
3339 if (precision <= 2 * MAX_BITS_PER_WORD)
3340 signed_and_unsigned_types[precision][unsignedp] = t;
3342 if (!TYPE_NAME (t))
3344 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3345 TYPE_NAME (t) = get_identifier (type_name);
3348 return t;
3351 /* Likewise for floating-point types. */
3353 static tree
3354 float_type_for_precision (int precision, machine_mode mode)
3356 tree t;
3357 char type_name[20];
3359 if (float_types[(int) mode])
3360 return float_types[(int) mode];
3362 float_types[(int) mode] = t = make_node (REAL_TYPE);
3363 TYPE_PRECISION (t) = precision;
3364 layout_type (t);
3366 gcc_assert (TYPE_MODE (t) == mode);
3367 if (!TYPE_NAME (t))
3369 sprintf (type_name, "FLOAT_%d", precision);
3370 TYPE_NAME (t) = get_identifier (type_name);
3373 return t;
3376 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3377 an unsigned type; otherwise a signed type is returned. */
3379 tree
3380 gnat_type_for_mode (machine_mode mode, int unsignedp)
3382 if (mode == BLKmode)
3383 return NULL_TREE;
3385 if (mode == VOIDmode)
3386 return void_type_node;
3388 if (COMPLEX_MODE_P (mode))
3389 return NULL_TREE;
3391 if (SCALAR_FLOAT_MODE_P (mode))
3392 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
3394 if (SCALAR_INT_MODE_P (mode))
3395 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
3397 if (VECTOR_MODE_P (mode))
3399 machine_mode inner_mode = GET_MODE_INNER (mode);
3400 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3401 if (inner_type)
3402 return build_vector_type_for_mode (inner_type, mode);
3405 return NULL_TREE;
3408 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3409 signedness being specified by UNSIGNEDP. */
3411 tree
3412 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3414 if (type_node == char_type_node)
3415 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3417 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3419 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3421 type = copy_type (type);
3422 TREE_TYPE (type) = type_node;
3424 else if (TREE_TYPE (type_node)
3425 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3426 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3428 type = copy_type (type);
3429 TREE_TYPE (type) = TREE_TYPE (type_node);
3432 return type;
3435 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3436 transparently converted to each other. */
3439 gnat_types_compatible_p (tree t1, tree t2)
3441 enum tree_code code;
3443 /* This is the default criterion. */
3444 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3445 return 1;
3447 /* We only check structural equivalence here. */
3448 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3449 return 0;
3451 /* Vector types are also compatible if they have the same number of subparts
3452 and the same form of (scalar) element type. */
3453 if (code == VECTOR_TYPE
3454 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3455 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3456 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3457 return 1;
3459 /* Array types are also compatible if they are constrained and have the same
3460 domain(s), the same component type and the same scalar storage order. */
3461 if (code == ARRAY_TYPE
3462 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3463 || (TYPE_DOMAIN (t1)
3464 && TYPE_DOMAIN (t2)
3465 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3466 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3467 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3468 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3469 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3470 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3471 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3472 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3473 return 1;
3475 return 0;
3478 /* Return true if EXPR is a useless type conversion. */
3480 bool
3481 gnat_useless_type_conversion (tree expr)
3483 if (CONVERT_EXPR_P (expr)
3484 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3485 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3486 return gnat_types_compatible_p (TREE_TYPE (expr),
3487 TREE_TYPE (TREE_OPERAND (expr, 0)));
3489 return false;
3492 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3494 bool
3495 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3496 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3498 return TYPE_CI_CO_LIST (t) == cico_list
3499 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3500 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3501 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3504 /* EXP is an expression for the size of an object. If this size contains
3505 discriminant references, replace them with the maximum (if MAX_P) or
3506 minimum (if !MAX_P) possible value of the discriminant. */
3508 tree
3509 max_size (tree exp, bool max_p)
3511 enum tree_code code = TREE_CODE (exp);
3512 tree type = TREE_TYPE (exp);
3514 switch (TREE_CODE_CLASS (code))
3516 case tcc_declaration:
3517 case tcc_constant:
3518 return exp;
3520 case tcc_vl_exp:
3521 if (code == CALL_EXPR)
3523 tree t, *argarray;
3524 int n, i;
3526 t = maybe_inline_call_in_expr (exp);
3527 if (t)
3528 return max_size (t, max_p);
3530 n = call_expr_nargs (exp);
3531 gcc_assert (n > 0);
3532 argarray = XALLOCAVEC (tree, n);
3533 for (i = 0; i < n; i++)
3534 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3535 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3537 break;
3539 case tcc_reference:
3540 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3541 modify. Otherwise, we treat it like a variable. */
3542 if (CONTAINS_PLACEHOLDER_P (exp))
3544 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3545 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3546 return max_size (convert (get_base_type (val_type), val), true);
3549 return exp;
3551 case tcc_comparison:
3552 return max_p ? size_one_node : size_zero_node;
3554 case tcc_unary:
3555 if (code == NON_LVALUE_EXPR)
3556 return max_size (TREE_OPERAND (exp, 0), max_p);
3558 return fold_build1 (code, type,
3559 max_size (TREE_OPERAND (exp, 0),
3560 code == NEGATE_EXPR ? !max_p : max_p));
3562 case tcc_binary:
3564 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3565 tree rhs = max_size (TREE_OPERAND (exp, 1),
3566 code == MINUS_EXPR ? !max_p : max_p);
3568 /* Special-case wanting the maximum value of a MIN_EXPR.
3569 In that case, if one side overflows, return the other. */
3570 if (max_p && code == MIN_EXPR)
3572 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3573 return lhs;
3575 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3576 return rhs;
3579 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3580 overflowing and the RHS a variable. */
3581 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3582 && TREE_CODE (lhs) == INTEGER_CST
3583 && TREE_OVERFLOW (lhs)
3584 && TREE_CODE (rhs) != INTEGER_CST)
3585 return lhs;
3587 /* If we are going to subtract a "negative" value in an unsigned type,
3588 do the operation as an addition of the negated value, in order to
3589 avoid creating a spurious overflow below. */
3590 if (code == MINUS_EXPR
3591 && TYPE_UNSIGNED (type)
3592 && TREE_CODE (rhs) == INTEGER_CST
3593 && !TREE_OVERFLOW (rhs)
3594 && tree_int_cst_sign_bit (rhs) != 0)
3596 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3597 code = PLUS_EXPR;
3600 /* We need to detect overflows so we call size_binop here. */
3601 return size_binop (code, lhs, rhs);
3604 case tcc_expression:
3605 switch (TREE_CODE_LENGTH (code))
3607 case 1:
3608 if (code == SAVE_EXPR)
3609 return exp;
3611 return fold_build1 (code, type,
3612 max_size (TREE_OPERAND (exp, 0), max_p));
3614 case 2:
3615 if (code == COMPOUND_EXPR)
3616 return max_size (TREE_OPERAND (exp, 1), max_p);
3618 return fold_build2 (code, type,
3619 max_size (TREE_OPERAND (exp, 0), max_p),
3620 max_size (TREE_OPERAND (exp, 1), max_p));
3622 case 3:
3623 if (code == COND_EXPR)
3624 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3625 max_size (TREE_OPERAND (exp, 1), max_p),
3626 max_size (TREE_OPERAND (exp, 2), max_p));
3628 default:
3629 break;
3632 /* Other tree classes cannot happen. */
3633 default:
3634 break;
3637 gcc_unreachable ();
3640 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3641 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3642 Return a constructor for the template. */
3644 tree
3645 build_template (tree template_type, tree array_type, tree expr)
3647 vec<constructor_elt, va_gc> *template_elts = NULL;
3648 tree bound_list = NULL_TREE;
3649 tree field;
3651 while (TREE_CODE (array_type) == RECORD_TYPE
3652 && (TYPE_PADDING_P (array_type)
3653 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3654 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3656 if (TREE_CODE (array_type) == ARRAY_TYPE
3657 || (TREE_CODE (array_type) == INTEGER_TYPE
3658 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3659 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3661 /* First make the list for a CONSTRUCTOR for the template. Go down the
3662 field list of the template instead of the type chain because this
3663 array might be an Ada array of arrays and we can't tell where the
3664 nested arrays stop being the underlying object. */
3666 for (field = TYPE_FIELDS (template_type); field;
3667 (bound_list
3668 ? (bound_list = TREE_CHAIN (bound_list))
3669 : (array_type = TREE_TYPE (array_type))),
3670 field = DECL_CHAIN (DECL_CHAIN (field)))
3672 tree bounds, min, max;
3674 /* If we have a bound list, get the bounds from there. Likewise
3675 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3676 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3677 This will give us a maximum range. */
3678 if (bound_list)
3679 bounds = TREE_VALUE (bound_list);
3680 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3681 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3682 else if (expr && TREE_CODE (expr) == PARM_DECL
3683 && DECL_BY_COMPONENT_PTR_P (expr))
3684 bounds = TREE_TYPE (field);
3685 else
3686 gcc_unreachable ();
3688 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3689 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3691 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3692 substitute it from OBJECT. */
3693 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3694 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3696 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3697 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3700 return gnat_build_constructor (template_type, template_elts);
3703 /* Return true if TYPE is suitable for the element type of a vector. */
3705 static bool
3706 type_for_vector_element_p (tree type)
3708 machine_mode mode;
3710 if (!INTEGRAL_TYPE_P (type)
3711 && !SCALAR_FLOAT_TYPE_P (type)
3712 && !FIXED_POINT_TYPE_P (type))
3713 return false;
3715 mode = TYPE_MODE (type);
3716 if (GET_MODE_CLASS (mode) != MODE_INT
3717 && !SCALAR_FLOAT_MODE_P (mode)
3718 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3719 return false;
3721 return true;
3724 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3725 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3726 attribute declaration and want to issue error messages on failure. */
3728 static tree
3729 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3731 unsigned HOST_WIDE_INT size_int, inner_size_int;
3732 int nunits;
3734 /* Silently punt on variable sizes. We can't make vector types for them,
3735 need to ignore them on front-end generated subtypes of unconstrained
3736 base types, and this attribute is for binding implementors, not end
3737 users, so we should never get there from legitimate explicit uses. */
3738 if (!tree_fits_uhwi_p (size))
3739 return NULL_TREE;
3740 size_int = tree_to_uhwi (size);
3742 if (!type_for_vector_element_p (inner_type))
3744 if (attribute)
3745 error ("invalid element type for attribute %qs",
3746 IDENTIFIER_POINTER (attribute));
3747 return NULL_TREE;
3749 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3751 if (size_int % inner_size_int)
3753 if (attribute)
3754 error ("vector size not an integral multiple of component size");
3755 return NULL_TREE;
3758 if (size_int == 0)
3760 if (attribute)
3761 error ("zero vector size");
3762 return NULL_TREE;
3765 nunits = size_int / inner_size_int;
3766 if (nunits & (nunits - 1))
3768 if (attribute)
3769 error ("number of components of vector not a power of two");
3770 return NULL_TREE;
3773 return build_vector_type (inner_type, nunits);
3776 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3777 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3778 processing the attribute and want to issue error messages on failure. */
3780 static tree
3781 build_vector_type_for_array (tree array_type, tree attribute)
3783 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3784 TYPE_SIZE_UNIT (array_type),
3785 attribute);
3786 if (!vector_type)
3787 return NULL_TREE;
3789 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3790 return vector_type;
3793 /* Build a type to be used to represent an aliased object whose nominal type
3794 is an unconstrained array. This consists of a RECORD_TYPE containing a
3795 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3796 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3797 an arbitrary unconstrained object. Use NAME as the name of the record.
3798 DEBUG_INFO_P is true if we need to write debug information for the type. */
3800 tree
3801 build_unc_object_type (tree template_type, tree object_type, tree name,
3802 bool debug_info_p)
3804 tree decl;
3805 tree type = make_node (RECORD_TYPE);
3806 tree template_field
3807 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3808 NULL_TREE, NULL_TREE, 0, 1);
3809 tree array_field
3810 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3811 NULL_TREE, NULL_TREE, 0, 1);
3813 TYPE_NAME (type) = name;
3814 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3815 DECL_CHAIN (template_field) = array_field;
3816 finish_record_type (type, template_field, 0, true);
3818 /* Declare it now since it will never be declared otherwise. This is
3819 necessary to ensure that its subtrees are properly marked. */
3820 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3822 /* template_type will not be used elsewhere than here, so to keep the debug
3823 info clean and in order to avoid scoping issues, make decl its
3824 context. */
3825 gnat_set_type_context (template_type, decl);
3827 return type;
3830 /* Same, taking a thin or fat pointer type instead of a template type. */
3832 tree
3833 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3834 tree name, bool debug_info_p)
3836 tree template_type;
3838 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3840 template_type
3841 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3842 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3843 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3845 return
3846 build_unc_object_type (template_type, object_type, name, debug_info_p);
3849 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3850 In the normal case this is just two adjustments, but we have more to
3851 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3853 void
3854 update_pointer_to (tree old_type, tree new_type)
3856 tree ptr = TYPE_POINTER_TO (old_type);
3857 tree ref = TYPE_REFERENCE_TO (old_type);
3858 tree t;
3860 /* If this is the main variant, process all the other variants first. */
3861 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3862 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3863 update_pointer_to (t, new_type);
3865 /* If no pointers and no references, we are done. */
3866 if (!ptr && !ref)
3867 return;
3869 /* Merge the old type qualifiers in the new type.
3871 Each old variant has qualifiers for specific reasons, and the new
3872 designated type as well. Each set of qualifiers represents useful
3873 information grabbed at some point, and merging the two simply unifies
3874 these inputs into the final type description.
3876 Consider for instance a volatile type frozen after an access to constant
3877 type designating it; after the designated type's freeze, we get here with
3878 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3879 when the access type was processed. We will make a volatile and readonly
3880 designated type, because that's what it really is.
3882 We might also get here for a non-dummy OLD_TYPE variant with different
3883 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3884 to private record type elaboration (see the comments around the call to
3885 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3886 the qualifiers in those cases too, to avoid accidentally discarding the
3887 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3888 new_type
3889 = build_qualified_type (new_type,
3890 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3892 /* If old type and new type are identical, there is nothing to do. */
3893 if (old_type == new_type)
3894 return;
3896 /* Otherwise, first handle the simple case. */
3897 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3899 tree new_ptr, new_ref;
3901 /* If pointer or reference already points to new type, nothing to do.
3902 This can happen as update_pointer_to can be invoked multiple times
3903 on the same couple of types because of the type variants. */
3904 if ((ptr && TREE_TYPE (ptr) == new_type)
3905 || (ref && TREE_TYPE (ref) == new_type))
3906 return;
3908 /* Chain PTR and its variants at the end. */
3909 new_ptr = TYPE_POINTER_TO (new_type);
3910 if (new_ptr)
3912 while (TYPE_NEXT_PTR_TO (new_ptr))
3913 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3914 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3916 else
3917 TYPE_POINTER_TO (new_type) = ptr;
3919 /* Now adjust them. */
3920 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3921 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3923 TREE_TYPE (t) = new_type;
3924 if (TYPE_NULL_BOUNDS (t))
3925 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3928 /* Chain REF and its variants at the end. */
3929 new_ref = TYPE_REFERENCE_TO (new_type);
3930 if (new_ref)
3932 while (TYPE_NEXT_REF_TO (new_ref))
3933 new_ref = TYPE_NEXT_REF_TO (new_ref);
3934 TYPE_NEXT_REF_TO (new_ref) = ref;
3936 else
3937 TYPE_REFERENCE_TO (new_type) = ref;
3939 /* Now adjust them. */
3940 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3941 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3942 TREE_TYPE (t) = new_type;
3944 TYPE_POINTER_TO (old_type) = NULL_TREE;
3945 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3948 /* Now deal with the unconstrained array case. In this case the pointer
3949 is actually a record where both fields are pointers to dummy nodes.
3950 Turn them into pointers to the correct types using update_pointer_to.
3951 Likewise for the pointer to the object record (thin pointer). */
3952 else
3954 tree new_ptr = TYPE_POINTER_TO (new_type);
3956 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3958 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3959 since update_pointer_to can be invoked multiple times on the same
3960 couple of types because of the type variants. */
3961 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3962 return;
3964 update_pointer_to
3965 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3966 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3968 update_pointer_to
3969 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3970 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3972 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3973 TYPE_OBJECT_RECORD_TYPE (new_type));
3975 TYPE_POINTER_TO (old_type) = NULL_TREE;
3976 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3980 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3981 unconstrained one. This involves making or finding a template. */
3983 static tree
3984 convert_to_fat_pointer (tree type, tree expr)
3986 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3987 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3988 tree etype = TREE_TYPE (expr);
3989 tree template_addr;
3990 vec<constructor_elt, va_gc> *v;
3991 vec_alloc (v, 2);
3993 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3994 array (compare_fat_pointers ensures that this is the full discriminant)
3995 and a valid pointer to the bounds. This latter property is necessary
3996 since the compiler can hoist the load of the bounds done through it. */
3997 if (integer_zerop (expr))
3999 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4000 tree null_bounds, t;
4002 if (TYPE_NULL_BOUNDS (ptr_template_type))
4003 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4004 else
4006 /* The template type can still be dummy at this point so we build an
4007 empty constructor. The middle-end will fill it in with zeros. */
4008 t = build_constructor (template_type, NULL);
4009 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4010 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4011 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4014 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4015 fold_convert (p_array_type, null_pointer_node));
4016 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4017 t = build_constructor (type, v);
4018 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4019 TREE_CONSTANT (t) = 0;
4020 TREE_STATIC (t) = 1;
4022 return t;
4025 /* If EXPR is a thin pointer, make template and data from the record. */
4026 if (TYPE_IS_THIN_POINTER_P (etype))
4028 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4030 expr = gnat_protect_expr (expr);
4032 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4033 the thin pointer value has been shifted so we shift it back to get
4034 the template address. */
4035 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4037 template_addr
4038 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4039 fold_build1 (NEGATE_EXPR, sizetype,
4040 byte_position
4041 (DECL_CHAIN (field))));
4042 template_addr
4043 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4044 template_addr);
4047 /* Otherwise we explicitly take the address of the fields. */
4048 else
4050 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4051 template_addr
4052 = build_unary_op (ADDR_EXPR, NULL_TREE,
4053 build_component_ref (expr, field, false));
4054 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4055 build_component_ref (expr, DECL_CHAIN (field),
4056 false));
4060 /* Otherwise, build the constructor for the template. */
4061 else
4062 template_addr
4063 = build_unary_op (ADDR_EXPR, NULL_TREE,
4064 build_template (template_type, TREE_TYPE (etype),
4065 expr));
4067 /* The final result is a constructor for the fat pointer.
4069 If EXPR is an argument of a foreign convention subprogram, the type it
4070 points to is directly the component type. In this case, the expression
4071 type may not match the corresponding FIELD_DECL type at this point, so we
4072 call "convert" here to fix that up if necessary. This type consistency is
4073 required, for instance because it ensures that possible later folding of
4074 COMPONENT_REFs against this constructor always yields something of the
4075 same type as the initial reference.
4077 Note that the call to "build_template" above is still fine because it
4078 will only refer to the provided TEMPLATE_TYPE in this case. */
4079 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4080 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4081 return gnat_build_constructor (type, v);
4084 /* Create an expression whose value is that of EXPR,
4085 converted to type TYPE. The TREE_TYPE of the value
4086 is always TYPE. This function implements all reasonable
4087 conversions; callers should filter out those that are
4088 not permitted by the language being compiled. */
4090 tree
4091 convert (tree type, tree expr)
4093 tree etype = TREE_TYPE (expr);
4094 enum tree_code ecode = TREE_CODE (etype);
4095 enum tree_code code = TREE_CODE (type);
4097 /* If the expression is already of the right type, we are done. */
4098 if (etype == type)
4099 return expr;
4101 /* If both input and output have padding and are of variable size, do this
4102 as an unchecked conversion. Likewise if one is a mere variant of the
4103 other, so we avoid a pointless unpad/repad sequence. */
4104 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4105 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4106 && (!TREE_CONSTANT (TYPE_SIZE (type))
4107 || !TREE_CONSTANT (TYPE_SIZE (etype))
4108 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4109 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4110 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4113 /* If the output type has padding, convert to the inner type and make a
4114 constructor to build the record, unless a variable size is involved. */
4115 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4117 vec<constructor_elt, va_gc> *v;
4119 /* If we previously converted from another type and our type is
4120 of variable size, remove the conversion to avoid the need for
4121 variable-sized temporaries. Likewise for a conversion between
4122 original and packable version. */
4123 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4124 && (!TREE_CONSTANT (TYPE_SIZE (type))
4125 || (ecode == RECORD_TYPE
4126 && TYPE_NAME (etype)
4127 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4128 expr = TREE_OPERAND (expr, 0);
4130 /* If we are just removing the padding from expr, convert the original
4131 object if we have variable size in order to avoid the need for some
4132 variable-sized temporaries. Likewise if the padding is a variant
4133 of the other, so we avoid a pointless unpad/repad sequence. */
4134 if (TREE_CODE (expr) == COMPONENT_REF
4135 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4136 && (!TREE_CONSTANT (TYPE_SIZE (type))
4137 || TYPE_MAIN_VARIANT (type)
4138 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4139 || (ecode == RECORD_TYPE
4140 && TYPE_NAME (etype)
4141 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4142 return convert (type, TREE_OPERAND (expr, 0));
4144 /* If the inner type is of self-referential size and the expression type
4145 is a record, do this as an unchecked conversion. But first pad the
4146 expression if possible to have the same size on both sides. */
4147 if (ecode == RECORD_TYPE
4148 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4150 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4151 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4152 false, false, false, true),
4153 expr);
4154 return unchecked_convert (type, expr, false);
4157 /* If we are converting between array types with variable size, do the
4158 final conversion as an unchecked conversion, again to avoid the need
4159 for some variable-sized temporaries. If valid, this conversion is
4160 very likely purely technical and without real effects. */
4161 if (ecode == ARRAY_TYPE
4162 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4163 && !TREE_CONSTANT (TYPE_SIZE (etype))
4164 && !TREE_CONSTANT (TYPE_SIZE (type)))
4165 return unchecked_convert (type,
4166 convert (TREE_TYPE (TYPE_FIELDS (type)),
4167 expr),
4168 false);
4170 vec_alloc (v, 1);
4171 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4172 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4173 return gnat_build_constructor (type, v);
4176 /* If the input type has padding, remove it and convert to the output type.
4177 The conditions ordering is arranged to ensure that the output type is not
4178 a padding type here, as it is not clear whether the conversion would
4179 always be correct if this was to happen. */
4180 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4182 tree unpadded;
4184 /* If we have just converted to this padded type, just get the
4185 inner expression. */
4186 if (TREE_CODE (expr) == CONSTRUCTOR)
4187 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4189 /* Otherwise, build an explicit component reference. */
4190 else
4191 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4193 return convert (type, unpadded);
4196 /* If the input is a biased type, adjust first. */
4197 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4198 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4199 fold_convert (TREE_TYPE (etype), expr),
4200 fold_convert (TREE_TYPE (etype),
4201 TYPE_MIN_VALUE (etype))));
4203 /* If the input is a justified modular type, we need to extract the actual
4204 object before converting it to any other type with the exceptions of an
4205 unconstrained array or of a mere type variant. It is useful to avoid the
4206 extraction and conversion in the type variant case because it could end
4207 up replacing a VAR_DECL expr by a constructor and we might be about the
4208 take the address of the result. */
4209 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4210 && code != UNCONSTRAINED_ARRAY_TYPE
4211 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4212 return
4213 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4215 /* If converting to a type that contains a template, convert to the data
4216 type and then build the template. */
4217 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4219 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4220 vec<constructor_elt, va_gc> *v;
4221 vec_alloc (v, 2);
4223 /* If the source already has a template, get a reference to the
4224 associated array only, as we are going to rebuild a template
4225 for the target type anyway. */
4226 expr = maybe_unconstrained_array (expr);
4228 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4229 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4230 obj_type, NULL_TREE));
4231 if (expr)
4232 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4233 convert (obj_type, expr));
4234 return gnat_build_constructor (type, v);
4237 /* There are some cases of expressions that we process specially. */
4238 switch (TREE_CODE (expr))
4240 case ERROR_MARK:
4241 return expr;
4243 case NULL_EXPR:
4244 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4245 conversion in gnat_expand_expr. NULL_EXPR does not represent
4246 and actual value, so no conversion is needed. */
4247 expr = copy_node (expr);
4248 TREE_TYPE (expr) = type;
4249 return expr;
4251 case STRING_CST:
4252 /* If we are converting a STRING_CST to another constrained array type,
4253 just make a new one in the proper type. */
4254 if (code == ecode && AGGREGATE_TYPE_P (etype)
4255 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4256 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4258 expr = copy_node (expr);
4259 TREE_TYPE (expr) = type;
4260 return expr;
4262 break;
4264 case VECTOR_CST:
4265 /* If we are converting a VECTOR_CST to a mere type variant, just make
4266 a new one in the proper type. */
4267 if (code == ecode && gnat_types_compatible_p (type, etype))
4269 expr = copy_node (expr);
4270 TREE_TYPE (expr) = type;
4271 return expr;
4274 /* fall through */
4276 case CONSTRUCTOR:
4277 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4278 another padding type around the same type, just make a new one in
4279 the proper type. */
4280 if (code == ecode
4281 && (gnat_types_compatible_p (type, etype)
4282 || (code == RECORD_TYPE
4283 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4284 && TREE_TYPE (TYPE_FIELDS (type))
4285 == TREE_TYPE (TYPE_FIELDS (etype)))))
4287 expr = copy_node (expr);
4288 TREE_TYPE (expr) = type;
4289 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4290 return expr;
4293 /* Likewise for a conversion between original and packable version, or
4294 conversion between types of the same size and with the same list of
4295 fields, but we have to work harder to preserve type consistency. */
4296 if (code == ecode
4297 && code == RECORD_TYPE
4298 && (TYPE_NAME (type) == TYPE_NAME (etype)
4299 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4302 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4303 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4304 vec<constructor_elt, va_gc> *v;
4305 vec_alloc (v, len);
4306 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4307 unsigned HOST_WIDE_INT idx;
4308 tree index, value;
4310 /* Whether we need to clear TREE_CONSTANT et al. on the output
4311 constructor when we convert in place. */
4312 bool clear_constant = false;
4314 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4316 /* Skip the missing fields in the CONSTRUCTOR. */
4317 while (efield && field && !SAME_FIELD_P (efield, index))
4319 efield = DECL_CHAIN (efield);
4320 field = DECL_CHAIN (field);
4322 /* The field must be the same. */
4323 if (!(efield && field && SAME_FIELD_P (efield, field)))
4324 break;
4325 constructor_elt elt
4326 = {field, convert (TREE_TYPE (field), value)};
4327 v->quick_push (elt);
4329 /* If packing has made this field a bitfield and the input
4330 value couldn't be emitted statically any more, we need to
4331 clear TREE_CONSTANT on our output. */
4332 if (!clear_constant
4333 && TREE_CONSTANT (expr)
4334 && !CONSTRUCTOR_BITFIELD_P (efield)
4335 && CONSTRUCTOR_BITFIELD_P (field)
4336 && !initializer_constant_valid_for_bitfield_p (value))
4337 clear_constant = true;
4339 efield = DECL_CHAIN (efield);
4340 field = DECL_CHAIN (field);
4343 /* If we have been able to match and convert all the input fields
4344 to their output type, convert in place now. We'll fallback to a
4345 view conversion downstream otherwise. */
4346 if (idx == len)
4348 expr = copy_node (expr);
4349 TREE_TYPE (expr) = type;
4350 CONSTRUCTOR_ELTS (expr) = v;
4351 if (clear_constant)
4352 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4353 return expr;
4357 /* Likewise for a conversion between array type and vector type with a
4358 compatible representative array. */
4359 else if (code == VECTOR_TYPE
4360 && ecode == ARRAY_TYPE
4361 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4362 etype))
4364 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4365 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4366 vec<constructor_elt, va_gc> *v;
4367 unsigned HOST_WIDE_INT ix;
4368 tree value;
4370 /* Build a VECTOR_CST from a *constant* array constructor. */
4371 if (TREE_CONSTANT (expr))
4373 bool constant_p = true;
4375 /* Iterate through elements and check if all constructor
4376 elements are *_CSTs. */
4377 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4378 if (!CONSTANT_CLASS_P (value))
4380 constant_p = false;
4381 break;
4384 if (constant_p)
4385 return build_vector_from_ctor (type,
4386 CONSTRUCTOR_ELTS (expr));
4389 /* Otherwise, build a regular vector constructor. */
4390 vec_alloc (v, len);
4391 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4393 constructor_elt elt = {NULL_TREE, value};
4394 v->quick_push (elt);
4396 expr = copy_node (expr);
4397 TREE_TYPE (expr) = type;
4398 CONSTRUCTOR_ELTS (expr) = v;
4399 return expr;
4401 break;
4403 case UNCONSTRAINED_ARRAY_REF:
4404 /* First retrieve the underlying array. */
4405 expr = maybe_unconstrained_array (expr);
4406 etype = TREE_TYPE (expr);
4407 ecode = TREE_CODE (etype);
4408 break;
4410 case VIEW_CONVERT_EXPR:
4412 /* GCC 4.x is very sensitive to type consistency overall, and view
4413 conversions thus are very frequent. Even though just "convert"ing
4414 the inner operand to the output type is fine in most cases, it
4415 might expose unexpected input/output type mismatches in special
4416 circumstances so we avoid such recursive calls when we can. */
4417 tree op0 = TREE_OPERAND (expr, 0);
4419 /* If we are converting back to the original type, we can just
4420 lift the input conversion. This is a common occurrence with
4421 switches back-and-forth amongst type variants. */
4422 if (type == TREE_TYPE (op0))
4423 return op0;
4425 /* Otherwise, if we're converting between two aggregate or vector
4426 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4427 target type in place or to just convert the inner expression. */
4428 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4429 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4431 /* If we are converting between mere variants, we can just
4432 substitute the VIEW_CONVERT_EXPR in place. */
4433 if (gnat_types_compatible_p (type, etype))
4434 return build1 (VIEW_CONVERT_EXPR, type, op0);
4436 /* Otherwise, we may just bypass the input view conversion unless
4437 one of the types is a fat pointer, which is handled by
4438 specialized code below which relies on exact type matching. */
4439 else if (!TYPE_IS_FAT_POINTER_P (type)
4440 && !TYPE_IS_FAT_POINTER_P (etype))
4441 return convert (type, op0);
4444 break;
4447 default:
4448 break;
4451 /* Check for converting to a pointer to an unconstrained array. */
4452 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4453 return convert_to_fat_pointer (type, expr);
4455 /* If we are converting between two aggregate or vector types that are mere
4456 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4457 to a vector type from its representative array type. */
4458 else if ((code == ecode
4459 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4460 && gnat_types_compatible_p (type, etype))
4461 || (code == VECTOR_TYPE
4462 && ecode == ARRAY_TYPE
4463 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4464 etype)))
4465 return build1 (VIEW_CONVERT_EXPR, type, expr);
4467 /* If we are converting between tagged types, try to upcast properly. */
4468 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4469 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4471 tree child_etype = etype;
4472 do {
4473 tree field = TYPE_FIELDS (child_etype);
4474 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4475 return build_component_ref (expr, field, false);
4476 child_etype = TREE_TYPE (field);
4477 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4480 /* If we are converting from a smaller form of record type back to it, just
4481 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4482 size on both sides. */
4483 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4484 && smaller_form_type_p (etype, type))
4486 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4487 false, false, false, true),
4488 expr);
4489 return build1 (VIEW_CONVERT_EXPR, type, expr);
4492 /* In all other cases of related types, make a NOP_EXPR. */
4493 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4494 return fold_convert (type, expr);
4496 switch (code)
4498 case VOID_TYPE:
4499 return fold_build1 (CONVERT_EXPR, type, expr);
4501 case INTEGER_TYPE:
4502 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4503 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4504 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4505 return unchecked_convert (type, expr, false);
4506 else if (TYPE_BIASED_REPRESENTATION_P (type))
4507 return fold_convert (type,
4508 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4509 convert (TREE_TYPE (type), expr),
4510 convert (TREE_TYPE (type),
4511 TYPE_MIN_VALUE (type))));
4513 /* fall through */
4515 case ENUMERAL_TYPE:
4516 case BOOLEAN_TYPE:
4517 /* If we are converting an additive expression to an integer type
4518 with lower precision, be wary of the optimization that can be
4519 applied by convert_to_integer. There are 2 problematic cases:
4520 - if the first operand was originally of a biased type,
4521 because we could be recursively called to convert it
4522 to an intermediate type and thus rematerialize the
4523 additive operator endlessly,
4524 - if the expression contains a placeholder, because an
4525 intermediate conversion that changes the sign could
4526 be inserted and thus introduce an artificial overflow
4527 at compile time when the placeholder is substituted. */
4528 if (code == INTEGER_TYPE
4529 && ecode == INTEGER_TYPE
4530 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4531 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4533 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4535 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4536 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4537 || CONTAINS_PLACEHOLDER_P (expr))
4538 return build1 (NOP_EXPR, type, expr);
4541 return fold (convert_to_integer (type, expr));
4543 case POINTER_TYPE:
4544 case REFERENCE_TYPE:
4545 /* If converting between two thin pointers, adjust if needed to account
4546 for differing offsets from the base pointer, depending on whether
4547 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4548 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4550 tree etype_pos
4551 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4552 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4553 : size_zero_node;
4554 tree type_pos
4555 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4556 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4557 : size_zero_node;
4558 tree byte_diff = size_diffop (type_pos, etype_pos);
4560 expr = build1 (NOP_EXPR, type, expr);
4561 if (integer_zerop (byte_diff))
4562 return expr;
4564 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4565 fold_convert (sizetype, byte_diff));
4568 /* If converting fat pointer to normal or thin pointer, get the pointer
4569 to the array and then convert it. */
4570 if (TYPE_IS_FAT_POINTER_P (etype))
4571 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4573 return fold (convert_to_pointer (type, expr));
4575 case REAL_TYPE:
4576 return fold (convert_to_real (type, expr));
4578 case RECORD_TYPE:
4579 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4581 vec<constructor_elt, va_gc> *v;
4582 vec_alloc (v, 1);
4584 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4585 convert (TREE_TYPE (TYPE_FIELDS (type)),
4586 expr));
4587 return gnat_build_constructor (type, v);
4590 /* fall through */
4592 case ARRAY_TYPE:
4593 /* In these cases, assume the front-end has validated the conversion.
4594 If the conversion is valid, it will be a bit-wise conversion, so
4595 it can be viewed as an unchecked conversion. */
4596 return unchecked_convert (type, expr, false);
4598 case UNION_TYPE:
4599 /* This is a either a conversion between a tagged type and some
4600 subtype, which we have to mark as a UNION_TYPE because of
4601 overlapping fields or a conversion of an Unchecked_Union. */
4602 return unchecked_convert (type, expr, false);
4604 case UNCONSTRAINED_ARRAY_TYPE:
4605 /* If the input is a VECTOR_TYPE, convert to the representative
4606 array type first. */
4607 if (ecode == VECTOR_TYPE)
4609 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4610 etype = TREE_TYPE (expr);
4611 ecode = TREE_CODE (etype);
4614 /* If EXPR is a constrained array, take its address, convert it to a
4615 fat pointer, and then dereference it. Likewise if EXPR is a
4616 record containing both a template and a constrained array.
4617 Note that a record representing a justified modular type
4618 always represents a packed constrained array. */
4619 if (ecode == ARRAY_TYPE
4620 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4621 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4622 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4623 return
4624 build_unary_op
4625 (INDIRECT_REF, NULL_TREE,
4626 convert_to_fat_pointer (TREE_TYPE (type),
4627 build_unary_op (ADDR_EXPR,
4628 NULL_TREE, expr)));
4630 /* Do something very similar for converting one unconstrained
4631 array to another. */
4632 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4633 return
4634 build_unary_op (INDIRECT_REF, NULL_TREE,
4635 convert (TREE_TYPE (type),
4636 build_unary_op (ADDR_EXPR,
4637 NULL_TREE, expr)));
4638 else
4639 gcc_unreachable ();
4641 case COMPLEX_TYPE:
4642 return fold (convert_to_complex (type, expr));
4644 default:
4645 gcc_unreachable ();
4649 /* Create an expression whose value is that of EXPR converted to the common
4650 index type, which is sizetype. EXPR is supposed to be in the base type
4651 of the GNAT index type. Calling it is equivalent to doing
4653 convert (sizetype, expr)
4655 but we try to distribute the type conversion with the knowledge that EXPR
4656 cannot overflow in its type. This is a best-effort approach and we fall
4657 back to the above expression as soon as difficulties are encountered.
4659 This is necessary to overcome issues that arise when the GNAT base index
4660 type and the GCC common index type (sizetype) don't have the same size,
4661 which is quite frequent on 64-bit architectures. In this case, and if
4662 the GNAT base index type is signed but the iteration type of the loop has
4663 been forced to unsigned, the loop scalar evolution engine cannot compute
4664 a simple evolution for the general induction variables associated with the
4665 array indices, because it will preserve the wrap-around semantics in the
4666 unsigned type of their "inner" part. As a result, many loop optimizations
4667 are blocked.
4669 The solution is to use a special (basic) induction variable that is at
4670 least as large as sizetype, and to express the aforementioned general
4671 induction variables in terms of this induction variable, eliminating
4672 the problematic intermediate truncation to the GNAT base index type.
4673 This is possible as long as the original expression doesn't overflow
4674 and if the middle-end hasn't introduced artificial overflows in the
4675 course of the various simplification it can make to the expression. */
4677 tree
4678 convert_to_index_type (tree expr)
4680 enum tree_code code = TREE_CODE (expr);
4681 tree type = TREE_TYPE (expr);
4683 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4684 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4685 if (TYPE_UNSIGNED (type) || !optimize)
4686 return convert (sizetype, expr);
4688 switch (code)
4690 case VAR_DECL:
4691 /* The main effect of the function: replace a loop parameter with its
4692 associated special induction variable. */
4693 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4694 expr = DECL_INDUCTION_VAR (expr);
4695 break;
4697 CASE_CONVERT:
4699 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4700 /* Bail out as soon as we suspect some sort of type frobbing. */
4701 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4702 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4703 break;
4706 /* fall through */
4708 case NON_LVALUE_EXPR:
4709 return fold_build1 (code, sizetype,
4710 convert_to_index_type (TREE_OPERAND (expr, 0)));
4712 case PLUS_EXPR:
4713 case MINUS_EXPR:
4714 case MULT_EXPR:
4715 return fold_build2 (code, sizetype,
4716 convert_to_index_type (TREE_OPERAND (expr, 0)),
4717 convert_to_index_type (TREE_OPERAND (expr, 1)));
4719 case COMPOUND_EXPR:
4720 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4721 convert_to_index_type (TREE_OPERAND (expr, 1)));
4723 case COND_EXPR:
4724 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4725 convert_to_index_type (TREE_OPERAND (expr, 1)),
4726 convert_to_index_type (TREE_OPERAND (expr, 2)));
4728 default:
4729 break;
4732 return convert (sizetype, expr);
4735 /* Remove all conversions that are done in EXP. This includes converting
4736 from a padded type or to a justified modular type. If TRUE_ADDRESS
4737 is true, always return the address of the containing object even if
4738 the address is not bit-aligned. */
4740 tree
4741 remove_conversions (tree exp, bool true_address)
4743 switch (TREE_CODE (exp))
4745 case CONSTRUCTOR:
4746 if (true_address
4747 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4748 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4749 return
4750 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
4751 break;
4753 case COMPONENT_REF:
4754 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4755 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4756 break;
4758 CASE_CONVERT:
4759 case VIEW_CONVERT_EXPR:
4760 case NON_LVALUE_EXPR:
4761 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4763 default:
4764 break;
4767 return exp;
4770 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4771 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4772 likewise return an expression pointing to the underlying array. */
4774 tree
4775 maybe_unconstrained_array (tree exp)
4777 enum tree_code code = TREE_CODE (exp);
4778 tree type = TREE_TYPE (exp);
4780 switch (TREE_CODE (type))
4782 case UNCONSTRAINED_ARRAY_TYPE:
4783 if (code == UNCONSTRAINED_ARRAY_REF)
4785 const bool read_only = TREE_READONLY (exp);
4786 const bool no_trap = TREE_THIS_NOTRAP (exp);
4788 exp = TREE_OPERAND (exp, 0);
4789 type = TREE_TYPE (exp);
4791 if (TREE_CODE (exp) == COND_EXPR)
4793 tree op1
4794 = build_unary_op (INDIRECT_REF, NULL_TREE,
4795 build_component_ref (TREE_OPERAND (exp, 1),
4796 TYPE_FIELDS (type),
4797 false));
4798 tree op2
4799 = build_unary_op (INDIRECT_REF, NULL_TREE,
4800 build_component_ref (TREE_OPERAND (exp, 2),
4801 TYPE_FIELDS (type),
4802 false));
4804 exp = build3 (COND_EXPR,
4805 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4806 TREE_OPERAND (exp, 0), op1, op2);
4808 else
4810 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4811 build_component_ref (exp,
4812 TYPE_FIELDS (type),
4813 false));
4814 TREE_READONLY (exp) = read_only;
4815 TREE_THIS_NOTRAP (exp) = no_trap;
4819 else if (code == NULL_EXPR)
4820 exp = build1 (NULL_EXPR,
4821 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4822 TREE_OPERAND (exp, 0));
4823 break;
4825 case RECORD_TYPE:
4826 /* If this is a padded type and it contains a template, convert to the
4827 unpadded type first. */
4828 if (TYPE_PADDING_P (type)
4829 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4830 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4832 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4833 code = TREE_CODE (exp);
4834 type = TREE_TYPE (exp);
4837 if (TYPE_CONTAINS_TEMPLATE_P (type))
4839 /* If the array initializer is a box, return NULL_TREE. */
4840 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
4841 return NULL_TREE;
4843 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
4844 false);
4845 type = TREE_TYPE (exp);
4847 /* If the array type is padded, convert to the unpadded type. */
4848 if (TYPE_IS_PADDING_P (type))
4849 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4851 break;
4853 default:
4854 break;
4857 return exp;
4860 /* Return true if EXPR is an expression that can be folded as an operand
4861 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4863 static bool
4864 can_fold_for_view_convert_p (tree expr)
4866 tree t1, t2;
4868 /* The folder will fold NOP_EXPRs between integral types with the same
4869 precision (in the middle-end's sense). We cannot allow it if the
4870 types don't have the same precision in the Ada sense as well. */
4871 if (TREE_CODE (expr) != NOP_EXPR)
4872 return true;
4874 t1 = TREE_TYPE (expr);
4875 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4877 /* Defer to the folder for non-integral conversions. */
4878 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4879 return true;
4881 /* Only fold conversions that preserve both precisions. */
4882 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4883 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4884 return true;
4886 return false;
4889 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4890 If NOTRUNC_P is true, truncation operations should be suppressed.
4892 Special care is required with (source or target) integral types whose
4893 precision is not equal to their size, to make sure we fetch or assign
4894 the value bits whose location might depend on the endianness, e.g.
4896 Rmsize : constant := 8;
4897 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4899 type Bit_Array is array (1 .. Rmsize) of Boolean;
4900 pragma Pack (Bit_Array);
4902 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4904 Value : Int := 2#1000_0001#;
4905 Vbits : Bit_Array := To_Bit_Array (Value);
4907 we expect the 8 bits at Vbits'Address to always contain Value, while
4908 their original location depends on the endianness, at Value'Address
4909 on a little-endian architecture but not on a big-endian one. */
4911 tree
4912 unchecked_convert (tree type, tree expr, bool notrunc_p)
4914 tree etype = TREE_TYPE (expr);
4915 enum tree_code ecode = TREE_CODE (etype);
4916 enum tree_code code = TREE_CODE (type);
4917 tree tem;
4918 int c;
4920 /* If the expression is already of the right type, we are done. */
4921 if (etype == type)
4922 return expr;
4924 /* If both types are integral just do a normal conversion.
4925 Likewise for a conversion to an unconstrained array. */
4926 if (((INTEGRAL_TYPE_P (type)
4927 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
4928 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4929 && (INTEGRAL_TYPE_P (etype)
4930 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4931 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4932 || code == UNCONSTRAINED_ARRAY_TYPE)
4934 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4936 tree ntype = copy_type (etype);
4937 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4938 TYPE_MAIN_VARIANT (ntype) = ntype;
4939 expr = build1 (NOP_EXPR, ntype, expr);
4942 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4944 tree rtype = copy_type (type);
4945 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4946 TYPE_MAIN_VARIANT (rtype) = rtype;
4947 expr = convert (rtype, expr);
4948 expr = build1 (NOP_EXPR, type, expr);
4950 else
4951 expr = convert (type, expr);
4954 /* If we are converting to an integral type whose precision is not equal
4955 to its size, first unchecked convert to a record type that contains a
4956 field of the given precision. Then extract the result from the field.
4958 There is a subtlety if the source type is an aggregate type with reverse
4959 storage order because its representation is not contiguous in the native
4960 storage order, i.e. a direct unchecked conversion to an integral type
4961 with N bits of precision cannot read the first N bits of the aggregate
4962 type. To overcome it, we do an unchecked conversion to an integral type
4963 with reverse storage order and return the resulting value. This also
4964 ensures that the result of the unchecked conversion doesn't depend on
4965 the endianness of the target machine, but only on the storage order of
4966 the aggregate type.
4968 Finally, for the sake of consistency, we do the unchecked conversion
4969 to an integral type with reverse storage order as soon as the source
4970 type is an aggregate type with reverse storage order, even if there
4971 are no considerations of precision or size involved. */
4972 else if (INTEGRAL_TYPE_P (type)
4973 && TYPE_RM_SIZE (type)
4974 && (tree_int_cst_compare (TYPE_RM_SIZE (type),
4975 TYPE_SIZE (type)) < 0
4976 || (AGGREGATE_TYPE_P (etype)
4977 && TYPE_REVERSE_STORAGE_ORDER (etype))))
4979 tree rec_type = make_node (RECORD_TYPE);
4980 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4981 tree field_type, field;
4983 if (AGGREGATE_TYPE_P (etype))
4984 TYPE_REVERSE_STORAGE_ORDER (rec_type)
4985 = TYPE_REVERSE_STORAGE_ORDER (etype);
4987 if (TYPE_UNSIGNED (type))
4988 field_type = make_unsigned_type (prec);
4989 else
4990 field_type = make_signed_type (prec);
4991 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4993 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4994 NULL_TREE, bitsize_zero_node, 1, 0);
4996 finish_record_type (rec_type, field, 1, false);
4998 expr = unchecked_convert (rec_type, expr, notrunc_p);
4999 expr = build_component_ref (expr, field, false);
5000 expr = fold_build1 (NOP_EXPR, type, expr);
5003 /* Similarly if we are converting from an integral type whose precision is
5004 not equal to its size, first copy into a field of the given precision
5005 and unchecked convert the record type.
5007 The same considerations as above apply if the target type is an aggregate
5008 type with reverse storage order and we also proceed similarly. */
5009 else if (INTEGRAL_TYPE_P (etype)
5010 && TYPE_RM_SIZE (etype)
5011 && (tree_int_cst_compare (TYPE_RM_SIZE (etype),
5012 TYPE_SIZE (etype)) < 0
5013 || (AGGREGATE_TYPE_P (type)
5014 && TYPE_REVERSE_STORAGE_ORDER (type))))
5016 tree rec_type = make_node (RECORD_TYPE);
5017 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5018 vec<constructor_elt, va_gc> *v;
5019 vec_alloc (v, 1);
5020 tree field_type, field;
5022 if (AGGREGATE_TYPE_P (type))
5023 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5024 = TYPE_REVERSE_STORAGE_ORDER (type);
5026 if (TYPE_UNSIGNED (etype))
5027 field_type = make_unsigned_type (prec);
5028 else
5029 field_type = make_signed_type (prec);
5030 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5032 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5033 NULL_TREE, bitsize_zero_node, 1, 0);
5035 finish_record_type (rec_type, field, 1, false);
5037 expr = fold_build1 (NOP_EXPR, field_type, expr);
5038 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5039 expr = gnat_build_constructor (rec_type, v);
5040 expr = unchecked_convert (type, expr, notrunc_p);
5043 /* If we are converting from a scalar type to a type with a different size,
5044 we need to pad to have the same size on both sides.
5046 ??? We cannot do it unconditionally because unchecked conversions are
5047 used liberally by the front-end to implement polymorphism, e.g. in:
5049 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5050 return p___size__4 (p__object!(S191s.all));
5052 so we skip all expressions that are references. */
5053 else if (!REFERENCE_CLASS_P (expr)
5054 && !AGGREGATE_TYPE_P (etype)
5055 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5056 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5058 if (c < 0)
5060 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5061 false, false, false, true),
5062 expr);
5063 expr = unchecked_convert (type, expr, notrunc_p);
5065 else
5067 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5068 false, false, false, true);
5069 expr = unchecked_convert (rec_type, expr, notrunc_p);
5070 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5074 /* We have a special case when we are converting between two unconstrained
5075 array types. In that case, take the address, convert the fat pointer
5076 types, and dereference. */
5077 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5078 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5079 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5080 build_unary_op (ADDR_EXPR, NULL_TREE,
5081 expr)));
5083 /* Another special case is when we are converting to a vector type from its
5084 representative array type; this a regular conversion. */
5085 else if (code == VECTOR_TYPE
5086 && ecode == ARRAY_TYPE
5087 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5088 etype))
5089 expr = convert (type, expr);
5091 /* And, if the array type is not the representative, we try to build an
5092 intermediate vector type of which the array type is the representative
5093 and to do the unchecked conversion between the vector types, in order
5094 to enable further simplifications in the middle-end. */
5095 else if (code == VECTOR_TYPE
5096 && ecode == ARRAY_TYPE
5097 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5099 expr = convert (tem, expr);
5100 return unchecked_convert (type, expr, notrunc_p);
5103 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5104 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5105 else if (TREE_CODE (expr) == CONSTRUCTOR
5106 && code == RECORD_TYPE
5107 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5109 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5110 Empty, false, false, false, true),
5111 expr);
5112 return unchecked_convert (type, expr, notrunc_p);
5115 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5116 else
5118 expr = maybe_unconstrained_array (expr);
5119 etype = TREE_TYPE (expr);
5120 ecode = TREE_CODE (etype);
5121 if (can_fold_for_view_convert_p (expr))
5122 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5123 else
5124 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5127 /* If the result is an integral type whose precision is not equal to its
5128 size, sign- or zero-extend the result. We need not do this if the input
5129 is an integral type of the same precision and signedness or if the output
5130 is a biased type or if both the input and output are unsigned, or if the
5131 lower bound is constant and non-negative, see E_Signed_Integer_Subtype
5132 case of gnat_to_gnu_entity. */
5133 if (!notrunc_p
5134 && INTEGRAL_TYPE_P (type)
5135 && TYPE_RM_SIZE (type)
5136 && tree_int_cst_compare (TYPE_RM_SIZE (type), TYPE_SIZE (type)) < 0
5137 && !(INTEGRAL_TYPE_P (etype)
5138 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5139 && tree_int_cst_compare (TYPE_RM_SIZE (type),
5140 TYPE_RM_SIZE (etype)
5141 ? TYPE_RM_SIZE (etype)
5142 : TYPE_SIZE (etype)) == 0)
5143 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5144 && !((TYPE_UNSIGNED (type)
5145 || (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
5146 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0))
5147 && TYPE_UNSIGNED (etype)))
5149 tree base_type
5150 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5151 TYPE_UNSIGNED (type));
5152 tree shift_expr
5153 = convert (base_type,
5154 size_binop (MINUS_EXPR,
5155 TYPE_SIZE (type), TYPE_RM_SIZE (type)));
5156 expr
5157 = convert (type,
5158 build_binary_op (RSHIFT_EXPR, base_type,
5159 build_binary_op (LSHIFT_EXPR, base_type,
5160 convert (base_type, expr),
5161 shift_expr),
5162 shift_expr));
5165 /* An unchecked conversion should never raise Constraint_Error. The code
5166 below assumes that GCC's conversion routines overflow the same way that
5167 the underlying hardware does. This is probably true. In the rare case
5168 when it is false, we can rely on the fact that such conversions are
5169 erroneous anyway. */
5170 if (TREE_CODE (expr) == INTEGER_CST)
5171 TREE_OVERFLOW (expr) = 0;
5173 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5174 show no longer constant. */
5175 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5176 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5177 OEP_ONLY_CONST))
5178 TREE_CONSTANT (expr) = 0;
5180 return expr;
5183 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5184 the latter being a record type as predicated by Is_Record_Type. */
5186 enum tree_code
5187 tree_code_for_record_type (Entity_Id gnat_type)
5189 Node_Id component_list, component;
5191 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5192 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5193 if (!Is_Unchecked_Union (gnat_type))
5194 return RECORD_TYPE;
5196 gnat_type = Implementation_Base_Type (gnat_type);
5197 component_list
5198 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5200 for (component = First_Non_Pragma (Component_Items (component_list));
5201 Present (component);
5202 component = Next_Non_Pragma (component))
5203 if (Ekind (Defining_Entity (component)) == E_Component)
5204 return RECORD_TYPE;
5206 return UNION_TYPE;
5209 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5210 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5211 according to the presence of an alignment clause on the type or, if it
5212 is an array, on the component type. */
5214 bool
5215 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5217 gnat_type = Underlying_Type (gnat_type);
5219 *align_clause = Present (Alignment_Clause (gnat_type));
5221 if (Is_Array_Type (gnat_type))
5223 gnat_type = Underlying_Type (Component_Type (gnat_type));
5224 if (Present (Alignment_Clause (gnat_type)))
5225 *align_clause = true;
5228 if (!Is_Floating_Point_Type (gnat_type))
5229 return false;
5231 if (UI_To_Int (Esize (gnat_type)) != 64)
5232 return false;
5234 return true;
5237 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5238 size is greater or equal to 64 bits, or an array of such a type. Set
5239 ALIGN_CLAUSE according to the presence of an alignment clause on the
5240 type or, if it is an array, on the component type. */
5242 bool
5243 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5245 gnat_type = Underlying_Type (gnat_type);
5247 *align_clause = Present (Alignment_Clause (gnat_type));
5249 if (Is_Array_Type (gnat_type))
5251 gnat_type = Underlying_Type (Component_Type (gnat_type));
5252 if (Present (Alignment_Clause (gnat_type)))
5253 *align_clause = true;
5256 if (!Is_Scalar_Type (gnat_type))
5257 return false;
5259 if (UI_To_Int (Esize (gnat_type)) < 64)
5260 return false;
5262 return true;
5265 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5266 component of an aggregate type. */
5268 bool
5269 type_for_nonaliased_component_p (tree gnu_type)
5271 /* If the type is passed by reference, we may have pointers to the
5272 component so it cannot be made non-aliased. */
5273 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5274 return false;
5276 /* We used to say that any component of aggregate type is aliased
5277 because the front-end may take 'Reference of it. The front-end
5278 has been enhanced in the meantime so as to use a renaming instead
5279 in most cases, but the back-end can probably take the address of
5280 such a component too so we go for the conservative stance.
5282 For instance, we might need the address of any array type, even
5283 if normally passed by copy, to construct a fat pointer if the
5284 component is used as an actual for an unconstrained formal.
5286 Likewise for record types: even if a specific record subtype is
5287 passed by copy, the parent type might be passed by ref (e.g. if
5288 it's of variable size) and we might take the address of a child
5289 component to pass to a parent formal. We have no way to check
5290 for such conditions here. */
5291 if (AGGREGATE_TYPE_P (gnu_type))
5292 return false;
5294 return true;
5297 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5299 bool
5300 smaller_form_type_p (tree type, tree orig_type)
5302 tree size, osize;
5304 /* We're not interested in variants here. */
5305 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5306 return false;
5308 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5309 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5310 return false;
5312 size = TYPE_SIZE (type);
5313 osize = TYPE_SIZE (orig_type);
5315 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5316 return false;
5318 return tree_int_cst_lt (size, osize) != 0;
5321 /* Perform final processing on global declarations. */
5323 static GTY (()) tree dummy_global;
5325 void
5326 gnat_write_global_declarations (void)
5328 unsigned int i;
5329 tree iter;
5331 /* If we have declared types as used at the global level, insert them in
5332 the global hash table. We use a dummy variable for this purpose, but
5333 we need to build it unconditionally to avoid -fcompare-debug issues. */
5334 if (first_global_object_name)
5336 struct varpool_node *node;
5337 char *label;
5339 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5340 dummy_global
5341 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5342 void_type_node);
5343 DECL_HARD_REGISTER (dummy_global) = 1;
5344 TREE_STATIC (dummy_global) = 1;
5345 node = varpool_node::get_create (dummy_global);
5346 node->definition = 1;
5347 node->force_output = 1;
5349 if (types_used_by_cur_var_decl)
5350 while (!types_used_by_cur_var_decl->is_empty ())
5352 tree t = types_used_by_cur_var_decl->pop ();
5353 types_used_by_var_decl_insert (t, dummy_global);
5357 /* Output debug information for all global type declarations first. This
5358 ensures that global types whose compilation hasn't been finalized yet,
5359 for example pointers to Taft amendment types, have their compilation
5360 finalized in the right context. */
5361 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5362 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5363 debug_hooks->type_decl (iter, false);
5365 /* Then output the global variables. We need to do that after the debug
5366 information for global types is emitted so that they are finalized. */
5367 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5368 if (TREE_CODE (iter) == VAR_DECL)
5369 rest_of_decl_compilation (iter, true, 0);
5371 /* Output the imported modules/declarations. In GNAT, these are only
5372 materializing subprogram. */
5373 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5374 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5375 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5376 DECL_CONTEXT (iter), 0);
5379 /* ************************************************************************
5380 * * GCC builtins support *
5381 * ************************************************************************ */
5383 /* The general scheme is fairly simple:
5385 For each builtin function/type to be declared, gnat_install_builtins calls
5386 internal facilities which eventually get to gnat_pushdecl, which in turn
5387 tracks the so declared builtin function decls in the 'builtin_decls' global
5388 datastructure. When an Intrinsic subprogram declaration is processed, we
5389 search this global datastructure to retrieve the associated BUILT_IN DECL
5390 node. */
5392 /* Search the chain of currently available builtin declarations for a node
5393 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5394 found, if any, or NULL_TREE otherwise. */
5395 tree
5396 builtin_decl_for (tree name)
5398 unsigned i;
5399 tree decl;
5401 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5402 if (DECL_NAME (decl) == name)
5403 return decl;
5405 return NULL_TREE;
5408 /* The code below eventually exposes gnat_install_builtins, which declares
5409 the builtin types and functions we might need, either internally or as
5410 user accessible facilities.
5412 ??? This is a first implementation shot, still in rough shape. It is
5413 heavily inspired from the "C" family implementation, with chunks copied
5414 verbatim from there.
5416 Two obvious improvement candidates are:
5417 o Use a more efficient name/decl mapping scheme
5418 o Devise a middle-end infrastructure to avoid having to copy
5419 pieces between front-ends. */
5421 /* ----------------------------------------------------------------------- *
5422 * BUILTIN ELEMENTARY TYPES *
5423 * ----------------------------------------------------------------------- */
5425 /* Standard data types to be used in builtin argument declarations. */
5427 enum c_tree_index
5429 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5430 CTI_STRING_TYPE,
5431 CTI_CONST_STRING_TYPE,
5433 CTI_MAX
5436 static tree c_global_trees[CTI_MAX];
5438 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5439 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5440 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5442 /* ??? In addition some attribute handlers, we currently don't support a
5443 (small) number of builtin-types, which in turns inhibits support for a
5444 number of builtin functions. */
5445 #define wint_type_node void_type_node
5446 #define intmax_type_node void_type_node
5447 #define uintmax_type_node void_type_node
5449 /* Used to help initialize the builtin-types.def table. When a type of
5450 the correct size doesn't exist, use error_mark_node instead of NULL.
5451 The later results in segfaults even when a decl using the type doesn't
5452 get invoked. */
5454 static tree
5455 builtin_type_for_size (int size, bool unsignedp)
5457 tree type = gnat_type_for_size (size, unsignedp);
5458 return type ? type : error_mark_node;
5461 /* Build/push the elementary type decls that builtin functions/types
5462 will need. */
5464 static void
5465 install_builtin_elementary_types (void)
5467 signed_size_type_node = gnat_signed_type_for (size_type_node);
5468 pid_type_node = integer_type_node;
5470 string_type_node = build_pointer_type (char_type_node);
5471 const_string_type_node
5472 = build_pointer_type (build_qualified_type
5473 (char_type_node, TYPE_QUAL_CONST));
5476 /* ----------------------------------------------------------------------- *
5477 * BUILTIN FUNCTION TYPES *
5478 * ----------------------------------------------------------------------- */
5480 /* Now, builtin function types per se. */
5482 enum c_builtin_type
5484 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5485 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5486 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5487 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5488 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5489 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5490 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5491 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5492 ARG6) NAME,
5493 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5494 ARG6, ARG7) NAME,
5495 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5496 ARG6, ARG7, ARG8) NAME,
5497 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5498 ARG6, ARG7, ARG8, ARG9) NAME,
5499 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5500 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5501 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5502 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5503 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5504 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5505 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5506 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5507 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5508 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5509 NAME,
5510 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5511 ARG6) NAME,
5512 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5513 ARG6, ARG7) NAME,
5514 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5515 #include "builtin-types.def"
5516 #undef DEF_PRIMITIVE_TYPE
5517 #undef DEF_FUNCTION_TYPE_0
5518 #undef DEF_FUNCTION_TYPE_1
5519 #undef DEF_FUNCTION_TYPE_2
5520 #undef DEF_FUNCTION_TYPE_3
5521 #undef DEF_FUNCTION_TYPE_4
5522 #undef DEF_FUNCTION_TYPE_5
5523 #undef DEF_FUNCTION_TYPE_6
5524 #undef DEF_FUNCTION_TYPE_7
5525 #undef DEF_FUNCTION_TYPE_8
5526 #undef DEF_FUNCTION_TYPE_9
5527 #undef DEF_FUNCTION_TYPE_10
5528 #undef DEF_FUNCTION_TYPE_11
5529 #undef DEF_FUNCTION_TYPE_VAR_0
5530 #undef DEF_FUNCTION_TYPE_VAR_1
5531 #undef DEF_FUNCTION_TYPE_VAR_2
5532 #undef DEF_FUNCTION_TYPE_VAR_3
5533 #undef DEF_FUNCTION_TYPE_VAR_4
5534 #undef DEF_FUNCTION_TYPE_VAR_5
5535 #undef DEF_FUNCTION_TYPE_VAR_6
5536 #undef DEF_FUNCTION_TYPE_VAR_7
5537 #undef DEF_POINTER_TYPE
5538 BT_LAST
5541 typedef enum c_builtin_type builtin_type;
5543 /* A temporary array used in communication with def_fn_type. */
5544 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5546 /* A helper function for install_builtin_types. Build function type
5547 for DEF with return type RET and N arguments. If VAR is true, then the
5548 function should be variadic after those N arguments.
5550 Takes special care not to ICE if any of the types involved are
5551 error_mark_node, which indicates that said type is not in fact available
5552 (see builtin_type_for_size). In which case the function type as a whole
5553 should be error_mark_node. */
5555 static void
5556 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5558 tree t;
5559 tree *args = XALLOCAVEC (tree, n);
5560 va_list list;
5561 int i;
5563 va_start (list, n);
5564 for (i = 0; i < n; ++i)
5566 builtin_type a = (builtin_type) va_arg (list, int);
5567 t = builtin_types[a];
5568 if (t == error_mark_node)
5569 goto egress;
5570 args[i] = t;
5573 t = builtin_types[ret];
5574 if (t == error_mark_node)
5575 goto egress;
5576 if (var)
5577 t = build_varargs_function_type_array (t, n, args);
5578 else
5579 t = build_function_type_array (t, n, args);
5581 egress:
5582 builtin_types[def] = t;
5583 va_end (list);
5586 /* Build the builtin function types and install them in the builtin_types
5587 array for later use in builtin function decls. */
5589 static void
5590 install_builtin_function_types (void)
5592 tree va_list_ref_type_node;
5593 tree va_list_arg_type_node;
5595 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5597 va_list_arg_type_node = va_list_ref_type_node =
5598 build_pointer_type (TREE_TYPE (va_list_type_node));
5600 else
5602 va_list_arg_type_node = va_list_type_node;
5603 va_list_ref_type_node = build_reference_type (va_list_type_node);
5606 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5607 builtin_types[ENUM] = VALUE;
5608 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5609 def_fn_type (ENUM, RETURN, 0, 0);
5610 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5611 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5612 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5613 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5614 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5615 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5616 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5617 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5618 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5619 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5620 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5621 ARG6) \
5622 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5623 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5624 ARG6, ARG7) \
5625 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5626 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5627 ARG6, ARG7, ARG8) \
5628 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5629 ARG7, ARG8);
5630 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5631 ARG6, ARG7, ARG8, ARG9) \
5632 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5633 ARG7, ARG8, ARG9);
5634 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5635 ARG6, ARG7, ARG8, ARG9, ARG10) \
5636 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5637 ARG7, ARG8, ARG9, ARG10);
5638 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5639 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5640 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5641 ARG7, ARG8, ARG9, ARG10, ARG11);
5642 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5643 def_fn_type (ENUM, RETURN, 1, 0);
5644 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5645 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5646 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5647 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5648 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5649 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5650 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5651 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5652 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5653 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5654 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5655 ARG6) \
5656 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5657 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5658 ARG6, ARG7) \
5659 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5660 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5661 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5663 #include "builtin-types.def"
5665 #undef DEF_PRIMITIVE_TYPE
5666 #undef DEF_FUNCTION_TYPE_0
5667 #undef DEF_FUNCTION_TYPE_1
5668 #undef DEF_FUNCTION_TYPE_2
5669 #undef DEF_FUNCTION_TYPE_3
5670 #undef DEF_FUNCTION_TYPE_4
5671 #undef DEF_FUNCTION_TYPE_5
5672 #undef DEF_FUNCTION_TYPE_6
5673 #undef DEF_FUNCTION_TYPE_7
5674 #undef DEF_FUNCTION_TYPE_8
5675 #undef DEF_FUNCTION_TYPE_9
5676 #undef DEF_FUNCTION_TYPE_10
5677 #undef DEF_FUNCTION_TYPE_11
5678 #undef DEF_FUNCTION_TYPE_VAR_0
5679 #undef DEF_FUNCTION_TYPE_VAR_1
5680 #undef DEF_FUNCTION_TYPE_VAR_2
5681 #undef DEF_FUNCTION_TYPE_VAR_3
5682 #undef DEF_FUNCTION_TYPE_VAR_4
5683 #undef DEF_FUNCTION_TYPE_VAR_5
5684 #undef DEF_FUNCTION_TYPE_VAR_6
5685 #undef DEF_FUNCTION_TYPE_VAR_7
5686 #undef DEF_POINTER_TYPE
5687 builtin_types[(int) BT_LAST] = NULL_TREE;
5690 /* ----------------------------------------------------------------------- *
5691 * BUILTIN ATTRIBUTES *
5692 * ----------------------------------------------------------------------- */
5694 enum built_in_attribute
5696 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5697 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5698 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5699 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5700 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5701 #include "builtin-attrs.def"
5702 #undef DEF_ATTR_NULL_TREE
5703 #undef DEF_ATTR_INT
5704 #undef DEF_ATTR_STRING
5705 #undef DEF_ATTR_IDENT
5706 #undef DEF_ATTR_TREE_LIST
5707 ATTR_LAST
5710 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5712 static void
5713 install_builtin_attributes (void)
5715 /* Fill in the built_in_attributes array. */
5716 #define DEF_ATTR_NULL_TREE(ENUM) \
5717 built_in_attributes[(int) ENUM] = NULL_TREE;
5718 #define DEF_ATTR_INT(ENUM, VALUE) \
5719 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5720 #define DEF_ATTR_STRING(ENUM, VALUE) \
5721 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5722 #define DEF_ATTR_IDENT(ENUM, STRING) \
5723 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5724 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5725 built_in_attributes[(int) ENUM] \
5726 = tree_cons (built_in_attributes[(int) PURPOSE], \
5727 built_in_attributes[(int) VALUE], \
5728 built_in_attributes[(int) CHAIN]);
5729 #include "builtin-attrs.def"
5730 #undef DEF_ATTR_NULL_TREE
5731 #undef DEF_ATTR_INT
5732 #undef DEF_ATTR_STRING
5733 #undef DEF_ATTR_IDENT
5734 #undef DEF_ATTR_TREE_LIST
5737 /* Handle a "const" attribute; arguments as in
5738 struct attribute_spec.handler. */
5740 static tree
5741 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5742 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5743 bool *no_add_attrs)
5745 if (TREE_CODE (*node) == FUNCTION_DECL)
5746 TREE_READONLY (*node) = 1;
5747 else
5748 *no_add_attrs = true;
5750 return NULL_TREE;
5753 /* Handle a "nothrow" attribute; arguments as in
5754 struct attribute_spec.handler. */
5756 static tree
5757 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5758 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5759 bool *no_add_attrs)
5761 if (TREE_CODE (*node) == FUNCTION_DECL)
5762 TREE_NOTHROW (*node) = 1;
5763 else
5764 *no_add_attrs = true;
5766 return NULL_TREE;
5769 /* Handle a "pure" attribute; arguments as in
5770 struct attribute_spec.handler. */
5772 static tree
5773 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5774 int ARG_UNUSED (flags), bool *no_add_attrs)
5776 if (TREE_CODE (*node) == FUNCTION_DECL)
5777 DECL_PURE_P (*node) = 1;
5778 /* TODO: support types. */
5779 else
5781 warning (OPT_Wattributes, "%qs attribute ignored",
5782 IDENTIFIER_POINTER (name));
5783 *no_add_attrs = true;
5786 return NULL_TREE;
5789 /* Handle a "no vops" attribute; arguments as in
5790 struct attribute_spec.handler. */
5792 static tree
5793 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5794 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5795 bool *ARG_UNUSED (no_add_attrs))
5797 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5798 DECL_IS_NOVOPS (*node) = 1;
5799 return NULL_TREE;
5802 /* Helper for nonnull attribute handling; fetch the operand number
5803 from the attribute argument list. */
5805 static bool
5806 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5808 /* Verify the arg number is a constant. */
5809 if (!tree_fits_uhwi_p (arg_num_expr))
5810 return false;
5812 *valp = TREE_INT_CST_LOW (arg_num_expr);
5813 return true;
5816 /* Handle the "nonnull" attribute. */
5817 static tree
5818 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5819 tree args, int ARG_UNUSED (flags),
5820 bool *no_add_attrs)
5822 tree type = *node;
5823 unsigned HOST_WIDE_INT attr_arg_num;
5825 /* If no arguments are specified, all pointer arguments should be
5826 non-null. Verify a full prototype is given so that the arguments
5827 will have the correct types when we actually check them later.
5828 Avoid diagnosing type-generic built-ins since those have no
5829 prototype. */
5830 if (!args)
5832 if (!prototype_p (type)
5833 && (!TYPE_ATTRIBUTES (type)
5834 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
5836 error ("nonnull attribute without arguments on a non-prototype");
5837 *no_add_attrs = true;
5839 return NULL_TREE;
5842 /* Argument list specified. Verify that each argument number references
5843 a pointer argument. */
5844 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5846 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5848 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5850 error ("nonnull argument has invalid operand number (argument %lu)",
5851 (unsigned long) attr_arg_num);
5852 *no_add_attrs = true;
5853 return NULL_TREE;
5856 if (prototype_p (type))
5858 function_args_iterator iter;
5859 tree argument;
5861 function_args_iter_init (&iter, type);
5862 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5864 argument = function_args_iter_cond (&iter);
5865 if (!argument || ck_num == arg_num)
5866 break;
5869 if (!argument
5870 || TREE_CODE (argument) == VOID_TYPE)
5872 error ("nonnull argument with out-of-range operand number "
5873 "(argument %lu, operand %lu)",
5874 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5875 *no_add_attrs = true;
5876 return NULL_TREE;
5879 if (TREE_CODE (argument) != POINTER_TYPE)
5881 error ("nonnull argument references non-pointer operand "
5882 "(argument %lu, operand %lu)",
5883 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5884 *no_add_attrs = true;
5885 return NULL_TREE;
5890 return NULL_TREE;
5893 /* Handle a "sentinel" attribute. */
5895 static tree
5896 handle_sentinel_attribute (tree *node, tree name, tree args,
5897 int ARG_UNUSED (flags), bool *no_add_attrs)
5899 if (!prototype_p (*node))
5901 warning (OPT_Wattributes,
5902 "%qs attribute requires prototypes with named arguments",
5903 IDENTIFIER_POINTER (name));
5904 *no_add_attrs = true;
5906 else
5908 if (!stdarg_p (*node))
5910 warning (OPT_Wattributes,
5911 "%qs attribute only applies to variadic functions",
5912 IDENTIFIER_POINTER (name));
5913 *no_add_attrs = true;
5917 if (args)
5919 tree position = TREE_VALUE (args);
5921 if (TREE_CODE (position) != INTEGER_CST)
5923 warning (0, "requested position is not an integer constant");
5924 *no_add_attrs = true;
5926 else
5928 if (tree_int_cst_lt (position, integer_zero_node))
5930 warning (0, "requested position is less than zero");
5931 *no_add_attrs = true;
5936 return NULL_TREE;
5939 /* Handle a "noreturn" attribute; arguments as in
5940 struct attribute_spec.handler. */
5942 static tree
5943 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5944 int ARG_UNUSED (flags), bool *no_add_attrs)
5946 tree type = TREE_TYPE (*node);
5948 /* See FIXME comment in c_common_attribute_table. */
5949 if (TREE_CODE (*node) == FUNCTION_DECL)
5950 TREE_THIS_VOLATILE (*node) = 1;
5951 else if (TREE_CODE (type) == POINTER_TYPE
5952 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5953 TREE_TYPE (*node)
5954 = build_pointer_type
5955 (build_type_variant (TREE_TYPE (type),
5956 TYPE_READONLY (TREE_TYPE (type)), 1));
5957 else
5959 warning (OPT_Wattributes, "%qs attribute ignored",
5960 IDENTIFIER_POINTER (name));
5961 *no_add_attrs = true;
5964 return NULL_TREE;
5967 /* Handle a "noinline" attribute; arguments as in
5968 struct attribute_spec.handler. */
5970 static tree
5971 handle_noinline_attribute (tree *node, tree name,
5972 tree ARG_UNUSED (args),
5973 int ARG_UNUSED (flags), bool *no_add_attrs)
5975 if (TREE_CODE (*node) == FUNCTION_DECL)
5977 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
5979 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
5980 "with attribute %qs", name, "always_inline");
5981 *no_add_attrs = true;
5983 else
5984 DECL_UNINLINABLE (*node) = 1;
5986 else
5988 warning (OPT_Wattributes, "%qE attribute ignored", name);
5989 *no_add_attrs = true;
5992 return NULL_TREE;
5995 /* Handle a "noclone" attribute; arguments as in
5996 struct attribute_spec.handler. */
5998 static tree
5999 handle_noclone_attribute (tree *node, tree name,
6000 tree ARG_UNUSED (args),
6001 int ARG_UNUSED (flags), bool *no_add_attrs)
6003 if (TREE_CODE (*node) != FUNCTION_DECL)
6005 warning (OPT_Wattributes, "%qE attribute ignored", name);
6006 *no_add_attrs = true;
6009 return NULL_TREE;
6012 /* Handle a "leaf" attribute; arguments as in
6013 struct attribute_spec.handler. */
6015 static tree
6016 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6017 int ARG_UNUSED (flags), bool *no_add_attrs)
6019 if (TREE_CODE (*node) != FUNCTION_DECL)
6021 warning (OPT_Wattributes, "%qE attribute ignored", name);
6022 *no_add_attrs = true;
6024 if (!TREE_PUBLIC (*node))
6026 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6027 *no_add_attrs = true;
6030 return NULL_TREE;
6033 /* Handle a "always_inline" attribute; arguments as in
6034 struct attribute_spec.handler. */
6036 static tree
6037 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6038 int ARG_UNUSED (flags), bool *no_add_attrs)
6040 if (TREE_CODE (*node) == FUNCTION_DECL)
6042 /* Set the attribute and mark it for disregarding inline limits. */
6043 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6045 else
6047 warning (OPT_Wattributes, "%qE attribute ignored", name);
6048 *no_add_attrs = true;
6051 return NULL_TREE;
6054 /* Handle a "malloc" attribute; arguments as in
6055 struct attribute_spec.handler. */
6057 static tree
6058 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6059 int ARG_UNUSED (flags), bool *no_add_attrs)
6061 if (TREE_CODE (*node) == FUNCTION_DECL
6062 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6063 DECL_IS_MALLOC (*node) = 1;
6064 else
6066 warning (OPT_Wattributes, "%qs attribute ignored",
6067 IDENTIFIER_POINTER (name));
6068 *no_add_attrs = true;
6071 return NULL_TREE;
6074 /* Fake handler for attributes we don't properly support. */
6076 tree
6077 fake_attribute_handler (tree * ARG_UNUSED (node),
6078 tree ARG_UNUSED (name),
6079 tree ARG_UNUSED (args),
6080 int ARG_UNUSED (flags),
6081 bool * ARG_UNUSED (no_add_attrs))
6083 return NULL_TREE;
6086 /* Handle a "type_generic" attribute. */
6088 static tree
6089 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6090 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6091 bool * ARG_UNUSED (no_add_attrs))
6093 /* Ensure we have a function type. */
6094 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6096 /* Ensure we have a variadic function. */
6097 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6099 return NULL_TREE;
6102 /* Handle a "vector_size" attribute; arguments as in
6103 struct attribute_spec.handler. */
6105 static tree
6106 handle_vector_size_attribute (tree *node, tree name, tree args,
6107 int ARG_UNUSED (flags), bool *no_add_attrs)
6109 tree type = *node;
6110 tree vector_type;
6112 *no_add_attrs = true;
6114 /* We need to provide for vector pointers, vector arrays, and
6115 functions returning vectors. For example:
6117 __attribute__((vector_size(16))) short *foo;
6119 In this case, the mode is SI, but the type being modified is
6120 HI, so we need to look further. */
6121 while (POINTER_TYPE_P (type)
6122 || TREE_CODE (type) == FUNCTION_TYPE
6123 || TREE_CODE (type) == ARRAY_TYPE)
6124 type = TREE_TYPE (type);
6126 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6127 if (!vector_type)
6128 return NULL_TREE;
6130 /* Build back pointers if needed. */
6131 *node = reconstruct_complex_type (*node, vector_type);
6133 return NULL_TREE;
6136 /* Handle a "vector_type" attribute; arguments as in
6137 struct attribute_spec.handler. */
6139 static tree
6140 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6141 int ARG_UNUSED (flags), bool *no_add_attrs)
6143 tree type = *node;
6144 tree vector_type;
6146 *no_add_attrs = true;
6148 if (TREE_CODE (type) != ARRAY_TYPE)
6150 error ("attribute %qs applies to array types only",
6151 IDENTIFIER_POINTER (name));
6152 return NULL_TREE;
6155 vector_type = build_vector_type_for_array (type, name);
6156 if (!vector_type)
6157 return NULL_TREE;
6159 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6160 *node = vector_type;
6162 return NULL_TREE;
6165 /* ----------------------------------------------------------------------- *
6166 * BUILTIN FUNCTIONS *
6167 * ----------------------------------------------------------------------- */
6169 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6170 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6171 if nonansi_p and flag_no_nonansi_builtin. */
6173 static void
6174 def_builtin_1 (enum built_in_function fncode,
6175 const char *name,
6176 enum built_in_class fnclass,
6177 tree fntype, tree libtype,
6178 bool both_p, bool fallback_p,
6179 bool nonansi_p ATTRIBUTE_UNUSED,
6180 tree fnattrs, bool implicit_p)
6182 tree decl;
6183 const char *libname;
6185 /* Preserve an already installed decl. It most likely was setup in advance
6186 (e.g. as part of the internal builtins) for specific reasons. */
6187 if (builtin_decl_explicit (fncode))
6188 return;
6190 gcc_assert ((!both_p && !fallback_p)
6191 || !strncmp (name, "__builtin_",
6192 strlen ("__builtin_")));
6194 libname = name + strlen ("__builtin_");
6195 decl = add_builtin_function (name, fntype, fncode, fnclass,
6196 (fallback_p ? libname : NULL),
6197 fnattrs);
6198 if (both_p)
6199 /* ??? This is normally further controlled by command-line options
6200 like -fno-builtin, but we don't have them for Ada. */
6201 add_builtin_function (libname, libtype, fncode, fnclass,
6202 NULL, fnattrs);
6204 set_builtin_decl (fncode, decl, implicit_p);
6207 static int flag_isoc94 = 0;
6208 static int flag_isoc99 = 0;
6209 static int flag_isoc11 = 0;
6211 /* Install what the common builtins.def offers. */
6213 static void
6214 install_builtin_functions (void)
6216 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6217 NONANSI_P, ATTRS, IMPLICIT, COND) \
6218 if (NAME && COND) \
6219 def_builtin_1 (ENUM, NAME, CLASS, \
6220 builtin_types[(int) TYPE], \
6221 builtin_types[(int) LIBTYPE], \
6222 BOTH_P, FALLBACK_P, NONANSI_P, \
6223 built_in_attributes[(int) ATTRS], IMPLICIT);
6224 #include "builtins.def"
6227 /* ----------------------------------------------------------------------- *
6228 * BUILTIN FUNCTIONS *
6229 * ----------------------------------------------------------------------- */
6231 /* Install the builtin functions we might need. */
6233 void
6234 gnat_install_builtins (void)
6236 install_builtin_elementary_types ();
6237 install_builtin_function_types ();
6238 install_builtin_attributes ();
6240 /* Install builtins used by generic middle-end pieces first. Some of these
6241 know about internal specificities and control attributes accordingly, for
6242 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6243 the generic definition from builtins.def. */
6244 build_common_builtin_nodes ();
6246 /* Now, install the target specific builtins, such as the AltiVec family on
6247 ppc, and the common set as exposed by builtins.def. */
6248 targetm.init_builtins ();
6249 install_builtin_functions ();
6252 #include "gt-ada-utils.h"
6253 #include "gtype-ada.h"