PR c/71392 - SEGV calling integer overflow built-ins with a null pointer
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob1f1e4d3b81481d0536a280cb824810c08f4d2522
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2016, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute,
113 false },
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
115 false },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute,
117 false },
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
119 false },
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
121 false },
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
123 false },
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
125 false },
126 { "noinline", 0, 0, true, false, false, handle_noinline_attribute,
127 false },
128 { "noclone", 0, 0, true, false, false, handle_noclone_attribute,
129 false },
130 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
131 false },
132 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
133 false },
134 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
135 false },
136 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
137 false },
139 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
140 false },
141 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
142 false },
143 { "may_alias", 0, 0, false, true, false, NULL, false },
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
148 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
149 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
151 { NULL, 0, 0, false, false, false, NULL, false }
154 /* Associates a GNAT tree node to a GCC tree node. It is used in
155 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
156 of `save_gnu_tree' for more info. */
157 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
159 #define GET_GNU_TREE(GNAT_ENTITY) \
160 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
162 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
163 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
165 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
166 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
168 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
169 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
171 #define GET_DUMMY_NODE(GNAT_ENTITY) \
172 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
174 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
175 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
177 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
178 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
180 /* This variable keeps a table for types for each precision so that we only
181 allocate each of them once. Signed and unsigned types are kept separate.
183 Note that these types are only used when fold-const requests something
184 special. Perhaps we should NOT share these types; we'll see how it
185 goes later. */
186 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
188 /* Likewise for float types, but record these by mode. */
189 static GTY(()) tree float_types[NUM_MACHINE_MODES];
191 /* For each binding contour we allocate a binding_level structure to indicate
192 the binding depth. */
194 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
195 /* The binding level containing this one (the enclosing binding level). */
196 struct gnat_binding_level *chain;
197 /* The BLOCK node for this level. */
198 tree block;
199 /* If nonzero, the setjmp buffer that needs to be updated for any
200 variable-sized definition within this context. */
201 tree jmpbuf_decl;
204 /* The binding level currently in effect. */
205 static GTY(()) struct gnat_binding_level *current_binding_level;
207 /* A chain of gnat_binding_level structures awaiting reuse. */
208 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
210 /* The context to be used for global declarations. */
211 static GTY(()) tree global_context;
213 /* An array of global declarations. */
214 static GTY(()) vec<tree, va_gc> *global_decls;
216 /* An array of builtin function declarations. */
217 static GTY(()) vec<tree, va_gc> *builtin_decls;
219 /* A chain of unused BLOCK nodes. */
220 static GTY((deletable)) tree free_block_chain;
222 /* A hash table of padded types. It is modelled on the generic type
223 hash table in tree.c, which must thus be used as a reference. */
225 struct GTY((for_user)) pad_type_hash {
226 unsigned long hash;
227 tree type;
230 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
232 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
233 static bool equal (pad_type_hash *a, pad_type_hash *b);
234 static int keep_cache_entry (pad_type_hash *&);
237 static GTY ((cache))
238 hash_table<pad_type_hasher> *pad_type_hash_table;
240 static tree merge_sizes (tree, tree, tree, bool, bool);
241 static tree compute_related_constant (tree, tree);
242 static tree split_plus (tree, tree *);
243 static tree float_type_for_precision (int, machine_mode);
244 static tree convert_to_fat_pointer (tree, tree);
245 static unsigned int scale_by_factor_of (tree, unsigned int);
246 static bool potential_alignment_gap (tree, tree, tree);
248 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
249 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
250 struct deferred_decl_context_node
252 /* The ..._DECL node to work on. */
253 tree decl;
255 /* The corresponding entity's Scope. */
256 Entity_Id gnat_scope;
258 /* The value of force_global when DECL was pushed. */
259 int force_global;
261 /* The list of ..._TYPE nodes to propagate the context to. */
262 vec<tree> types;
264 /* The next queue item. */
265 struct deferred_decl_context_node *next;
268 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
270 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
271 feed it with the elaboration of GNAT_SCOPE. */
272 static struct deferred_decl_context_node *
273 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
275 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
276 feed it with the DECL_CONTEXT computed as part of N as soon as it is
277 computed. */
278 static void add_deferred_type_context (struct deferred_decl_context_node *n,
279 tree type);
281 /* Initialize data structures of the utils.c module. */
283 void
284 init_gnat_utils (void)
286 /* Initialize the association of GNAT nodes to GCC trees. */
287 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
289 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
290 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
292 /* Initialize the hash table of padded types. */
293 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
296 /* Destroy data structures of the utils.c module. */
298 void
299 destroy_gnat_utils (void)
301 /* Destroy the association of GNAT nodes to GCC trees. */
302 ggc_free (associate_gnat_to_gnu);
303 associate_gnat_to_gnu = NULL;
305 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
306 ggc_free (dummy_node_table);
307 dummy_node_table = NULL;
309 /* Destroy the hash table of padded types. */
310 pad_type_hash_table->empty ();
311 pad_type_hash_table = NULL;
314 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
315 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
316 If NO_CHECK is true, the latter check is suppressed.
318 If GNU_DECL is zero, reset a previous association. */
320 void
321 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
323 /* Check that GNAT_ENTITY is not already defined and that it is being set
324 to something which is a decl. If that is not the case, this usually
325 means GNAT_ENTITY is defined twice, but occasionally is due to some
326 Gigi problem. */
327 gcc_assert (!(gnu_decl
328 && (PRESENT_GNU_TREE (gnat_entity)
329 || (!no_check && !DECL_P (gnu_decl)))));
331 SET_GNU_TREE (gnat_entity, gnu_decl);
334 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
335 that was associated with it. If there is no such tree node, abort.
337 In some cases, such as delayed elaboration or expressions that need to
338 be elaborated only once, GNAT_ENTITY is really not an entity. */
340 tree
341 get_gnu_tree (Entity_Id gnat_entity)
343 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
344 return GET_GNU_TREE (gnat_entity);
347 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
349 bool
350 present_gnu_tree (Entity_Id gnat_entity)
352 return PRESENT_GNU_TREE (gnat_entity);
355 /* Make a dummy type corresponding to GNAT_TYPE. */
357 tree
358 make_dummy_type (Entity_Id gnat_type)
360 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
361 tree gnu_type;
363 /* If there was no equivalent type (can only happen when just annotating
364 types) or underlying type, go back to the original type. */
365 if (No (gnat_equiv))
366 gnat_equiv = gnat_type;
368 /* If it there already a dummy type, use that one. Else make one. */
369 if (PRESENT_DUMMY_NODE (gnat_equiv))
370 return GET_DUMMY_NODE (gnat_equiv);
372 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
373 an ENUMERAL_TYPE. */
374 gnu_type = make_node (Is_Record_Type (gnat_equiv)
375 ? tree_code_for_record_type (gnat_equiv)
376 : ENUMERAL_TYPE);
377 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
378 TYPE_DUMMY_P (gnu_type) = 1;
379 TYPE_STUB_DECL (gnu_type)
380 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
381 if (Is_By_Reference_Type (gnat_equiv))
382 TYPE_BY_REFERENCE_P (gnu_type) = 1;
384 SET_DUMMY_NODE (gnat_equiv, gnu_type);
386 return gnu_type;
389 /* Return the dummy type that was made for GNAT_TYPE, if any. */
391 tree
392 get_dummy_type (Entity_Id gnat_type)
394 return GET_DUMMY_NODE (gnat_type);
397 /* Build dummy fat and thin pointer types whose designated type is specified
398 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
400 void
401 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
403 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
404 tree gnu_fat_type, fields, gnu_object_type;
406 gnu_template_type = make_node (RECORD_TYPE);
407 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
408 TYPE_DUMMY_P (gnu_template_type) = 1;
409 gnu_ptr_template = build_pointer_type (gnu_template_type);
411 gnu_array_type = make_node (ENUMERAL_TYPE);
412 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
413 TYPE_DUMMY_P (gnu_array_type) = 1;
414 gnu_ptr_array = build_pointer_type (gnu_array_type);
416 gnu_fat_type = make_node (RECORD_TYPE);
417 /* Build a stub DECL to trigger the special processing for fat pointer types
418 in gnat_pushdecl. */
419 TYPE_NAME (gnu_fat_type)
420 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
421 gnu_fat_type);
422 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
423 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
424 DECL_CHAIN (fields)
425 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
426 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
427 finish_fat_pointer_type (gnu_fat_type, fields);
428 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
429 /* Suppress debug info until after the type is completed. */
430 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
432 gnu_object_type = make_node (RECORD_TYPE);
433 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
434 TYPE_DUMMY_P (gnu_object_type) = 1;
436 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
437 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
438 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
441 /* Return true if we are in the global binding level. */
443 bool
444 global_bindings_p (void)
446 return force_global || !current_function_decl;
449 /* Enter a new binding level. */
451 void
452 gnat_pushlevel (void)
454 struct gnat_binding_level *newlevel = NULL;
456 /* Reuse a struct for this binding level, if there is one. */
457 if (free_binding_level)
459 newlevel = free_binding_level;
460 free_binding_level = free_binding_level->chain;
462 else
463 newlevel = ggc_alloc<gnat_binding_level> ();
465 /* Use a free BLOCK, if any; otherwise, allocate one. */
466 if (free_block_chain)
468 newlevel->block = free_block_chain;
469 free_block_chain = BLOCK_CHAIN (free_block_chain);
470 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
472 else
473 newlevel->block = make_node (BLOCK);
475 /* Point the BLOCK we just made to its parent. */
476 if (current_binding_level)
477 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
479 BLOCK_VARS (newlevel->block) = NULL_TREE;
480 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
481 TREE_USED (newlevel->block) = 1;
483 /* Add this level to the front of the chain (stack) of active levels. */
484 newlevel->chain = current_binding_level;
485 newlevel->jmpbuf_decl = NULL_TREE;
486 current_binding_level = newlevel;
489 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
490 and point FNDECL to this BLOCK. */
492 void
493 set_current_block_context (tree fndecl)
495 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
496 DECL_INITIAL (fndecl) = current_binding_level->block;
497 set_block_for_group (current_binding_level->block);
500 /* Set the jmpbuf_decl for the current binding level to DECL. */
502 void
503 set_block_jmpbuf_decl (tree decl)
505 current_binding_level->jmpbuf_decl = decl;
508 /* Get the jmpbuf_decl, if any, for the current binding level. */
510 tree
511 get_block_jmpbuf_decl (void)
513 return current_binding_level->jmpbuf_decl;
516 /* Exit a binding level. Set any BLOCK into the current code group. */
518 void
519 gnat_poplevel (void)
521 struct gnat_binding_level *level = current_binding_level;
522 tree block = level->block;
524 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
525 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
527 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
528 are no variables free the block and merge its subblocks into those of its
529 parent block. Otherwise, add it to the list of its parent. */
530 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
532 else if (!BLOCK_VARS (block))
534 BLOCK_SUBBLOCKS (level->chain->block)
535 = block_chainon (BLOCK_SUBBLOCKS (block),
536 BLOCK_SUBBLOCKS (level->chain->block));
537 BLOCK_CHAIN (block) = free_block_chain;
538 free_block_chain = block;
540 else
542 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
543 BLOCK_SUBBLOCKS (level->chain->block) = block;
544 TREE_USED (block) = 1;
545 set_block_for_group (block);
548 /* Free this binding structure. */
549 current_binding_level = level->chain;
550 level->chain = free_binding_level;
551 free_binding_level = level;
554 /* Exit a binding level and discard the associated BLOCK. */
556 void
557 gnat_zaplevel (void)
559 struct gnat_binding_level *level = current_binding_level;
560 tree block = level->block;
562 BLOCK_CHAIN (block) = free_block_chain;
563 free_block_chain = block;
565 /* Free this binding structure. */
566 current_binding_level = level->chain;
567 level->chain = free_binding_level;
568 free_binding_level = level;
571 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
573 static void
574 gnat_set_type_context (tree type, tree context)
576 tree decl = TYPE_STUB_DECL (type);
578 TYPE_CONTEXT (type) = context;
580 while (decl && DECL_PARALLEL_TYPE (decl))
582 tree parallel_type = DECL_PARALLEL_TYPE (decl);
584 /* Give a context to the parallel types and their stub decl, if any.
585 Some parallel types seems to be present in multiple parallel type
586 chains, so don't mess with their context if they already have one. */
587 if (!TYPE_CONTEXT (parallel_type))
589 if (TYPE_STUB_DECL (parallel_type))
590 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
591 TYPE_CONTEXT (parallel_type) = context;
594 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
598 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
599 the debug info, or Empty if there is no such scope. If not NULL, set
600 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
602 Entity_Id
603 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
605 Entity_Id gnat_entity;
607 if (is_subprogram)
608 *is_subprogram = false;
610 if (Nkind (gnat_node) == N_Defining_Identifier
611 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
612 gnat_entity = Scope (gnat_node);
613 else
614 return Empty;
616 while (Present (gnat_entity))
618 switch (Ekind (gnat_entity))
620 case E_Function:
621 case E_Procedure:
622 if (Present (Protected_Body_Subprogram (gnat_entity)))
623 gnat_entity = Protected_Body_Subprogram (gnat_entity);
625 /* If the scope is a subprogram, then just rely on
626 current_function_decl, so that we don't have to defer
627 anything. This is needed because other places rely on the
628 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
629 if (is_subprogram)
630 *is_subprogram = true;
631 return gnat_entity;
633 case E_Record_Type:
634 case E_Record_Subtype:
635 return gnat_entity;
637 default:
638 /* By default, we are not interested in this particular scope: go to
639 the outer one. */
640 break;
643 gnat_entity = Scope (gnat_entity);
646 return Empty;
649 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
650 of N otherwise. */
652 static void
653 defer_or_set_type_context (tree type, tree context,
654 struct deferred_decl_context_node *n)
656 if (n)
657 add_deferred_type_context (n, type);
658 else
659 gnat_set_type_context (type, context);
662 /* Return global_context, but create it first if need be. */
664 static tree
665 get_global_context (void)
667 if (!global_context)
669 global_context = build_translation_unit_decl (NULL_TREE);
670 debug_hooks->register_main_translation_unit (global_context);
673 return global_context;
676 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
677 for location information and flag propagation. */
679 void
680 gnat_pushdecl (tree decl, Node_Id gnat_node)
682 tree context = NULL_TREE;
683 struct deferred_decl_context_node *deferred_decl_context = NULL;
685 /* If explicitely asked to make DECL global or if it's an imported nested
686 object, short-circuit the regular Scope-based context computation. */
687 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
689 /* Rely on the GNAT scope, or fallback to the current_function_decl if
690 the GNAT scope reached the global scope, if it reached a subprogram
691 or the declaration is a subprogram or a variable (for them we skip
692 intermediate context types because the subprogram body elaboration
693 machinery and the inliner both expect a subprogram context).
695 Falling back to current_function_decl is necessary for implicit
696 subprograms created by gigi, such as the elaboration subprograms. */
697 bool context_is_subprogram = false;
698 const Entity_Id gnat_scope
699 = get_debug_scope (gnat_node, &context_is_subprogram);
701 if (Present (gnat_scope)
702 && !context_is_subprogram
703 && TREE_CODE (decl) != FUNCTION_DECL
704 && TREE_CODE (decl) != VAR_DECL)
705 /* Always assume the scope has not been elaborated, thus defer the
706 context propagation to the time its elaboration will be
707 available. */
708 deferred_decl_context
709 = add_deferred_decl_context (decl, gnat_scope, force_global);
711 /* External declarations (when force_global > 0) may not be in a
712 local context. */
713 else if (current_function_decl && force_global == 0)
714 context = current_function_decl;
717 /* If either we are forced to be in global mode or if both the GNAT scope and
718 the current_function_decl did not help in determining the context, use the
719 global scope. */
720 if (!deferred_decl_context && !context)
721 context = get_global_context ();
723 /* Functions imported in another function are not really nested.
724 For really nested functions mark them initially as needing
725 a static chain for uses of that flag before unnesting;
726 lower_nested_functions will then recompute it. */
727 if (TREE_CODE (decl) == FUNCTION_DECL
728 && !TREE_PUBLIC (decl)
729 && context
730 && (TREE_CODE (context) == FUNCTION_DECL
731 || decl_function_context (context)))
732 DECL_STATIC_CHAIN (decl) = 1;
734 if (!deferred_decl_context)
735 DECL_CONTEXT (decl) = context;
737 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
739 /* Set the location of DECL and emit a declaration for it. */
740 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
741 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
743 add_decl_expr (decl, gnat_node);
745 /* Put the declaration on the list. The list of declarations is in reverse
746 order. The list will be reversed later. Put global declarations in the
747 globals list and local ones in the current block. But skip TYPE_DECLs
748 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
749 with the debugger and aren't needed anyway. */
750 if (!(TREE_CODE (decl) == TYPE_DECL
751 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
753 if (DECL_EXTERNAL (decl))
755 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
756 vec_safe_push (builtin_decls, decl);
758 else if (global_bindings_p ())
759 vec_safe_push (global_decls, decl);
760 else
762 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
763 BLOCK_VARS (current_binding_level->block) = decl;
767 /* For the declaration of a type, set its name either if it isn't already
768 set or if the previous type name was not derived from a source name.
769 We'd rather have the type named with a real name and all the pointer
770 types to the same object have the same node, except when the names are
771 both derived from source names. */
772 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
774 tree t = TREE_TYPE (decl);
776 /* Array and pointer types aren't tagged types in the C sense so we need
777 to generate a typedef in DWARF for them and make sure it is preserved,
778 unless the type is artificial. */
779 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
780 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
781 || DECL_ARTIFICIAL (decl)))
783 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
784 generate the typedef in DWARF. Also do that for fat pointer types
785 because, even though they are tagged types in the C sense, they are
786 still XUP types attached to the base array type at this point. */
787 else if (!DECL_ARTIFICIAL (decl)
788 && (TREE_CODE (t) == ARRAY_TYPE
789 || TREE_CODE (t) == POINTER_TYPE
790 || TYPE_IS_FAT_POINTER_P (t)))
792 tree tt;
793 /* ??? Copy and original type are not supposed to be variant but we
794 really need a variant for the placeholder machinery to work. */
795 if (TYPE_IS_FAT_POINTER_P (t))
796 tt = build_variant_type_copy (t);
797 else
799 /* TYPE_NEXT_PTR_TO is a chain of main variants. */
800 tt = build_distinct_type_copy (TYPE_MAIN_VARIANT (t));
801 if (TREE_CODE (t) == POINTER_TYPE)
802 TYPE_NEXT_PTR_TO (TYPE_MAIN_VARIANT (t)) = tt;
803 tt = build_qualified_type (tt, TYPE_QUALS (t));
805 TYPE_NAME (tt) = decl;
806 defer_or_set_type_context (tt,
807 DECL_CONTEXT (decl),
808 deferred_decl_context);
809 TREE_USED (tt) = TREE_USED (t);
810 TREE_TYPE (decl) = tt;
811 if (TYPE_NAME (t)
812 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
813 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
814 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
815 else
816 DECL_ORIGINAL_TYPE (decl) = t;
817 /* Array types need to have a name so that they can be related to
818 their GNAT encodings. */
819 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
820 TYPE_NAME (t) = DECL_NAME (decl);
821 t = NULL_TREE;
823 else if (TYPE_NAME (t)
824 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
825 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
827 else
828 t = NULL_TREE;
830 /* Propagate the name to all the variants, this is needed for the type
831 qualifiers machinery to work properly (see check_qualified_type).
832 Also propagate the context to them. Note that it will be propagated
833 to all parallel types too thanks to gnat_set_type_context. */
834 if (t)
835 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
836 /* ??? Because of the previous kludge, we can have variants of fat
837 pointer types with different names. */
838 if (!(TYPE_IS_FAT_POINTER_P (t)
839 && TYPE_NAME (t)
840 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
842 TYPE_NAME (t) = decl;
843 defer_or_set_type_context (t,
844 DECL_CONTEXT (decl),
845 deferred_decl_context);
850 /* Create a record type that contains a SIZE bytes long field of TYPE with a
851 starting bit position so that it is aligned to ALIGN bits, and leaving at
852 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
853 record is guaranteed to get. GNAT_NODE is used for the position of the
854 associated TYPE_DECL. */
856 tree
857 make_aligning_type (tree type, unsigned int align, tree size,
858 unsigned int base_align, int room, Node_Id gnat_node)
860 /* We will be crafting a record type with one field at a position set to be
861 the next multiple of ALIGN past record'address + room bytes. We use a
862 record placeholder to express record'address. */
863 tree record_type = make_node (RECORD_TYPE);
864 tree record = build0 (PLACEHOLDER_EXPR, record_type);
866 tree record_addr_st
867 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
869 /* The diagram below summarizes the shape of what we manipulate:
871 <--------- pos ---------->
872 { +------------+-------------+-----------------+
873 record =>{ |############| ... | field (type) |
874 { +------------+-------------+-----------------+
875 |<-- room -->|<- voffset ->|<---- size ----->|
878 record_addr vblock_addr
880 Every length is in sizetype bytes there, except "pos" which has to be
881 set as a bit position in the GCC tree for the record. */
882 tree room_st = size_int (room);
883 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
884 tree voffset_st, pos, field;
886 tree name = TYPE_IDENTIFIER (type);
888 name = concat_name (name, "ALIGN");
889 TYPE_NAME (record_type) = name;
891 /* Compute VOFFSET and then POS. The next byte position multiple of some
892 alignment after some address is obtained by "and"ing the alignment minus
893 1 with the two's complement of the address. */
894 voffset_st = size_binop (BIT_AND_EXPR,
895 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
896 size_int ((align / BITS_PER_UNIT) - 1));
898 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
899 pos = size_binop (MULT_EXPR,
900 convert (bitsizetype,
901 size_binop (PLUS_EXPR, room_st, voffset_st)),
902 bitsize_unit_node);
904 /* Craft the GCC record representation. We exceptionally do everything
905 manually here because 1) our generic circuitry is not quite ready to
906 handle the complex position/size expressions we are setting up, 2) we
907 have a strong simplifying factor at hand: we know the maximum possible
908 value of voffset, and 3) we have to set/reset at least the sizes in
909 accordance with this maximum value anyway, as we need them to convey
910 what should be "alloc"ated for this type.
912 Use -1 as the 'addressable' indication for the field to prevent the
913 creation of a bitfield. We don't need one, it would have damaging
914 consequences on the alignment computation, and create_field_decl would
915 make one without this special argument, for instance because of the
916 complex position expression. */
917 field = create_field_decl (get_identifier ("F"), type, record_type, size,
918 pos, 1, -1);
919 TYPE_FIELDS (record_type) = field;
921 SET_TYPE_ALIGN (record_type, base_align);
922 TYPE_USER_ALIGN (record_type) = 1;
924 TYPE_SIZE (record_type)
925 = size_binop (PLUS_EXPR,
926 size_binop (MULT_EXPR, convert (bitsizetype, size),
927 bitsize_unit_node),
928 bitsize_int (align + room * BITS_PER_UNIT));
929 TYPE_SIZE_UNIT (record_type)
930 = size_binop (PLUS_EXPR, size,
931 size_int (room + align / BITS_PER_UNIT));
933 SET_TYPE_MODE (record_type, BLKmode);
934 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
936 /* Declare it now since it will never be declared otherwise. This is
937 necessary to ensure that its subtrees are properly marked. */
938 create_type_decl (name, record_type, true, false, gnat_node);
940 return record_type;
943 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
944 as the field type of a packed record if IN_RECORD is true, or as the
945 component type of a packed array if IN_RECORD is false. See if we can
946 rewrite it either as a type that has non-BLKmode, which we can pack
947 tighter in the packed record case, or as a smaller type with at most
948 MAX_ALIGN alignment if the value is non-zero. If so, return the new
949 type; if not, return the original type. */
951 tree
952 make_packable_type (tree type, bool in_record, unsigned int max_align)
954 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
955 unsigned HOST_WIDE_INT new_size;
956 unsigned int align = TYPE_ALIGN (type);
957 unsigned int new_align;
959 /* No point in doing anything if the size is zero. */
960 if (size == 0)
961 return type;
963 tree new_type = make_node (TREE_CODE (type));
965 /* Copy the name and flags from the old type to that of the new.
966 Note that we rely on the pointer equality created here for
967 TYPE_NAME to look through conversions in various places. */
968 TYPE_NAME (new_type) = TYPE_NAME (type);
969 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
970 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
971 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
972 if (TREE_CODE (type) == RECORD_TYPE)
973 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
975 /* If we are in a record and have a small size, set the alignment to
976 try for an integral mode. Otherwise set it to try for a smaller
977 type with BLKmode. */
978 if (in_record && size <= MAX_FIXED_MODE_SIZE)
980 new_size = ceil_pow2 (size);
981 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
982 SET_TYPE_ALIGN (new_type, new_align);
984 else
986 /* Do not try to shrink the size if the RM size is not constant. */
987 if (TYPE_CONTAINS_TEMPLATE_P (type)
988 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
989 return type;
991 /* Round the RM size up to a unit boundary to get the minimal size
992 for a BLKmode record. Give up if it's already the size and we
993 don't need to lower the alignment. */
994 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
995 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
996 if (new_size == size && (max_align == 0 || align <= max_align))
997 return type;
999 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1000 if (max_align > 0 && new_align > max_align)
1001 new_align = max_align;
1002 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1005 TYPE_USER_ALIGN (new_type) = 1;
1007 /* Now copy the fields, keeping the position and size as we don't want
1008 to change the layout by propagating the packedness downwards. */
1009 tree new_field_list = NULL_TREE;
1010 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1012 tree new_field_type = TREE_TYPE (field);
1013 tree new_field, new_size;
1015 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1016 && !TYPE_FAT_POINTER_P (new_field_type)
1017 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1018 new_field_type = make_packable_type (new_field_type, true, max_align);
1020 /* However, for the last field in a not already packed record type
1021 that is of an aggregate type, we need to use the RM size in the
1022 packable version of the record type, see finish_record_type. */
1023 if (!DECL_CHAIN (field)
1024 && !TYPE_PACKED (type)
1025 && RECORD_OR_UNION_TYPE_P (new_field_type)
1026 && !TYPE_FAT_POINTER_P (new_field_type)
1027 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1028 && TYPE_ADA_SIZE (new_field_type))
1029 new_size = TYPE_ADA_SIZE (new_field_type);
1030 else
1031 new_size = DECL_SIZE (field);
1033 new_field
1034 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1035 new_size, bit_position (field),
1036 TYPE_PACKED (type),
1037 !DECL_NONADDRESSABLE_P (field));
1039 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1040 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1041 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1042 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1044 DECL_CHAIN (new_field) = new_field_list;
1045 new_field_list = new_field;
1048 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1049 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1050 if (TYPE_STUB_DECL (type))
1051 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1052 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1054 /* If this is a padding record, we never want to make the size smaller
1055 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1056 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1058 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1059 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1060 new_size = size;
1062 else
1064 TYPE_SIZE (new_type) = bitsize_int (new_size);
1065 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1068 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1069 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1071 compute_record_mode (new_type);
1073 /* Try harder to get a packable type if necessary, for example
1074 in case the record itself contains a BLKmode field. */
1075 if (in_record && TYPE_MODE (new_type) == BLKmode)
1076 SET_TYPE_MODE (new_type,
1077 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1079 /* If neither mode nor size nor alignment shrunk, return the old type. */
1080 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1081 return type;
1083 return new_type;
1086 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1087 If TYPE is the best type, return it. Otherwise, make a new type. We
1088 only support new integral and pointer types. FOR_BIASED is true if
1089 we are making a biased type. */
1091 tree
1092 make_type_from_size (tree type, tree size_tree, bool for_biased)
1094 unsigned HOST_WIDE_INT size;
1095 bool biased_p;
1096 tree new_type;
1098 /* If size indicates an error, just return TYPE to avoid propagating
1099 the error. Likewise if it's too large to represent. */
1100 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1101 return type;
1103 size = tree_to_uhwi (size_tree);
1105 switch (TREE_CODE (type))
1107 case INTEGER_TYPE:
1108 case ENUMERAL_TYPE:
1109 case BOOLEAN_TYPE:
1110 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1111 && TYPE_BIASED_REPRESENTATION_P (type));
1113 /* Integer types with precision 0 are forbidden. */
1114 if (size == 0)
1115 size = 1;
1117 /* Only do something if the type isn't a packed array type and doesn't
1118 already have the proper size and the size isn't too large. */
1119 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1120 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1121 || size > LONG_LONG_TYPE_SIZE)
1122 break;
1124 biased_p |= for_biased;
1126 /* The type should be an unsigned type if the original type is unsigned
1127 or if the lower bound is constant and non-negative or if the type is
1128 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1129 if (TYPE_UNSIGNED (type)
1130 || (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1131 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1132 || biased_p)
1133 new_type = make_unsigned_type (size);
1134 else
1135 new_type = make_signed_type (size);
1136 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1137 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1138 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1139 /* Copy the name to show that it's essentially the same type and
1140 not a subrange type. */
1141 TYPE_NAME (new_type) = TYPE_NAME (type);
1142 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1143 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1144 return new_type;
1146 case RECORD_TYPE:
1147 /* Do something if this is a fat pointer, in which case we
1148 may need to return the thin pointer. */
1149 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1151 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
1152 if (!targetm.valid_pointer_mode (p_mode))
1153 p_mode = ptr_mode;
1154 return
1155 build_pointer_type_for_mode
1156 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1157 p_mode, 0);
1159 break;
1161 case POINTER_TYPE:
1162 /* Only do something if this is a thin pointer, in which case we
1163 may need to return the fat pointer. */
1164 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1165 return
1166 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1167 break;
1169 default:
1170 break;
1173 return type;
1176 /* See if the data pointed to by the hash table slot is marked. */
1179 pad_type_hasher::keep_cache_entry (pad_type_hash *&t)
1181 return ggc_marked_p (t->type);
1184 /* Return true iff the padded types are equivalent. */
1186 bool
1187 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1189 tree type1, type2;
1191 if (t1->hash != t2->hash)
1192 return 0;
1194 type1 = t1->type;
1195 type2 = t2->type;
1197 /* We consider that the padded types are equivalent if they pad the same type
1198 and have the same size, alignment, RM size and storage order. Taking the
1199 mode into account is redundant since it is determined by the others. */
1200 return
1201 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1202 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1203 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1204 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1205 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1208 /* Look up the padded TYPE in the hash table and return its canonical version
1209 if it exists; otherwise, insert it into the hash table. */
1211 static tree
1212 lookup_and_insert_pad_type (tree type)
1214 hashval_t hashcode;
1215 struct pad_type_hash in, *h;
1217 hashcode
1218 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1219 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1220 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1221 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1223 in.hash = hashcode;
1224 in.type = type;
1225 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1226 if (h)
1227 return h->type;
1229 h = ggc_alloc<pad_type_hash> ();
1230 h->hash = hashcode;
1231 h->type = type;
1232 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
1233 return NULL_TREE;
1236 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1237 if needed. We have already verified that SIZE and ALIGN are large enough.
1238 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1239 IS_COMPONENT_TYPE is true if this is being done for the component type of
1240 an array. IS_USER_TYPE is true if the original type needs to be completed.
1241 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1242 the RM size of the resulting type is to be set to SIZE too; in this case,
1243 the padded type is canonicalized before being returned. */
1245 tree
1246 maybe_pad_type (tree type, tree size, unsigned int align,
1247 Entity_Id gnat_entity, bool is_component_type,
1248 bool is_user_type, bool definition, bool set_rm_size)
1250 tree orig_size = TYPE_SIZE (type);
1251 unsigned int orig_align = TYPE_ALIGN (type);
1252 tree record, field;
1254 /* If TYPE is a padded type, see if it agrees with any size and alignment
1255 we were given. If so, return the original type. Otherwise, strip
1256 off the padding, since we will either be returning the inner type
1257 or repadding it. If no size or alignment is specified, use that of
1258 the original padded type. */
1259 if (TYPE_IS_PADDING_P (type))
1261 if ((!size
1262 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1263 && (align == 0 || align == orig_align))
1264 return type;
1266 if (!size)
1267 size = orig_size;
1268 if (align == 0)
1269 align = orig_align;
1271 type = TREE_TYPE (TYPE_FIELDS (type));
1272 orig_size = TYPE_SIZE (type);
1273 orig_align = TYPE_ALIGN (type);
1276 /* If the size is either not being changed or is being made smaller (which
1277 is not done here and is only valid for bitfields anyway), show the size
1278 isn't changing. Likewise, clear the alignment if it isn't being
1279 changed. Then return if we aren't doing anything. */
1280 if (size
1281 && (operand_equal_p (size, orig_size, 0)
1282 || (TREE_CODE (orig_size) == INTEGER_CST
1283 && tree_int_cst_lt (size, orig_size))))
1284 size = NULL_TREE;
1286 if (align == orig_align)
1287 align = 0;
1289 if (align == 0 && !size)
1290 return type;
1292 /* If requested, complete the original type and give it a name. */
1293 if (is_user_type)
1294 create_type_decl (get_entity_name (gnat_entity), type,
1295 !Comes_From_Source (gnat_entity),
1296 !(TYPE_NAME (type)
1297 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1298 && DECL_IGNORED_P (TYPE_NAME (type))),
1299 gnat_entity);
1301 /* We used to modify the record in place in some cases, but that could
1302 generate incorrect debugging information. So make a new record
1303 type and name. */
1304 record = make_node (RECORD_TYPE);
1305 TYPE_PADDING_P (record) = 1;
1307 /* ??? Padding types around packed array implementation types will be
1308 considered as root types in the array descriptor language hook (see
1309 gnat_get_array_descr_info). Give them the original packed array type
1310 name so that the one coming from sources appears in the debugging
1311 information. */
1312 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1313 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1314 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1315 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1316 else if (Present (gnat_entity))
1317 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1319 SET_TYPE_ALIGN (record, align ? align : orig_align);
1320 TYPE_SIZE (record) = size ? size : orig_size;
1321 TYPE_SIZE_UNIT (record)
1322 = convert (sizetype,
1323 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1324 bitsize_unit_node));
1326 /* If we are changing the alignment and the input type is a record with
1327 BLKmode and a small constant size, try to make a form that has an
1328 integral mode. This might allow the padding record to also have an
1329 integral mode, which will be much more efficient. There is no point
1330 in doing so if a size is specified unless it is also a small constant
1331 size and it is incorrect to do so if we cannot guarantee that the mode
1332 will be naturally aligned since the field must always be addressable.
1334 ??? This might not always be a win when done for a stand-alone object:
1335 since the nominal and the effective type of the object will now have
1336 different modes, a VIEW_CONVERT_EXPR will be required for converting
1337 between them and it might be hard to overcome afterwards, including
1338 at the RTL level when the stand-alone object is accessed as a whole. */
1339 if (align != 0
1340 && RECORD_OR_UNION_TYPE_P (type)
1341 && TYPE_MODE (type) == BLKmode
1342 && !TYPE_BY_REFERENCE_P (type)
1343 && TREE_CODE (orig_size) == INTEGER_CST
1344 && !TREE_OVERFLOW (orig_size)
1345 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1346 && (!size
1347 || (TREE_CODE (size) == INTEGER_CST
1348 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1350 tree packable_type = make_packable_type (type, true);
1351 if (TYPE_MODE (packable_type) != BLKmode
1352 && align >= TYPE_ALIGN (packable_type))
1353 type = packable_type;
1356 /* Now create the field with the original size. */
1357 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1358 bitsize_zero_node, 0, 1);
1359 DECL_INTERNAL_P (field) = 1;
1361 /* We will output additional debug info manually below. */
1362 finish_record_type (record, field, 1, false);
1364 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1365 SET_TYPE_DEBUG_TYPE (record, type);
1367 /* Set the RM size if requested. */
1368 if (set_rm_size)
1370 tree canonical_pad_type;
1372 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1374 /* If the padded type is complete and has constant size, we canonicalize
1375 it by means of the hash table. This is consistent with the language
1376 semantics and ensures that gigi and the middle-end have a common view
1377 of these padded types. */
1378 if (TREE_CONSTANT (TYPE_SIZE (record))
1379 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1381 record = canonical_pad_type;
1382 goto built;
1386 /* Unless debugging information isn't being written for the input type,
1387 write a record that shows what we are a subtype of and also make a
1388 variable that indicates our size, if still variable. */
1389 if (TREE_CODE (orig_size) != INTEGER_CST
1390 && TYPE_NAME (record)
1391 && TYPE_NAME (type)
1392 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1393 && DECL_IGNORED_P (TYPE_NAME (type))))
1395 tree name = TYPE_IDENTIFIER (record);
1396 tree size_unit = TYPE_SIZE_UNIT (record);
1398 /* A variable that holds the size is required even with no encoding since
1399 it will be referenced by debugging information attributes. At global
1400 level, we need a single variable across all translation units. */
1401 if (size
1402 && TREE_CODE (size) != INTEGER_CST
1403 && (definition || global_bindings_p ()))
1405 /* Whether or not gnat_entity comes from source, this XVZ variable is
1406 is a compilation artifact. */
1407 size_unit
1408 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1409 size_unit, true, global_bindings_p (),
1410 !definition && global_bindings_p (), false,
1411 false, true, true, NULL, gnat_entity);
1412 TYPE_SIZE_UNIT (record) = size_unit;
1415 /* There is no need to show what we are a subtype of when outputting as
1416 few encodings as possible: regular debugging infomation makes this
1417 redundant. */
1418 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1420 tree marker = make_node (RECORD_TYPE);
1421 tree orig_name = TYPE_IDENTIFIER (type);
1423 TYPE_NAME (marker) = concat_name (name, "XVS");
1424 finish_record_type (marker,
1425 create_field_decl (orig_name,
1426 build_reference_type (type),
1427 marker, NULL_TREE, NULL_TREE,
1428 0, 0),
1429 0, true);
1430 TYPE_SIZE_UNIT (marker) = size_unit;
1432 add_parallel_type (record, marker);
1436 built:
1437 /* If a simple size was explicitly given, maybe issue a warning. */
1438 if (!size
1439 || TREE_CODE (size) == COND_EXPR
1440 || TREE_CODE (size) == MAX_EXPR
1441 || No (gnat_entity))
1442 return record;
1444 /* But don't do it if we are just annotating types and the type is tagged or
1445 concurrent, since these types aren't fully laid out in this mode. */
1446 if (type_annotate_only)
1448 Entity_Id gnat_type
1449 = is_component_type
1450 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1452 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1453 return record;
1456 /* Take the original size as the maximum size of the input if there was an
1457 unconstrained record involved and round it up to the specified alignment,
1458 if one was specified, but only for aggregate types. */
1459 if (CONTAINS_PLACEHOLDER_P (orig_size))
1460 orig_size = max_size (orig_size, true);
1462 if (align && AGGREGATE_TYPE_P (type))
1463 orig_size = round_up (orig_size, align);
1465 if (!operand_equal_p (size, orig_size, 0)
1466 && !(TREE_CODE (size) == INTEGER_CST
1467 && TREE_CODE (orig_size) == INTEGER_CST
1468 && (TREE_OVERFLOW (size)
1469 || TREE_OVERFLOW (orig_size)
1470 || tree_int_cst_lt (size, orig_size))))
1472 Node_Id gnat_error_node = Empty;
1474 /* For a packed array, post the message on the original array type. */
1475 if (Is_Packed_Array_Impl_Type (gnat_entity))
1476 gnat_entity = Original_Array_Type (gnat_entity);
1478 if ((Ekind (gnat_entity) == E_Component
1479 || Ekind (gnat_entity) == E_Discriminant)
1480 && Present (Component_Clause (gnat_entity)))
1481 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1482 else if (Present (Size_Clause (gnat_entity)))
1483 gnat_error_node = Expression (Size_Clause (gnat_entity));
1485 /* Generate message only for entities that come from source, since
1486 if we have an entity created by expansion, the message will be
1487 generated for some other corresponding source entity. */
1488 if (Comes_From_Source (gnat_entity))
1490 if (Present (gnat_error_node))
1491 post_error_ne_tree ("{^ }bits of & unused?",
1492 gnat_error_node, gnat_entity,
1493 size_diffop (size, orig_size));
1494 else if (is_component_type)
1495 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1496 gnat_entity, gnat_entity,
1497 size_diffop (size, orig_size));
1501 return record;
1504 /* Return a copy of the padded TYPE but with reverse storage order. */
1506 tree
1507 set_reverse_storage_order_on_pad_type (tree type)
1509 tree field, canonical_pad_type;
1511 if (flag_checking)
1513 /* If the inner type is not scalar then the function does nothing. */
1514 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1515 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1516 && !VECTOR_TYPE_P (inner_type));
1519 /* This is required for the canonicalization. */
1520 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1522 field = copy_node (TYPE_FIELDS (type));
1523 type = copy_type (type);
1524 DECL_CONTEXT (field) = type;
1525 TYPE_FIELDS (type) = field;
1526 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1527 canonical_pad_type = lookup_and_insert_pad_type (type);
1528 return canonical_pad_type ? canonical_pad_type : type;
1531 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1532 If this is a multi-dimensional array type, do this recursively.
1534 OP may be
1535 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1536 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1537 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1539 void
1540 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1542 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1543 of a one-dimensional array, since the padding has the same alias set
1544 as the field type, but if it's a multi-dimensional array, we need to
1545 see the inner types. */
1546 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1547 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1548 || TYPE_PADDING_P (gnu_old_type)))
1549 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1551 /* Unconstrained array types are deemed incomplete and would thus be given
1552 alias set 0. Retrieve the underlying array type. */
1553 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1554 gnu_old_type
1555 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1556 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1557 gnu_new_type
1558 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1560 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1561 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1562 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1563 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1565 switch (op)
1567 case ALIAS_SET_COPY:
1568 /* The alias set shouldn't be copied between array types with different
1569 aliasing settings because this can break the aliasing relationship
1570 between the array type and its element type. */
1571 if (flag_checking || flag_strict_aliasing)
1572 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1573 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1574 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1575 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1577 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1578 break;
1580 case ALIAS_SET_SUBSET:
1581 case ALIAS_SET_SUPERSET:
1583 alias_set_type old_set = get_alias_set (gnu_old_type);
1584 alias_set_type new_set = get_alias_set (gnu_new_type);
1586 /* Do nothing if the alias sets conflict. This ensures that we
1587 never call record_alias_subset several times for the same pair
1588 or at all for alias set 0. */
1589 if (!alias_sets_conflict_p (old_set, new_set))
1591 if (op == ALIAS_SET_SUBSET)
1592 record_alias_subset (old_set, new_set);
1593 else
1594 record_alias_subset (new_set, old_set);
1597 break;
1599 default:
1600 gcc_unreachable ();
1603 record_component_aliases (gnu_new_type);
1606 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1607 ARTIFICIAL_P is true if the type was generated by the compiler. */
1609 void
1610 record_builtin_type (const char *name, tree type, bool artificial_p)
1612 tree type_decl = build_decl (input_location,
1613 TYPE_DECL, get_identifier (name), type);
1614 DECL_ARTIFICIAL (type_decl) = artificial_p;
1615 TYPE_ARTIFICIAL (type) = artificial_p;
1616 gnat_pushdecl (type_decl, Empty);
1618 if (debug_hooks->type_decl)
1619 debug_hooks->type_decl (type_decl, false);
1622 /* Finish constructing the character type CHAR_TYPE.
1624 In Ada character types are enumeration types and, as a consequence, are
1625 represented in the front-end by integral types holding the positions of
1626 the enumeration values as defined by the language, which means that the
1627 integral types are unsigned.
1629 Unfortunately the signedness of 'char' in C is implementation-defined
1630 and GCC even has the option -fsigned-char to toggle it at run time.
1631 Since GNAT's philosophy is to be compatible with C by default, to wit
1632 Interfaces.C.char is defined as a mere copy of Character, we may need
1633 to declare character types as signed types in GENERIC and generate the
1634 necessary adjustments to make them behave as unsigned types.
1636 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1637 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1638 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1639 types. The idea is to ensure that the bit pattern contained in the
1640 Esize'd objects is not changed, even though the numerical value will
1641 be interpreted differently depending on the signedness.
1643 For character types, the bounds are implicit and, therefore, need to
1644 be adjusted. Morever, the debug info needs the unsigned version. */
1646 void
1647 finish_character_type (tree char_type)
1649 if (TYPE_UNSIGNED (char_type))
1650 return;
1652 /* Make a copy of a generic unsigned version since we'll modify it. */
1653 tree unsigned_char_type
1654 = (char_type == char_type_node
1655 ? unsigned_char_type_node
1656 : copy_type (gnat_unsigned_type_for (char_type)));
1658 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1659 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1660 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1662 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1663 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1664 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1667 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1668 finish constructing the record type as a fat pointer type. */
1670 void
1671 finish_fat_pointer_type (tree record_type, tree field_list)
1673 /* Make sure we can put it into a register. */
1674 if (STRICT_ALIGNMENT)
1675 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1677 /* Show what it really is. */
1678 TYPE_FAT_POINTER_P (record_type) = 1;
1680 /* Do not emit debug info for it since the types of its fields may still be
1681 incomplete at this point. */
1682 finish_record_type (record_type, field_list, 0, false);
1684 /* Force type_contains_placeholder_p to return true on it. Although the
1685 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1686 type but the representation of the unconstrained array. */
1687 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1690 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1691 finish constructing the record or union type. If REP_LEVEL is zero, this
1692 record has no representation clause and so will be entirely laid out here.
1693 If REP_LEVEL is one, this record has a representation clause and has been
1694 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1695 this record is derived from a parent record and thus inherits its layout;
1696 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1697 additional debug info needs to be output for this type. */
1699 void
1700 finish_record_type (tree record_type, tree field_list, int rep_level,
1701 bool debug_info_p)
1703 enum tree_code code = TREE_CODE (record_type);
1704 tree name = TYPE_IDENTIFIER (record_type);
1705 tree ada_size = bitsize_zero_node;
1706 tree size = bitsize_zero_node;
1707 bool had_size = TYPE_SIZE (record_type) != 0;
1708 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1709 bool had_align = TYPE_ALIGN (record_type) != 0;
1710 tree field;
1712 TYPE_FIELDS (record_type) = field_list;
1714 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1715 generate debug info and have a parallel type. */
1716 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1718 /* Globally initialize the record first. If this is a rep'ed record,
1719 that just means some initializations; otherwise, layout the record. */
1720 if (rep_level > 0)
1722 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1723 TYPE_ALIGN (record_type)));
1725 if (!had_size_unit)
1726 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1728 if (!had_size)
1729 TYPE_SIZE (record_type) = bitsize_zero_node;
1731 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1732 out just like a UNION_TYPE, since the size will be fixed. */
1733 else if (code == QUAL_UNION_TYPE)
1734 code = UNION_TYPE;
1736 else
1738 /* Ensure there isn't a size already set. There can be in an error
1739 case where there is a rep clause but all fields have errors and
1740 no longer have a position. */
1741 TYPE_SIZE (record_type) = 0;
1743 /* Ensure we use the traditional GCC layout for bitfields when we need
1744 to pack the record type or have a representation clause. The other
1745 possible layout (Microsoft C compiler), if available, would prevent
1746 efficient packing in almost all cases. */
1747 #ifdef TARGET_MS_BITFIELD_LAYOUT
1748 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1749 decl_attributes (&record_type,
1750 tree_cons (get_identifier ("gcc_struct"),
1751 NULL_TREE, NULL_TREE),
1752 ATTR_FLAG_TYPE_IN_PLACE);
1753 #endif
1755 layout_type (record_type);
1758 /* At this point, the position and size of each field is known. It was
1759 either set before entry by a rep clause, or by laying out the type above.
1761 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1762 to compute the Ada size; the GCC size and alignment (for rep'ed records
1763 that are not padding types); and the mode (for rep'ed records). We also
1764 clear the DECL_BIT_FIELD indication for the cases we know have not been
1765 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1767 if (code == QUAL_UNION_TYPE)
1768 field_list = nreverse (field_list);
1770 for (field = field_list; field; field = DECL_CHAIN (field))
1772 tree type = TREE_TYPE (field);
1773 tree pos = bit_position (field);
1774 tree this_size = DECL_SIZE (field);
1775 tree this_ada_size;
1777 if (RECORD_OR_UNION_TYPE_P (type)
1778 && !TYPE_FAT_POINTER_P (type)
1779 && !TYPE_CONTAINS_TEMPLATE_P (type)
1780 && TYPE_ADA_SIZE (type))
1781 this_ada_size = TYPE_ADA_SIZE (type);
1782 else
1783 this_ada_size = this_size;
1785 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1786 if (DECL_BIT_FIELD (field)
1787 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1789 unsigned int align = TYPE_ALIGN (type);
1791 /* In the general case, type alignment is required. */
1792 if (value_factor_p (pos, align))
1794 /* The enclosing record type must be sufficiently aligned.
1795 Otherwise, if no alignment was specified for it and it
1796 has been laid out already, bump its alignment to the
1797 desired one if this is compatible with its size and
1798 maximum alignment, if any. */
1799 if (TYPE_ALIGN (record_type) >= align)
1801 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1802 DECL_BIT_FIELD (field) = 0;
1804 else if (!had_align
1805 && rep_level == 0
1806 && value_factor_p (TYPE_SIZE (record_type), align)
1807 && (!TYPE_MAX_ALIGN (record_type)
1808 || TYPE_MAX_ALIGN (record_type) >= align))
1810 SET_TYPE_ALIGN (record_type, align);
1811 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1812 DECL_BIT_FIELD (field) = 0;
1816 /* In the non-strict alignment case, only byte alignment is. */
1817 if (!STRICT_ALIGNMENT
1818 && DECL_BIT_FIELD (field)
1819 && value_factor_p (pos, BITS_PER_UNIT))
1820 DECL_BIT_FIELD (field) = 0;
1823 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1824 field is technically not addressable. Except that it can actually
1825 be addressed if it is BLKmode and happens to be properly aligned. */
1826 if (DECL_BIT_FIELD (field)
1827 && !(DECL_MODE (field) == BLKmode
1828 && value_factor_p (pos, BITS_PER_UNIT)))
1829 DECL_NONADDRESSABLE_P (field) = 1;
1831 /* A type must be as aligned as its most aligned field that is not
1832 a bit-field. But this is already enforced by layout_type. */
1833 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1834 SET_TYPE_ALIGN (record_type,
1835 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
1837 switch (code)
1839 case UNION_TYPE:
1840 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1841 size = size_binop (MAX_EXPR, size, this_size);
1842 break;
1844 case QUAL_UNION_TYPE:
1845 ada_size
1846 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1847 this_ada_size, ada_size);
1848 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1849 this_size, size);
1850 break;
1852 case RECORD_TYPE:
1853 /* Since we know here that all fields are sorted in order of
1854 increasing bit position, the size of the record is one
1855 higher than the ending bit of the last field processed
1856 unless we have a rep clause, since in that case we might
1857 have a field outside a QUAL_UNION_TYPE that has a higher ending
1858 position. So use a MAX in that case. Also, if this field is a
1859 QUAL_UNION_TYPE, we need to take into account the previous size in
1860 the case of empty variants. */
1861 ada_size
1862 = merge_sizes (ada_size, pos, this_ada_size,
1863 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1864 size
1865 = merge_sizes (size, pos, this_size,
1866 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1867 break;
1869 default:
1870 gcc_unreachable ();
1874 if (code == QUAL_UNION_TYPE)
1875 nreverse (field_list);
1877 if (rep_level < 2)
1879 /* If this is a padding record, we never want to make the size smaller
1880 than what was specified in it, if any. */
1881 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1882 size = TYPE_SIZE (record_type);
1884 /* Now set any of the values we've just computed that apply. */
1885 if (!TYPE_FAT_POINTER_P (record_type)
1886 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1887 SET_TYPE_ADA_SIZE (record_type, ada_size);
1889 if (rep_level > 0)
1891 tree size_unit = had_size_unit
1892 ? TYPE_SIZE_UNIT (record_type)
1893 : convert (sizetype,
1894 size_binop (CEIL_DIV_EXPR, size,
1895 bitsize_unit_node));
1896 unsigned int align = TYPE_ALIGN (record_type);
1898 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1899 TYPE_SIZE_UNIT (record_type)
1900 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1902 compute_record_mode (record_type);
1906 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1907 TYPE_MAX_ALIGN (record_type) = 0;
1909 if (debug_info_p)
1910 rest_of_record_type_compilation (record_type);
1913 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1914 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1915 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1916 moment TYPE will get a context. */
1918 void
1919 add_parallel_type (tree type, tree parallel_type)
1921 tree decl = TYPE_STUB_DECL (type);
1923 while (DECL_PARALLEL_TYPE (decl))
1924 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1926 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1928 /* If PARALLEL_TYPE already has a context, we are done. */
1929 if (TYPE_CONTEXT (parallel_type))
1930 return;
1932 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
1933 it to PARALLEL_TYPE. */
1934 if (TYPE_CONTEXT (type))
1935 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1937 /* Otherwise TYPE has not context yet. We know it will have one thanks to
1938 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
1939 so we have nothing to do in this case. */
1942 /* Return true if TYPE has a parallel type. */
1944 static bool
1945 has_parallel_type (tree type)
1947 tree decl = TYPE_STUB_DECL (type);
1949 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1952 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
1953 associated with it. It need not be invoked directly in most cases as
1954 finish_record_type takes care of doing so. */
1956 void
1957 rest_of_record_type_compilation (tree record_type)
1959 bool var_size = false;
1960 tree field;
1962 /* If this is a padded type, the bulk of the debug info has already been
1963 generated for the field's type. */
1964 if (TYPE_IS_PADDING_P (record_type))
1965 return;
1967 /* If the type already has a parallel type (XVS type), then we're done. */
1968 if (has_parallel_type (record_type))
1969 return;
1971 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1973 /* We need to make an XVE/XVU record if any field has variable size,
1974 whether or not the record does. For example, if we have a union,
1975 it may be that all fields, rounded up to the alignment, have the
1976 same size, in which case we'll use that size. But the debug
1977 output routines (except Dwarf2) won't be able to output the fields,
1978 so we need to make the special record. */
1979 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1980 /* If a field has a non-constant qualifier, the record will have
1981 variable size too. */
1982 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1983 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1985 var_size = true;
1986 break;
1990 /* If this record type is of variable size, make a parallel record type that
1991 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1992 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1994 tree new_record_type
1995 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1996 ? UNION_TYPE : TREE_CODE (record_type));
1997 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
1998 tree last_pos = bitsize_zero_node;
1999 tree old_field, prev_old_field = NULL_TREE;
2001 new_name
2002 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2003 ? "XVU" : "XVE");
2004 TYPE_NAME (new_record_type) = new_name;
2005 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2006 TYPE_STUB_DECL (new_record_type)
2007 = create_type_stub_decl (new_name, new_record_type);
2008 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2009 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2010 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2011 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2012 TYPE_SIZE_UNIT (new_record_type)
2013 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2015 /* Now scan all the fields, replacing each field with a new field
2016 corresponding to the new encoding. */
2017 for (old_field = TYPE_FIELDS (record_type); old_field;
2018 old_field = DECL_CHAIN (old_field))
2020 tree field_type = TREE_TYPE (old_field);
2021 tree field_name = DECL_NAME (old_field);
2022 tree curpos = bit_position (old_field);
2023 tree pos, new_field;
2024 bool var = false;
2025 unsigned int align = 0;
2027 /* We're going to do some pattern matching below so remove as many
2028 conversions as possible. */
2029 curpos = remove_conversions (curpos, true);
2031 /* See how the position was modified from the last position.
2033 There are two basic cases we support: a value was added
2034 to the last position or the last position was rounded to
2035 a boundary and they something was added. Check for the
2036 first case first. If not, see if there is any evidence
2037 of rounding. If so, round the last position and retry.
2039 If this is a union, the position can be taken as zero. */
2040 if (TREE_CODE (new_record_type) == UNION_TYPE)
2041 pos = bitsize_zero_node;
2042 else
2043 pos = compute_related_constant (curpos, last_pos);
2045 if (!pos
2046 && TREE_CODE (curpos) == MULT_EXPR
2047 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2049 tree offset = TREE_OPERAND (curpos, 0);
2050 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2051 align = scale_by_factor_of (offset, align);
2052 last_pos = round_up (last_pos, align);
2053 pos = compute_related_constant (curpos, last_pos);
2055 else if (!pos
2056 && TREE_CODE (curpos) == PLUS_EXPR
2057 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2058 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2059 && tree_fits_uhwi_p
2060 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2062 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2063 unsigned HOST_WIDE_INT addend
2064 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2065 align
2066 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2067 align = scale_by_factor_of (offset, align);
2068 align = MIN (align, addend & -addend);
2069 last_pos = round_up (last_pos, align);
2070 pos = compute_related_constant (curpos, last_pos);
2072 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2074 align = TYPE_ALIGN (field_type);
2075 last_pos = round_up (last_pos, align);
2076 pos = compute_related_constant (curpos, last_pos);
2079 /* If we can't compute a position, set it to zero.
2081 ??? We really should abort here, but it's too much work
2082 to get this correct for all cases. */
2083 if (!pos)
2084 pos = bitsize_zero_node;
2086 /* See if this type is variable-sized and make a pointer type
2087 and indicate the indirection if so. Beware that the debug
2088 back-end may adjust the position computed above according
2089 to the alignment of the field type, i.e. the pointer type
2090 in this case, if we don't preventively counter that. */
2091 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2093 field_type = build_pointer_type (field_type);
2094 if (align != 0 && TYPE_ALIGN (field_type) > align)
2096 field_type = copy_type (field_type);
2097 SET_TYPE_ALIGN (field_type, align);
2099 var = true;
2102 /* Make a new field name, if necessary. */
2103 if (var || align != 0)
2105 char suffix[16];
2107 if (align != 0)
2108 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2109 align / BITS_PER_UNIT);
2110 else
2111 strcpy (suffix, "XVL");
2113 field_name = concat_name (field_name, suffix);
2116 new_field
2117 = create_field_decl (field_name, field_type, new_record_type,
2118 DECL_SIZE (old_field), pos, 0, 0);
2119 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2120 TYPE_FIELDS (new_record_type) = new_field;
2122 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2123 zero. The only time it's not the last field of the record
2124 is when there are other components at fixed positions after
2125 it (meaning there was a rep clause for every field) and we
2126 want to be able to encode them. */
2127 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2128 (TREE_CODE (TREE_TYPE (old_field))
2129 == QUAL_UNION_TYPE)
2130 ? bitsize_zero_node
2131 : DECL_SIZE (old_field));
2132 prev_old_field = old_field;
2135 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2137 add_parallel_type (record_type, new_record_type);
2141 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2142 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2143 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2144 replace a value of zero with the old size. If HAS_REP is true, we take the
2145 MAX of the end position of this field with LAST_SIZE. In all other cases,
2146 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2148 static tree
2149 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2150 bool has_rep)
2152 tree type = TREE_TYPE (last_size);
2153 tree new_size;
2155 if (!special || TREE_CODE (size) != COND_EXPR)
2157 new_size = size_binop (PLUS_EXPR, first_bit, size);
2158 if (has_rep)
2159 new_size = size_binop (MAX_EXPR, last_size, new_size);
2162 else
2163 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2164 integer_zerop (TREE_OPERAND (size, 1))
2165 ? last_size : merge_sizes (last_size, first_bit,
2166 TREE_OPERAND (size, 1),
2167 1, has_rep),
2168 integer_zerop (TREE_OPERAND (size, 2))
2169 ? last_size : merge_sizes (last_size, first_bit,
2170 TREE_OPERAND (size, 2),
2171 1, has_rep));
2173 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2174 when fed through substitute_in_expr) into thinking that a constant
2175 size is not constant. */
2176 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2177 new_size = TREE_OPERAND (new_size, 0);
2179 return new_size;
2182 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2183 related by the addition of a constant. Return that constant if so. */
2185 static tree
2186 compute_related_constant (tree op0, tree op1)
2188 tree op0_var, op1_var;
2189 tree op0_con = split_plus (op0, &op0_var);
2190 tree op1_con = split_plus (op1, &op1_var);
2191 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2193 if (operand_equal_p (op0_var, op1_var, 0))
2194 return result;
2195 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2196 return result;
2197 else
2198 return 0;
2201 /* Utility function of above to split a tree OP which may be a sum, into a
2202 constant part, which is returned, and a variable part, which is stored
2203 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2204 bitsizetype. */
2206 static tree
2207 split_plus (tree in, tree *pvar)
2209 /* Strip conversions in order to ease the tree traversal and maximize the
2210 potential for constant or plus/minus discovery. We need to be careful
2211 to always return and set *pvar to bitsizetype trees, but it's worth
2212 the effort. */
2213 in = remove_conversions (in, false);
2215 *pvar = convert (bitsizetype, in);
2217 if (TREE_CODE (in) == INTEGER_CST)
2219 *pvar = bitsize_zero_node;
2220 return convert (bitsizetype, in);
2222 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2224 tree lhs_var, rhs_var;
2225 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2226 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2228 if (lhs_var == TREE_OPERAND (in, 0)
2229 && rhs_var == TREE_OPERAND (in, 1))
2230 return bitsize_zero_node;
2232 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2233 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2235 else
2236 return bitsize_zero_node;
2239 /* Return a copy of TYPE but safe to modify in any way. */
2241 tree
2242 copy_type (tree type)
2244 tree new_type = copy_node (type);
2246 /* Unshare the language-specific data. */
2247 if (TYPE_LANG_SPECIFIC (type))
2249 TYPE_LANG_SPECIFIC (new_type) = NULL;
2250 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2253 /* And the contents of the language-specific slot if needed. */
2254 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2255 && TYPE_RM_VALUES (type))
2257 TYPE_RM_VALUES (new_type) = NULL_TREE;
2258 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2259 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2260 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2263 /* copy_node clears this field instead of copying it, because it is
2264 aliased with TREE_CHAIN. */
2265 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2267 TYPE_POINTER_TO (new_type) = NULL_TREE;
2268 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2269 TYPE_MAIN_VARIANT (new_type) = new_type;
2270 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2271 TYPE_CANONICAL (new_type) = new_type;
2273 return new_type;
2276 /* Return a subtype of sizetype with range MIN to MAX and whose
2277 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2278 of the associated TYPE_DECL. */
2280 tree
2281 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2283 /* First build a type for the desired range. */
2284 tree type = build_nonshared_range_type (sizetype, min, max);
2286 /* Then set the index type. */
2287 SET_TYPE_INDEX_TYPE (type, index);
2288 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2290 return type;
2293 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2294 sizetype is used. */
2296 tree
2297 create_range_type (tree type, tree min, tree max)
2299 tree range_type;
2301 if (!type)
2302 type = sizetype;
2304 /* First build a type with the base range. */
2305 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2306 TYPE_MAX_VALUE (type));
2308 /* Then set the actual range. */
2309 SET_TYPE_RM_MIN_VALUE (range_type, min);
2310 SET_TYPE_RM_MAX_VALUE (range_type, max);
2312 return range_type;
2315 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2316 NAME gives the name of the type to be used in the declaration. */
2318 tree
2319 create_type_stub_decl (tree name, tree type)
2321 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2322 DECL_ARTIFICIAL (type_decl) = 1;
2323 TYPE_ARTIFICIAL (type) = 1;
2324 return type_decl;
2327 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2328 used in the declaration. ARTIFICIAL_P is true if the declaration was
2329 generated by the compiler. DEBUG_INFO_P is true if we need to write
2330 debug information about this type. GNAT_NODE is used for the position
2331 of the decl. */
2333 tree
2334 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2335 Node_Id gnat_node)
2337 enum tree_code code = TREE_CODE (type);
2338 bool is_named
2339 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2340 tree type_decl;
2342 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2343 gcc_assert (!TYPE_IS_DUMMY_P (type));
2345 /* If the type hasn't been named yet, we're naming it; preserve an existing
2346 TYPE_STUB_DECL that has been attached to it for some purpose. */
2347 if (!is_named && TYPE_STUB_DECL (type))
2349 type_decl = TYPE_STUB_DECL (type);
2350 DECL_NAME (type_decl) = name;
2352 else
2353 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2355 DECL_ARTIFICIAL (type_decl) = artificial_p;
2356 TYPE_ARTIFICIAL (type) = artificial_p;
2358 /* Add this decl to the current binding level. */
2359 gnat_pushdecl (type_decl, gnat_node);
2361 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2362 causes the name to be also viewed as a "tag" by the debug back-end, with
2363 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2364 types in DWARF.
2366 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2367 from multiple contexts, and "type_decl" references a copy of it: in such a
2368 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2369 with the mechanism above. */
2370 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2371 TYPE_STUB_DECL (type) = type_decl;
2373 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2374 back-end doesn't support, and for others if we don't need to. */
2375 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2376 DECL_IGNORED_P (type_decl) = 1;
2378 return type_decl;
2381 /* Return a VAR_DECL or CONST_DECL node.
2383 NAME gives the name of the variable. ASM_NAME is its assembler name
2384 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2385 the GCC tree for an optional initial expression; NULL_TREE if none.
2387 CONST_FLAG is true if this variable is constant, in which case we might
2388 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2390 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2391 definition to be made visible outside of the current compilation unit, for
2392 instance variable definitions in a package specification.
2394 EXTERN_FLAG is true when processing an external variable declaration (as
2395 opposed to a definition: no storage is to be allocated for the variable).
2397 STATIC_FLAG is only relevant when not at top level and indicates whether
2398 to always allocate storage to the variable.
2400 VOLATILE_FLAG is true if this variable is declared as volatile.
2402 ARTIFICIAL_P is true if the variable was generated by the compiler.
2404 DEBUG_INFO_P is true if we need to write debug information for it.
2406 ATTR_LIST is the list of attributes to be attached to the variable.
2408 GNAT_NODE is used for the position of the decl. */
2410 tree
2411 create_var_decl (tree name, tree asm_name, tree type, tree init,
2412 bool const_flag, bool public_flag, bool extern_flag,
2413 bool static_flag, bool volatile_flag, bool artificial_p,
2414 bool debug_info_p, struct attrib *attr_list,
2415 Node_Id gnat_node, bool const_decl_allowed_p)
2417 /* Whether the object has static storage duration, either explicitly or by
2418 virtue of being declared at the global level. */
2419 const bool static_storage = static_flag || global_bindings_p ();
2421 /* Whether the initializer is constant: for an external object or an object
2422 with static storage duration, we check that the initializer is a valid
2423 constant expression for initializing a static variable; otherwise, we
2424 only check that it is constant. */
2425 const bool init_const
2426 = (init
2427 && gnat_types_compatible_p (type, TREE_TYPE (init))
2428 && (extern_flag || static_storage
2429 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2430 != NULL_TREE
2431 : TREE_CONSTANT (init)));
2433 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2434 case the initializer may be used in lieu of the DECL node (as done in
2435 Identifier_to_gnu). This is useful to prevent the need of elaboration
2436 code when an identifier for which such a DECL is made is in turn used
2437 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2438 but extra constraints apply to this choice (see below) and they are not
2439 relevant to the distinction we wish to make. */
2440 const bool constant_p = const_flag && init_const;
2442 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2443 and may be used for scalars in general but not for aggregates. */
2444 tree var_decl
2445 = build_decl (input_location,
2446 (constant_p && const_decl_allowed_p
2447 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2448 name, type);
2450 /* Detect constants created by the front-end to hold 'reference to function
2451 calls for stabilization purposes. This is needed for renaming. */
2452 if (const_flag && init && POINTER_TYPE_P (type))
2454 tree inner = init;
2455 if (TREE_CODE (inner) == COMPOUND_EXPR)
2456 inner = TREE_OPERAND (inner, 1);
2457 inner = remove_conversions (inner, true);
2458 if (TREE_CODE (inner) == ADDR_EXPR
2459 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2460 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2461 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2462 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2463 DECL_RETURN_VALUE_P (var_decl) = 1;
2466 /* If this is external, throw away any initializations (they will be done
2467 elsewhere) unless this is a constant for which we would like to remain
2468 able to get the initializer. If we are defining a global here, leave a
2469 constant initialization and save any variable elaborations for the
2470 elaboration routine. If we are just annotating types, throw away the
2471 initialization if it isn't a constant. */
2472 if ((extern_flag && !constant_p)
2473 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2474 init = NULL_TREE;
2476 /* At the global level, a non-constant initializer generates elaboration
2477 statements. Check that such statements are allowed, that is to say,
2478 not violating a No_Elaboration_Code restriction. */
2479 if (init && !init_const && global_bindings_p ())
2480 Check_Elaboration_Code_Allowed (gnat_node);
2482 /* Attach the initializer, if any. */
2483 DECL_INITIAL (var_decl) = init;
2485 /* Directly set some flags. */
2486 DECL_ARTIFICIAL (var_decl) = artificial_p;
2487 DECL_EXTERNAL (var_decl) = extern_flag;
2489 TREE_CONSTANT (var_decl) = constant_p;
2490 TREE_READONLY (var_decl) = const_flag;
2492 /* The object is public if it is external or if it is declared public
2493 and has static storage duration. */
2494 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2496 /* We need to allocate static storage for an object with static storage
2497 duration if it isn't external. */
2498 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2500 TREE_SIDE_EFFECTS (var_decl)
2501 = TREE_THIS_VOLATILE (var_decl)
2502 = TYPE_VOLATILE (type) | volatile_flag;
2504 if (TREE_SIDE_EFFECTS (var_decl))
2505 TREE_ADDRESSABLE (var_decl) = 1;
2507 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2508 try to fiddle with DECL_COMMON. However, on platforms that don't
2509 support global BSS sections, uninitialized global variables would
2510 go in DATA instead, thus increasing the size of the executable. */
2511 if (!flag_no_common
2512 && TREE_CODE (var_decl) == VAR_DECL
2513 && TREE_PUBLIC (var_decl)
2514 && !have_global_bss_p ())
2515 DECL_COMMON (var_decl) = 1;
2517 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2518 since we will create an associated variable. Likewise for an external
2519 constant whose initializer is not absolute, because this would mean a
2520 global relocation in a read-only section which runs afoul of the PE-COFF
2521 run-time relocation mechanism. */
2522 if (!debug_info_p
2523 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2524 || (extern_flag
2525 && constant_p
2526 && init
2527 && initializer_constant_valid_p (init, TREE_TYPE (init))
2528 != null_pointer_node))
2529 DECL_IGNORED_P (var_decl) = 1;
2531 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2532 if (TREE_CODE (var_decl) == VAR_DECL)
2533 process_attributes (&var_decl, &attr_list, true, gnat_node);
2535 /* Add this decl to the current binding level. */
2536 gnat_pushdecl (var_decl, gnat_node);
2538 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2540 /* Let the target mangle the name if this isn't a verbatim asm. */
2541 if (*IDENTIFIER_POINTER (asm_name) != '*')
2542 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2544 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2547 return var_decl;
2550 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2552 static bool
2553 aggregate_type_contains_array_p (tree type)
2555 switch (TREE_CODE (type))
2557 case RECORD_TYPE:
2558 case UNION_TYPE:
2559 case QUAL_UNION_TYPE:
2561 tree field;
2562 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2563 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2564 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2565 return true;
2566 return false;
2569 case ARRAY_TYPE:
2570 return true;
2572 default:
2573 gcc_unreachable ();
2577 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2578 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2579 is the specified size of the field. If POS is nonzero, it is the bit
2580 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2581 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2582 means we are allowed to take the address of the field; if it is negative,
2583 we should not make a bitfield, which is used by make_aligning_type. */
2585 tree
2586 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2587 int packed, int addressable)
2589 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2591 DECL_CONTEXT (field_decl) = record_type;
2592 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2594 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2595 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2596 Likewise for an aggregate without specified position that contains an
2597 array, because in this case slices of variable length of this array
2598 must be handled by GCC and variable-sized objects need to be aligned
2599 to at least a byte boundary. */
2600 if (packed && (TYPE_MODE (type) == BLKmode
2601 || (!pos
2602 && AGGREGATE_TYPE_P (type)
2603 && aggregate_type_contains_array_p (type))))
2604 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2606 /* If a size is specified, use it. Otherwise, if the record type is packed
2607 compute a size to use, which may differ from the object's natural size.
2608 We always set a size in this case to trigger the checks for bitfield
2609 creation below, which is typically required when no position has been
2610 specified. */
2611 if (size)
2612 size = convert (bitsizetype, size);
2613 else if (packed == 1)
2615 size = rm_size (type);
2616 if (TYPE_MODE (type) == BLKmode)
2617 size = round_up (size, BITS_PER_UNIT);
2620 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2621 specified for two reasons: first if the size differs from the natural
2622 size. Second, if the alignment is insufficient. There are a number of
2623 ways the latter can be true.
2625 We never make a bitfield if the type of the field has a nonconstant size,
2626 because no such entity requiring bitfield operations should reach here.
2628 We do *preventively* make a bitfield when there might be the need for it
2629 but we don't have all the necessary information to decide, as is the case
2630 of a field with no specified position in a packed record.
2632 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2633 in layout_decl or finish_record_type to clear the bit_field indication if
2634 it is in fact not needed. */
2635 if (addressable >= 0
2636 && size
2637 && TREE_CODE (size) == INTEGER_CST
2638 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2639 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2640 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2641 || packed
2642 || (TYPE_ALIGN (record_type) != 0
2643 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2645 DECL_BIT_FIELD (field_decl) = 1;
2646 DECL_SIZE (field_decl) = size;
2647 if (!packed && !pos)
2649 if (TYPE_ALIGN (record_type) != 0
2650 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2651 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2652 else
2653 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2657 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2659 /* Bump the alignment if need be, either for bitfield/packing purposes or
2660 to satisfy the type requirements if no such consideration applies. When
2661 we get the alignment from the type, indicate if this is from an explicit
2662 user request, which prevents stor-layout from lowering it later on. */
2664 unsigned int bit_align
2665 = (DECL_BIT_FIELD (field_decl) ? 1
2666 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2668 if (bit_align > DECL_ALIGN (field_decl))
2669 SET_DECL_ALIGN (field_decl, bit_align);
2670 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2672 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2673 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2677 if (pos)
2679 /* We need to pass in the alignment the DECL is known to have.
2680 This is the lowest-order bit set in POS, but no more than
2681 the alignment of the record, if one is specified. Note
2682 that an alignment of 0 is taken as infinite. */
2683 unsigned int known_align;
2685 if (tree_fits_uhwi_p (pos))
2686 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2687 else
2688 known_align = BITS_PER_UNIT;
2690 if (TYPE_ALIGN (record_type)
2691 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2692 known_align = TYPE_ALIGN (record_type);
2694 layout_decl (field_decl, known_align);
2695 SET_DECL_OFFSET_ALIGN (field_decl,
2696 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2697 : BITS_PER_UNIT);
2698 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2699 &DECL_FIELD_BIT_OFFSET (field_decl),
2700 DECL_OFFSET_ALIGN (field_decl), pos);
2703 /* In addition to what our caller says, claim the field is addressable if we
2704 know that its type is not suitable.
2706 The field may also be "technically" nonaddressable, meaning that even if
2707 we attempt to take the field's address we will actually get the address
2708 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2709 value we have at this point is not accurate enough, so we don't account
2710 for this here and let finish_record_type decide. */
2711 if (!addressable && !type_for_nonaliased_component_p (type))
2712 addressable = 1;
2714 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2716 return field_decl;
2719 /* Return a PARM_DECL node with NAME and TYPE. */
2721 tree
2722 create_param_decl (tree name, tree type)
2724 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2726 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2727 can lead to various ABI violations. */
2728 if (targetm.calls.promote_prototypes (NULL_TREE)
2729 && INTEGRAL_TYPE_P (type)
2730 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2732 /* We have to be careful about biased types here. Make a subtype
2733 of integer_type_node with the proper biasing. */
2734 if (TREE_CODE (type) == INTEGER_TYPE
2735 && TYPE_BIASED_REPRESENTATION_P (type))
2737 tree subtype
2738 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2739 TREE_TYPE (subtype) = integer_type_node;
2740 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2741 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2742 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2743 type = subtype;
2745 else
2746 type = integer_type_node;
2749 DECL_ARG_TYPE (param_decl) = type;
2750 return param_decl;
2753 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2754 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2755 changed. GNAT_NODE is used for the position of error messages. */
2757 void
2758 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2759 Node_Id gnat_node)
2761 struct attrib *attr;
2763 for (attr = *attr_list; attr; attr = attr->next)
2764 switch (attr->type)
2766 case ATTR_MACHINE_ATTRIBUTE:
2767 Sloc_to_locus (Sloc (gnat_node), &input_location);
2768 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2769 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2770 break;
2772 case ATTR_LINK_ALIAS:
2773 if (!DECL_EXTERNAL (*node))
2775 TREE_STATIC (*node) = 1;
2776 assemble_alias (*node, attr->name);
2778 break;
2780 case ATTR_WEAK_EXTERNAL:
2781 if (SUPPORTS_WEAK)
2782 declare_weak (*node);
2783 else
2784 post_error ("?weak declarations not supported on this target",
2785 attr->error_point);
2786 break;
2788 case ATTR_LINK_SECTION:
2789 if (targetm_common.have_named_sections)
2791 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2792 DECL_COMMON (*node) = 0;
2794 else
2795 post_error ("?section attributes are not supported for this target",
2796 attr->error_point);
2797 break;
2799 case ATTR_LINK_CONSTRUCTOR:
2800 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2801 TREE_USED (*node) = 1;
2802 break;
2804 case ATTR_LINK_DESTRUCTOR:
2805 DECL_STATIC_DESTRUCTOR (*node) = 1;
2806 TREE_USED (*node) = 1;
2807 break;
2809 case ATTR_THREAD_LOCAL_STORAGE:
2810 set_decl_tls_model (*node, decl_default_tls_model (*node));
2811 DECL_COMMON (*node) = 0;
2812 break;
2815 *attr_list = NULL;
2818 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2819 a power of 2. */
2821 bool
2822 value_factor_p (tree value, HOST_WIDE_INT factor)
2824 if (tree_fits_uhwi_p (value))
2825 return tree_to_uhwi (value) % factor == 0;
2827 if (TREE_CODE (value) == MULT_EXPR)
2828 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2829 || value_factor_p (TREE_OPERAND (value, 1), factor));
2831 return false;
2834 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2835 from the parameter association for the instantiation of a generic. We do
2836 not want to emit source location for them: the code generated for their
2837 initialization is likely to disturb debugging. */
2839 bool
2840 renaming_from_generic_instantiation_p (Node_Id gnat_node)
2842 if (Nkind (gnat_node) != N_Defining_Identifier
2843 || !IN (Ekind (gnat_node), Object_Kind)
2844 || Comes_From_Source (gnat_node)
2845 || !Present (Renamed_Object (gnat_node)))
2846 return false;
2848 /* Get the object declaration of the renamed object, if any and if the
2849 renamed object is a mere identifier. */
2850 gnat_node = Renamed_Object (gnat_node);
2851 if (Nkind (gnat_node) != N_Identifier)
2852 return false;
2854 gnat_node = Entity (gnat_node);
2855 if (!Present (Parent (gnat_node)))
2856 return false;
2858 gnat_node = Parent (gnat_node);
2859 return
2860 (Present (gnat_node)
2861 && Nkind (gnat_node) == N_Object_Declaration
2862 && Present (Corresponding_Generic_Association (gnat_node)));
2865 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2866 feed it with the elaboration of GNAT_SCOPE. */
2868 static struct deferred_decl_context_node *
2869 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2871 struct deferred_decl_context_node *new_node;
2873 new_node
2874 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2875 new_node->decl = decl;
2876 new_node->gnat_scope = gnat_scope;
2877 new_node->force_global = force_global;
2878 new_node->types.create (1);
2879 new_node->next = deferred_decl_context_queue;
2880 deferred_decl_context_queue = new_node;
2881 return new_node;
2884 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2885 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2886 computed. */
2888 static void
2889 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2891 n->types.safe_push (type);
2894 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2895 NULL_TREE if it is not available. */
2897 static tree
2898 compute_deferred_decl_context (Entity_Id gnat_scope)
2900 tree context;
2902 if (present_gnu_tree (gnat_scope))
2903 context = get_gnu_tree (gnat_scope);
2904 else
2905 return NULL_TREE;
2907 if (TREE_CODE (context) == TYPE_DECL)
2909 const tree context_type = TREE_TYPE (context);
2911 /* Skip dummy types: only the final ones can appear in the context
2912 chain. */
2913 if (TYPE_DUMMY_P (context_type))
2914 return NULL_TREE;
2916 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2917 chain. */
2918 else
2919 context = context_type;
2922 return context;
2925 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2926 that cannot be processed yet, remove the other ones. If FORCE is true,
2927 force the processing for all nodes, use the global context when nodes don't
2928 have a GNU translation. */
2930 void
2931 process_deferred_decl_context (bool force)
2933 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2934 struct deferred_decl_context_node *node;
2936 while (*it != NULL)
2938 bool processed = false;
2939 tree context = NULL_TREE;
2940 Entity_Id gnat_scope;
2942 node = *it;
2944 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2945 get the first scope. */
2946 gnat_scope = node->gnat_scope;
2947 while (Present (gnat_scope))
2949 context = compute_deferred_decl_context (gnat_scope);
2950 if (!force || context)
2951 break;
2952 gnat_scope = get_debug_scope (gnat_scope, NULL);
2955 /* Imported declarations must not be in a local context (i.e. not inside
2956 a function). */
2957 if (context && node->force_global > 0)
2959 tree ctx = context;
2961 while (ctx)
2963 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2964 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
2968 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2969 was no elaborated scope, use the global context. */
2970 if (force && !context)
2971 context = get_global_context ();
2973 if (context)
2975 tree t;
2976 int i;
2978 DECL_CONTEXT (node->decl) = context;
2980 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2981 ..._TYPE nodes. */
2982 FOR_EACH_VEC_ELT (node->types, i, t)
2984 gnat_set_type_context (t, context);
2986 processed = true;
2989 /* If this node has been successfuly processed, remove it from the
2990 queue. Then move to the next node. */
2991 if (processed)
2993 *it = node->next;
2994 node->types.release ();
2995 free (node);
2997 else
2998 it = &node->next;
3003 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3005 static unsigned int
3006 scale_by_factor_of (tree expr, unsigned int value)
3008 unsigned HOST_WIDE_INT addend = 0;
3009 unsigned HOST_WIDE_INT factor = 1;
3011 /* Peel conversions around EXPR and try to extract bodies from function
3012 calls: it is possible to get the scale factor from size functions. */
3013 expr = remove_conversions (expr, true);
3014 if (TREE_CODE (expr) == CALL_EXPR)
3015 expr = maybe_inline_call_in_expr (expr);
3017 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3018 multiple of the scale factor we are looking for. */
3019 if (TREE_CODE (expr) == PLUS_EXPR
3020 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3021 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3023 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3024 expr = TREE_OPERAND (expr, 0);
3027 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3028 corresponding to the number of trailing zeros of the mask. */
3029 if (TREE_CODE (expr) == BIT_AND_EXPR
3030 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3032 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3033 unsigned int i = 0;
3035 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3037 mask >>= 1;
3038 factor *= 2;
3039 i++;
3043 /* If the addend is not a multiple of the factor we found, give up. In
3044 theory we could find a smaller common factor but it's useless for our
3045 needs. This situation arises when dealing with a field F1 with no
3046 alignment requirement but that is following a field F2 with such
3047 requirements. As long as we have F2's offset, we don't need alignment
3048 information to compute F1's. */
3049 if (addend % factor != 0)
3050 factor = 1;
3052 return factor * value;
3055 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3056 unless we can prove these 2 fields are laid out in such a way that no gap
3057 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3058 is the distance in bits between the end of PREV_FIELD and the starting
3059 position of CURR_FIELD. It is ignored if null. */
3061 static bool
3062 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3064 /* If this is the first field of the record, there cannot be any gap */
3065 if (!prev_field)
3066 return false;
3068 /* If the previous field is a union type, then return false: The only
3069 time when such a field is not the last field of the record is when
3070 there are other components at fixed positions after it (meaning there
3071 was a rep clause for every field), in which case we don't want the
3072 alignment constraint to override them. */
3073 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3074 return false;
3076 /* If the distance between the end of prev_field and the beginning of
3077 curr_field is constant, then there is a gap if the value of this
3078 constant is not null. */
3079 if (offset && tree_fits_uhwi_p (offset))
3080 return !integer_zerop (offset);
3082 /* If the size and position of the previous field are constant,
3083 then check the sum of this size and position. There will be a gap
3084 iff it is not multiple of the current field alignment. */
3085 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3086 && tree_fits_uhwi_p (bit_position (prev_field)))
3087 return ((tree_to_uhwi (bit_position (prev_field))
3088 + tree_to_uhwi (DECL_SIZE (prev_field)))
3089 % DECL_ALIGN (curr_field) != 0);
3091 /* If both the position and size of the previous field are multiples
3092 of the current field alignment, there cannot be any gap. */
3093 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3094 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3095 return false;
3097 /* Fallback, return that there may be a potential gap */
3098 return true;
3101 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3102 the decl. */
3104 tree
3105 create_label_decl (tree name, Node_Id gnat_node)
3107 tree label_decl
3108 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3110 DECL_MODE (label_decl) = VOIDmode;
3112 /* Add this decl to the current binding level. */
3113 gnat_pushdecl (label_decl, gnat_node);
3115 return label_decl;
3118 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3119 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3120 the list of its parameters (a list of PARM_DECL nodes chained through the
3121 DECL_CHAIN field).
3123 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3125 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3126 definition to be made visible outside of the current compilation unit.
3128 EXTERN_FLAG is true when processing an external subprogram declaration.
3130 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3132 DEBUG_INFO_P is true if we need to write debug information for it.
3134 ATTR_LIST is the list of attributes to be attached to the subprogram.
3136 GNAT_NODE is used for the position of the decl. */
3138 tree
3139 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3140 enum inline_status_t inline_status, bool public_flag,
3141 bool extern_flag, bool artificial_p, bool debug_info_p,
3142 struct attrib *attr_list, Node_Id gnat_node)
3144 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3145 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3147 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3148 DECL_EXTERNAL (subprog_decl) = extern_flag;
3149 TREE_PUBLIC (subprog_decl) = public_flag;
3151 if (!debug_info_p)
3152 DECL_IGNORED_P (subprog_decl) = 1;
3154 switch (inline_status)
3156 case is_suppressed:
3157 DECL_UNINLINABLE (subprog_decl) = 1;
3158 break;
3160 case is_disabled:
3161 break;
3163 case is_required:
3164 if (Back_End_Inlining)
3165 decl_attributes (&subprog_decl,
3166 tree_cons (get_identifier ("always_inline"),
3167 NULL_TREE, NULL_TREE),
3168 ATTR_FLAG_TYPE_IN_PLACE);
3170 /* ... fall through ... */
3172 case is_enabled:
3173 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3174 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3175 break;
3177 default:
3178 gcc_unreachable ();
3181 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3183 /* Once everything is processed, finish the subprogram declaration. */
3184 finish_subprog_decl (subprog_decl, asm_name, type);
3186 /* Add this decl to the current binding level. */
3187 gnat_pushdecl (subprog_decl, gnat_node);
3189 /* Output the assembler code and/or RTL for the declaration. */
3190 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3192 return subprog_decl;
3195 /* Given a subprogram declaration DECL, its assembler name and its type,
3196 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3198 void
3199 finish_subprog_decl (tree decl, tree asm_name, tree type)
3201 tree result_decl
3202 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3203 TREE_TYPE (type));
3205 DECL_ARTIFICIAL (result_decl) = 1;
3206 DECL_IGNORED_P (result_decl) = 1;
3207 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3208 DECL_RESULT (decl) = result_decl;
3210 TREE_READONLY (decl) = TYPE_READONLY (type);
3211 TREE_SIDE_EFFECTS (decl) = TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3213 if (asm_name)
3215 /* Let the target mangle the name if this isn't a verbatim asm. */
3216 if (*IDENTIFIER_POINTER (asm_name) != '*')
3217 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3219 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3221 /* The expand_main_function circuitry expects "main_identifier_node" to
3222 designate the DECL_NAME of the 'main' entry point, in turn expected
3223 to be declared as the "main" function literally by default. Ada
3224 program entry points are typically declared with a different name
3225 within the binder generated file, exported as 'main' to satisfy the
3226 system expectations. Force main_identifier_node in this case. */
3227 if (asm_name == main_identifier_node)
3228 DECL_NAME (decl) = main_identifier_node;
3232 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3233 body. This routine needs to be invoked before processing the declarations
3234 appearing in the subprogram. */
3236 void
3237 begin_subprog_body (tree subprog_decl)
3239 tree param_decl;
3241 announce_function (subprog_decl);
3243 /* This function is being defined. */
3244 TREE_STATIC (subprog_decl) = 1;
3246 /* The failure of this assertion will likely come from a wrong context for
3247 the subprogram body, e.g. another procedure for a procedure declared at
3248 library level. */
3249 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3251 current_function_decl = subprog_decl;
3253 /* Enter a new binding level and show that all the parameters belong to
3254 this function. */
3255 gnat_pushlevel ();
3257 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3258 param_decl = DECL_CHAIN (param_decl))
3259 DECL_CONTEXT (param_decl) = subprog_decl;
3261 make_decl_rtl (subprog_decl);
3264 /* Finish translating the current subprogram and set its BODY. */
3266 void
3267 end_subprog_body (tree body)
3269 tree fndecl = current_function_decl;
3271 /* Attach the BLOCK for this level to the function and pop the level. */
3272 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3273 DECL_INITIAL (fndecl) = current_binding_level->block;
3274 gnat_poplevel ();
3276 /* Mark the RESULT_DECL as being in this subprogram. */
3277 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3279 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3280 if (TREE_CODE (body) == BIND_EXPR)
3282 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3283 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3286 DECL_SAVED_TREE (fndecl) = body;
3288 current_function_decl = decl_function_context (fndecl);
3291 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3293 void
3294 rest_of_subprog_body_compilation (tree subprog_decl)
3296 /* We cannot track the location of errors past this point. */
3297 error_gnat_node = Empty;
3299 /* If we're only annotating types, don't actually compile this function. */
3300 if (type_annotate_only)
3301 return;
3303 /* Dump functions before gimplification. */
3304 dump_function (TDI_original, subprog_decl);
3306 if (!decl_function_context (subprog_decl))
3307 cgraph_node::finalize_function (subprog_decl, false);
3308 else
3309 /* Register this function with cgraph just far enough to get it
3310 added to our parent's nested function list. */
3311 (void) cgraph_node::get_create (subprog_decl);
3314 tree
3315 gnat_builtin_function (tree decl)
3317 gnat_pushdecl (decl, Empty);
3318 return decl;
3321 /* Return an integer type with the number of bits of precision given by
3322 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3323 it is a signed type. */
3325 tree
3326 gnat_type_for_size (unsigned precision, int unsignedp)
3328 tree t;
3329 char type_name[20];
3331 if (precision <= 2 * MAX_BITS_PER_WORD
3332 && signed_and_unsigned_types[precision][unsignedp])
3333 return signed_and_unsigned_types[precision][unsignedp];
3335 if (unsignedp)
3336 t = make_unsigned_type (precision);
3337 else
3338 t = make_signed_type (precision);
3340 if (precision <= 2 * MAX_BITS_PER_WORD)
3341 signed_and_unsigned_types[precision][unsignedp] = t;
3343 if (!TYPE_NAME (t))
3345 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3346 TYPE_NAME (t) = get_identifier (type_name);
3349 return t;
3352 /* Likewise for floating-point types. */
3354 static tree
3355 float_type_for_precision (int precision, machine_mode mode)
3357 tree t;
3358 char type_name[20];
3360 if (float_types[(int) mode])
3361 return float_types[(int) mode];
3363 float_types[(int) mode] = t = make_node (REAL_TYPE);
3364 TYPE_PRECISION (t) = precision;
3365 layout_type (t);
3367 gcc_assert (TYPE_MODE (t) == mode);
3368 if (!TYPE_NAME (t))
3370 sprintf (type_name, "FLOAT_%d", precision);
3371 TYPE_NAME (t) = get_identifier (type_name);
3374 return t;
3377 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3378 an unsigned type; otherwise a signed type is returned. */
3380 tree
3381 gnat_type_for_mode (machine_mode mode, int unsignedp)
3383 if (mode == BLKmode)
3384 return NULL_TREE;
3386 if (mode == VOIDmode)
3387 return void_type_node;
3389 if (COMPLEX_MODE_P (mode))
3390 return NULL_TREE;
3392 if (SCALAR_FLOAT_MODE_P (mode))
3393 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
3395 if (SCALAR_INT_MODE_P (mode))
3396 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
3398 if (VECTOR_MODE_P (mode))
3400 machine_mode inner_mode = GET_MODE_INNER (mode);
3401 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3402 if (inner_type)
3403 return build_vector_type_for_mode (inner_type, mode);
3406 return NULL_TREE;
3409 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3410 signedness being specified by UNSIGNEDP. */
3412 tree
3413 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3415 if (type_node == char_type_node)
3416 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3418 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3420 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3422 type = copy_type (type);
3423 TREE_TYPE (type) = type_node;
3425 else if (TREE_TYPE (type_node)
3426 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3427 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3429 type = copy_type (type);
3430 TREE_TYPE (type) = TREE_TYPE (type_node);
3433 return type;
3436 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3437 transparently converted to each other. */
3440 gnat_types_compatible_p (tree t1, tree t2)
3442 enum tree_code code;
3444 /* This is the default criterion. */
3445 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3446 return 1;
3448 /* We only check structural equivalence here. */
3449 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3450 return 0;
3452 /* Vector types are also compatible if they have the same number of subparts
3453 and the same form of (scalar) element type. */
3454 if (code == VECTOR_TYPE
3455 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3456 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3457 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3458 return 1;
3460 /* Array types are also compatible if they are constrained and have the same
3461 domain(s), the same component type and the same scalar storage order. */
3462 if (code == ARRAY_TYPE
3463 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3464 || (TYPE_DOMAIN (t1)
3465 && TYPE_DOMAIN (t2)
3466 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3467 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3468 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3469 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3470 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3471 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3472 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3473 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3474 return 1;
3476 return 0;
3479 /* Return true if EXPR is a useless type conversion. */
3481 bool
3482 gnat_useless_type_conversion (tree expr)
3484 if (CONVERT_EXPR_P (expr)
3485 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3486 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3487 return gnat_types_compatible_p (TREE_TYPE (expr),
3488 TREE_TYPE (TREE_OPERAND (expr, 0)));
3490 return false;
3493 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3495 bool
3496 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3497 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3499 return TYPE_CI_CO_LIST (t) == cico_list
3500 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3501 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3502 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3505 /* EXP is an expression for the size of an object. If this size contains
3506 discriminant references, replace them with the maximum (if MAX_P) or
3507 minimum (if !MAX_P) possible value of the discriminant. */
3509 tree
3510 max_size (tree exp, bool max_p)
3512 enum tree_code code = TREE_CODE (exp);
3513 tree type = TREE_TYPE (exp);
3515 switch (TREE_CODE_CLASS (code))
3517 case tcc_declaration:
3518 case tcc_constant:
3519 return exp;
3521 case tcc_vl_exp:
3522 if (code == CALL_EXPR)
3524 tree t, *argarray;
3525 int n, i;
3527 t = maybe_inline_call_in_expr (exp);
3528 if (t)
3529 return max_size (t, max_p);
3531 n = call_expr_nargs (exp);
3532 gcc_assert (n > 0);
3533 argarray = XALLOCAVEC (tree, n);
3534 for (i = 0; i < n; i++)
3535 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3536 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3538 break;
3540 case tcc_reference:
3541 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3542 modify. Otherwise, we treat it like a variable. */
3543 if (CONTAINS_PLACEHOLDER_P (exp))
3545 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3546 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3547 return max_size (convert (get_base_type (val_type), val), true);
3550 return exp;
3552 case tcc_comparison:
3553 return max_p ? size_one_node : size_zero_node;
3555 case tcc_unary:
3556 if (code == NON_LVALUE_EXPR)
3557 return max_size (TREE_OPERAND (exp, 0), max_p);
3559 return fold_build1 (code, type,
3560 max_size (TREE_OPERAND (exp, 0),
3561 code == NEGATE_EXPR ? !max_p : max_p));
3563 case tcc_binary:
3565 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3566 tree rhs = max_size (TREE_OPERAND (exp, 1),
3567 code == MINUS_EXPR ? !max_p : max_p);
3569 /* Special-case wanting the maximum value of a MIN_EXPR.
3570 In that case, if one side overflows, return the other. */
3571 if (max_p && code == MIN_EXPR)
3573 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3574 return lhs;
3576 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3577 return rhs;
3580 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3581 overflowing and the RHS a variable. */
3582 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3583 && TREE_CODE (lhs) == INTEGER_CST
3584 && TREE_OVERFLOW (lhs)
3585 && TREE_CODE (rhs) != INTEGER_CST)
3586 return lhs;
3588 /* If we are going to subtract a "negative" value in an unsigned type,
3589 do the operation as an addition of the negated value, in order to
3590 avoid creating a spurious overflow below. */
3591 if (code == MINUS_EXPR
3592 && TYPE_UNSIGNED (type)
3593 && TREE_CODE (rhs) == INTEGER_CST
3594 && !TREE_OVERFLOW (rhs)
3595 && tree_int_cst_sign_bit (rhs) != 0)
3597 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3598 code = PLUS_EXPR;
3601 /* We need to detect overflows so we call size_binop here. */
3602 return size_binop (code, lhs, rhs);
3605 case tcc_expression:
3606 switch (TREE_CODE_LENGTH (code))
3608 case 1:
3609 if (code == SAVE_EXPR)
3610 return exp;
3612 return fold_build1 (code, type,
3613 max_size (TREE_OPERAND (exp, 0), max_p));
3615 case 2:
3616 if (code == COMPOUND_EXPR)
3617 return max_size (TREE_OPERAND (exp, 1), max_p);
3619 return fold_build2 (code, type,
3620 max_size (TREE_OPERAND (exp, 0), max_p),
3621 max_size (TREE_OPERAND (exp, 1), max_p));
3623 case 3:
3624 if (code == COND_EXPR)
3625 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3626 max_size (TREE_OPERAND (exp, 1), max_p),
3627 max_size (TREE_OPERAND (exp, 2), max_p));
3629 default:
3630 break;
3633 /* Other tree classes cannot happen. */
3634 default:
3635 break;
3638 gcc_unreachable ();
3641 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3642 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3643 Return a constructor for the template. */
3645 tree
3646 build_template (tree template_type, tree array_type, tree expr)
3648 vec<constructor_elt, va_gc> *template_elts = NULL;
3649 tree bound_list = NULL_TREE;
3650 tree field;
3652 while (TREE_CODE (array_type) == RECORD_TYPE
3653 && (TYPE_PADDING_P (array_type)
3654 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3655 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3657 if (TREE_CODE (array_type) == ARRAY_TYPE
3658 || (TREE_CODE (array_type) == INTEGER_TYPE
3659 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3660 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3662 /* First make the list for a CONSTRUCTOR for the template. Go down the
3663 field list of the template instead of the type chain because this
3664 array might be an Ada array of arrays and we can't tell where the
3665 nested arrays stop being the underlying object. */
3667 for (field = TYPE_FIELDS (template_type); field;
3668 (bound_list
3669 ? (bound_list = TREE_CHAIN (bound_list))
3670 : (array_type = TREE_TYPE (array_type))),
3671 field = DECL_CHAIN (DECL_CHAIN (field)))
3673 tree bounds, min, max;
3675 /* If we have a bound list, get the bounds from there. Likewise
3676 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3677 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3678 This will give us a maximum range. */
3679 if (bound_list)
3680 bounds = TREE_VALUE (bound_list);
3681 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3682 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3683 else if (expr && TREE_CODE (expr) == PARM_DECL
3684 && DECL_BY_COMPONENT_PTR_P (expr))
3685 bounds = TREE_TYPE (field);
3686 else
3687 gcc_unreachable ();
3689 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3690 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3692 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3693 substitute it from OBJECT. */
3694 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3695 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3697 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3698 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3701 return gnat_build_constructor (template_type, template_elts);
3704 /* Return true if TYPE is suitable for the element type of a vector. */
3706 static bool
3707 type_for_vector_element_p (tree type)
3709 machine_mode mode;
3711 if (!INTEGRAL_TYPE_P (type)
3712 && !SCALAR_FLOAT_TYPE_P (type)
3713 && !FIXED_POINT_TYPE_P (type))
3714 return false;
3716 mode = TYPE_MODE (type);
3717 if (GET_MODE_CLASS (mode) != MODE_INT
3718 && !SCALAR_FLOAT_MODE_P (mode)
3719 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3720 return false;
3722 return true;
3725 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3726 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3727 attribute declaration and want to issue error messages on failure. */
3729 static tree
3730 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3732 unsigned HOST_WIDE_INT size_int, inner_size_int;
3733 int nunits;
3735 /* Silently punt on variable sizes. We can't make vector types for them,
3736 need to ignore them on front-end generated subtypes of unconstrained
3737 base types, and this attribute is for binding implementors, not end
3738 users, so we should never get there from legitimate explicit uses. */
3739 if (!tree_fits_uhwi_p (size))
3740 return NULL_TREE;
3741 size_int = tree_to_uhwi (size);
3743 if (!type_for_vector_element_p (inner_type))
3745 if (attribute)
3746 error ("invalid element type for attribute %qs",
3747 IDENTIFIER_POINTER (attribute));
3748 return NULL_TREE;
3750 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3752 if (size_int % inner_size_int)
3754 if (attribute)
3755 error ("vector size not an integral multiple of component size");
3756 return NULL_TREE;
3759 if (size_int == 0)
3761 if (attribute)
3762 error ("zero vector size");
3763 return NULL_TREE;
3766 nunits = size_int / inner_size_int;
3767 if (nunits & (nunits - 1))
3769 if (attribute)
3770 error ("number of components of vector not a power of two");
3771 return NULL_TREE;
3774 return build_vector_type (inner_type, nunits);
3777 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3778 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3779 processing the attribute and want to issue error messages on failure. */
3781 static tree
3782 build_vector_type_for_array (tree array_type, tree attribute)
3784 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3785 TYPE_SIZE_UNIT (array_type),
3786 attribute);
3787 if (!vector_type)
3788 return NULL_TREE;
3790 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3791 return vector_type;
3794 /* Build a type to be used to represent an aliased object whose nominal type
3795 is an unconstrained array. This consists of a RECORD_TYPE containing a
3796 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3797 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3798 an arbitrary unconstrained object. Use NAME as the name of the record.
3799 DEBUG_INFO_P is true if we need to write debug information for the type. */
3801 tree
3802 build_unc_object_type (tree template_type, tree object_type, tree name,
3803 bool debug_info_p)
3805 tree decl;
3806 tree type = make_node (RECORD_TYPE);
3807 tree template_field
3808 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3809 NULL_TREE, NULL_TREE, 0, 1);
3810 tree array_field
3811 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3812 NULL_TREE, NULL_TREE, 0, 1);
3814 TYPE_NAME (type) = name;
3815 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3816 DECL_CHAIN (template_field) = array_field;
3817 finish_record_type (type, template_field, 0, true);
3819 /* Declare it now since it will never be declared otherwise. This is
3820 necessary to ensure that its subtrees are properly marked. */
3821 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3823 /* template_type will not be used elsewhere than here, so to keep the debug
3824 info clean and in order to avoid scoping issues, make decl its
3825 context. */
3826 gnat_set_type_context (template_type, decl);
3828 return type;
3831 /* Same, taking a thin or fat pointer type instead of a template type. */
3833 tree
3834 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3835 tree name, bool debug_info_p)
3837 tree template_type;
3839 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3841 template_type
3842 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3843 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3844 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3846 return
3847 build_unc_object_type (template_type, object_type, name, debug_info_p);
3850 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3851 In the normal case this is just two adjustments, but we have more to
3852 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3854 void
3855 update_pointer_to (tree old_type, tree new_type)
3857 tree ptr = TYPE_POINTER_TO (old_type);
3858 tree ref = TYPE_REFERENCE_TO (old_type);
3859 tree t;
3861 /* If this is the main variant, process all the other variants first. */
3862 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3863 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3864 update_pointer_to (t, new_type);
3866 /* If no pointers and no references, we are done. */
3867 if (!ptr && !ref)
3868 return;
3870 /* Merge the old type qualifiers in the new type.
3872 Each old variant has qualifiers for specific reasons, and the new
3873 designated type as well. Each set of qualifiers represents useful
3874 information grabbed at some point, and merging the two simply unifies
3875 these inputs into the final type description.
3877 Consider for instance a volatile type frozen after an access to constant
3878 type designating it; after the designated type's freeze, we get here with
3879 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3880 when the access type was processed. We will make a volatile and readonly
3881 designated type, because that's what it really is.
3883 We might also get here for a non-dummy OLD_TYPE variant with different
3884 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3885 to private record type elaboration (see the comments around the call to
3886 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3887 the qualifiers in those cases too, to avoid accidentally discarding the
3888 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3889 new_type
3890 = build_qualified_type (new_type,
3891 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3893 /* If old type and new type are identical, there is nothing to do. */
3894 if (old_type == new_type)
3895 return;
3897 /* Otherwise, first handle the simple case. */
3898 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3900 tree new_ptr, new_ref;
3902 /* If pointer or reference already points to new type, nothing to do.
3903 This can happen as update_pointer_to can be invoked multiple times
3904 on the same couple of types because of the type variants. */
3905 if ((ptr && TREE_TYPE (ptr) == new_type)
3906 || (ref && TREE_TYPE (ref) == new_type))
3907 return;
3909 /* Chain PTR and its variants at the end. */
3910 new_ptr = TYPE_POINTER_TO (new_type);
3911 if (new_ptr)
3913 while (TYPE_NEXT_PTR_TO (new_ptr))
3914 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3915 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3917 else
3918 TYPE_POINTER_TO (new_type) = ptr;
3920 /* Now adjust them. */
3921 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3922 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3924 TREE_TYPE (t) = new_type;
3925 if (TYPE_NULL_BOUNDS (t))
3926 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3929 /* Chain REF and its variants at the end. */
3930 new_ref = TYPE_REFERENCE_TO (new_type);
3931 if (new_ref)
3933 while (TYPE_NEXT_REF_TO (new_ref))
3934 new_ref = TYPE_NEXT_REF_TO (new_ref);
3935 TYPE_NEXT_REF_TO (new_ref) = ref;
3937 else
3938 TYPE_REFERENCE_TO (new_type) = ref;
3940 /* Now adjust them. */
3941 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3942 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3943 TREE_TYPE (t) = new_type;
3945 TYPE_POINTER_TO (old_type) = NULL_TREE;
3946 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3949 /* Now deal with the unconstrained array case. In this case the pointer
3950 is actually a record where both fields are pointers to dummy nodes.
3951 Turn them into pointers to the correct types using update_pointer_to.
3952 Likewise for the pointer to the object record (thin pointer). */
3953 else
3955 tree new_ptr = TYPE_POINTER_TO (new_type);
3957 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3959 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3960 since update_pointer_to can be invoked multiple times on the same
3961 couple of types because of the type variants. */
3962 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3963 return;
3965 update_pointer_to
3966 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3967 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3969 update_pointer_to
3970 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3971 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3973 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3974 TYPE_OBJECT_RECORD_TYPE (new_type));
3976 TYPE_POINTER_TO (old_type) = NULL_TREE;
3977 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3981 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3982 unconstrained one. This involves making or finding a template. */
3984 static tree
3985 convert_to_fat_pointer (tree type, tree expr)
3987 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3988 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3989 tree etype = TREE_TYPE (expr);
3990 tree template_addr;
3991 vec<constructor_elt, va_gc> *v;
3992 vec_alloc (v, 2);
3994 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3995 array (compare_fat_pointers ensures that this is the full discriminant)
3996 and a valid pointer to the bounds. This latter property is necessary
3997 since the compiler can hoist the load of the bounds done through it. */
3998 if (integer_zerop (expr))
4000 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4001 tree null_bounds, t;
4003 if (TYPE_NULL_BOUNDS (ptr_template_type))
4004 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4005 else
4007 /* The template type can still be dummy at this point so we build an
4008 empty constructor. The middle-end will fill it in with zeros. */
4009 t = build_constructor (template_type, NULL);
4010 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4011 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4012 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4015 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4016 fold_convert (p_array_type, null_pointer_node));
4017 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4018 t = build_constructor (type, v);
4019 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4020 TREE_CONSTANT (t) = 0;
4021 TREE_STATIC (t) = 1;
4023 return t;
4026 /* If EXPR is a thin pointer, make template and data from the record. */
4027 if (TYPE_IS_THIN_POINTER_P (etype))
4029 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4031 expr = gnat_protect_expr (expr);
4033 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4034 the thin pointer value has been shifted so we shift it back to get
4035 the template address. */
4036 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4038 template_addr
4039 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4040 fold_build1 (NEGATE_EXPR, sizetype,
4041 byte_position
4042 (DECL_CHAIN (field))));
4043 template_addr
4044 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4045 template_addr);
4048 /* Otherwise we explicitly take the address of the fields. */
4049 else
4051 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4052 template_addr
4053 = build_unary_op (ADDR_EXPR, NULL_TREE,
4054 build_component_ref (expr, field, false));
4055 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4056 build_component_ref (expr, DECL_CHAIN (field),
4057 false));
4061 /* Otherwise, build the constructor for the template. */
4062 else
4063 template_addr
4064 = build_unary_op (ADDR_EXPR, NULL_TREE,
4065 build_template (template_type, TREE_TYPE (etype),
4066 expr));
4068 /* The final result is a constructor for the fat pointer.
4070 If EXPR is an argument of a foreign convention subprogram, the type it
4071 points to is directly the component type. In this case, the expression
4072 type may not match the corresponding FIELD_DECL type at this point, so we
4073 call "convert" here to fix that up if necessary. This type consistency is
4074 required, for instance because it ensures that possible later folding of
4075 COMPONENT_REFs against this constructor always yields something of the
4076 same type as the initial reference.
4078 Note that the call to "build_template" above is still fine because it
4079 will only refer to the provided TEMPLATE_TYPE in this case. */
4080 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4081 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4082 return gnat_build_constructor (type, v);
4085 /* Create an expression whose value is that of EXPR,
4086 converted to type TYPE. The TREE_TYPE of the value
4087 is always TYPE. This function implements all reasonable
4088 conversions; callers should filter out those that are
4089 not permitted by the language being compiled. */
4091 tree
4092 convert (tree type, tree expr)
4094 tree etype = TREE_TYPE (expr);
4095 enum tree_code ecode = TREE_CODE (etype);
4096 enum tree_code code = TREE_CODE (type);
4098 /* If the expression is already of the right type, we are done. */
4099 if (etype == type)
4100 return expr;
4102 /* If both input and output have padding and are of variable size, do this
4103 as an unchecked conversion. Likewise if one is a mere variant of the
4104 other, so we avoid a pointless unpad/repad sequence. */
4105 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4106 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4107 && (!TREE_CONSTANT (TYPE_SIZE (type))
4108 || !TREE_CONSTANT (TYPE_SIZE (etype))
4109 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4110 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4111 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4114 /* If the output type has padding, convert to the inner type and make a
4115 constructor to build the record, unless a variable size is involved. */
4116 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4118 vec<constructor_elt, va_gc> *v;
4120 /* If we previously converted from another type and our type is
4121 of variable size, remove the conversion to avoid the need for
4122 variable-sized temporaries. Likewise for a conversion between
4123 original and packable version. */
4124 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4125 && (!TREE_CONSTANT (TYPE_SIZE (type))
4126 || (ecode == RECORD_TYPE
4127 && TYPE_NAME (etype)
4128 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4129 expr = TREE_OPERAND (expr, 0);
4131 /* If we are just removing the padding from expr, convert the original
4132 object if we have variable size in order to avoid the need for some
4133 variable-sized temporaries. Likewise if the padding is a variant
4134 of the other, so we avoid a pointless unpad/repad sequence. */
4135 if (TREE_CODE (expr) == COMPONENT_REF
4136 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4137 && (!TREE_CONSTANT (TYPE_SIZE (type))
4138 || TYPE_MAIN_VARIANT (type)
4139 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4140 || (ecode == RECORD_TYPE
4141 && TYPE_NAME (etype)
4142 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4143 return convert (type, TREE_OPERAND (expr, 0));
4145 /* If the inner type is of self-referential size and the expression type
4146 is a record, do this as an unchecked conversion. But first pad the
4147 expression if possible to have the same size on both sides. */
4148 if (ecode == RECORD_TYPE
4149 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4151 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4152 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4153 false, false, false, true),
4154 expr);
4155 return unchecked_convert (type, expr, false);
4158 /* If we are converting between array types with variable size, do the
4159 final conversion as an unchecked conversion, again to avoid the need
4160 for some variable-sized temporaries. If valid, this conversion is
4161 very likely purely technical and without real effects. */
4162 if (ecode == ARRAY_TYPE
4163 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4164 && !TREE_CONSTANT (TYPE_SIZE (etype))
4165 && !TREE_CONSTANT (TYPE_SIZE (type)))
4166 return unchecked_convert (type,
4167 convert (TREE_TYPE (TYPE_FIELDS (type)),
4168 expr),
4169 false);
4171 vec_alloc (v, 1);
4172 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4173 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4174 return gnat_build_constructor (type, v);
4177 /* If the input type has padding, remove it and convert to the output type.
4178 The conditions ordering is arranged to ensure that the output type is not
4179 a padding type here, as it is not clear whether the conversion would
4180 always be correct if this was to happen. */
4181 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4183 tree unpadded;
4185 /* If we have just converted to this padded type, just get the
4186 inner expression. */
4187 if (TREE_CODE (expr) == CONSTRUCTOR)
4188 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4190 /* Otherwise, build an explicit component reference. */
4191 else
4192 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4194 return convert (type, unpadded);
4197 /* If the input is a biased type, adjust first. */
4198 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4199 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4200 fold_convert (TREE_TYPE (etype), expr),
4201 fold_convert (TREE_TYPE (etype),
4202 TYPE_MIN_VALUE (etype))));
4204 /* If the input is a justified modular type, we need to extract the actual
4205 object before converting it to any other type with the exceptions of an
4206 unconstrained array or of a mere type variant. It is useful to avoid the
4207 extraction and conversion in the type variant case because it could end
4208 up replacing a VAR_DECL expr by a constructor and we might be about the
4209 take the address of the result. */
4210 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4211 && code != UNCONSTRAINED_ARRAY_TYPE
4212 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4213 return
4214 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4216 /* If converting to a type that contains a template, convert to the data
4217 type and then build the template. */
4218 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4220 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4221 vec<constructor_elt, va_gc> *v;
4222 vec_alloc (v, 2);
4224 /* If the source already has a template, get a reference to the
4225 associated array only, as we are going to rebuild a template
4226 for the target type anyway. */
4227 expr = maybe_unconstrained_array (expr);
4229 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4230 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4231 obj_type, NULL_TREE));
4232 if (expr)
4233 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4234 convert (obj_type, expr));
4235 return gnat_build_constructor (type, v);
4238 /* There are some cases of expressions that we process specially. */
4239 switch (TREE_CODE (expr))
4241 case ERROR_MARK:
4242 return expr;
4244 case NULL_EXPR:
4245 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4246 conversion in gnat_expand_expr. NULL_EXPR does not represent
4247 and actual value, so no conversion is needed. */
4248 expr = copy_node (expr);
4249 TREE_TYPE (expr) = type;
4250 return expr;
4252 case STRING_CST:
4253 /* If we are converting a STRING_CST to another constrained array type,
4254 just make a new one in the proper type. */
4255 if (code == ecode && AGGREGATE_TYPE_P (etype)
4256 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4257 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4259 expr = copy_node (expr);
4260 TREE_TYPE (expr) = type;
4261 return expr;
4263 break;
4265 case VECTOR_CST:
4266 /* If we are converting a VECTOR_CST to a mere type variant, just make
4267 a new one in the proper type. */
4268 if (code == ecode && gnat_types_compatible_p (type, etype))
4270 expr = copy_node (expr);
4271 TREE_TYPE (expr) = type;
4272 return expr;
4275 case CONSTRUCTOR:
4276 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4277 another padding type around the same type, just make a new one in
4278 the proper type. */
4279 if (code == ecode
4280 && (gnat_types_compatible_p (type, etype)
4281 || (code == RECORD_TYPE
4282 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4283 && TREE_TYPE (TYPE_FIELDS (type))
4284 == TREE_TYPE (TYPE_FIELDS (etype)))))
4286 expr = copy_node (expr);
4287 TREE_TYPE (expr) = type;
4288 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4289 return expr;
4292 /* Likewise for a conversion between original and packable version, or
4293 conversion between types of the same size and with the same list of
4294 fields, but we have to work harder to preserve type consistency. */
4295 if (code == ecode
4296 && code == RECORD_TYPE
4297 && (TYPE_NAME (type) == TYPE_NAME (etype)
4298 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4301 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4302 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4303 vec<constructor_elt, va_gc> *v;
4304 vec_alloc (v, len);
4305 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4306 unsigned HOST_WIDE_INT idx;
4307 tree index, value;
4309 /* Whether we need to clear TREE_CONSTANT et al. on the output
4310 constructor when we convert in place. */
4311 bool clear_constant = false;
4313 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4315 /* Skip the missing fields in the CONSTRUCTOR. */
4316 while (efield && field && !SAME_FIELD_P (efield, index))
4318 efield = DECL_CHAIN (efield);
4319 field = DECL_CHAIN (field);
4321 /* The field must be the same. */
4322 if (!(efield && field && SAME_FIELD_P (efield, field)))
4323 break;
4324 constructor_elt elt
4325 = {field, convert (TREE_TYPE (field), value)};
4326 v->quick_push (elt);
4328 /* If packing has made this field a bitfield and the input
4329 value couldn't be emitted statically any more, we need to
4330 clear TREE_CONSTANT on our output. */
4331 if (!clear_constant
4332 && TREE_CONSTANT (expr)
4333 && !CONSTRUCTOR_BITFIELD_P (efield)
4334 && CONSTRUCTOR_BITFIELD_P (field)
4335 && !initializer_constant_valid_for_bitfield_p (value))
4336 clear_constant = true;
4338 efield = DECL_CHAIN (efield);
4339 field = DECL_CHAIN (field);
4342 /* If we have been able to match and convert all the input fields
4343 to their output type, convert in place now. We'll fallback to a
4344 view conversion downstream otherwise. */
4345 if (idx == len)
4347 expr = copy_node (expr);
4348 TREE_TYPE (expr) = type;
4349 CONSTRUCTOR_ELTS (expr) = v;
4350 if (clear_constant)
4351 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4352 return expr;
4356 /* Likewise for a conversion between array type and vector type with a
4357 compatible representative array. */
4358 else if (code == VECTOR_TYPE
4359 && ecode == ARRAY_TYPE
4360 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4361 etype))
4363 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4364 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4365 vec<constructor_elt, va_gc> *v;
4366 unsigned HOST_WIDE_INT ix;
4367 tree value;
4369 /* Build a VECTOR_CST from a *constant* array constructor. */
4370 if (TREE_CONSTANT (expr))
4372 bool constant_p = true;
4374 /* Iterate through elements and check if all constructor
4375 elements are *_CSTs. */
4376 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4377 if (!CONSTANT_CLASS_P (value))
4379 constant_p = false;
4380 break;
4383 if (constant_p)
4384 return build_vector_from_ctor (type,
4385 CONSTRUCTOR_ELTS (expr));
4388 /* Otherwise, build a regular vector constructor. */
4389 vec_alloc (v, len);
4390 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4392 constructor_elt elt = {NULL_TREE, value};
4393 v->quick_push (elt);
4395 expr = copy_node (expr);
4396 TREE_TYPE (expr) = type;
4397 CONSTRUCTOR_ELTS (expr) = v;
4398 return expr;
4400 break;
4402 case UNCONSTRAINED_ARRAY_REF:
4403 /* First retrieve the underlying array. */
4404 expr = maybe_unconstrained_array (expr);
4405 etype = TREE_TYPE (expr);
4406 ecode = TREE_CODE (etype);
4407 break;
4409 case VIEW_CONVERT_EXPR:
4411 /* GCC 4.x is very sensitive to type consistency overall, and view
4412 conversions thus are very frequent. Even though just "convert"ing
4413 the inner operand to the output type is fine in most cases, it
4414 might expose unexpected input/output type mismatches in special
4415 circumstances so we avoid such recursive calls when we can. */
4416 tree op0 = TREE_OPERAND (expr, 0);
4418 /* If we are converting back to the original type, we can just
4419 lift the input conversion. This is a common occurrence with
4420 switches back-and-forth amongst type variants. */
4421 if (type == TREE_TYPE (op0))
4422 return op0;
4424 /* Otherwise, if we're converting between two aggregate or vector
4425 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4426 target type in place or to just convert the inner expression. */
4427 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4428 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4430 /* If we are converting between mere variants, we can just
4431 substitute the VIEW_CONVERT_EXPR in place. */
4432 if (gnat_types_compatible_p (type, etype))
4433 return build1 (VIEW_CONVERT_EXPR, type, op0);
4435 /* Otherwise, we may just bypass the input view conversion unless
4436 one of the types is a fat pointer, which is handled by
4437 specialized code below which relies on exact type matching. */
4438 else if (!TYPE_IS_FAT_POINTER_P (type)
4439 && !TYPE_IS_FAT_POINTER_P (etype))
4440 return convert (type, op0);
4443 break;
4446 default:
4447 break;
4450 /* Check for converting to a pointer to an unconstrained array. */
4451 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4452 return convert_to_fat_pointer (type, expr);
4454 /* If we are converting between two aggregate or vector types that are mere
4455 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4456 to a vector type from its representative array type. */
4457 else if ((code == ecode
4458 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4459 && gnat_types_compatible_p (type, etype))
4460 || (code == VECTOR_TYPE
4461 && ecode == ARRAY_TYPE
4462 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4463 etype)))
4464 return build1 (VIEW_CONVERT_EXPR, type, expr);
4466 /* If we are converting between tagged types, try to upcast properly. */
4467 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4468 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4470 tree child_etype = etype;
4471 do {
4472 tree field = TYPE_FIELDS (child_etype);
4473 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4474 return build_component_ref (expr, field, false);
4475 child_etype = TREE_TYPE (field);
4476 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4479 /* If we are converting from a smaller form of record type back to it, just
4480 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4481 size on both sides. */
4482 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4483 && smaller_form_type_p (etype, type))
4485 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4486 false, false, false, true),
4487 expr);
4488 return build1 (VIEW_CONVERT_EXPR, type, expr);
4491 /* In all other cases of related types, make a NOP_EXPR. */
4492 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4493 return fold_convert (type, expr);
4495 switch (code)
4497 case VOID_TYPE:
4498 return fold_build1 (CONVERT_EXPR, type, expr);
4500 case INTEGER_TYPE:
4501 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4502 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4503 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4504 return unchecked_convert (type, expr, false);
4505 else if (TYPE_BIASED_REPRESENTATION_P (type))
4506 return fold_convert (type,
4507 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4508 convert (TREE_TYPE (type), expr),
4509 convert (TREE_TYPE (type),
4510 TYPE_MIN_VALUE (type))));
4512 /* ... fall through ... */
4514 case ENUMERAL_TYPE:
4515 case BOOLEAN_TYPE:
4516 /* If we are converting an additive expression to an integer type
4517 with lower precision, be wary of the optimization that can be
4518 applied by convert_to_integer. There are 2 problematic cases:
4519 - if the first operand was originally of a biased type,
4520 because we could be recursively called to convert it
4521 to an intermediate type and thus rematerialize the
4522 additive operator endlessly,
4523 - if the expression contains a placeholder, because an
4524 intermediate conversion that changes the sign could
4525 be inserted and thus introduce an artificial overflow
4526 at compile time when the placeholder is substituted. */
4527 if (code == INTEGER_TYPE
4528 && ecode == INTEGER_TYPE
4529 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4530 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4532 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4534 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4535 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4536 || CONTAINS_PLACEHOLDER_P (expr))
4537 return build1 (NOP_EXPR, type, expr);
4540 return fold (convert_to_integer (type, expr));
4542 case POINTER_TYPE:
4543 case REFERENCE_TYPE:
4544 /* If converting between two thin pointers, adjust if needed to account
4545 for differing offsets from the base pointer, depending on whether
4546 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4547 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4549 tree etype_pos
4550 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4551 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4552 : size_zero_node;
4553 tree type_pos
4554 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4555 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4556 : size_zero_node;
4557 tree byte_diff = size_diffop (type_pos, etype_pos);
4559 expr = build1 (NOP_EXPR, type, expr);
4560 if (integer_zerop (byte_diff))
4561 return expr;
4563 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4564 fold_convert (sizetype, byte_diff));
4567 /* If converting fat pointer to normal or thin pointer, get the pointer
4568 to the array and then convert it. */
4569 if (TYPE_IS_FAT_POINTER_P (etype))
4570 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4572 return fold (convert_to_pointer (type, expr));
4574 case REAL_TYPE:
4575 return fold (convert_to_real (type, expr));
4577 case RECORD_TYPE:
4578 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4580 vec<constructor_elt, va_gc> *v;
4581 vec_alloc (v, 1);
4583 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4584 convert (TREE_TYPE (TYPE_FIELDS (type)),
4585 expr));
4586 return gnat_build_constructor (type, v);
4589 /* ... fall through ... */
4591 case ARRAY_TYPE:
4592 /* In these cases, assume the front-end has validated the conversion.
4593 If the conversion is valid, it will be a bit-wise conversion, so
4594 it can be viewed as an unchecked conversion. */
4595 return unchecked_convert (type, expr, false);
4597 case UNION_TYPE:
4598 /* This is a either a conversion between a tagged type and some
4599 subtype, which we have to mark as a UNION_TYPE because of
4600 overlapping fields or a conversion of an Unchecked_Union. */
4601 return unchecked_convert (type, expr, false);
4603 case UNCONSTRAINED_ARRAY_TYPE:
4604 /* If the input is a VECTOR_TYPE, convert to the representative
4605 array type first. */
4606 if (ecode == VECTOR_TYPE)
4608 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4609 etype = TREE_TYPE (expr);
4610 ecode = TREE_CODE (etype);
4613 /* If EXPR is a constrained array, take its address, convert it to a
4614 fat pointer, and then dereference it. Likewise if EXPR is a
4615 record containing both a template and a constrained array.
4616 Note that a record representing a justified modular type
4617 always represents a packed constrained array. */
4618 if (ecode == ARRAY_TYPE
4619 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4620 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4621 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4622 return
4623 build_unary_op
4624 (INDIRECT_REF, NULL_TREE,
4625 convert_to_fat_pointer (TREE_TYPE (type),
4626 build_unary_op (ADDR_EXPR,
4627 NULL_TREE, expr)));
4629 /* Do something very similar for converting one unconstrained
4630 array to another. */
4631 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4632 return
4633 build_unary_op (INDIRECT_REF, NULL_TREE,
4634 convert (TREE_TYPE (type),
4635 build_unary_op (ADDR_EXPR,
4636 NULL_TREE, expr)));
4637 else
4638 gcc_unreachable ();
4640 case COMPLEX_TYPE:
4641 return fold (convert_to_complex (type, expr));
4643 default:
4644 gcc_unreachable ();
4648 /* Create an expression whose value is that of EXPR converted to the common
4649 index type, which is sizetype. EXPR is supposed to be in the base type
4650 of the GNAT index type. Calling it is equivalent to doing
4652 convert (sizetype, expr)
4654 but we try to distribute the type conversion with the knowledge that EXPR
4655 cannot overflow in its type. This is a best-effort approach and we fall
4656 back to the above expression as soon as difficulties are encountered.
4658 This is necessary to overcome issues that arise when the GNAT base index
4659 type and the GCC common index type (sizetype) don't have the same size,
4660 which is quite frequent on 64-bit architectures. In this case, and if
4661 the GNAT base index type is signed but the iteration type of the loop has
4662 been forced to unsigned, the loop scalar evolution engine cannot compute
4663 a simple evolution for the general induction variables associated with the
4664 array indices, because it will preserve the wrap-around semantics in the
4665 unsigned type of their "inner" part. As a result, many loop optimizations
4666 are blocked.
4668 The solution is to use a special (basic) induction variable that is at
4669 least as large as sizetype, and to express the aforementioned general
4670 induction variables in terms of this induction variable, eliminating
4671 the problematic intermediate truncation to the GNAT base index type.
4672 This is possible as long as the original expression doesn't overflow
4673 and if the middle-end hasn't introduced artificial overflows in the
4674 course of the various simplification it can make to the expression. */
4676 tree
4677 convert_to_index_type (tree expr)
4679 enum tree_code code = TREE_CODE (expr);
4680 tree type = TREE_TYPE (expr);
4682 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4683 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4684 if (TYPE_UNSIGNED (type) || !optimize)
4685 return convert (sizetype, expr);
4687 switch (code)
4689 case VAR_DECL:
4690 /* The main effect of the function: replace a loop parameter with its
4691 associated special induction variable. */
4692 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4693 expr = DECL_INDUCTION_VAR (expr);
4694 break;
4696 CASE_CONVERT:
4698 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4699 /* Bail out as soon as we suspect some sort of type frobbing. */
4700 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4701 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4702 break;
4705 /* ... fall through ... */
4707 case NON_LVALUE_EXPR:
4708 return fold_build1 (code, sizetype,
4709 convert_to_index_type (TREE_OPERAND (expr, 0)));
4711 case PLUS_EXPR:
4712 case MINUS_EXPR:
4713 case MULT_EXPR:
4714 return fold_build2 (code, sizetype,
4715 convert_to_index_type (TREE_OPERAND (expr, 0)),
4716 convert_to_index_type (TREE_OPERAND (expr, 1)));
4718 case COMPOUND_EXPR:
4719 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4720 convert_to_index_type (TREE_OPERAND (expr, 1)));
4722 case COND_EXPR:
4723 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4724 convert_to_index_type (TREE_OPERAND (expr, 1)),
4725 convert_to_index_type (TREE_OPERAND (expr, 2)));
4727 default:
4728 break;
4731 return convert (sizetype, expr);
4734 /* Remove all conversions that are done in EXP. This includes converting
4735 from a padded type or to a justified modular type. If TRUE_ADDRESS
4736 is true, always return the address of the containing object even if
4737 the address is not bit-aligned. */
4739 tree
4740 remove_conversions (tree exp, bool true_address)
4742 switch (TREE_CODE (exp))
4744 case CONSTRUCTOR:
4745 if (true_address
4746 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4747 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4748 return
4749 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
4750 break;
4752 case COMPONENT_REF:
4753 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4754 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4755 break;
4757 CASE_CONVERT:
4758 case VIEW_CONVERT_EXPR:
4759 case NON_LVALUE_EXPR:
4760 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4762 default:
4763 break;
4766 return exp;
4769 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4770 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4771 likewise return an expression pointing to the underlying array. */
4773 tree
4774 maybe_unconstrained_array (tree exp)
4776 enum tree_code code = TREE_CODE (exp);
4777 tree type = TREE_TYPE (exp);
4779 switch (TREE_CODE (type))
4781 case UNCONSTRAINED_ARRAY_TYPE:
4782 if (code == UNCONSTRAINED_ARRAY_REF)
4784 const bool read_only = TREE_READONLY (exp);
4785 const bool no_trap = TREE_THIS_NOTRAP (exp);
4787 exp = TREE_OPERAND (exp, 0);
4788 type = TREE_TYPE (exp);
4790 if (TREE_CODE (exp) == COND_EXPR)
4792 tree op1
4793 = build_unary_op (INDIRECT_REF, NULL_TREE,
4794 build_component_ref (TREE_OPERAND (exp, 1),
4795 TYPE_FIELDS (type),
4796 false));
4797 tree op2
4798 = build_unary_op (INDIRECT_REF, NULL_TREE,
4799 build_component_ref (TREE_OPERAND (exp, 2),
4800 TYPE_FIELDS (type),
4801 false));
4803 exp = build3 (COND_EXPR,
4804 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4805 TREE_OPERAND (exp, 0), op1, op2);
4807 else
4809 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4810 build_component_ref (exp,
4811 TYPE_FIELDS (type),
4812 false));
4813 TREE_READONLY (exp) = read_only;
4814 TREE_THIS_NOTRAP (exp) = no_trap;
4818 else if (code == NULL_EXPR)
4819 exp = build1 (NULL_EXPR,
4820 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4821 TREE_OPERAND (exp, 0));
4822 break;
4824 case RECORD_TYPE:
4825 /* If this is a padded type and it contains a template, convert to the
4826 unpadded type first. */
4827 if (TYPE_PADDING_P (type)
4828 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4829 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4831 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4832 code = TREE_CODE (exp);
4833 type = TREE_TYPE (exp);
4836 if (TYPE_CONTAINS_TEMPLATE_P (type))
4838 /* If the array initializer is a box, return NULL_TREE. */
4839 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
4840 return NULL_TREE;
4842 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
4843 false);
4844 type = TREE_TYPE (exp);
4846 /* If the array type is padded, convert to the unpadded type. */
4847 if (TYPE_IS_PADDING_P (type))
4848 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4850 break;
4852 default:
4853 break;
4856 return exp;
4859 /* Return true if EXPR is an expression that can be folded as an operand
4860 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4862 static bool
4863 can_fold_for_view_convert_p (tree expr)
4865 tree t1, t2;
4867 /* The folder will fold NOP_EXPRs between integral types with the same
4868 precision (in the middle-end's sense). We cannot allow it if the
4869 types don't have the same precision in the Ada sense as well. */
4870 if (TREE_CODE (expr) != NOP_EXPR)
4871 return true;
4873 t1 = TREE_TYPE (expr);
4874 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4876 /* Defer to the folder for non-integral conversions. */
4877 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4878 return true;
4880 /* Only fold conversions that preserve both precisions. */
4881 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4882 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4883 return true;
4885 return false;
4888 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4889 If NOTRUNC_P is true, truncation operations should be suppressed.
4891 Special care is required with (source or target) integral types whose
4892 precision is not equal to their size, to make sure we fetch or assign
4893 the value bits whose location might depend on the endianness, e.g.
4895 Rmsize : constant := 8;
4896 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4898 type Bit_Array is array (1 .. Rmsize) of Boolean;
4899 pragma Pack (Bit_Array);
4901 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4903 Value : Int := 2#1000_0001#;
4904 Vbits : Bit_Array := To_Bit_Array (Value);
4906 we expect the 8 bits at Vbits'Address to always contain Value, while
4907 their original location depends on the endianness, at Value'Address
4908 on a little-endian architecture but not on a big-endian one. */
4910 tree
4911 unchecked_convert (tree type, tree expr, bool notrunc_p)
4913 tree etype = TREE_TYPE (expr);
4914 enum tree_code ecode = TREE_CODE (etype);
4915 enum tree_code code = TREE_CODE (type);
4916 tree tem;
4917 int c;
4919 /* If the expression is already of the right type, we are done. */
4920 if (etype == type)
4921 return expr;
4923 /* If both types are integral just do a normal conversion.
4924 Likewise for a conversion to an unconstrained array. */
4925 if (((INTEGRAL_TYPE_P (type)
4926 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
4927 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4928 && (INTEGRAL_TYPE_P (etype)
4929 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4930 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4931 || code == UNCONSTRAINED_ARRAY_TYPE)
4933 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4935 tree ntype = copy_type (etype);
4936 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4937 TYPE_MAIN_VARIANT (ntype) = ntype;
4938 expr = build1 (NOP_EXPR, ntype, expr);
4941 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4943 tree rtype = copy_type (type);
4944 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4945 TYPE_MAIN_VARIANT (rtype) = rtype;
4946 expr = convert (rtype, expr);
4947 expr = build1 (NOP_EXPR, type, expr);
4949 else
4950 expr = convert (type, expr);
4953 /* If we are converting to an integral type whose precision is not equal
4954 to its size, first unchecked convert to a record type that contains a
4955 field of the given precision. Then extract the result from the field.
4957 There is a subtlety if the source type is an aggregate type with reverse
4958 storage order because its representation is not contiguous in the native
4959 storage order, i.e. a direct unchecked conversion to an integral type
4960 with N bits of precision cannot read the first N bits of the aggregate
4961 type. To overcome it, we do an unchecked conversion to an integral type
4962 with reverse storage order and return the resulting value. This also
4963 ensures that the result of the unchecked conversion doesn't depend on
4964 the endianness of the target machine, but only on the storage order of
4965 the aggregate type.
4967 Finally, for the sake of consistency, we do the unchecked conversion
4968 to an integral type with reverse storage order as soon as the source
4969 type is an aggregate type with reverse storage order, even if there
4970 are no considerations of precision or size involved. */
4971 else if (INTEGRAL_TYPE_P (type)
4972 && TYPE_RM_SIZE (type)
4973 && (tree_int_cst_compare (TYPE_RM_SIZE (type),
4974 TYPE_SIZE (type)) < 0
4975 || (AGGREGATE_TYPE_P (etype)
4976 && TYPE_REVERSE_STORAGE_ORDER (etype))))
4978 tree rec_type = make_node (RECORD_TYPE);
4979 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4980 tree field_type, field;
4982 if (AGGREGATE_TYPE_P (etype))
4983 TYPE_REVERSE_STORAGE_ORDER (rec_type)
4984 = TYPE_REVERSE_STORAGE_ORDER (etype);
4986 if (TYPE_UNSIGNED (type))
4987 field_type = make_unsigned_type (prec);
4988 else
4989 field_type = make_signed_type (prec);
4990 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4992 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4993 NULL_TREE, bitsize_zero_node, 1, 0);
4995 finish_record_type (rec_type, field, 1, false);
4997 expr = unchecked_convert (rec_type, expr, notrunc_p);
4998 expr = build_component_ref (expr, field, false);
4999 expr = fold_build1 (NOP_EXPR, type, expr);
5002 /* Similarly if we are converting from an integral type whose precision is
5003 not equal to its size, first copy into a field of the given precision
5004 and unchecked convert the record type.
5006 The same considerations as above apply if the target type is an aggregate
5007 type with reverse storage order and we also proceed similarly. */
5008 else if (INTEGRAL_TYPE_P (etype)
5009 && TYPE_RM_SIZE (etype)
5010 && (tree_int_cst_compare (TYPE_RM_SIZE (etype),
5011 TYPE_SIZE (etype)) < 0
5012 || (AGGREGATE_TYPE_P (type)
5013 && TYPE_REVERSE_STORAGE_ORDER (type))))
5015 tree rec_type = make_node (RECORD_TYPE);
5016 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5017 vec<constructor_elt, va_gc> *v;
5018 vec_alloc (v, 1);
5019 tree field_type, field;
5021 if (AGGREGATE_TYPE_P (type))
5022 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5023 = TYPE_REVERSE_STORAGE_ORDER (type);
5025 if (TYPE_UNSIGNED (etype))
5026 field_type = make_unsigned_type (prec);
5027 else
5028 field_type = make_signed_type (prec);
5029 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5031 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5032 NULL_TREE, bitsize_zero_node, 1, 0);
5034 finish_record_type (rec_type, field, 1, false);
5036 expr = fold_build1 (NOP_EXPR, field_type, expr);
5037 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5038 expr = gnat_build_constructor (rec_type, v);
5039 expr = unchecked_convert (type, expr, notrunc_p);
5042 /* If we are converting from a scalar type to a type with a different size,
5043 we need to pad to have the same size on both sides.
5045 ??? We cannot do it unconditionally because unchecked conversions are
5046 used liberally by the front-end to implement polymorphism, e.g. in:
5048 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5049 return p___size__4 (p__object!(S191s.all));
5051 so we skip all expressions that are references. */
5052 else if (!REFERENCE_CLASS_P (expr)
5053 && !AGGREGATE_TYPE_P (etype)
5054 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5055 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5057 if (c < 0)
5059 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5060 false, false, false, true),
5061 expr);
5062 expr = unchecked_convert (type, expr, notrunc_p);
5064 else
5066 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5067 false, false, false, true);
5068 expr = unchecked_convert (rec_type, expr, notrunc_p);
5069 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5073 /* We have a special case when we are converting between two unconstrained
5074 array types. In that case, take the address, convert the fat pointer
5075 types, and dereference. */
5076 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5077 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5078 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5079 build_unary_op (ADDR_EXPR, NULL_TREE,
5080 expr)));
5082 /* Another special case is when we are converting to a vector type from its
5083 representative array type; this a regular conversion. */
5084 else if (code == VECTOR_TYPE
5085 && ecode == ARRAY_TYPE
5086 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5087 etype))
5088 expr = convert (type, expr);
5090 /* And, if the array type is not the representative, we try to build an
5091 intermediate vector type of which the array type is the representative
5092 and to do the unchecked conversion between the vector types, in order
5093 to enable further simplifications in the middle-end. */
5094 else if (code == VECTOR_TYPE
5095 && ecode == ARRAY_TYPE
5096 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5098 expr = convert (tem, expr);
5099 return unchecked_convert (type, expr, notrunc_p);
5102 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5103 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5104 else if (TREE_CODE (expr) == CONSTRUCTOR
5105 && code == RECORD_TYPE
5106 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5108 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5109 Empty, false, false, false, true),
5110 expr);
5111 return unchecked_convert (type, expr, notrunc_p);
5114 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5115 else
5117 expr = maybe_unconstrained_array (expr);
5118 etype = TREE_TYPE (expr);
5119 ecode = TREE_CODE (etype);
5120 if (can_fold_for_view_convert_p (expr))
5121 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5122 else
5123 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5126 /* If the result is an integral type whose precision is not equal to its
5127 size, sign- or zero-extend the result. We need not do this if the input
5128 is an integral type of the same precision and signedness or if the output
5129 is a biased type or if both the input and output are unsigned, or if the
5130 lower bound is constant and non-negative, see E_Signed_Integer_Subtype
5131 case of gnat_to_gnu_entity. */
5132 if (!notrunc_p
5133 && INTEGRAL_TYPE_P (type)
5134 && TYPE_RM_SIZE (type)
5135 && tree_int_cst_compare (TYPE_RM_SIZE (type), TYPE_SIZE (type)) < 0
5136 && !(INTEGRAL_TYPE_P (etype)
5137 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5138 && tree_int_cst_compare (TYPE_RM_SIZE (type),
5139 TYPE_RM_SIZE (etype)
5140 ? TYPE_RM_SIZE (etype)
5141 : TYPE_SIZE (etype)) == 0)
5142 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5143 && !((TYPE_UNSIGNED (type)
5144 || (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
5145 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0))
5146 && TYPE_UNSIGNED (etype)))
5148 tree base_type
5149 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5150 TYPE_UNSIGNED (type));
5151 tree shift_expr
5152 = convert (base_type,
5153 size_binop (MINUS_EXPR,
5154 TYPE_SIZE (type), TYPE_RM_SIZE (type)));
5155 expr
5156 = convert (type,
5157 build_binary_op (RSHIFT_EXPR, base_type,
5158 build_binary_op (LSHIFT_EXPR, base_type,
5159 convert (base_type, expr),
5160 shift_expr),
5161 shift_expr));
5164 /* An unchecked conversion should never raise Constraint_Error. The code
5165 below assumes that GCC's conversion routines overflow the same way that
5166 the underlying hardware does. This is probably true. In the rare case
5167 when it is false, we can rely on the fact that such conversions are
5168 erroneous anyway. */
5169 if (TREE_CODE (expr) == INTEGER_CST)
5170 TREE_OVERFLOW (expr) = 0;
5172 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5173 show no longer constant. */
5174 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5175 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5176 OEP_ONLY_CONST))
5177 TREE_CONSTANT (expr) = 0;
5179 return expr;
5182 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5183 the latter being a record type as predicated by Is_Record_Type. */
5185 enum tree_code
5186 tree_code_for_record_type (Entity_Id gnat_type)
5188 Node_Id component_list, component;
5190 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5191 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5192 if (!Is_Unchecked_Union (gnat_type))
5193 return RECORD_TYPE;
5195 gnat_type = Implementation_Base_Type (gnat_type);
5196 component_list
5197 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5199 for (component = First_Non_Pragma (Component_Items (component_list));
5200 Present (component);
5201 component = Next_Non_Pragma (component))
5202 if (Ekind (Defining_Entity (component)) == E_Component)
5203 return RECORD_TYPE;
5205 return UNION_TYPE;
5208 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5209 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5210 according to the presence of an alignment clause on the type or, if it
5211 is an array, on the component type. */
5213 bool
5214 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5216 gnat_type = Underlying_Type (gnat_type);
5218 *align_clause = Present (Alignment_Clause (gnat_type));
5220 if (Is_Array_Type (gnat_type))
5222 gnat_type = Underlying_Type (Component_Type (gnat_type));
5223 if (Present (Alignment_Clause (gnat_type)))
5224 *align_clause = true;
5227 if (!Is_Floating_Point_Type (gnat_type))
5228 return false;
5230 if (UI_To_Int (Esize (gnat_type)) != 64)
5231 return false;
5233 return true;
5236 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5237 size is greater or equal to 64 bits, or an array of such a type. Set
5238 ALIGN_CLAUSE according to the presence of an alignment clause on the
5239 type or, if it is an array, on the component type. */
5241 bool
5242 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5244 gnat_type = Underlying_Type (gnat_type);
5246 *align_clause = Present (Alignment_Clause (gnat_type));
5248 if (Is_Array_Type (gnat_type))
5250 gnat_type = Underlying_Type (Component_Type (gnat_type));
5251 if (Present (Alignment_Clause (gnat_type)))
5252 *align_clause = true;
5255 if (!Is_Scalar_Type (gnat_type))
5256 return false;
5258 if (UI_To_Int (Esize (gnat_type)) < 64)
5259 return false;
5261 return true;
5264 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5265 component of an aggregate type. */
5267 bool
5268 type_for_nonaliased_component_p (tree gnu_type)
5270 /* If the type is passed by reference, we may have pointers to the
5271 component so it cannot be made non-aliased. */
5272 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5273 return false;
5275 /* We used to say that any component of aggregate type is aliased
5276 because the front-end may take 'Reference of it. The front-end
5277 has been enhanced in the meantime so as to use a renaming instead
5278 in most cases, but the back-end can probably take the address of
5279 such a component too so we go for the conservative stance.
5281 For instance, we might need the address of any array type, even
5282 if normally passed by copy, to construct a fat pointer if the
5283 component is used as an actual for an unconstrained formal.
5285 Likewise for record types: even if a specific record subtype is
5286 passed by copy, the parent type might be passed by ref (e.g. if
5287 it's of variable size) and we might take the address of a child
5288 component to pass to a parent formal. We have no way to check
5289 for such conditions here. */
5290 if (AGGREGATE_TYPE_P (gnu_type))
5291 return false;
5293 return true;
5296 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5298 bool
5299 smaller_form_type_p (tree type, tree orig_type)
5301 tree size, osize;
5303 /* We're not interested in variants here. */
5304 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5305 return false;
5307 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5308 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5309 return false;
5311 size = TYPE_SIZE (type);
5312 osize = TYPE_SIZE (orig_type);
5314 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5315 return false;
5317 return tree_int_cst_lt (size, osize) != 0;
5320 /* Perform final processing on global declarations. */
5322 static GTY (()) tree dummy_global;
5324 void
5325 gnat_write_global_declarations (void)
5327 unsigned int i;
5328 tree iter;
5330 /* If we have declared types as used at the global level, insert them in
5331 the global hash table. We use a dummy variable for this purpose, but
5332 we need to build it unconditionally to avoid -fcompare-debug issues. */
5333 if (first_global_object_name)
5335 struct varpool_node *node;
5336 char *label;
5338 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5339 dummy_global
5340 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5341 void_type_node);
5342 DECL_HARD_REGISTER (dummy_global) = 1;
5343 TREE_STATIC (dummy_global) = 1;
5344 node = varpool_node::get_create (dummy_global);
5345 node->definition = 1;
5346 node->force_output = 1;
5348 if (types_used_by_cur_var_decl)
5349 while (!types_used_by_cur_var_decl->is_empty ())
5351 tree t = types_used_by_cur_var_decl->pop ();
5352 types_used_by_var_decl_insert (t, dummy_global);
5356 /* Output debug information for all global type declarations first. This
5357 ensures that global types whose compilation hasn't been finalized yet,
5358 for example pointers to Taft amendment types, have their compilation
5359 finalized in the right context. */
5360 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5361 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5362 debug_hooks->type_decl (iter, false);
5364 /* Then output the global variables. We need to do that after the debug
5365 information for global types is emitted so that they are finalized. */
5366 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5367 if (TREE_CODE (iter) == VAR_DECL)
5368 rest_of_decl_compilation (iter, true, 0);
5370 /* Output the imported modules/declarations. In GNAT, these are only
5371 materializing subprogram. */
5372 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5373 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5374 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5375 DECL_CONTEXT (iter), 0);
5378 /* ************************************************************************
5379 * * GCC builtins support *
5380 * ************************************************************************ */
5382 /* The general scheme is fairly simple:
5384 For each builtin function/type to be declared, gnat_install_builtins calls
5385 internal facilities which eventually get to gnat_pushdecl, which in turn
5386 tracks the so declared builtin function decls in the 'builtin_decls' global
5387 datastructure. When an Intrinsic subprogram declaration is processed, we
5388 search this global datastructure to retrieve the associated BUILT_IN DECL
5389 node. */
5391 /* Search the chain of currently available builtin declarations for a node
5392 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5393 found, if any, or NULL_TREE otherwise. */
5394 tree
5395 builtin_decl_for (tree name)
5397 unsigned i;
5398 tree decl;
5400 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5401 if (DECL_NAME (decl) == name)
5402 return decl;
5404 return NULL_TREE;
5407 /* The code below eventually exposes gnat_install_builtins, which declares
5408 the builtin types and functions we might need, either internally or as
5409 user accessible facilities.
5411 ??? This is a first implementation shot, still in rough shape. It is
5412 heavily inspired from the "C" family implementation, with chunks copied
5413 verbatim from there.
5415 Two obvious improvement candidates are:
5416 o Use a more efficient name/decl mapping scheme
5417 o Devise a middle-end infrastructure to avoid having to copy
5418 pieces between front-ends. */
5420 /* ----------------------------------------------------------------------- *
5421 * BUILTIN ELEMENTARY TYPES *
5422 * ----------------------------------------------------------------------- */
5424 /* Standard data types to be used in builtin argument declarations. */
5426 enum c_tree_index
5428 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5429 CTI_STRING_TYPE,
5430 CTI_CONST_STRING_TYPE,
5432 CTI_MAX
5435 static tree c_global_trees[CTI_MAX];
5437 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5438 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5439 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5441 /* ??? In addition some attribute handlers, we currently don't support a
5442 (small) number of builtin-types, which in turns inhibits support for a
5443 number of builtin functions. */
5444 #define wint_type_node void_type_node
5445 #define intmax_type_node void_type_node
5446 #define uintmax_type_node void_type_node
5448 /* Build the void_list_node (void_type_node having been created). */
5450 static tree
5451 build_void_list_node (void)
5453 tree t = build_tree_list (NULL_TREE, void_type_node);
5454 return t;
5457 /* Used to help initialize the builtin-types.def table. When a type of
5458 the correct size doesn't exist, use error_mark_node instead of NULL.
5459 The later results in segfaults even when a decl using the type doesn't
5460 get invoked. */
5462 static tree
5463 builtin_type_for_size (int size, bool unsignedp)
5465 tree type = gnat_type_for_size (size, unsignedp);
5466 return type ? type : error_mark_node;
5469 /* Build/push the elementary type decls that builtin functions/types
5470 will need. */
5472 static void
5473 install_builtin_elementary_types (void)
5475 signed_size_type_node = gnat_signed_type_for (size_type_node);
5476 pid_type_node = integer_type_node;
5477 void_list_node = build_void_list_node ();
5479 string_type_node = build_pointer_type (char_type_node);
5480 const_string_type_node
5481 = build_pointer_type (build_qualified_type
5482 (char_type_node, TYPE_QUAL_CONST));
5485 /* ----------------------------------------------------------------------- *
5486 * BUILTIN FUNCTION TYPES *
5487 * ----------------------------------------------------------------------- */
5489 /* Now, builtin function types per se. */
5491 enum c_builtin_type
5493 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5494 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5495 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5496 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5497 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5498 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5499 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5500 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5501 ARG6) NAME,
5502 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5503 ARG6, ARG7) NAME,
5504 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5505 ARG6, ARG7, ARG8) NAME,
5506 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5507 ARG6, ARG7, ARG8, ARG9) NAME,
5508 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5509 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5510 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5511 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5512 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5513 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5514 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5515 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5516 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5517 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5518 NAME,
5519 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5520 ARG6) NAME,
5521 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5522 ARG6, ARG7) NAME,
5523 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5524 #include "builtin-types.def"
5525 #undef DEF_PRIMITIVE_TYPE
5526 #undef DEF_FUNCTION_TYPE_0
5527 #undef DEF_FUNCTION_TYPE_1
5528 #undef DEF_FUNCTION_TYPE_2
5529 #undef DEF_FUNCTION_TYPE_3
5530 #undef DEF_FUNCTION_TYPE_4
5531 #undef DEF_FUNCTION_TYPE_5
5532 #undef DEF_FUNCTION_TYPE_6
5533 #undef DEF_FUNCTION_TYPE_7
5534 #undef DEF_FUNCTION_TYPE_8
5535 #undef DEF_FUNCTION_TYPE_9
5536 #undef DEF_FUNCTION_TYPE_10
5537 #undef DEF_FUNCTION_TYPE_11
5538 #undef DEF_FUNCTION_TYPE_VAR_0
5539 #undef DEF_FUNCTION_TYPE_VAR_1
5540 #undef DEF_FUNCTION_TYPE_VAR_2
5541 #undef DEF_FUNCTION_TYPE_VAR_3
5542 #undef DEF_FUNCTION_TYPE_VAR_4
5543 #undef DEF_FUNCTION_TYPE_VAR_5
5544 #undef DEF_FUNCTION_TYPE_VAR_6
5545 #undef DEF_FUNCTION_TYPE_VAR_7
5546 #undef DEF_POINTER_TYPE
5547 BT_LAST
5550 typedef enum c_builtin_type builtin_type;
5552 /* A temporary array used in communication with def_fn_type. */
5553 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5555 /* A helper function for install_builtin_types. Build function type
5556 for DEF with return type RET and N arguments. If VAR is true, then the
5557 function should be variadic after those N arguments.
5559 Takes special care not to ICE if any of the types involved are
5560 error_mark_node, which indicates that said type is not in fact available
5561 (see builtin_type_for_size). In which case the function type as a whole
5562 should be error_mark_node. */
5564 static void
5565 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5567 tree t;
5568 tree *args = XALLOCAVEC (tree, n);
5569 va_list list;
5570 int i;
5572 va_start (list, n);
5573 for (i = 0; i < n; ++i)
5575 builtin_type a = (builtin_type) va_arg (list, int);
5576 t = builtin_types[a];
5577 if (t == error_mark_node)
5578 goto egress;
5579 args[i] = t;
5582 t = builtin_types[ret];
5583 if (t == error_mark_node)
5584 goto egress;
5585 if (var)
5586 t = build_varargs_function_type_array (t, n, args);
5587 else
5588 t = build_function_type_array (t, n, args);
5590 egress:
5591 builtin_types[def] = t;
5592 va_end (list);
5595 /* Build the builtin function types and install them in the builtin_types
5596 array for later use in builtin function decls. */
5598 static void
5599 install_builtin_function_types (void)
5601 tree va_list_ref_type_node;
5602 tree va_list_arg_type_node;
5604 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5606 va_list_arg_type_node = va_list_ref_type_node =
5607 build_pointer_type (TREE_TYPE (va_list_type_node));
5609 else
5611 va_list_arg_type_node = va_list_type_node;
5612 va_list_ref_type_node = build_reference_type (va_list_type_node);
5615 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5616 builtin_types[ENUM] = VALUE;
5617 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5618 def_fn_type (ENUM, RETURN, 0, 0);
5619 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5620 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5621 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5622 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5623 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5624 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5625 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5626 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5627 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5628 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5629 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5630 ARG6) \
5631 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5632 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5633 ARG6, ARG7) \
5634 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5635 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5636 ARG6, ARG7, ARG8) \
5637 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5638 ARG7, ARG8);
5639 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5640 ARG6, ARG7, ARG8, ARG9) \
5641 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5642 ARG7, ARG8, ARG9);
5643 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5644 ARG6, ARG7, ARG8, ARG9, ARG10) \
5645 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5646 ARG7, ARG8, ARG9, ARG10);
5647 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5648 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5649 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5650 ARG7, ARG8, ARG9, ARG10, ARG11);
5651 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5652 def_fn_type (ENUM, RETURN, 1, 0);
5653 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5654 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5655 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5656 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5657 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5658 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5659 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5660 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5661 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5662 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5663 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5664 ARG6) \
5665 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5666 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5667 ARG6, ARG7) \
5668 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5669 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5670 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5672 #include "builtin-types.def"
5674 #undef DEF_PRIMITIVE_TYPE
5675 #undef DEF_FUNCTION_TYPE_0
5676 #undef DEF_FUNCTION_TYPE_1
5677 #undef DEF_FUNCTION_TYPE_2
5678 #undef DEF_FUNCTION_TYPE_3
5679 #undef DEF_FUNCTION_TYPE_4
5680 #undef DEF_FUNCTION_TYPE_5
5681 #undef DEF_FUNCTION_TYPE_6
5682 #undef DEF_FUNCTION_TYPE_7
5683 #undef DEF_FUNCTION_TYPE_8
5684 #undef DEF_FUNCTION_TYPE_9
5685 #undef DEF_FUNCTION_TYPE_10
5686 #undef DEF_FUNCTION_TYPE_11
5687 #undef DEF_FUNCTION_TYPE_VAR_0
5688 #undef DEF_FUNCTION_TYPE_VAR_1
5689 #undef DEF_FUNCTION_TYPE_VAR_2
5690 #undef DEF_FUNCTION_TYPE_VAR_3
5691 #undef DEF_FUNCTION_TYPE_VAR_4
5692 #undef DEF_FUNCTION_TYPE_VAR_5
5693 #undef DEF_FUNCTION_TYPE_VAR_6
5694 #undef DEF_FUNCTION_TYPE_VAR_7
5695 #undef DEF_POINTER_TYPE
5696 builtin_types[(int) BT_LAST] = NULL_TREE;
5699 /* ----------------------------------------------------------------------- *
5700 * BUILTIN ATTRIBUTES *
5701 * ----------------------------------------------------------------------- */
5703 enum built_in_attribute
5705 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5706 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5707 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5708 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5709 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5710 #include "builtin-attrs.def"
5711 #undef DEF_ATTR_NULL_TREE
5712 #undef DEF_ATTR_INT
5713 #undef DEF_ATTR_STRING
5714 #undef DEF_ATTR_IDENT
5715 #undef DEF_ATTR_TREE_LIST
5716 ATTR_LAST
5719 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5721 static void
5722 install_builtin_attributes (void)
5724 /* Fill in the built_in_attributes array. */
5725 #define DEF_ATTR_NULL_TREE(ENUM) \
5726 built_in_attributes[(int) ENUM] = NULL_TREE;
5727 #define DEF_ATTR_INT(ENUM, VALUE) \
5728 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5729 #define DEF_ATTR_STRING(ENUM, VALUE) \
5730 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5731 #define DEF_ATTR_IDENT(ENUM, STRING) \
5732 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5733 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5734 built_in_attributes[(int) ENUM] \
5735 = tree_cons (built_in_attributes[(int) PURPOSE], \
5736 built_in_attributes[(int) VALUE], \
5737 built_in_attributes[(int) CHAIN]);
5738 #include "builtin-attrs.def"
5739 #undef DEF_ATTR_NULL_TREE
5740 #undef DEF_ATTR_INT
5741 #undef DEF_ATTR_STRING
5742 #undef DEF_ATTR_IDENT
5743 #undef DEF_ATTR_TREE_LIST
5746 /* Handle a "const" attribute; arguments as in
5747 struct attribute_spec.handler. */
5749 static tree
5750 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5751 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5752 bool *no_add_attrs)
5754 if (TREE_CODE (*node) == FUNCTION_DECL)
5755 TREE_READONLY (*node) = 1;
5756 else
5757 *no_add_attrs = true;
5759 return NULL_TREE;
5762 /* Handle a "nothrow" attribute; arguments as in
5763 struct attribute_spec.handler. */
5765 static tree
5766 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5767 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5768 bool *no_add_attrs)
5770 if (TREE_CODE (*node) == FUNCTION_DECL)
5771 TREE_NOTHROW (*node) = 1;
5772 else
5773 *no_add_attrs = true;
5775 return NULL_TREE;
5778 /* Handle a "pure" attribute; arguments as in
5779 struct attribute_spec.handler. */
5781 static tree
5782 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5783 int ARG_UNUSED (flags), bool *no_add_attrs)
5785 if (TREE_CODE (*node) == FUNCTION_DECL)
5786 DECL_PURE_P (*node) = 1;
5787 /* TODO: support types. */
5788 else
5790 warning (OPT_Wattributes, "%qs attribute ignored",
5791 IDENTIFIER_POINTER (name));
5792 *no_add_attrs = true;
5795 return NULL_TREE;
5798 /* Handle a "no vops" attribute; arguments as in
5799 struct attribute_spec.handler. */
5801 static tree
5802 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5803 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5804 bool *ARG_UNUSED (no_add_attrs))
5806 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5807 DECL_IS_NOVOPS (*node) = 1;
5808 return NULL_TREE;
5811 /* Helper for nonnull attribute handling; fetch the operand number
5812 from the attribute argument list. */
5814 static bool
5815 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5817 /* Verify the arg number is a constant. */
5818 if (!tree_fits_uhwi_p (arg_num_expr))
5819 return false;
5821 *valp = TREE_INT_CST_LOW (arg_num_expr);
5822 return true;
5825 /* Handle the "nonnull" attribute. */
5826 static tree
5827 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5828 tree args, int ARG_UNUSED (flags),
5829 bool *no_add_attrs)
5831 tree type = *node;
5832 unsigned HOST_WIDE_INT attr_arg_num;
5834 /* If no arguments are specified, all pointer arguments should be
5835 non-null. Verify a full prototype is given so that the arguments
5836 will have the correct types when we actually check them later.
5837 Avoid diagnosing type-generic built-ins since those have no
5838 prototype. */
5839 if (!args)
5841 if (!prototype_p (type)
5842 && (!TYPE_ATTRIBUTES (type)
5843 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
5845 error ("nonnull attribute without arguments on a non-prototype");
5846 *no_add_attrs = true;
5848 return NULL_TREE;
5851 /* Argument list specified. Verify that each argument number references
5852 a pointer argument. */
5853 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5855 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5857 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5859 error ("nonnull argument has invalid operand number (argument %lu)",
5860 (unsigned long) attr_arg_num);
5861 *no_add_attrs = true;
5862 return NULL_TREE;
5865 if (prototype_p (type))
5867 function_args_iterator iter;
5868 tree argument;
5870 function_args_iter_init (&iter, type);
5871 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5873 argument = function_args_iter_cond (&iter);
5874 if (!argument || ck_num == arg_num)
5875 break;
5878 if (!argument
5879 || TREE_CODE (argument) == VOID_TYPE)
5881 error ("nonnull argument with out-of-range operand number "
5882 "(argument %lu, operand %lu)",
5883 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5884 *no_add_attrs = true;
5885 return NULL_TREE;
5888 if (TREE_CODE (argument) != POINTER_TYPE)
5890 error ("nonnull argument references non-pointer operand "
5891 "(argument %lu, operand %lu)",
5892 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5893 *no_add_attrs = true;
5894 return NULL_TREE;
5899 return NULL_TREE;
5902 /* Handle a "sentinel" attribute. */
5904 static tree
5905 handle_sentinel_attribute (tree *node, tree name, tree args,
5906 int ARG_UNUSED (flags), bool *no_add_attrs)
5908 if (!prototype_p (*node))
5910 warning (OPT_Wattributes,
5911 "%qs attribute requires prototypes with named arguments",
5912 IDENTIFIER_POINTER (name));
5913 *no_add_attrs = true;
5915 else
5917 if (!stdarg_p (*node))
5919 warning (OPT_Wattributes,
5920 "%qs attribute only applies to variadic functions",
5921 IDENTIFIER_POINTER (name));
5922 *no_add_attrs = true;
5926 if (args)
5928 tree position = TREE_VALUE (args);
5930 if (TREE_CODE (position) != INTEGER_CST)
5932 warning (0, "requested position is not an integer constant");
5933 *no_add_attrs = true;
5935 else
5937 if (tree_int_cst_lt (position, integer_zero_node))
5939 warning (0, "requested position is less than zero");
5940 *no_add_attrs = true;
5945 return NULL_TREE;
5948 /* Handle a "noreturn" attribute; arguments as in
5949 struct attribute_spec.handler. */
5951 static tree
5952 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5953 int ARG_UNUSED (flags), bool *no_add_attrs)
5955 tree type = TREE_TYPE (*node);
5957 /* See FIXME comment in c_common_attribute_table. */
5958 if (TREE_CODE (*node) == FUNCTION_DECL)
5959 TREE_THIS_VOLATILE (*node) = 1;
5960 else if (TREE_CODE (type) == POINTER_TYPE
5961 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5962 TREE_TYPE (*node)
5963 = build_pointer_type
5964 (build_type_variant (TREE_TYPE (type),
5965 TYPE_READONLY (TREE_TYPE (type)), 1));
5966 else
5968 warning (OPT_Wattributes, "%qs attribute ignored",
5969 IDENTIFIER_POINTER (name));
5970 *no_add_attrs = true;
5973 return NULL_TREE;
5976 /* Handle a "noinline" attribute; arguments as in
5977 struct attribute_spec.handler. */
5979 static tree
5980 handle_noinline_attribute (tree *node, tree name,
5981 tree ARG_UNUSED (args),
5982 int ARG_UNUSED (flags), bool *no_add_attrs)
5984 if (TREE_CODE (*node) == FUNCTION_DECL)
5986 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
5988 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
5989 "with attribute %qs", name, "always_inline");
5990 *no_add_attrs = true;
5992 else
5993 DECL_UNINLINABLE (*node) = 1;
5995 else
5997 warning (OPT_Wattributes, "%qE attribute ignored", name);
5998 *no_add_attrs = true;
6001 return NULL_TREE;
6004 /* Handle a "noclone" attribute; arguments as in
6005 struct attribute_spec.handler. */
6007 static tree
6008 handle_noclone_attribute (tree *node, tree name,
6009 tree ARG_UNUSED (args),
6010 int ARG_UNUSED (flags), bool *no_add_attrs)
6012 if (TREE_CODE (*node) != FUNCTION_DECL)
6014 warning (OPT_Wattributes, "%qE attribute ignored", name);
6015 *no_add_attrs = true;
6018 return NULL_TREE;
6021 /* Handle a "leaf" attribute; arguments as in
6022 struct attribute_spec.handler. */
6024 static tree
6025 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6026 int ARG_UNUSED (flags), bool *no_add_attrs)
6028 if (TREE_CODE (*node) != FUNCTION_DECL)
6030 warning (OPT_Wattributes, "%qE attribute ignored", name);
6031 *no_add_attrs = true;
6033 if (!TREE_PUBLIC (*node))
6035 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6036 *no_add_attrs = true;
6039 return NULL_TREE;
6042 /* Handle a "always_inline" attribute; arguments as in
6043 struct attribute_spec.handler. */
6045 static tree
6046 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6047 int ARG_UNUSED (flags), bool *no_add_attrs)
6049 if (TREE_CODE (*node) == FUNCTION_DECL)
6051 /* Set the attribute and mark it for disregarding inline limits. */
6052 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6054 else
6056 warning (OPT_Wattributes, "%qE attribute ignored", name);
6057 *no_add_attrs = true;
6060 return NULL_TREE;
6063 /* Handle a "malloc" attribute; arguments as in
6064 struct attribute_spec.handler. */
6066 static tree
6067 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6068 int ARG_UNUSED (flags), bool *no_add_attrs)
6070 if (TREE_CODE (*node) == FUNCTION_DECL
6071 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6072 DECL_IS_MALLOC (*node) = 1;
6073 else
6075 warning (OPT_Wattributes, "%qs attribute ignored",
6076 IDENTIFIER_POINTER (name));
6077 *no_add_attrs = true;
6080 return NULL_TREE;
6083 /* Fake handler for attributes we don't properly support. */
6085 tree
6086 fake_attribute_handler (tree * ARG_UNUSED (node),
6087 tree ARG_UNUSED (name),
6088 tree ARG_UNUSED (args),
6089 int ARG_UNUSED (flags),
6090 bool * ARG_UNUSED (no_add_attrs))
6092 return NULL_TREE;
6095 /* Handle a "type_generic" attribute. */
6097 static tree
6098 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6099 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6100 bool * ARG_UNUSED (no_add_attrs))
6102 /* Ensure we have a function type. */
6103 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6105 /* Ensure we have a variadic function. */
6106 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6108 return NULL_TREE;
6111 /* Handle a "vector_size" attribute; arguments as in
6112 struct attribute_spec.handler. */
6114 static tree
6115 handle_vector_size_attribute (tree *node, tree name, tree args,
6116 int ARG_UNUSED (flags), bool *no_add_attrs)
6118 tree type = *node;
6119 tree vector_type;
6121 *no_add_attrs = true;
6123 /* We need to provide for vector pointers, vector arrays, and
6124 functions returning vectors. For example:
6126 __attribute__((vector_size(16))) short *foo;
6128 In this case, the mode is SI, but the type being modified is
6129 HI, so we need to look further. */
6130 while (POINTER_TYPE_P (type)
6131 || TREE_CODE (type) == FUNCTION_TYPE
6132 || TREE_CODE (type) == ARRAY_TYPE)
6133 type = TREE_TYPE (type);
6135 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6136 if (!vector_type)
6137 return NULL_TREE;
6139 /* Build back pointers if needed. */
6140 *node = reconstruct_complex_type (*node, vector_type);
6142 return NULL_TREE;
6145 /* Handle a "vector_type" attribute; arguments as in
6146 struct attribute_spec.handler. */
6148 static tree
6149 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6150 int ARG_UNUSED (flags), bool *no_add_attrs)
6152 tree type = *node;
6153 tree vector_type;
6155 *no_add_attrs = true;
6157 if (TREE_CODE (type) != ARRAY_TYPE)
6159 error ("attribute %qs applies to array types only",
6160 IDENTIFIER_POINTER (name));
6161 return NULL_TREE;
6164 vector_type = build_vector_type_for_array (type, name);
6165 if (!vector_type)
6166 return NULL_TREE;
6168 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6169 *node = vector_type;
6171 return NULL_TREE;
6174 /* ----------------------------------------------------------------------- *
6175 * BUILTIN FUNCTIONS *
6176 * ----------------------------------------------------------------------- */
6178 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6179 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6180 if nonansi_p and flag_no_nonansi_builtin. */
6182 static void
6183 def_builtin_1 (enum built_in_function fncode,
6184 const char *name,
6185 enum built_in_class fnclass,
6186 tree fntype, tree libtype,
6187 bool both_p, bool fallback_p,
6188 bool nonansi_p ATTRIBUTE_UNUSED,
6189 tree fnattrs, bool implicit_p)
6191 tree decl;
6192 const char *libname;
6194 /* Preserve an already installed decl. It most likely was setup in advance
6195 (e.g. as part of the internal builtins) for specific reasons. */
6196 if (builtin_decl_explicit (fncode))
6197 return;
6199 gcc_assert ((!both_p && !fallback_p)
6200 || !strncmp (name, "__builtin_",
6201 strlen ("__builtin_")));
6203 libname = name + strlen ("__builtin_");
6204 decl = add_builtin_function (name, fntype, fncode, fnclass,
6205 (fallback_p ? libname : NULL),
6206 fnattrs);
6207 if (both_p)
6208 /* ??? This is normally further controlled by command-line options
6209 like -fno-builtin, but we don't have them for Ada. */
6210 add_builtin_function (libname, libtype, fncode, fnclass,
6211 NULL, fnattrs);
6213 set_builtin_decl (fncode, decl, implicit_p);
6216 static int flag_isoc94 = 0;
6217 static int flag_isoc99 = 0;
6218 static int flag_isoc11 = 0;
6220 /* Install what the common builtins.def offers. */
6222 static void
6223 install_builtin_functions (void)
6225 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6226 NONANSI_P, ATTRS, IMPLICIT, COND) \
6227 if (NAME && COND) \
6228 def_builtin_1 (ENUM, NAME, CLASS, \
6229 builtin_types[(int) TYPE], \
6230 builtin_types[(int) LIBTYPE], \
6231 BOTH_P, FALLBACK_P, NONANSI_P, \
6232 built_in_attributes[(int) ATTRS], IMPLICIT);
6233 #include "builtins.def"
6236 /* ----------------------------------------------------------------------- *
6237 * BUILTIN FUNCTIONS *
6238 * ----------------------------------------------------------------------- */
6240 /* Install the builtin functions we might need. */
6242 void
6243 gnat_install_builtins (void)
6245 install_builtin_elementary_types ();
6246 install_builtin_function_types ();
6247 install_builtin_attributes ();
6249 /* Install builtins used by generic middle-end pieces first. Some of these
6250 know about internal specificities and control attributes accordingly, for
6251 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6252 the generic definition from builtins.def. */
6253 build_common_builtin_nodes ();
6255 /* Now, install the target specific builtins, such as the AltiVec family on
6256 ppc, and the common set as exposed by builtins.def. */
6257 targetm.init_builtins ();
6258 install_builtin_functions ();
6261 #include "gt-ada-utils.h"
6262 #include "gtype-ada.h"