* gcc-interface/utils.c (pad_type_hash): Use hashval_t for hash value.
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blobbfd3388c56a09938876560f894f3ebef74b47dab
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2017, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute,
113 false },
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
115 false },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute,
117 false },
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
119 false },
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
121 false },
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
123 false },
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
125 false },
126 { "noinline", 0, 0, true, false, false, handle_noinline_attribute,
127 false },
128 { "noclone", 0, 0, true, false, false, handle_noclone_attribute,
129 false },
130 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
131 false },
132 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
133 false },
134 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
135 false },
136 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
137 false },
139 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
140 false },
141 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
142 false },
143 { "may_alias", 0, 0, false, true, false, NULL, false },
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
148 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
149 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
151 { NULL, 0, 0, false, false, false, NULL, false }
154 /* Associates a GNAT tree node to a GCC tree node. It is used in
155 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
156 of `save_gnu_tree' for more info. */
157 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
159 #define GET_GNU_TREE(GNAT_ENTITY) \
160 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
162 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
163 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
165 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
166 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
168 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
169 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
171 #define GET_DUMMY_NODE(GNAT_ENTITY) \
172 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
174 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
175 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
177 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
178 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
180 /* This variable keeps a table for types for each precision so that we only
181 allocate each of them once. Signed and unsigned types are kept separate.
183 Note that these types are only used when fold-const requests something
184 special. Perhaps we should NOT share these types; we'll see how it
185 goes later. */
186 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
188 /* Likewise for float types, but record these by mode. */
189 static GTY(()) tree float_types[NUM_MACHINE_MODES];
191 /* For each binding contour we allocate a binding_level structure to indicate
192 the binding depth. */
194 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
195 /* The binding level containing this one (the enclosing binding level). */
196 struct gnat_binding_level *chain;
197 /* The BLOCK node for this level. */
198 tree block;
199 /* If nonzero, the setjmp buffer that needs to be updated for any
200 variable-sized definition within this context. */
201 tree jmpbuf_decl;
204 /* The binding level currently in effect. */
205 static GTY(()) struct gnat_binding_level *current_binding_level;
207 /* A chain of gnat_binding_level structures awaiting reuse. */
208 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
210 /* The context to be used for global declarations. */
211 static GTY(()) tree global_context;
213 /* An array of global declarations. */
214 static GTY(()) vec<tree, va_gc> *global_decls;
216 /* An array of builtin function declarations. */
217 static GTY(()) vec<tree, va_gc> *builtin_decls;
219 /* A chain of unused BLOCK nodes. */
220 static GTY((deletable)) tree free_block_chain;
222 /* A hash table of padded types. It is modelled on the generic type
223 hash table in tree.c, which must thus be used as a reference. */
225 struct GTY((for_user)) pad_type_hash
227 hashval_t hash;
228 tree type;
231 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
233 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
234 static bool equal (pad_type_hash *a, pad_type_hash *b);
236 static int
237 keep_cache_entry (pad_type_hash *&t)
239 return ggc_marked_p (t->type);
243 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
245 static tree merge_sizes (tree, tree, tree, bool, bool);
246 static tree fold_bit_position (const_tree);
247 static tree compute_related_constant (tree, tree);
248 static tree split_plus (tree, tree *);
249 static tree float_type_for_precision (int, machine_mode);
250 static tree convert_to_fat_pointer (tree, tree);
251 static unsigned int scale_by_factor_of (tree, unsigned int);
252 static bool potential_alignment_gap (tree, tree, tree);
254 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
255 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
256 struct deferred_decl_context_node
258 /* The ..._DECL node to work on. */
259 tree decl;
261 /* The corresponding entity's Scope. */
262 Entity_Id gnat_scope;
264 /* The value of force_global when DECL was pushed. */
265 int force_global;
267 /* The list of ..._TYPE nodes to propagate the context to. */
268 vec<tree> types;
270 /* The next queue item. */
271 struct deferred_decl_context_node *next;
274 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
276 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
277 feed it with the elaboration of GNAT_SCOPE. */
278 static struct deferred_decl_context_node *
279 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
281 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
282 feed it with the DECL_CONTEXT computed as part of N as soon as it is
283 computed. */
284 static void add_deferred_type_context (struct deferred_decl_context_node *n,
285 tree type);
287 /* Initialize data structures of the utils.c module. */
289 void
290 init_gnat_utils (void)
292 /* Initialize the association of GNAT nodes to GCC trees. */
293 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
295 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
296 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
298 /* Initialize the hash table of padded types. */
299 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
302 /* Destroy data structures of the utils.c module. */
304 void
305 destroy_gnat_utils (void)
307 /* Destroy the association of GNAT nodes to GCC trees. */
308 ggc_free (associate_gnat_to_gnu);
309 associate_gnat_to_gnu = NULL;
311 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
312 ggc_free (dummy_node_table);
313 dummy_node_table = NULL;
315 /* Destroy the hash table of padded types. */
316 pad_type_hash_table->empty ();
317 pad_type_hash_table = NULL;
320 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
321 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
322 If NO_CHECK is true, the latter check is suppressed.
324 If GNU_DECL is zero, reset a previous association. */
326 void
327 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
329 /* Check that GNAT_ENTITY is not already defined and that it is being set
330 to something which is a decl. If that is not the case, this usually
331 means GNAT_ENTITY is defined twice, but occasionally is due to some
332 Gigi problem. */
333 gcc_assert (!(gnu_decl
334 && (PRESENT_GNU_TREE (gnat_entity)
335 || (!no_check && !DECL_P (gnu_decl)))));
337 SET_GNU_TREE (gnat_entity, gnu_decl);
340 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
341 that was associated with it. If there is no such tree node, abort.
343 In some cases, such as delayed elaboration or expressions that need to
344 be elaborated only once, GNAT_ENTITY is really not an entity. */
346 tree
347 get_gnu_tree (Entity_Id gnat_entity)
349 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
350 return GET_GNU_TREE (gnat_entity);
353 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
355 bool
356 present_gnu_tree (Entity_Id gnat_entity)
358 return PRESENT_GNU_TREE (gnat_entity);
361 /* Make a dummy type corresponding to GNAT_TYPE. */
363 tree
364 make_dummy_type (Entity_Id gnat_type)
366 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
367 tree gnu_type, debug_type;
369 /* If there was no equivalent type (can only happen when just annotating
370 types) or underlying type, go back to the original type. */
371 if (No (gnat_equiv))
372 gnat_equiv = gnat_type;
374 /* If it there already a dummy type, use that one. Else make one. */
375 if (PRESENT_DUMMY_NODE (gnat_equiv))
376 return GET_DUMMY_NODE (gnat_equiv);
378 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
379 an ENUMERAL_TYPE. */
380 gnu_type = make_node (Is_Record_Type (gnat_equiv)
381 ? tree_code_for_record_type (gnat_equiv)
382 : ENUMERAL_TYPE);
383 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
384 TYPE_DUMMY_P (gnu_type) = 1;
385 TYPE_STUB_DECL (gnu_type)
386 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
387 if (Is_By_Reference_Type (gnat_equiv))
388 TYPE_BY_REFERENCE_P (gnu_type) = 1;
390 SET_DUMMY_NODE (gnat_equiv, gnu_type);
392 /* Create a debug type so that debug info consumers only see an unspecified
393 type. */
394 if (Needs_Debug_Info (gnat_type))
396 debug_type = make_node (LANG_TYPE);
397 SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
399 TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
400 TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
403 return gnu_type;
406 /* Return the dummy type that was made for GNAT_TYPE, if any. */
408 tree
409 get_dummy_type (Entity_Id gnat_type)
411 return GET_DUMMY_NODE (gnat_type);
414 /* Build dummy fat and thin pointer types whose designated type is specified
415 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
417 void
418 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
420 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
421 tree gnu_fat_type, fields, gnu_object_type;
423 gnu_template_type = make_node (RECORD_TYPE);
424 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
425 TYPE_DUMMY_P (gnu_template_type) = 1;
426 gnu_ptr_template = build_pointer_type (gnu_template_type);
428 gnu_array_type = make_node (ENUMERAL_TYPE);
429 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
430 TYPE_DUMMY_P (gnu_array_type) = 1;
431 gnu_ptr_array = build_pointer_type (gnu_array_type);
433 gnu_fat_type = make_node (RECORD_TYPE);
434 /* Build a stub DECL to trigger the special processing for fat pointer types
435 in gnat_pushdecl. */
436 TYPE_NAME (gnu_fat_type)
437 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
438 gnu_fat_type);
439 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
440 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
441 DECL_CHAIN (fields)
442 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
443 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
444 finish_fat_pointer_type (gnu_fat_type, fields);
445 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
446 /* Suppress debug info until after the type is completed. */
447 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
449 gnu_object_type = make_node (RECORD_TYPE);
450 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
451 TYPE_DUMMY_P (gnu_object_type) = 1;
453 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
454 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
455 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
458 /* Return true if we are in the global binding level. */
460 bool
461 global_bindings_p (void)
463 return force_global || !current_function_decl;
466 /* Enter a new binding level. */
468 void
469 gnat_pushlevel (void)
471 struct gnat_binding_level *newlevel = NULL;
473 /* Reuse a struct for this binding level, if there is one. */
474 if (free_binding_level)
476 newlevel = free_binding_level;
477 free_binding_level = free_binding_level->chain;
479 else
480 newlevel = ggc_alloc<gnat_binding_level> ();
482 /* Use a free BLOCK, if any; otherwise, allocate one. */
483 if (free_block_chain)
485 newlevel->block = free_block_chain;
486 free_block_chain = BLOCK_CHAIN (free_block_chain);
487 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
489 else
490 newlevel->block = make_node (BLOCK);
492 /* Point the BLOCK we just made to its parent. */
493 if (current_binding_level)
494 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
496 BLOCK_VARS (newlevel->block) = NULL_TREE;
497 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
498 TREE_USED (newlevel->block) = 1;
500 /* Add this level to the front of the chain (stack) of active levels. */
501 newlevel->chain = current_binding_level;
502 newlevel->jmpbuf_decl = NULL_TREE;
503 current_binding_level = newlevel;
506 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
507 and point FNDECL to this BLOCK. */
509 void
510 set_current_block_context (tree fndecl)
512 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
513 DECL_INITIAL (fndecl) = current_binding_level->block;
514 set_block_for_group (current_binding_level->block);
517 /* Set the jmpbuf_decl for the current binding level to DECL. */
519 void
520 set_block_jmpbuf_decl (tree decl)
522 current_binding_level->jmpbuf_decl = decl;
525 /* Get the jmpbuf_decl, if any, for the current binding level. */
527 tree
528 get_block_jmpbuf_decl (void)
530 return current_binding_level->jmpbuf_decl;
533 /* Exit a binding level. Set any BLOCK into the current code group. */
535 void
536 gnat_poplevel (void)
538 struct gnat_binding_level *level = current_binding_level;
539 tree block = level->block;
541 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
542 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
544 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
545 are no variables free the block and merge its subblocks into those of its
546 parent block. Otherwise, add it to the list of its parent. */
547 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
549 else if (!BLOCK_VARS (block))
551 BLOCK_SUBBLOCKS (level->chain->block)
552 = block_chainon (BLOCK_SUBBLOCKS (block),
553 BLOCK_SUBBLOCKS (level->chain->block));
554 BLOCK_CHAIN (block) = free_block_chain;
555 free_block_chain = block;
557 else
559 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
560 BLOCK_SUBBLOCKS (level->chain->block) = block;
561 TREE_USED (block) = 1;
562 set_block_for_group (block);
565 /* Free this binding structure. */
566 current_binding_level = level->chain;
567 level->chain = free_binding_level;
568 free_binding_level = level;
571 /* Exit a binding level and discard the associated BLOCK. */
573 void
574 gnat_zaplevel (void)
576 struct gnat_binding_level *level = current_binding_level;
577 tree block = level->block;
579 BLOCK_CHAIN (block) = free_block_chain;
580 free_block_chain = block;
582 /* Free this binding structure. */
583 current_binding_level = level->chain;
584 level->chain = free_binding_level;
585 free_binding_level = level;
588 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
590 static void
591 gnat_set_type_context (tree type, tree context)
593 tree decl = TYPE_STUB_DECL (type);
595 TYPE_CONTEXT (type) = context;
597 while (decl && DECL_PARALLEL_TYPE (decl))
599 tree parallel_type = DECL_PARALLEL_TYPE (decl);
601 /* Give a context to the parallel types and their stub decl, if any.
602 Some parallel types seems to be present in multiple parallel type
603 chains, so don't mess with their context if they already have one. */
604 if (!TYPE_CONTEXT (parallel_type))
606 if (TYPE_STUB_DECL (parallel_type))
607 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
608 TYPE_CONTEXT (parallel_type) = context;
611 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
615 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
616 the debug info, or Empty if there is no such scope. If not NULL, set
617 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
619 Entity_Id
620 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
622 Entity_Id gnat_entity;
624 if (is_subprogram)
625 *is_subprogram = false;
627 if (Nkind (gnat_node) == N_Defining_Identifier
628 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
629 gnat_entity = Scope (gnat_node);
630 else
631 return Empty;
633 while (Present (gnat_entity))
635 switch (Ekind (gnat_entity))
637 case E_Function:
638 case E_Procedure:
639 if (Present (Protected_Body_Subprogram (gnat_entity)))
640 gnat_entity = Protected_Body_Subprogram (gnat_entity);
642 /* If the scope is a subprogram, then just rely on
643 current_function_decl, so that we don't have to defer
644 anything. This is needed because other places rely on the
645 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
646 if (is_subprogram)
647 *is_subprogram = true;
648 return gnat_entity;
650 case E_Record_Type:
651 case E_Record_Subtype:
652 return gnat_entity;
654 default:
655 /* By default, we are not interested in this particular scope: go to
656 the outer one. */
657 break;
660 gnat_entity = Scope (gnat_entity);
663 return Empty;
666 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
667 of N otherwise. */
669 static void
670 defer_or_set_type_context (tree type, tree context,
671 struct deferred_decl_context_node *n)
673 if (n)
674 add_deferred_type_context (n, type);
675 else
676 gnat_set_type_context (type, context);
679 /* Return global_context, but create it first if need be. */
681 static tree
682 get_global_context (void)
684 if (!global_context)
686 global_context
687 = build_translation_unit_decl (get_identifier (main_input_filename));
688 debug_hooks->register_main_translation_unit (global_context);
691 return global_context;
694 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
695 for location information and flag propagation. */
697 void
698 gnat_pushdecl (tree decl, Node_Id gnat_node)
700 tree context = NULL_TREE;
701 struct deferred_decl_context_node *deferred_decl_context = NULL;
703 /* If explicitely asked to make DECL global or if it's an imported nested
704 object, short-circuit the regular Scope-based context computation. */
705 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
707 /* Rely on the GNAT scope, or fallback to the current_function_decl if
708 the GNAT scope reached the global scope, if it reached a subprogram
709 or the declaration is a subprogram or a variable (for them we skip
710 intermediate context types because the subprogram body elaboration
711 machinery and the inliner both expect a subprogram context).
713 Falling back to current_function_decl is necessary for implicit
714 subprograms created by gigi, such as the elaboration subprograms. */
715 bool context_is_subprogram = false;
716 const Entity_Id gnat_scope
717 = get_debug_scope (gnat_node, &context_is_subprogram);
719 if (Present (gnat_scope)
720 && !context_is_subprogram
721 && TREE_CODE (decl) != FUNCTION_DECL
722 && TREE_CODE (decl) != VAR_DECL)
723 /* Always assume the scope has not been elaborated, thus defer the
724 context propagation to the time its elaboration will be
725 available. */
726 deferred_decl_context
727 = add_deferred_decl_context (decl, gnat_scope, force_global);
729 /* External declarations (when force_global > 0) may not be in a
730 local context. */
731 else if (current_function_decl && force_global == 0)
732 context = current_function_decl;
735 /* If either we are forced to be in global mode or if both the GNAT scope and
736 the current_function_decl did not help in determining the context, use the
737 global scope. */
738 if (!deferred_decl_context && !context)
739 context = get_global_context ();
741 /* Functions imported in another function are not really nested.
742 For really nested functions mark them initially as needing
743 a static chain for uses of that flag before unnesting;
744 lower_nested_functions will then recompute it. */
745 if (TREE_CODE (decl) == FUNCTION_DECL
746 && !TREE_PUBLIC (decl)
747 && context
748 && (TREE_CODE (context) == FUNCTION_DECL
749 || decl_function_context (context)))
750 DECL_STATIC_CHAIN (decl) = 1;
752 if (!deferred_decl_context)
753 DECL_CONTEXT (decl) = context;
755 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
757 /* Set the location of DECL and emit a declaration for it. */
758 if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
759 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
761 add_decl_expr (decl, gnat_node);
763 /* Put the declaration on the list. The list of declarations is in reverse
764 order. The list will be reversed later. Put global declarations in the
765 globals list and local ones in the current block. But skip TYPE_DECLs
766 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
767 with the debugger and aren't needed anyway. */
768 if (!(TREE_CODE (decl) == TYPE_DECL
769 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
771 /* External declarations must go to the binding level they belong to.
772 This will make corresponding imported entities are available in the
773 debugger at the proper time. */
774 if (DECL_EXTERNAL (decl)
775 && TREE_CODE (decl) == FUNCTION_DECL
776 && DECL_BUILT_IN (decl))
777 vec_safe_push (builtin_decls, decl);
778 else if (global_bindings_p ())
779 vec_safe_push (global_decls, decl);
780 else
782 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
783 BLOCK_VARS (current_binding_level->block) = decl;
787 /* For the declaration of a type, set its name either if it isn't already
788 set or if the previous type name was not derived from a source name.
789 We'd rather have the type named with a real name and all the pointer
790 types to the same object have the same node, except when the names are
791 both derived from source names. */
792 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
794 tree t = TREE_TYPE (decl);
796 /* Array and pointer types aren't tagged types in the C sense so we need
797 to generate a typedef in DWARF for them and make sure it is preserved,
798 unless the type is artificial. */
799 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
800 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
801 || DECL_ARTIFICIAL (decl)))
803 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
804 generate the typedef in DWARF. Also do that for fat pointer types
805 because, even though they are tagged types in the C sense, they are
806 still XUP types attached to the base array type at this point. */
807 else if (!DECL_ARTIFICIAL (decl)
808 && (TREE_CODE (t) == ARRAY_TYPE
809 || TREE_CODE (t) == POINTER_TYPE
810 || TYPE_IS_FAT_POINTER_P (t)))
812 tree tt = build_variant_type_copy (t);
813 TYPE_NAME (tt) = decl;
814 defer_or_set_type_context (tt,
815 DECL_CONTEXT (decl),
816 deferred_decl_context);
817 TREE_TYPE (decl) = tt;
818 if (TYPE_NAME (t)
819 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
820 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
821 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
822 else
823 DECL_ORIGINAL_TYPE (decl) = t;
824 /* Array types need to have a name so that they can be related to
825 their GNAT encodings. */
826 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
827 TYPE_NAME (t) = DECL_NAME (decl);
828 t = NULL_TREE;
830 else if (TYPE_NAME (t)
831 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
832 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
834 else
835 t = NULL_TREE;
837 /* Propagate the name to all the variants, this is needed for the type
838 qualifiers machinery to work properly (see check_qualified_type).
839 Also propagate the context to them. Note that it will be propagated
840 to all parallel types too thanks to gnat_set_type_context. */
841 if (t)
842 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
843 /* ??? Because of the previous kludge, we can have variants of fat
844 pointer types with different names. */
845 if (!(TYPE_IS_FAT_POINTER_P (t)
846 && TYPE_NAME (t)
847 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
849 TYPE_NAME (t) = decl;
850 defer_or_set_type_context (t,
851 DECL_CONTEXT (decl),
852 deferred_decl_context);
857 /* Create a record type that contains a SIZE bytes long field of TYPE with a
858 starting bit position so that it is aligned to ALIGN bits, and leaving at
859 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
860 record is guaranteed to get. GNAT_NODE is used for the position of the
861 associated TYPE_DECL. */
863 tree
864 make_aligning_type (tree type, unsigned int align, tree size,
865 unsigned int base_align, int room, Node_Id gnat_node)
867 /* We will be crafting a record type with one field at a position set to be
868 the next multiple of ALIGN past record'address + room bytes. We use a
869 record placeholder to express record'address. */
870 tree record_type = make_node (RECORD_TYPE);
871 tree record = build0 (PLACEHOLDER_EXPR, record_type);
873 tree record_addr_st
874 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
876 /* The diagram below summarizes the shape of what we manipulate:
878 <--------- pos ---------->
879 { +------------+-------------+-----------------+
880 record =>{ |############| ... | field (type) |
881 { +------------+-------------+-----------------+
882 |<-- room -->|<- voffset ->|<---- size ----->|
885 record_addr vblock_addr
887 Every length is in sizetype bytes there, except "pos" which has to be
888 set as a bit position in the GCC tree for the record. */
889 tree room_st = size_int (room);
890 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
891 tree voffset_st, pos, field;
893 tree name = TYPE_IDENTIFIER (type);
895 name = concat_name (name, "ALIGN");
896 TYPE_NAME (record_type) = name;
898 /* Compute VOFFSET and then POS. The next byte position multiple of some
899 alignment after some address is obtained by "and"ing the alignment minus
900 1 with the two's complement of the address. */
901 voffset_st = size_binop (BIT_AND_EXPR,
902 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
903 size_int ((align / BITS_PER_UNIT) - 1));
905 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
906 pos = size_binop (MULT_EXPR,
907 convert (bitsizetype,
908 size_binop (PLUS_EXPR, room_st, voffset_st)),
909 bitsize_unit_node);
911 /* Craft the GCC record representation. We exceptionally do everything
912 manually here because 1) our generic circuitry is not quite ready to
913 handle the complex position/size expressions we are setting up, 2) we
914 have a strong simplifying factor at hand: we know the maximum possible
915 value of voffset, and 3) we have to set/reset at least the sizes in
916 accordance with this maximum value anyway, as we need them to convey
917 what should be "alloc"ated for this type.
919 Use -1 as the 'addressable' indication for the field to prevent the
920 creation of a bitfield. We don't need one, it would have damaging
921 consequences on the alignment computation, and create_field_decl would
922 make one without this special argument, for instance because of the
923 complex position expression. */
924 field = create_field_decl (get_identifier ("F"), type, record_type, size,
925 pos, 1, -1);
926 TYPE_FIELDS (record_type) = field;
928 SET_TYPE_ALIGN (record_type, base_align);
929 TYPE_USER_ALIGN (record_type) = 1;
931 TYPE_SIZE (record_type)
932 = size_binop (PLUS_EXPR,
933 size_binop (MULT_EXPR, convert (bitsizetype, size),
934 bitsize_unit_node),
935 bitsize_int (align + room * BITS_PER_UNIT));
936 TYPE_SIZE_UNIT (record_type)
937 = size_binop (PLUS_EXPR, size,
938 size_int (room + align / BITS_PER_UNIT));
940 SET_TYPE_MODE (record_type, BLKmode);
941 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
943 /* Declare it now since it will never be declared otherwise. This is
944 necessary to ensure that its subtrees are properly marked. */
945 create_type_decl (name, record_type, true, false, gnat_node);
947 return record_type;
950 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
951 as the field type of a packed record if IN_RECORD is true, or as the
952 component type of a packed array if IN_RECORD is false. See if we can
953 rewrite it either as a type that has non-BLKmode, which we can pack
954 tighter in the packed record case, or as a smaller type with at most
955 MAX_ALIGN alignment if the value is non-zero. If so, return the new
956 type; if not, return the original type. */
958 tree
959 make_packable_type (tree type, bool in_record, unsigned int max_align)
961 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
962 unsigned HOST_WIDE_INT new_size;
963 unsigned int align = TYPE_ALIGN (type);
964 unsigned int new_align;
966 /* No point in doing anything if the size is zero. */
967 if (size == 0)
968 return type;
970 tree new_type = make_node (TREE_CODE (type));
972 /* Copy the name and flags from the old type to that of the new.
973 Note that we rely on the pointer equality created here for
974 TYPE_NAME to look through conversions in various places. */
975 TYPE_NAME (new_type) = TYPE_NAME (type);
976 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
977 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
978 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
979 if (TREE_CODE (type) == RECORD_TYPE)
980 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
982 /* If we are in a record and have a small size, set the alignment to
983 try for an integral mode. Otherwise set it to try for a smaller
984 type with BLKmode. */
985 if (in_record && size <= MAX_FIXED_MODE_SIZE)
987 new_size = ceil_pow2 (size);
988 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
989 SET_TYPE_ALIGN (new_type, new_align);
991 else
993 /* Do not try to shrink the size if the RM size is not constant. */
994 if (TYPE_CONTAINS_TEMPLATE_P (type)
995 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
996 return type;
998 /* Round the RM size up to a unit boundary to get the minimal size
999 for a BLKmode record. Give up if it's already the size and we
1000 don't need to lower the alignment. */
1001 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
1002 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1003 if (new_size == size && (max_align == 0 || align <= max_align))
1004 return type;
1006 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1007 if (max_align > 0 && new_align > max_align)
1008 new_align = max_align;
1009 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1012 TYPE_USER_ALIGN (new_type) = 1;
1014 /* Now copy the fields, keeping the position and size as we don't want
1015 to change the layout by propagating the packedness downwards. */
1016 tree new_field_list = NULL_TREE;
1017 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1019 tree new_field_type = TREE_TYPE (field);
1020 tree new_field, new_size;
1022 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1023 && !TYPE_FAT_POINTER_P (new_field_type)
1024 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1025 new_field_type = make_packable_type (new_field_type, true, max_align);
1027 /* However, for the last field in a not already packed record type
1028 that is of an aggregate type, we need to use the RM size in the
1029 packable version of the record type, see finish_record_type. */
1030 if (!DECL_CHAIN (field)
1031 && !TYPE_PACKED (type)
1032 && RECORD_OR_UNION_TYPE_P (new_field_type)
1033 && !TYPE_FAT_POINTER_P (new_field_type)
1034 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1035 && TYPE_ADA_SIZE (new_field_type))
1036 new_size = TYPE_ADA_SIZE (new_field_type);
1037 else
1038 new_size = DECL_SIZE (field);
1040 new_field
1041 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1042 new_size, bit_position (field),
1043 TYPE_PACKED (type),
1044 !DECL_NONADDRESSABLE_P (field));
1046 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1047 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1048 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1049 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1051 DECL_CHAIN (new_field) = new_field_list;
1052 new_field_list = new_field;
1055 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1056 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1057 if (TYPE_STUB_DECL (type))
1058 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1059 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1061 /* If this is a padding record, we never want to make the size smaller
1062 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1063 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1065 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1066 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1067 new_size = size;
1069 else
1071 TYPE_SIZE (new_type) = bitsize_int (new_size);
1072 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1075 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1076 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1078 compute_record_mode (new_type);
1080 /* Try harder to get a packable type if necessary, for example
1081 in case the record itself contains a BLKmode field. */
1082 if (in_record && TYPE_MODE (new_type) == BLKmode)
1083 SET_TYPE_MODE (new_type,
1084 mode_for_size_tree (TYPE_SIZE (new_type),
1085 MODE_INT, 1).else_blk ());
1087 /* If neither mode nor size nor alignment shrunk, return the old type. */
1088 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1089 return type;
1091 return new_type;
1094 /* Return true if TYPE has an unsigned representation. This needs to be used
1095 when the representation of types whose precision is not equal to their size
1096 is manipulated based on the RM size. */
1098 static inline bool
1099 type_unsigned_for_rm (tree type)
1101 /* This is the common case. */
1102 if (TYPE_UNSIGNED (type))
1103 return true;
1105 /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1106 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1107 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1108 return true;
1110 return false;
1113 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1114 If TYPE is the best type, return it. Otherwise, make a new type. We
1115 only support new integral and pointer types. FOR_BIASED is true if
1116 we are making a biased type. */
1118 tree
1119 make_type_from_size (tree type, tree size_tree, bool for_biased)
1121 unsigned HOST_WIDE_INT size;
1122 bool biased_p;
1123 tree new_type;
1125 /* If size indicates an error, just return TYPE to avoid propagating
1126 the error. Likewise if it's too large to represent. */
1127 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1128 return type;
1130 size = tree_to_uhwi (size_tree);
1132 switch (TREE_CODE (type))
1134 case INTEGER_TYPE:
1135 case ENUMERAL_TYPE:
1136 case BOOLEAN_TYPE:
1137 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1138 && TYPE_BIASED_REPRESENTATION_P (type));
1140 /* Integer types with precision 0 are forbidden. */
1141 if (size == 0)
1142 size = 1;
1144 /* Only do something if the type isn't a packed array type and doesn't
1145 already have the proper size and the size isn't too large. */
1146 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1147 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1148 || size > LONG_LONG_TYPE_SIZE)
1149 break;
1151 biased_p |= for_biased;
1153 /* The type should be an unsigned type if the original type is unsigned
1154 or if the lower bound is constant and non-negative or if the type is
1155 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1156 if (type_unsigned_for_rm (type) || biased_p)
1157 new_type = make_unsigned_type (size);
1158 else
1159 new_type = make_signed_type (size);
1160 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1161 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1162 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1163 /* Copy the name to show that it's essentially the same type and
1164 not a subrange type. */
1165 TYPE_NAME (new_type) = TYPE_NAME (type);
1166 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1167 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1168 return new_type;
1170 case RECORD_TYPE:
1171 /* Do something if this is a fat pointer, in which case we
1172 may need to return the thin pointer. */
1173 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1175 scalar_int_mode p_mode;
1176 if (!int_mode_for_size (size, 0).exists (&p_mode)
1177 || !targetm.valid_pointer_mode (p_mode))
1178 p_mode = ptr_mode;
1179 return
1180 build_pointer_type_for_mode
1181 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1182 p_mode, 0);
1184 break;
1186 case POINTER_TYPE:
1187 /* Only do something if this is a thin pointer, in which case we
1188 may need to return the fat pointer. */
1189 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1190 return
1191 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1192 break;
1194 default:
1195 break;
1198 return type;
1201 /* Return true iff the padded types are equivalent. */
1203 bool
1204 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1206 tree type1, type2;
1208 if (t1->hash != t2->hash)
1209 return 0;
1211 type1 = t1->type;
1212 type2 = t2->type;
1214 /* We consider that the padded types are equivalent if they pad the same type
1215 and have the same size, alignment, RM size and storage order. Taking the
1216 mode into account is redundant since it is determined by the others. */
1217 return
1218 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1219 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1220 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1221 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1222 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1225 /* Look up the padded TYPE in the hash table and return its canonical version
1226 if it exists; otherwise, insert it into the hash table. */
1228 static tree
1229 lookup_and_insert_pad_type (tree type)
1231 hashval_t hashcode;
1232 struct pad_type_hash in, *h;
1234 hashcode
1235 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1236 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1237 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1238 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1240 in.hash = hashcode;
1241 in.type = type;
1242 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1243 if (h)
1244 return h->type;
1246 h = ggc_alloc<pad_type_hash> ();
1247 h->hash = hashcode;
1248 h->type = type;
1249 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
1250 return NULL_TREE;
1253 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1254 if needed. We have already verified that SIZE and ALIGN are large enough.
1255 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1256 IS_COMPONENT_TYPE is true if this is being done for the component type of
1257 an array. IS_USER_TYPE is true if the original type needs to be completed.
1258 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1259 the RM size of the resulting type is to be set to SIZE too; in this case,
1260 the padded type is canonicalized before being returned. */
1262 tree
1263 maybe_pad_type (tree type, tree size, unsigned int align,
1264 Entity_Id gnat_entity, bool is_component_type,
1265 bool is_user_type, bool definition, bool set_rm_size)
1267 tree orig_size = TYPE_SIZE (type);
1268 unsigned int orig_align = TYPE_ALIGN (type);
1269 tree record, field;
1271 /* If TYPE is a padded type, see if it agrees with any size and alignment
1272 we were given. If so, return the original type. Otherwise, strip
1273 off the padding, since we will either be returning the inner type
1274 or repadding it. If no size or alignment is specified, use that of
1275 the original padded type. */
1276 if (TYPE_IS_PADDING_P (type))
1278 if ((!size
1279 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1280 && (align == 0 || align == orig_align))
1281 return type;
1283 if (!size)
1284 size = orig_size;
1285 if (align == 0)
1286 align = orig_align;
1288 type = TREE_TYPE (TYPE_FIELDS (type));
1289 orig_size = TYPE_SIZE (type);
1290 orig_align = TYPE_ALIGN (type);
1293 /* If the size is either not being changed or is being made smaller (which
1294 is not done here and is only valid for bitfields anyway), show the size
1295 isn't changing. Likewise, clear the alignment if it isn't being
1296 changed. Then return if we aren't doing anything. */
1297 if (size
1298 && (operand_equal_p (size, orig_size, 0)
1299 || (TREE_CODE (orig_size) == INTEGER_CST
1300 && tree_int_cst_lt (size, orig_size))))
1301 size = NULL_TREE;
1303 if (align == orig_align)
1304 align = 0;
1306 if (align == 0 && !size)
1307 return type;
1309 /* If requested, complete the original type and give it a name. */
1310 if (is_user_type)
1311 create_type_decl (get_entity_name (gnat_entity), type,
1312 !Comes_From_Source (gnat_entity),
1313 !(TYPE_NAME (type)
1314 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1315 && DECL_IGNORED_P (TYPE_NAME (type))),
1316 gnat_entity);
1318 /* We used to modify the record in place in some cases, but that could
1319 generate incorrect debugging information. So make a new record
1320 type and name. */
1321 record = make_node (RECORD_TYPE);
1322 TYPE_PADDING_P (record) = 1;
1324 /* ??? Padding types around packed array implementation types will be
1325 considered as root types in the array descriptor language hook (see
1326 gnat_get_array_descr_info). Give them the original packed array type
1327 name so that the one coming from sources appears in the debugging
1328 information. */
1329 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1330 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1331 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1332 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1333 else if (Present (gnat_entity))
1334 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1336 SET_TYPE_ALIGN (record, align ? align : orig_align);
1337 TYPE_SIZE (record) = size ? size : orig_size;
1338 TYPE_SIZE_UNIT (record)
1339 = convert (sizetype,
1340 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1341 bitsize_unit_node));
1343 /* If we are changing the alignment and the input type is a record with
1344 BLKmode and a small constant size, try to make a form that has an
1345 integral mode. This might allow the padding record to also have an
1346 integral mode, which will be much more efficient. There is no point
1347 in doing so if a size is specified unless it is also a small constant
1348 size and it is incorrect to do so if we cannot guarantee that the mode
1349 will be naturally aligned since the field must always be addressable.
1351 ??? This might not always be a win when done for a stand-alone object:
1352 since the nominal and the effective type of the object will now have
1353 different modes, a VIEW_CONVERT_EXPR will be required for converting
1354 between them and it might be hard to overcome afterwards, including
1355 at the RTL level when the stand-alone object is accessed as a whole. */
1356 if (align != 0
1357 && RECORD_OR_UNION_TYPE_P (type)
1358 && TYPE_MODE (type) == BLKmode
1359 && !TYPE_BY_REFERENCE_P (type)
1360 && TREE_CODE (orig_size) == INTEGER_CST
1361 && !TREE_OVERFLOW (orig_size)
1362 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1363 && (!size
1364 || (TREE_CODE (size) == INTEGER_CST
1365 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1367 tree packable_type = make_packable_type (type, true);
1368 if (TYPE_MODE (packable_type) != BLKmode
1369 && align >= TYPE_ALIGN (packable_type))
1370 type = packable_type;
1373 /* Now create the field with the original size. */
1374 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1375 bitsize_zero_node, 0, 1);
1376 DECL_INTERNAL_P (field) = 1;
1378 /* We will output additional debug info manually below. */
1379 finish_record_type (record, field, 1, false);
1381 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1382 SET_TYPE_DEBUG_TYPE (record, type);
1384 /* Set the RM size if requested. */
1385 if (set_rm_size)
1387 tree canonical_pad_type;
1389 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1391 /* If the padded type is complete and has constant size, we canonicalize
1392 it by means of the hash table. This is consistent with the language
1393 semantics and ensures that gigi and the middle-end have a common view
1394 of these padded types. */
1395 if (TREE_CONSTANT (TYPE_SIZE (record))
1396 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1398 record = canonical_pad_type;
1399 goto built;
1403 /* Unless debugging information isn't being written for the input type,
1404 write a record that shows what we are a subtype of and also make a
1405 variable that indicates our size, if still variable. */
1406 if (TREE_CODE (orig_size) != INTEGER_CST
1407 && TYPE_NAME (record)
1408 && TYPE_NAME (type)
1409 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1410 && DECL_IGNORED_P (TYPE_NAME (type))))
1412 tree name = TYPE_IDENTIFIER (record);
1413 tree size_unit = TYPE_SIZE_UNIT (record);
1415 /* A variable that holds the size is required even with no encoding since
1416 it will be referenced by debugging information attributes. At global
1417 level, we need a single variable across all translation units. */
1418 if (size
1419 && TREE_CODE (size) != INTEGER_CST
1420 && (definition || global_bindings_p ()))
1422 /* Whether or not gnat_entity comes from source, this XVZ variable is
1423 is a compilation artifact. */
1424 size_unit
1425 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1426 size_unit, true, global_bindings_p (),
1427 !definition && global_bindings_p (), false,
1428 false, true, true, NULL, gnat_entity);
1429 TYPE_SIZE_UNIT (record) = size_unit;
1432 /* There is no need to show what we are a subtype of when outputting as
1433 few encodings as possible: regular debugging infomation makes this
1434 redundant. */
1435 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1437 tree marker = make_node (RECORD_TYPE);
1438 tree orig_name = TYPE_IDENTIFIER (type);
1440 TYPE_NAME (marker) = concat_name (name, "XVS");
1441 finish_record_type (marker,
1442 create_field_decl (orig_name,
1443 build_reference_type (type),
1444 marker, NULL_TREE, NULL_TREE,
1445 0, 0),
1446 0, true);
1447 TYPE_SIZE_UNIT (marker) = size_unit;
1449 add_parallel_type (record, marker);
1453 built:
1454 /* If a simple size was explicitly given, maybe issue a warning. */
1455 if (!size
1456 || TREE_CODE (size) == COND_EXPR
1457 || TREE_CODE (size) == MAX_EXPR
1458 || No (gnat_entity))
1459 return record;
1461 /* But don't do it if we are just annotating types and the type is tagged or
1462 concurrent, since these types aren't fully laid out in this mode. */
1463 if (type_annotate_only)
1465 Entity_Id gnat_type
1466 = is_component_type
1467 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1469 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1470 return record;
1473 /* Take the original size as the maximum size of the input if there was an
1474 unconstrained record involved and round it up to the specified alignment,
1475 if one was specified, but only for aggregate types. */
1476 if (CONTAINS_PLACEHOLDER_P (orig_size))
1477 orig_size = max_size (orig_size, true);
1479 if (align && AGGREGATE_TYPE_P (type))
1480 orig_size = round_up (orig_size, align);
1482 if (!operand_equal_p (size, orig_size, 0)
1483 && !(TREE_CODE (size) == INTEGER_CST
1484 && TREE_CODE (orig_size) == INTEGER_CST
1485 && (TREE_OVERFLOW (size)
1486 || TREE_OVERFLOW (orig_size)
1487 || tree_int_cst_lt (size, orig_size))))
1489 Node_Id gnat_error_node = Empty;
1491 /* For a packed array, post the message on the original array type. */
1492 if (Is_Packed_Array_Impl_Type (gnat_entity))
1493 gnat_entity = Original_Array_Type (gnat_entity);
1495 if ((Ekind (gnat_entity) == E_Component
1496 || Ekind (gnat_entity) == E_Discriminant)
1497 && Present (Component_Clause (gnat_entity)))
1498 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1499 else if (Present (Size_Clause (gnat_entity)))
1500 gnat_error_node = Expression (Size_Clause (gnat_entity));
1502 /* Generate message only for entities that come from source, since
1503 if we have an entity created by expansion, the message will be
1504 generated for some other corresponding source entity. */
1505 if (Comes_From_Source (gnat_entity))
1507 if (Present (gnat_error_node))
1508 post_error_ne_tree ("{^ }bits of & unused?",
1509 gnat_error_node, gnat_entity,
1510 size_diffop (size, orig_size));
1511 else if (is_component_type)
1512 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1513 gnat_entity, gnat_entity,
1514 size_diffop (size, orig_size));
1518 return record;
1521 /* Return a copy of the padded TYPE but with reverse storage order. */
1523 tree
1524 set_reverse_storage_order_on_pad_type (tree type)
1526 tree field, canonical_pad_type;
1528 if (flag_checking)
1530 /* If the inner type is not scalar then the function does nothing. */
1531 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1532 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1533 && !VECTOR_TYPE_P (inner_type));
1536 /* This is required for the canonicalization. */
1537 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1539 field = copy_node (TYPE_FIELDS (type));
1540 type = copy_type (type);
1541 DECL_CONTEXT (field) = type;
1542 TYPE_FIELDS (type) = field;
1543 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1544 canonical_pad_type = lookup_and_insert_pad_type (type);
1545 return canonical_pad_type ? canonical_pad_type : type;
1548 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1549 If this is a multi-dimensional array type, do this recursively.
1551 OP may be
1552 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1553 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1554 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1556 void
1557 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1559 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1560 of a one-dimensional array, since the padding has the same alias set
1561 as the field type, but if it's a multi-dimensional array, we need to
1562 see the inner types. */
1563 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1564 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1565 || TYPE_PADDING_P (gnu_old_type)))
1566 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1568 /* Unconstrained array types are deemed incomplete and would thus be given
1569 alias set 0. Retrieve the underlying array type. */
1570 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1571 gnu_old_type
1572 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1573 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1574 gnu_new_type
1575 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1577 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1578 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1579 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1580 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1582 switch (op)
1584 case ALIAS_SET_COPY:
1585 /* The alias set shouldn't be copied between array types with different
1586 aliasing settings because this can break the aliasing relationship
1587 between the array type and its element type. */
1588 if (flag_checking || flag_strict_aliasing)
1589 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1590 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1591 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1592 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1594 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1595 break;
1597 case ALIAS_SET_SUBSET:
1598 case ALIAS_SET_SUPERSET:
1600 alias_set_type old_set = get_alias_set (gnu_old_type);
1601 alias_set_type new_set = get_alias_set (gnu_new_type);
1603 /* Do nothing if the alias sets conflict. This ensures that we
1604 never call record_alias_subset several times for the same pair
1605 or at all for alias set 0. */
1606 if (!alias_sets_conflict_p (old_set, new_set))
1608 if (op == ALIAS_SET_SUBSET)
1609 record_alias_subset (old_set, new_set);
1610 else
1611 record_alias_subset (new_set, old_set);
1614 break;
1616 default:
1617 gcc_unreachable ();
1620 record_component_aliases (gnu_new_type);
1623 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1624 ARTIFICIAL_P is true if the type was generated by the compiler. */
1626 void
1627 record_builtin_type (const char *name, tree type, bool artificial_p)
1629 tree type_decl = build_decl (input_location,
1630 TYPE_DECL, get_identifier (name), type);
1631 DECL_ARTIFICIAL (type_decl) = artificial_p;
1632 TYPE_ARTIFICIAL (type) = artificial_p;
1633 gnat_pushdecl (type_decl, Empty);
1635 if (debug_hooks->type_decl)
1636 debug_hooks->type_decl (type_decl, false);
1639 /* Finish constructing the character type CHAR_TYPE.
1641 In Ada character types are enumeration types and, as a consequence, are
1642 represented in the front-end by integral types holding the positions of
1643 the enumeration values as defined by the language, which means that the
1644 integral types are unsigned.
1646 Unfortunately the signedness of 'char' in C is implementation-defined
1647 and GCC even has the option -fsigned-char to toggle it at run time.
1648 Since GNAT's philosophy is to be compatible with C by default, to wit
1649 Interfaces.C.char is defined as a mere copy of Character, we may need
1650 to declare character types as signed types in GENERIC and generate the
1651 necessary adjustments to make them behave as unsigned types.
1653 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1654 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1655 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1656 types. The idea is to ensure that the bit pattern contained in the
1657 Esize'd objects is not changed, even though the numerical value will
1658 be interpreted differently depending on the signedness. */
1660 void
1661 finish_character_type (tree char_type)
1663 if (TYPE_UNSIGNED (char_type))
1664 return;
1666 /* Make a copy of a generic unsigned version since we'll modify it. */
1667 tree unsigned_char_type
1668 = (char_type == char_type_node
1669 ? unsigned_char_type_node
1670 : copy_type (gnat_unsigned_type_for (char_type)));
1672 /* Create an unsigned version of the type and set it as debug type. */
1673 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1674 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1675 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1676 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1678 /* If this is a subtype, make the debug type a subtype of the debug type
1679 of the base type and convert literal RM bounds to unsigned. */
1680 if (TREE_TYPE (char_type))
1682 tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1683 tree min_value = TYPE_RM_MIN_VALUE (char_type);
1684 tree max_value = TYPE_RM_MAX_VALUE (char_type);
1686 if (TREE_CODE (min_value) == INTEGER_CST)
1687 min_value = fold_convert (base_unsigned_char_type, min_value);
1688 if (TREE_CODE (max_value) == INTEGER_CST)
1689 max_value = fold_convert (base_unsigned_char_type, max_value);
1691 TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1692 SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1693 SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1696 /* Adjust the RM bounds of the original type to unsigned; that's especially
1697 important for types since they are implicit in this case. */
1698 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1699 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1702 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1703 finish constructing the record type as a fat pointer type. */
1705 void
1706 finish_fat_pointer_type (tree record_type, tree field_list)
1708 /* Make sure we can put it into a register. */
1709 if (STRICT_ALIGNMENT)
1710 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1712 /* Show what it really is. */
1713 TYPE_FAT_POINTER_P (record_type) = 1;
1715 /* Do not emit debug info for it since the types of its fields may still be
1716 incomplete at this point. */
1717 finish_record_type (record_type, field_list, 0, false);
1719 /* Force type_contains_placeholder_p to return true on it. Although the
1720 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1721 type but the representation of the unconstrained array. */
1722 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1725 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1726 finish constructing the record or union type. If REP_LEVEL is zero, this
1727 record has no representation clause and so will be entirely laid out here.
1728 If REP_LEVEL is one, this record has a representation clause and has been
1729 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1730 this record is derived from a parent record and thus inherits its layout;
1731 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1732 additional debug info needs to be output for this type. */
1734 void
1735 finish_record_type (tree record_type, tree field_list, int rep_level,
1736 bool debug_info_p)
1738 enum tree_code code = TREE_CODE (record_type);
1739 tree name = TYPE_IDENTIFIER (record_type);
1740 tree ada_size = bitsize_zero_node;
1741 tree size = bitsize_zero_node;
1742 bool had_size = TYPE_SIZE (record_type) != 0;
1743 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1744 bool had_align = TYPE_ALIGN (record_type) != 0;
1745 tree field;
1747 TYPE_FIELDS (record_type) = field_list;
1749 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1750 generate debug info and have a parallel type. */
1751 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1753 /* Globally initialize the record first. If this is a rep'ed record,
1754 that just means some initializations; otherwise, layout the record. */
1755 if (rep_level > 0)
1757 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1758 TYPE_ALIGN (record_type)));
1760 if (!had_size_unit)
1761 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1763 if (!had_size)
1764 TYPE_SIZE (record_type) = bitsize_zero_node;
1766 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1767 out just like a UNION_TYPE, since the size will be fixed. */
1768 else if (code == QUAL_UNION_TYPE)
1769 code = UNION_TYPE;
1771 else
1773 /* Ensure there isn't a size already set. There can be in an error
1774 case where there is a rep clause but all fields have errors and
1775 no longer have a position. */
1776 TYPE_SIZE (record_type) = 0;
1778 /* Ensure we use the traditional GCC layout for bitfields when we need
1779 to pack the record type or have a representation clause. The other
1780 possible layout (Microsoft C compiler), if available, would prevent
1781 efficient packing in almost all cases. */
1782 #ifdef TARGET_MS_BITFIELD_LAYOUT
1783 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1784 decl_attributes (&record_type,
1785 tree_cons (get_identifier ("gcc_struct"),
1786 NULL_TREE, NULL_TREE),
1787 ATTR_FLAG_TYPE_IN_PLACE);
1788 #endif
1790 layout_type (record_type);
1793 /* At this point, the position and size of each field is known. It was
1794 either set before entry by a rep clause, or by laying out the type above.
1796 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1797 to compute the Ada size; the GCC size and alignment (for rep'ed records
1798 that are not padding types); and the mode (for rep'ed records). We also
1799 clear the DECL_BIT_FIELD indication for the cases we know have not been
1800 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1802 if (code == QUAL_UNION_TYPE)
1803 field_list = nreverse (field_list);
1805 for (field = field_list; field; field = DECL_CHAIN (field))
1807 tree type = TREE_TYPE (field);
1808 tree pos = bit_position (field);
1809 tree this_size = DECL_SIZE (field);
1810 tree this_ada_size;
1812 if (RECORD_OR_UNION_TYPE_P (type)
1813 && !TYPE_FAT_POINTER_P (type)
1814 && !TYPE_CONTAINS_TEMPLATE_P (type)
1815 && TYPE_ADA_SIZE (type))
1816 this_ada_size = TYPE_ADA_SIZE (type);
1817 else
1818 this_ada_size = this_size;
1820 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1821 if (DECL_BIT_FIELD (field)
1822 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1824 unsigned int align = TYPE_ALIGN (type);
1826 /* In the general case, type alignment is required. */
1827 if (value_factor_p (pos, align))
1829 /* The enclosing record type must be sufficiently aligned.
1830 Otherwise, if no alignment was specified for it and it
1831 has been laid out already, bump its alignment to the
1832 desired one if this is compatible with its size and
1833 maximum alignment, if any. */
1834 if (TYPE_ALIGN (record_type) >= align)
1836 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1837 DECL_BIT_FIELD (field) = 0;
1839 else if (!had_align
1840 && rep_level == 0
1841 && value_factor_p (TYPE_SIZE (record_type), align)
1842 && (!TYPE_MAX_ALIGN (record_type)
1843 || TYPE_MAX_ALIGN (record_type) >= align))
1845 SET_TYPE_ALIGN (record_type, align);
1846 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1847 DECL_BIT_FIELD (field) = 0;
1851 /* In the non-strict alignment case, only byte alignment is. */
1852 if (!STRICT_ALIGNMENT
1853 && DECL_BIT_FIELD (field)
1854 && value_factor_p (pos, BITS_PER_UNIT))
1855 DECL_BIT_FIELD (field) = 0;
1858 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1859 field is technically not addressable. Except that it can actually
1860 be addressed if it is BLKmode and happens to be properly aligned. */
1861 if (DECL_BIT_FIELD (field)
1862 && !(DECL_MODE (field) == BLKmode
1863 && value_factor_p (pos, BITS_PER_UNIT)))
1864 DECL_NONADDRESSABLE_P (field) = 1;
1866 /* A type must be as aligned as its most aligned field that is not
1867 a bit-field. But this is already enforced by layout_type. */
1868 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1869 SET_TYPE_ALIGN (record_type,
1870 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
1872 switch (code)
1874 case UNION_TYPE:
1875 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1876 size = size_binop (MAX_EXPR, size, this_size);
1877 break;
1879 case QUAL_UNION_TYPE:
1880 ada_size
1881 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1882 this_ada_size, ada_size);
1883 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1884 this_size, size);
1885 break;
1887 case RECORD_TYPE:
1888 /* Since we know here that all fields are sorted in order of
1889 increasing bit position, the size of the record is one
1890 higher than the ending bit of the last field processed
1891 unless we have a rep clause, since in that case we might
1892 have a field outside a QUAL_UNION_TYPE that has a higher ending
1893 position. So use a MAX in that case. Also, if this field is a
1894 QUAL_UNION_TYPE, we need to take into account the previous size in
1895 the case of empty variants. */
1896 ada_size
1897 = merge_sizes (ada_size, pos, this_ada_size,
1898 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1899 size
1900 = merge_sizes (size, pos, this_size,
1901 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1902 break;
1904 default:
1905 gcc_unreachable ();
1909 if (code == QUAL_UNION_TYPE)
1910 nreverse (field_list);
1912 if (rep_level < 2)
1914 /* If this is a padding record, we never want to make the size smaller
1915 than what was specified in it, if any. */
1916 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1917 size = TYPE_SIZE (record_type);
1919 /* Now set any of the values we've just computed that apply. */
1920 if (!TYPE_FAT_POINTER_P (record_type)
1921 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1922 SET_TYPE_ADA_SIZE (record_type, ada_size);
1924 if (rep_level > 0)
1926 tree size_unit = had_size_unit
1927 ? TYPE_SIZE_UNIT (record_type)
1928 : convert (sizetype,
1929 size_binop (CEIL_DIV_EXPR, size,
1930 bitsize_unit_node));
1931 unsigned int align = TYPE_ALIGN (record_type);
1933 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1934 TYPE_SIZE_UNIT (record_type)
1935 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1937 compute_record_mode (record_type);
1941 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1942 TYPE_MAX_ALIGN (record_type) = 0;
1944 if (debug_info_p)
1945 rest_of_record_type_compilation (record_type);
1948 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1949 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1950 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1951 moment TYPE will get a context. */
1953 void
1954 add_parallel_type (tree type, tree parallel_type)
1956 tree decl = TYPE_STUB_DECL (type);
1958 while (DECL_PARALLEL_TYPE (decl))
1959 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1961 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1963 /* If PARALLEL_TYPE already has a context, we are done. */
1964 if (TYPE_CONTEXT (parallel_type))
1965 return;
1967 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
1968 it to PARALLEL_TYPE. */
1969 if (TYPE_CONTEXT (type))
1970 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1972 /* Otherwise TYPE has not context yet. We know it will have one thanks to
1973 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
1974 so we have nothing to do in this case. */
1977 /* Return true if TYPE has a parallel type. */
1979 static bool
1980 has_parallel_type (tree type)
1982 tree decl = TYPE_STUB_DECL (type);
1984 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1987 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
1988 associated with it. It need not be invoked directly in most cases as
1989 finish_record_type takes care of doing so. */
1991 void
1992 rest_of_record_type_compilation (tree record_type)
1994 bool var_size = false;
1995 tree field;
1997 /* If this is a padded type, the bulk of the debug info has already been
1998 generated for the field's type. */
1999 if (TYPE_IS_PADDING_P (record_type))
2000 return;
2002 /* If the type already has a parallel type (XVS type), then we're done. */
2003 if (has_parallel_type (record_type))
2004 return;
2006 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2008 /* We need to make an XVE/XVU record if any field has variable size,
2009 whether or not the record does. For example, if we have a union,
2010 it may be that all fields, rounded up to the alignment, have the
2011 same size, in which case we'll use that size. But the debug
2012 output routines (except Dwarf2) won't be able to output the fields,
2013 so we need to make the special record. */
2014 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2015 /* If a field has a non-constant qualifier, the record will have
2016 variable size too. */
2017 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2018 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2020 var_size = true;
2021 break;
2025 /* If this record type is of variable size, make a parallel record type that
2026 will tell the debugger how the former is laid out (see exp_dbug.ads). */
2027 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
2029 tree new_record_type
2030 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2031 ? UNION_TYPE : TREE_CODE (record_type));
2032 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2033 tree last_pos = bitsize_zero_node;
2034 tree old_field, prev_old_field = NULL_TREE;
2036 new_name
2037 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2038 ? "XVU" : "XVE");
2039 TYPE_NAME (new_record_type) = new_name;
2040 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2041 TYPE_STUB_DECL (new_record_type)
2042 = create_type_stub_decl (new_name, new_record_type);
2043 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2044 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2045 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2046 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2047 TYPE_SIZE_UNIT (new_record_type)
2048 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2050 /* Now scan all the fields, replacing each field with a new field
2051 corresponding to the new encoding. */
2052 for (old_field = TYPE_FIELDS (record_type); old_field;
2053 old_field = DECL_CHAIN (old_field))
2055 tree field_type = TREE_TYPE (old_field);
2056 tree field_name = DECL_NAME (old_field);
2057 tree curpos = fold_bit_position (old_field);
2058 tree pos, new_field;
2059 bool var = false;
2060 unsigned int align = 0;
2062 /* See how the position was modified from the last position.
2064 There are two basic cases we support: a value was added
2065 to the last position or the last position was rounded to
2066 a boundary and they something was added. Check for the
2067 first case first. If not, see if there is any evidence
2068 of rounding. If so, round the last position and retry.
2070 If this is a union, the position can be taken as zero. */
2071 if (TREE_CODE (new_record_type) == UNION_TYPE)
2072 pos = bitsize_zero_node;
2073 else
2074 pos = compute_related_constant (curpos, last_pos);
2076 if (!pos
2077 && TREE_CODE (curpos) == MULT_EXPR
2078 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2080 tree offset = TREE_OPERAND (curpos, 0);
2081 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2082 align = scale_by_factor_of (offset, align);
2083 last_pos = round_up (last_pos, align);
2084 pos = compute_related_constant (curpos, last_pos);
2086 else if (!pos
2087 && TREE_CODE (curpos) == PLUS_EXPR
2088 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2089 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2090 && tree_fits_uhwi_p
2091 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2093 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2094 unsigned HOST_WIDE_INT addend
2095 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2096 align
2097 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2098 align = scale_by_factor_of (offset, align);
2099 align = MIN (align, addend & -addend);
2100 last_pos = round_up (last_pos, align);
2101 pos = compute_related_constant (curpos, last_pos);
2103 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2105 align = TYPE_ALIGN (field_type);
2106 last_pos = round_up (last_pos, align);
2107 pos = compute_related_constant (curpos, last_pos);
2110 /* If we can't compute a position, set it to zero.
2112 ??? We really should abort here, but it's too much work
2113 to get this correct for all cases. */
2114 if (!pos)
2115 pos = bitsize_zero_node;
2117 /* See if this type is variable-sized and make a pointer type
2118 and indicate the indirection if so. Beware that the debug
2119 back-end may adjust the position computed above according
2120 to the alignment of the field type, i.e. the pointer type
2121 in this case, if we don't preventively counter that. */
2122 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2124 field_type = build_pointer_type (field_type);
2125 if (align != 0 && TYPE_ALIGN (field_type) > align)
2127 field_type = copy_type (field_type);
2128 SET_TYPE_ALIGN (field_type, align);
2130 var = true;
2133 /* Make a new field name, if necessary. */
2134 if (var || align != 0)
2136 char suffix[16];
2138 if (align != 0)
2139 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2140 align / BITS_PER_UNIT);
2141 else
2142 strcpy (suffix, "XVL");
2144 field_name = concat_name (field_name, suffix);
2147 new_field
2148 = create_field_decl (field_name, field_type, new_record_type,
2149 DECL_SIZE (old_field), pos, 0, 0);
2150 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2151 TYPE_FIELDS (new_record_type) = new_field;
2153 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2154 zero. The only time it's not the last field of the record
2155 is when there are other components at fixed positions after
2156 it (meaning there was a rep clause for every field) and we
2157 want to be able to encode them. */
2158 last_pos = size_binop (PLUS_EXPR, curpos,
2159 (TREE_CODE (TREE_TYPE (old_field))
2160 == QUAL_UNION_TYPE)
2161 ? bitsize_zero_node
2162 : DECL_SIZE (old_field));
2163 prev_old_field = old_field;
2166 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2168 add_parallel_type (record_type, new_record_type);
2172 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2173 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2174 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2175 replace a value of zero with the old size. If HAS_REP is true, we take the
2176 MAX of the end position of this field with LAST_SIZE. In all other cases,
2177 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2179 static tree
2180 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2181 bool has_rep)
2183 tree type = TREE_TYPE (last_size);
2184 tree new_size;
2186 if (!special || TREE_CODE (size) != COND_EXPR)
2188 new_size = size_binop (PLUS_EXPR, first_bit, size);
2189 if (has_rep)
2190 new_size = size_binop (MAX_EXPR, last_size, new_size);
2193 else
2194 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2195 integer_zerop (TREE_OPERAND (size, 1))
2196 ? last_size : merge_sizes (last_size, first_bit,
2197 TREE_OPERAND (size, 1),
2198 1, has_rep),
2199 integer_zerop (TREE_OPERAND (size, 2))
2200 ? last_size : merge_sizes (last_size, first_bit,
2201 TREE_OPERAND (size, 2),
2202 1, has_rep));
2204 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2205 when fed through substitute_in_expr) into thinking that a constant
2206 size is not constant. */
2207 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2208 new_size = TREE_OPERAND (new_size, 0);
2210 return new_size;
2213 /* Return the bit position of FIELD, in bits from the start of the record,
2214 and fold it as much as possible. This is a tree of type bitsizetype. */
2216 static tree
2217 fold_bit_position (const_tree field)
2219 tree offset = DECL_FIELD_OFFSET (field);
2220 if (TREE_CODE (offset) == MULT_EXPR || TREE_CODE (offset) == PLUS_EXPR)
2221 offset = size_binop (TREE_CODE (offset),
2222 fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
2223 fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
2224 else
2225 offset = fold_convert (bitsizetype, offset);
2226 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2227 size_binop (MULT_EXPR, offset, bitsize_unit_node));
2230 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2231 related by the addition of a constant. Return that constant if so. */
2233 static tree
2234 compute_related_constant (tree op0, tree op1)
2236 tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2238 if (TREE_CODE (op0) == MULT_EXPR
2239 && TREE_CODE (op1) == MULT_EXPR
2240 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2241 && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2243 factor = TREE_OPERAND (op0, 1);
2244 op0 = TREE_OPERAND (op0, 0);
2245 op1 = TREE_OPERAND (op1, 0);
2247 else
2248 factor = NULL_TREE;
2250 op0_cst = split_plus (op0, &op0_var);
2251 op1_cst = split_plus (op1, &op1_var);
2252 result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2254 if (operand_equal_p (op0_var, op1_var, 0))
2255 return factor ? size_binop (MULT_EXPR, factor, result) : result;
2257 return NULL_TREE;
2260 /* Utility function of above to split a tree OP which may be a sum, into a
2261 constant part, which is returned, and a variable part, which is stored
2262 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2263 bitsizetype. */
2265 static tree
2266 split_plus (tree in, tree *pvar)
2268 /* Strip conversions in order to ease the tree traversal and maximize the
2269 potential for constant or plus/minus discovery. We need to be careful
2270 to always return and set *pvar to bitsizetype trees, but it's worth
2271 the effort. */
2272 in = remove_conversions (in, false);
2274 *pvar = convert (bitsizetype, in);
2276 if (TREE_CODE (in) == INTEGER_CST)
2278 *pvar = bitsize_zero_node;
2279 return convert (bitsizetype, in);
2281 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2283 tree lhs_var, rhs_var;
2284 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2285 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2287 if (lhs_var == TREE_OPERAND (in, 0)
2288 && rhs_var == TREE_OPERAND (in, 1))
2289 return bitsize_zero_node;
2291 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2292 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2294 else
2295 return bitsize_zero_node;
2298 /* Return a copy of TYPE but safe to modify in any way. */
2300 tree
2301 copy_type (tree type)
2303 tree new_type = copy_node (type);
2305 /* Unshare the language-specific data. */
2306 if (TYPE_LANG_SPECIFIC (type))
2308 TYPE_LANG_SPECIFIC (new_type) = NULL;
2309 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2312 /* And the contents of the language-specific slot if needed. */
2313 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2314 && TYPE_RM_VALUES (type))
2316 TYPE_RM_VALUES (new_type) = NULL_TREE;
2317 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2318 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2319 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2322 /* copy_node clears this field instead of copying it, because it is
2323 aliased with TREE_CHAIN. */
2324 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2326 TYPE_POINTER_TO (new_type) = NULL_TREE;
2327 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2328 TYPE_MAIN_VARIANT (new_type) = new_type;
2329 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2330 TYPE_CANONICAL (new_type) = new_type;
2332 return new_type;
2335 /* Return a subtype of sizetype with range MIN to MAX and whose
2336 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2337 of the associated TYPE_DECL. */
2339 tree
2340 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2342 /* First build a type for the desired range. */
2343 tree type = build_nonshared_range_type (sizetype, min, max);
2345 /* Then set the index type. */
2346 SET_TYPE_INDEX_TYPE (type, index);
2347 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2349 return type;
2352 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2353 sizetype is used. */
2355 tree
2356 create_range_type (tree type, tree min, tree max)
2358 tree range_type;
2360 if (!type)
2361 type = sizetype;
2363 /* First build a type with the base range. */
2364 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2365 TYPE_MAX_VALUE (type));
2367 /* Then set the actual range. */
2368 SET_TYPE_RM_MIN_VALUE (range_type, min);
2369 SET_TYPE_RM_MAX_VALUE (range_type, max);
2371 return range_type;
2374 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2375 NAME gives the name of the type to be used in the declaration. */
2377 tree
2378 create_type_stub_decl (tree name, tree type)
2380 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2381 DECL_ARTIFICIAL (type_decl) = 1;
2382 TYPE_ARTIFICIAL (type) = 1;
2383 return type_decl;
2386 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2387 used in the declaration. ARTIFICIAL_P is true if the declaration was
2388 generated by the compiler. DEBUG_INFO_P is true if we need to write
2389 debug information about this type. GNAT_NODE is used for the position
2390 of the decl. */
2392 tree
2393 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2394 Node_Id gnat_node)
2396 enum tree_code code = TREE_CODE (type);
2397 bool is_named
2398 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2399 tree type_decl;
2401 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2402 gcc_assert (!TYPE_IS_DUMMY_P (type));
2404 /* If the type hasn't been named yet, we're naming it; preserve an existing
2405 TYPE_STUB_DECL that has been attached to it for some purpose. */
2406 if (!is_named && TYPE_STUB_DECL (type))
2408 type_decl = TYPE_STUB_DECL (type);
2409 DECL_NAME (type_decl) = name;
2411 else
2412 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2414 DECL_ARTIFICIAL (type_decl) = artificial_p;
2415 TYPE_ARTIFICIAL (type) = artificial_p;
2417 /* Add this decl to the current binding level. */
2418 gnat_pushdecl (type_decl, gnat_node);
2420 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2421 causes the name to be also viewed as a "tag" by the debug back-end, with
2422 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2423 types in DWARF.
2425 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2426 from multiple contexts, and "type_decl" references a copy of it: in such a
2427 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2428 with the mechanism above. */
2429 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2430 TYPE_STUB_DECL (type) = type_decl;
2432 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2433 back-end doesn't support, and for others if we don't need to. */
2434 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2435 DECL_IGNORED_P (type_decl) = 1;
2437 return type_decl;
2440 /* Return a VAR_DECL or CONST_DECL node.
2442 NAME gives the name of the variable. ASM_NAME is its assembler name
2443 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2444 the GCC tree for an optional initial expression; NULL_TREE if none.
2446 CONST_FLAG is true if this variable is constant, in which case we might
2447 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2449 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2450 definition to be made visible outside of the current compilation unit, for
2451 instance variable definitions in a package specification.
2453 EXTERN_FLAG is true when processing an external variable declaration (as
2454 opposed to a definition: no storage is to be allocated for the variable).
2456 STATIC_FLAG is only relevant when not at top level and indicates whether
2457 to always allocate storage to the variable.
2459 VOLATILE_FLAG is true if this variable is declared as volatile.
2461 ARTIFICIAL_P is true if the variable was generated by the compiler.
2463 DEBUG_INFO_P is true if we need to write debug information for it.
2465 ATTR_LIST is the list of attributes to be attached to the variable.
2467 GNAT_NODE is used for the position of the decl. */
2469 tree
2470 create_var_decl (tree name, tree asm_name, tree type, tree init,
2471 bool const_flag, bool public_flag, bool extern_flag,
2472 bool static_flag, bool volatile_flag, bool artificial_p,
2473 bool debug_info_p, struct attrib *attr_list,
2474 Node_Id gnat_node, bool const_decl_allowed_p)
2476 /* Whether the object has static storage duration, either explicitly or by
2477 virtue of being declared at the global level. */
2478 const bool static_storage = static_flag || global_bindings_p ();
2480 /* Whether the initializer is constant: for an external object or an object
2481 with static storage duration, we check that the initializer is a valid
2482 constant expression for initializing a static variable; otherwise, we
2483 only check that it is constant. */
2484 const bool init_const
2485 = (init
2486 && gnat_types_compatible_p (type, TREE_TYPE (init))
2487 && (extern_flag || static_storage
2488 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2489 != NULL_TREE
2490 : TREE_CONSTANT (init)));
2492 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2493 case the initializer may be used in lieu of the DECL node (as done in
2494 Identifier_to_gnu). This is useful to prevent the need of elaboration
2495 code when an identifier for which such a DECL is made is in turn used
2496 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2497 but extra constraints apply to this choice (see below) and they are not
2498 relevant to the distinction we wish to make. */
2499 const bool constant_p = const_flag && init_const;
2501 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2502 and may be used for scalars in general but not for aggregates. */
2503 tree var_decl
2504 = build_decl (input_location,
2505 (constant_p
2506 && const_decl_allowed_p
2507 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2508 name, type);
2510 /* Detect constants created by the front-end to hold 'reference to function
2511 calls for stabilization purposes. This is needed for renaming. */
2512 if (const_flag && init && POINTER_TYPE_P (type))
2514 tree inner = init;
2515 if (TREE_CODE (inner) == COMPOUND_EXPR)
2516 inner = TREE_OPERAND (inner, 1);
2517 inner = remove_conversions (inner, true);
2518 if (TREE_CODE (inner) == ADDR_EXPR
2519 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2520 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2521 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2522 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2523 DECL_RETURN_VALUE_P (var_decl) = 1;
2526 /* If this is external, throw away any initializations (they will be done
2527 elsewhere) unless this is a constant for which we would like to remain
2528 able to get the initializer. If we are defining a global here, leave a
2529 constant initialization and save any variable elaborations for the
2530 elaboration routine. If we are just annotating types, throw away the
2531 initialization if it isn't a constant. */
2532 if ((extern_flag && !constant_p)
2533 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2534 init = NULL_TREE;
2536 /* At the global level, a non-constant initializer generates elaboration
2537 statements. Check that such statements are allowed, that is to say,
2538 not violating a No_Elaboration_Code restriction. */
2539 if (init && !init_const && global_bindings_p ())
2540 Check_Elaboration_Code_Allowed (gnat_node);
2542 /* Attach the initializer, if any. */
2543 DECL_INITIAL (var_decl) = init;
2545 /* Directly set some flags. */
2546 DECL_ARTIFICIAL (var_decl) = artificial_p;
2547 DECL_EXTERNAL (var_decl) = extern_flag;
2549 TREE_CONSTANT (var_decl) = constant_p;
2550 TREE_READONLY (var_decl) = const_flag;
2552 /* The object is public if it is external or if it is declared public
2553 and has static storage duration. */
2554 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2556 /* We need to allocate static storage for an object with static storage
2557 duration if it isn't external. */
2558 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2560 TREE_SIDE_EFFECTS (var_decl)
2561 = TREE_THIS_VOLATILE (var_decl)
2562 = TYPE_VOLATILE (type) | volatile_flag;
2564 if (TREE_SIDE_EFFECTS (var_decl))
2565 TREE_ADDRESSABLE (var_decl) = 1;
2567 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2568 try to fiddle with DECL_COMMON. However, on platforms that don't
2569 support global BSS sections, uninitialized global variables would
2570 go in DATA instead, thus increasing the size of the executable. */
2571 if (!flag_no_common
2572 && TREE_CODE (var_decl) == VAR_DECL
2573 && TREE_PUBLIC (var_decl)
2574 && !have_global_bss_p ())
2575 DECL_COMMON (var_decl) = 1;
2577 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2578 since we will create an associated variable. Likewise for an external
2579 constant whose initializer is not absolute, because this would mean a
2580 global relocation in a read-only section which runs afoul of the PE-COFF
2581 run-time relocation mechanism. */
2582 if (!debug_info_p
2583 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2584 || (extern_flag
2585 && constant_p
2586 && init
2587 && initializer_constant_valid_p (init, TREE_TYPE (init))
2588 != null_pointer_node))
2589 DECL_IGNORED_P (var_decl) = 1;
2591 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2592 if (TREE_CODE (var_decl) == VAR_DECL)
2593 process_attributes (&var_decl, &attr_list, true, gnat_node);
2595 /* Add this decl to the current binding level. */
2596 gnat_pushdecl (var_decl, gnat_node);
2598 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2600 /* Let the target mangle the name if this isn't a verbatim asm. */
2601 if (*IDENTIFIER_POINTER (asm_name) != '*')
2602 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2604 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2607 return var_decl;
2610 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2612 static bool
2613 aggregate_type_contains_array_p (tree type)
2615 switch (TREE_CODE (type))
2617 case RECORD_TYPE:
2618 case UNION_TYPE:
2619 case QUAL_UNION_TYPE:
2621 tree field;
2622 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2623 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2624 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2625 return true;
2626 return false;
2629 case ARRAY_TYPE:
2630 return true;
2632 default:
2633 gcc_unreachable ();
2637 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2638 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2639 is the specified size of the field. If POS is nonzero, it is the bit
2640 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2641 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2642 means we are allowed to take the address of the field; if it is negative,
2643 we should not make a bitfield, which is used by make_aligning_type. */
2645 tree
2646 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2647 int packed, int addressable)
2649 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2651 DECL_CONTEXT (field_decl) = record_type;
2652 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2654 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2655 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2656 Likewise for an aggregate without specified position that contains an
2657 array, because in this case slices of variable length of this array
2658 must be handled by GCC and variable-sized objects need to be aligned
2659 to at least a byte boundary. */
2660 if (packed && (TYPE_MODE (type) == BLKmode
2661 || (!pos
2662 && AGGREGATE_TYPE_P (type)
2663 && aggregate_type_contains_array_p (type))))
2664 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2666 /* If a size is specified, use it. Otherwise, if the record type is packed
2667 compute a size to use, which may differ from the object's natural size.
2668 We always set a size in this case to trigger the checks for bitfield
2669 creation below, which is typically required when no position has been
2670 specified. */
2671 if (size)
2672 size = convert (bitsizetype, size);
2673 else if (packed == 1)
2675 size = rm_size (type);
2676 if (TYPE_MODE (type) == BLKmode)
2677 size = round_up (size, BITS_PER_UNIT);
2680 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2681 specified for two reasons: first if the size differs from the natural
2682 size. Second, if the alignment is insufficient. There are a number of
2683 ways the latter can be true.
2685 We never make a bitfield if the type of the field has a nonconstant size,
2686 because no such entity requiring bitfield operations should reach here.
2688 We do *preventively* make a bitfield when there might be the need for it
2689 but we don't have all the necessary information to decide, as is the case
2690 of a field with no specified position in a packed record.
2692 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2693 in layout_decl or finish_record_type to clear the bit_field indication if
2694 it is in fact not needed. */
2695 if (addressable >= 0
2696 && size
2697 && TREE_CODE (size) == INTEGER_CST
2698 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2699 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2700 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2701 || packed
2702 || (TYPE_ALIGN (record_type) != 0
2703 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2705 DECL_BIT_FIELD (field_decl) = 1;
2706 DECL_SIZE (field_decl) = size;
2707 if (!packed && !pos)
2709 if (TYPE_ALIGN (record_type) != 0
2710 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2711 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2712 else
2713 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2717 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2719 /* Bump the alignment if need be, either for bitfield/packing purposes or
2720 to satisfy the type requirements if no such consideration applies. When
2721 we get the alignment from the type, indicate if this is from an explicit
2722 user request, which prevents stor-layout from lowering it later on. */
2724 unsigned int bit_align
2725 = (DECL_BIT_FIELD (field_decl) ? 1
2726 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2728 if (bit_align > DECL_ALIGN (field_decl))
2729 SET_DECL_ALIGN (field_decl, bit_align);
2730 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2732 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2733 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2737 if (pos)
2739 /* We need to pass in the alignment the DECL is known to have.
2740 This is the lowest-order bit set in POS, but no more than
2741 the alignment of the record, if one is specified. Note
2742 that an alignment of 0 is taken as infinite. */
2743 unsigned int known_align;
2745 if (tree_fits_uhwi_p (pos))
2746 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2747 else
2748 known_align = BITS_PER_UNIT;
2750 if (TYPE_ALIGN (record_type)
2751 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2752 known_align = TYPE_ALIGN (record_type);
2754 layout_decl (field_decl, known_align);
2755 SET_DECL_OFFSET_ALIGN (field_decl,
2756 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2757 : BITS_PER_UNIT);
2758 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2759 &DECL_FIELD_BIT_OFFSET (field_decl),
2760 DECL_OFFSET_ALIGN (field_decl), pos);
2763 /* In addition to what our caller says, claim the field is addressable if we
2764 know that its type is not suitable.
2766 The field may also be "technically" nonaddressable, meaning that even if
2767 we attempt to take the field's address we will actually get the address
2768 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2769 value we have at this point is not accurate enough, so we don't account
2770 for this here and let finish_record_type decide. */
2771 if (!addressable && !type_for_nonaliased_component_p (type))
2772 addressable = 1;
2774 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2776 return field_decl;
2779 /* Return a PARM_DECL node with NAME and TYPE. */
2781 tree
2782 create_param_decl (tree name, tree type)
2784 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2786 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2787 can lead to various ABI violations. */
2788 if (targetm.calls.promote_prototypes (NULL_TREE)
2789 && INTEGRAL_TYPE_P (type)
2790 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2792 /* We have to be careful about biased types here. Make a subtype
2793 of integer_type_node with the proper biasing. */
2794 if (TREE_CODE (type) == INTEGER_TYPE
2795 && TYPE_BIASED_REPRESENTATION_P (type))
2797 tree subtype
2798 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2799 TREE_TYPE (subtype) = integer_type_node;
2800 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2801 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2802 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2803 type = subtype;
2805 else
2806 type = integer_type_node;
2809 DECL_ARG_TYPE (param_decl) = type;
2810 return param_decl;
2813 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2814 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2815 changed. GNAT_NODE is used for the position of error messages. */
2817 void
2818 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2819 Node_Id gnat_node)
2821 struct attrib *attr;
2823 for (attr = *attr_list; attr; attr = attr->next)
2824 switch (attr->type)
2826 case ATTR_MACHINE_ATTRIBUTE:
2827 Sloc_to_locus (Sloc (gnat_node), &input_location);
2828 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2829 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2830 break;
2832 case ATTR_LINK_ALIAS:
2833 if (!DECL_EXTERNAL (*node))
2835 TREE_STATIC (*node) = 1;
2836 assemble_alias (*node, attr->name);
2838 break;
2840 case ATTR_WEAK_EXTERNAL:
2841 if (SUPPORTS_WEAK)
2842 declare_weak (*node);
2843 else
2844 post_error ("?weak declarations not supported on this target",
2845 attr->error_point);
2846 break;
2848 case ATTR_LINK_SECTION:
2849 if (targetm_common.have_named_sections)
2851 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2852 DECL_COMMON (*node) = 0;
2854 else
2855 post_error ("?section attributes are not supported for this target",
2856 attr->error_point);
2857 break;
2859 case ATTR_LINK_CONSTRUCTOR:
2860 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2861 TREE_USED (*node) = 1;
2862 break;
2864 case ATTR_LINK_DESTRUCTOR:
2865 DECL_STATIC_DESTRUCTOR (*node) = 1;
2866 TREE_USED (*node) = 1;
2867 break;
2869 case ATTR_THREAD_LOCAL_STORAGE:
2870 set_decl_tls_model (*node, decl_default_tls_model (*node));
2871 DECL_COMMON (*node) = 0;
2872 break;
2875 *attr_list = NULL;
2878 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2879 a power of 2. */
2881 bool
2882 value_factor_p (tree value, HOST_WIDE_INT factor)
2884 if (tree_fits_uhwi_p (value))
2885 return tree_to_uhwi (value) % factor == 0;
2887 if (TREE_CODE (value) == MULT_EXPR)
2888 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2889 || value_factor_p (TREE_OPERAND (value, 1), factor));
2891 return false;
2894 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2895 from the parameter association for the instantiation of a generic. We do
2896 not want to emit source location for them: the code generated for their
2897 initialization is likely to disturb debugging. */
2899 bool
2900 renaming_from_instantiation_p (Node_Id gnat_node)
2902 if (Nkind (gnat_node) != N_Defining_Identifier
2903 || !Is_Object (gnat_node)
2904 || Comes_From_Source (gnat_node)
2905 || !Present (Renamed_Object (gnat_node)))
2906 return false;
2908 /* Get the object declaration of the renamed object, if any and if the
2909 renamed object is a mere identifier. */
2910 gnat_node = Renamed_Object (gnat_node);
2911 if (Nkind (gnat_node) != N_Identifier)
2912 return false;
2914 gnat_node = Entity (gnat_node);
2915 if (!Present (Parent (gnat_node)))
2916 return false;
2918 gnat_node = Parent (gnat_node);
2919 return
2920 (Present (gnat_node)
2921 && Nkind (gnat_node) == N_Object_Declaration
2922 && Present (Corresponding_Generic_Association (gnat_node)));
2925 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2926 feed it with the elaboration of GNAT_SCOPE. */
2928 static struct deferred_decl_context_node *
2929 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2931 struct deferred_decl_context_node *new_node;
2933 new_node
2934 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2935 new_node->decl = decl;
2936 new_node->gnat_scope = gnat_scope;
2937 new_node->force_global = force_global;
2938 new_node->types.create (1);
2939 new_node->next = deferred_decl_context_queue;
2940 deferred_decl_context_queue = new_node;
2941 return new_node;
2944 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2945 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2946 computed. */
2948 static void
2949 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2951 n->types.safe_push (type);
2954 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2955 NULL_TREE if it is not available. */
2957 static tree
2958 compute_deferred_decl_context (Entity_Id gnat_scope)
2960 tree context;
2962 if (present_gnu_tree (gnat_scope))
2963 context = get_gnu_tree (gnat_scope);
2964 else
2965 return NULL_TREE;
2967 if (TREE_CODE (context) == TYPE_DECL)
2969 const tree context_type = TREE_TYPE (context);
2971 /* Skip dummy types: only the final ones can appear in the context
2972 chain. */
2973 if (TYPE_DUMMY_P (context_type))
2974 return NULL_TREE;
2976 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2977 chain. */
2978 else
2979 context = context_type;
2982 return context;
2985 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2986 that cannot be processed yet, remove the other ones. If FORCE is true,
2987 force the processing for all nodes, use the global context when nodes don't
2988 have a GNU translation. */
2990 void
2991 process_deferred_decl_context (bool force)
2993 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2994 struct deferred_decl_context_node *node;
2996 while (*it)
2998 bool processed = false;
2999 tree context = NULL_TREE;
3000 Entity_Id gnat_scope;
3002 node = *it;
3004 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
3005 get the first scope. */
3006 gnat_scope = node->gnat_scope;
3007 while (Present (gnat_scope))
3009 context = compute_deferred_decl_context (gnat_scope);
3010 if (!force || context)
3011 break;
3012 gnat_scope = get_debug_scope (gnat_scope, NULL);
3015 /* Imported declarations must not be in a local context (i.e. not inside
3016 a function). */
3017 if (context && node->force_global > 0)
3019 tree ctx = context;
3021 while (ctx)
3023 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3024 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3028 /* If FORCE, we want to get rid of all nodes in the queue: in case there
3029 was no elaborated scope, use the global context. */
3030 if (force && !context)
3031 context = get_global_context ();
3033 if (context)
3035 tree t;
3036 int i;
3038 DECL_CONTEXT (node->decl) = context;
3040 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3041 ..._TYPE nodes. */
3042 FOR_EACH_VEC_ELT (node->types, i, t)
3044 gnat_set_type_context (t, context);
3046 processed = true;
3049 /* If this node has been successfuly processed, remove it from the
3050 queue. Then move to the next node. */
3051 if (processed)
3053 *it = node->next;
3054 node->types.release ();
3055 free (node);
3057 else
3058 it = &node->next;
3062 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3064 static unsigned int
3065 scale_by_factor_of (tree expr, unsigned int value)
3067 unsigned HOST_WIDE_INT addend = 0;
3068 unsigned HOST_WIDE_INT factor = 1;
3070 /* Peel conversions around EXPR and try to extract bodies from function
3071 calls: it is possible to get the scale factor from size functions. */
3072 expr = remove_conversions (expr, true);
3073 if (TREE_CODE (expr) == CALL_EXPR)
3074 expr = maybe_inline_call_in_expr (expr);
3076 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3077 multiple of the scale factor we are looking for. */
3078 if (TREE_CODE (expr) == PLUS_EXPR
3079 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3080 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3082 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3083 expr = TREE_OPERAND (expr, 0);
3086 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3087 corresponding to the number of trailing zeros of the mask. */
3088 if (TREE_CODE (expr) == BIT_AND_EXPR
3089 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3091 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3092 unsigned int i = 0;
3094 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3096 mask >>= 1;
3097 factor *= 2;
3098 i++;
3102 /* If the addend is not a multiple of the factor we found, give up. In
3103 theory we could find a smaller common factor but it's useless for our
3104 needs. This situation arises when dealing with a field F1 with no
3105 alignment requirement but that is following a field F2 with such
3106 requirements. As long as we have F2's offset, we don't need alignment
3107 information to compute F1's. */
3108 if (addend % factor != 0)
3109 factor = 1;
3111 return factor * value;
3114 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3115 unless we can prove these 2 fields are laid out in such a way that no gap
3116 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3117 is the distance in bits between the end of PREV_FIELD and the starting
3118 position of CURR_FIELD. It is ignored if null. */
3120 static bool
3121 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3123 /* If this is the first field of the record, there cannot be any gap */
3124 if (!prev_field)
3125 return false;
3127 /* If the previous field is a union type, then return false: The only
3128 time when such a field is not the last field of the record is when
3129 there are other components at fixed positions after it (meaning there
3130 was a rep clause for every field), in which case we don't want the
3131 alignment constraint to override them. */
3132 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3133 return false;
3135 /* If the distance between the end of prev_field and the beginning of
3136 curr_field is constant, then there is a gap if the value of this
3137 constant is not null. */
3138 if (offset && tree_fits_uhwi_p (offset))
3139 return !integer_zerop (offset);
3141 /* If the size and position of the previous field are constant,
3142 then check the sum of this size and position. There will be a gap
3143 iff it is not multiple of the current field alignment. */
3144 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3145 && tree_fits_uhwi_p (bit_position (prev_field)))
3146 return ((tree_to_uhwi (bit_position (prev_field))
3147 + tree_to_uhwi (DECL_SIZE (prev_field)))
3148 % DECL_ALIGN (curr_field) != 0);
3150 /* If both the position and size of the previous field are multiples
3151 of the current field alignment, there cannot be any gap. */
3152 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3153 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3154 return false;
3156 /* Fallback, return that there may be a potential gap */
3157 return true;
3160 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3161 the decl. */
3163 tree
3164 create_label_decl (tree name, Node_Id gnat_node)
3166 tree label_decl
3167 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3169 SET_DECL_MODE (label_decl, VOIDmode);
3171 /* Add this decl to the current binding level. */
3172 gnat_pushdecl (label_decl, gnat_node);
3174 return label_decl;
3177 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3178 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3179 the list of its parameters (a list of PARM_DECL nodes chained through the
3180 DECL_CHAIN field).
3182 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3184 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3185 definition to be made visible outside of the current compilation unit.
3187 EXTERN_FLAG is true when processing an external subprogram declaration.
3189 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3191 DEBUG_INFO_P is true if we need to write debug information for it.
3193 DEFINITION is true if the subprogram is to be considered as a definition.
3195 ATTR_LIST is the list of attributes to be attached to the subprogram.
3197 GNAT_NODE is used for the position of the decl. */
3199 tree
3200 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3201 enum inline_status_t inline_status, bool public_flag,
3202 bool extern_flag, bool artificial_p, bool debug_info_p,
3203 bool definition, struct attrib *attr_list,
3204 Node_Id gnat_node)
3206 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3207 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3209 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3210 DECL_EXTERNAL (subprog_decl) = extern_flag;
3211 TREE_PUBLIC (subprog_decl) = public_flag;
3213 if (!debug_info_p)
3214 DECL_IGNORED_P (subprog_decl) = 1;
3215 if (definition)
3216 DECL_FUNCTION_IS_DEF (subprog_decl) = 1;
3218 switch (inline_status)
3220 case is_suppressed:
3221 DECL_UNINLINABLE (subprog_decl) = 1;
3222 break;
3224 case is_disabled:
3225 break;
3227 case is_required:
3228 if (Back_End_Inlining)
3230 decl_attributes (&subprog_decl,
3231 tree_cons (get_identifier ("always_inline"),
3232 NULL_TREE, NULL_TREE),
3233 ATTR_FLAG_TYPE_IN_PLACE);
3235 /* Inline_Always guarantees that every direct call is inlined and
3236 that there is no indirect reference to the subprogram, so the
3237 instance in the original package (as well as its clones in the
3238 client packages created for inter-unit inlining) can be made
3239 private, which causes the out-of-line body to be eliminated. */
3240 TREE_PUBLIC (subprog_decl) = 0;
3243 /* ... fall through ... */
3245 case is_enabled:
3246 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3247 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3248 break;
3250 default:
3251 gcc_unreachable ();
3254 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3256 /* Once everything is processed, finish the subprogram declaration. */
3257 finish_subprog_decl (subprog_decl, asm_name, type);
3259 /* Add this decl to the current binding level. */
3260 gnat_pushdecl (subprog_decl, gnat_node);
3262 /* Output the assembler code and/or RTL for the declaration. */
3263 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3265 return subprog_decl;
3268 /* Given a subprogram declaration DECL, its assembler name and its type,
3269 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3271 void
3272 finish_subprog_decl (tree decl, tree asm_name, tree type)
3274 tree result_decl
3275 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3276 TREE_TYPE (type));
3278 DECL_ARTIFICIAL (result_decl) = 1;
3279 DECL_IGNORED_P (result_decl) = 1;
3280 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3281 DECL_RESULT (decl) = result_decl;
3283 TREE_READONLY (decl) = TYPE_READONLY (type);
3284 TREE_SIDE_EFFECTS (decl) = TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3286 if (asm_name)
3288 /* Let the target mangle the name if this isn't a verbatim asm. */
3289 if (*IDENTIFIER_POINTER (asm_name) != '*')
3290 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3292 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3294 /* The expand_main_function circuitry expects "main_identifier_node" to
3295 designate the DECL_NAME of the 'main' entry point, in turn expected
3296 to be declared as the "main" function literally by default. Ada
3297 program entry points are typically declared with a different name
3298 within the binder generated file, exported as 'main' to satisfy the
3299 system expectations. Force main_identifier_node in this case. */
3300 if (asm_name == main_identifier_node)
3301 DECL_NAME (decl) = main_identifier_node;
3305 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3306 body. This routine needs to be invoked before processing the declarations
3307 appearing in the subprogram. */
3309 void
3310 begin_subprog_body (tree subprog_decl)
3312 tree param_decl;
3314 announce_function (subprog_decl);
3316 /* This function is being defined. */
3317 TREE_STATIC (subprog_decl) = 1;
3319 /* The failure of this assertion will likely come from a wrong context for
3320 the subprogram body, e.g. another procedure for a procedure declared at
3321 library level. */
3322 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3324 current_function_decl = subprog_decl;
3326 /* Enter a new binding level and show that all the parameters belong to
3327 this function. */
3328 gnat_pushlevel ();
3330 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3331 param_decl = DECL_CHAIN (param_decl))
3332 DECL_CONTEXT (param_decl) = subprog_decl;
3334 make_decl_rtl (subprog_decl);
3337 /* Finish translating the current subprogram and set its BODY. */
3339 void
3340 end_subprog_body (tree body)
3342 tree fndecl = current_function_decl;
3344 /* Attach the BLOCK for this level to the function and pop the level. */
3345 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3346 DECL_INITIAL (fndecl) = current_binding_level->block;
3347 gnat_poplevel ();
3349 /* Mark the RESULT_DECL as being in this subprogram. */
3350 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3352 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3353 if (TREE_CODE (body) == BIND_EXPR)
3355 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3356 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3359 DECL_SAVED_TREE (fndecl) = body;
3361 current_function_decl = decl_function_context (fndecl);
3364 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3366 void
3367 rest_of_subprog_body_compilation (tree subprog_decl)
3369 /* We cannot track the location of errors past this point. */
3370 error_gnat_node = Empty;
3372 /* If we're only annotating types, don't actually compile this function. */
3373 if (type_annotate_only)
3374 return;
3376 /* Dump functions before gimplification. */
3377 dump_function (TDI_original, subprog_decl);
3379 if (!decl_function_context (subprog_decl))
3380 cgraph_node::finalize_function (subprog_decl, false);
3381 else
3382 /* Register this function with cgraph just far enough to get it
3383 added to our parent's nested function list. */
3384 (void) cgraph_node::get_create (subprog_decl);
3387 tree
3388 gnat_builtin_function (tree decl)
3390 gnat_pushdecl (decl, Empty);
3391 return decl;
3394 /* Return an integer type with the number of bits of precision given by
3395 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3396 it is a signed type. */
3398 tree
3399 gnat_type_for_size (unsigned precision, int unsignedp)
3401 tree t;
3402 char type_name[20];
3404 if (precision <= 2 * MAX_BITS_PER_WORD
3405 && signed_and_unsigned_types[precision][unsignedp])
3406 return signed_and_unsigned_types[precision][unsignedp];
3408 if (unsignedp)
3409 t = make_unsigned_type (precision);
3410 else
3411 t = make_signed_type (precision);
3412 TYPE_ARTIFICIAL (t) = 1;
3414 if (precision <= 2 * MAX_BITS_PER_WORD)
3415 signed_and_unsigned_types[precision][unsignedp] = t;
3417 if (!TYPE_NAME (t))
3419 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3420 TYPE_NAME (t) = get_identifier (type_name);
3423 return t;
3426 /* Likewise for floating-point types. */
3428 static tree
3429 float_type_for_precision (int precision, machine_mode mode)
3431 tree t;
3432 char type_name[20];
3434 if (float_types[(int) mode])
3435 return float_types[(int) mode];
3437 float_types[(int) mode] = t = make_node (REAL_TYPE);
3438 TYPE_PRECISION (t) = precision;
3439 layout_type (t);
3441 gcc_assert (TYPE_MODE (t) == mode);
3442 if (!TYPE_NAME (t))
3444 sprintf (type_name, "FLOAT_%d", precision);
3445 TYPE_NAME (t) = get_identifier (type_name);
3448 return t;
3451 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3452 an unsigned type; otherwise a signed type is returned. */
3454 tree
3455 gnat_type_for_mode (machine_mode mode, int unsignedp)
3457 if (mode == BLKmode)
3458 return NULL_TREE;
3460 if (mode == VOIDmode)
3461 return void_type_node;
3463 if (COMPLEX_MODE_P (mode))
3464 return NULL_TREE;
3466 scalar_float_mode float_mode;
3467 if (is_a <scalar_float_mode> (mode, &float_mode))
3468 return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3469 float_mode);
3471 scalar_int_mode int_mode;
3472 if (is_a <scalar_int_mode> (mode, &int_mode))
3473 return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3475 if (VECTOR_MODE_P (mode))
3477 machine_mode inner_mode = GET_MODE_INNER (mode);
3478 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3479 if (inner_type)
3480 return build_vector_type_for_mode (inner_type, mode);
3483 return NULL_TREE;
3486 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3487 signedness being specified by UNSIGNEDP. */
3489 tree
3490 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3492 if (type_node == char_type_node)
3493 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3495 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3497 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3499 type = copy_type (type);
3500 TREE_TYPE (type) = type_node;
3502 else if (TREE_TYPE (type_node)
3503 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3504 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3506 type = copy_type (type);
3507 TREE_TYPE (type) = TREE_TYPE (type_node);
3510 return type;
3513 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3514 transparently converted to each other. */
3517 gnat_types_compatible_p (tree t1, tree t2)
3519 enum tree_code code;
3521 /* This is the default criterion. */
3522 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3523 return 1;
3525 /* We only check structural equivalence here. */
3526 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3527 return 0;
3529 /* Vector types are also compatible if they have the same number of subparts
3530 and the same form of (scalar) element type. */
3531 if (code == VECTOR_TYPE
3532 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3533 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3534 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3535 return 1;
3537 /* Array types are also compatible if they are constrained and have the same
3538 domain(s), the same component type and the same scalar storage order. */
3539 if (code == ARRAY_TYPE
3540 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3541 || (TYPE_DOMAIN (t1)
3542 && TYPE_DOMAIN (t2)
3543 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3544 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3545 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3546 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3547 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3548 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3549 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3550 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3551 return 1;
3553 return 0;
3556 /* Return true if EXPR is a useless type conversion. */
3558 bool
3559 gnat_useless_type_conversion (tree expr)
3561 if (CONVERT_EXPR_P (expr)
3562 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3563 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3564 return gnat_types_compatible_p (TREE_TYPE (expr),
3565 TREE_TYPE (TREE_OPERAND (expr, 0)));
3567 return false;
3570 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3572 bool
3573 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3574 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3576 return TYPE_CI_CO_LIST (t) == cico_list
3577 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3578 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3579 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3582 /* EXP is an expression for the size of an object. If this size contains
3583 discriminant references, replace them with the maximum (if MAX_P) or
3584 minimum (if !MAX_P) possible value of the discriminant. */
3586 tree
3587 max_size (tree exp, bool max_p)
3589 enum tree_code code = TREE_CODE (exp);
3590 tree type = TREE_TYPE (exp);
3591 tree op0, op1, op2;
3593 switch (TREE_CODE_CLASS (code))
3595 case tcc_declaration:
3596 case tcc_constant:
3597 return exp;
3599 case tcc_exceptional:
3600 gcc_assert (code == SSA_NAME);
3601 return exp;
3603 case tcc_vl_exp:
3604 if (code == CALL_EXPR)
3606 tree t, *argarray;
3607 int n, i;
3609 t = maybe_inline_call_in_expr (exp);
3610 if (t)
3611 return max_size (t, max_p);
3613 n = call_expr_nargs (exp);
3614 gcc_assert (n > 0);
3615 argarray = XALLOCAVEC (tree, n);
3616 for (i = 0; i < n; i++)
3617 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3618 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3620 break;
3622 case tcc_reference:
3623 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3624 modify. Otherwise, we treat it like a variable. */
3625 if (CONTAINS_PLACEHOLDER_P (exp))
3627 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3628 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3629 return
3630 convert (type,
3631 max_size (convert (get_base_type (val_type), val), true));
3634 return exp;
3636 case tcc_comparison:
3637 return build_int_cst (type, max_p ? 1 : 0);
3639 case tcc_unary:
3640 if (code == NON_LVALUE_EXPR)
3641 return max_size (TREE_OPERAND (exp, 0), max_p);
3643 op0 = max_size (TREE_OPERAND (exp, 0),
3644 code == NEGATE_EXPR ? !max_p : max_p);
3646 if (op0 == TREE_OPERAND (exp, 0))
3647 return exp;
3649 return fold_build1 (code, type, op0);
3651 case tcc_binary:
3653 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3654 tree rhs = max_size (TREE_OPERAND (exp, 1),
3655 code == MINUS_EXPR ? !max_p : max_p);
3657 /* Special-case wanting the maximum value of a MIN_EXPR.
3658 In that case, if one side overflows, return the other. */
3659 if (max_p && code == MIN_EXPR)
3661 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3662 return lhs;
3664 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3665 return rhs;
3668 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3669 overflowing and the RHS a variable. */
3670 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3671 && TREE_CODE (lhs) == INTEGER_CST
3672 && TREE_OVERFLOW (lhs)
3673 && TREE_CODE (rhs) != INTEGER_CST)
3674 return lhs;
3676 /* If we are going to subtract a "negative" value in an unsigned type,
3677 do the operation as an addition of the negated value, in order to
3678 avoid creating a spurious overflow below. */
3679 if (code == MINUS_EXPR
3680 && TYPE_UNSIGNED (type)
3681 && TREE_CODE (rhs) == INTEGER_CST
3682 && !TREE_OVERFLOW (rhs)
3683 && tree_int_cst_sign_bit (rhs) != 0)
3685 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3686 code = PLUS_EXPR;
3689 if (lhs == TREE_OPERAND (exp, 0) && rhs == TREE_OPERAND (exp, 1))
3690 return exp;
3692 /* We need to detect overflows so we call size_binop here. */
3693 return size_binop (code, lhs, rhs);
3696 case tcc_expression:
3697 switch (TREE_CODE_LENGTH (code))
3699 case 1:
3700 if (code == SAVE_EXPR)
3701 return exp;
3703 op0 = max_size (TREE_OPERAND (exp, 0),
3704 code == TRUTH_NOT_EXPR ? !max_p : max_p);
3706 if (op0 == TREE_OPERAND (exp, 0))
3707 return exp;
3709 return fold_build1 (code, type, op0);
3711 case 2:
3712 if (code == COMPOUND_EXPR)
3713 return max_size (TREE_OPERAND (exp, 1), max_p);
3715 op0 = max_size (TREE_OPERAND (exp, 0), max_p);
3716 op1 = max_size (TREE_OPERAND (exp, 1), max_p);
3718 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3719 return exp;
3721 return fold_build2 (code, type, op0, op1);
3723 case 3:
3724 if (code == COND_EXPR)
3726 op1 = TREE_OPERAND (exp, 1);
3727 op2 = TREE_OPERAND (exp, 2);
3729 if (!op1 || !op2)
3730 return exp;
3732 return
3733 fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3734 max_size (op1, max_p), max_size (op2, max_p));
3736 break;
3738 default:
3739 break;
3742 /* Other tree classes cannot happen. */
3743 default:
3744 break;
3747 gcc_unreachable ();
3750 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3751 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3752 Return a constructor for the template. */
3754 tree
3755 build_template (tree template_type, tree array_type, tree expr)
3757 vec<constructor_elt, va_gc> *template_elts = NULL;
3758 tree bound_list = NULL_TREE;
3759 tree field;
3761 while (TREE_CODE (array_type) == RECORD_TYPE
3762 && (TYPE_PADDING_P (array_type)
3763 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3764 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3766 if (TREE_CODE (array_type) == ARRAY_TYPE
3767 || (TREE_CODE (array_type) == INTEGER_TYPE
3768 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3769 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3771 /* First make the list for a CONSTRUCTOR for the template. Go down the
3772 field list of the template instead of the type chain because this
3773 array might be an Ada array of arrays and we can't tell where the
3774 nested arrays stop being the underlying object. */
3776 for (field = TYPE_FIELDS (template_type); field;
3777 (bound_list
3778 ? (bound_list = TREE_CHAIN (bound_list))
3779 : (array_type = TREE_TYPE (array_type))),
3780 field = DECL_CHAIN (DECL_CHAIN (field)))
3782 tree bounds, min, max;
3784 /* If we have a bound list, get the bounds from there. Likewise
3785 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3786 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3787 This will give us a maximum range. */
3788 if (bound_list)
3789 bounds = TREE_VALUE (bound_list);
3790 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3791 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3792 else if (expr && TREE_CODE (expr) == PARM_DECL
3793 && DECL_BY_COMPONENT_PTR_P (expr))
3794 bounds = TREE_TYPE (field);
3795 else
3796 gcc_unreachable ();
3798 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3799 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3801 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3802 substitute it from OBJECT. */
3803 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3804 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3806 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3807 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3810 return gnat_build_constructor (template_type, template_elts);
3813 /* Return true if TYPE is suitable for the element type of a vector. */
3815 static bool
3816 type_for_vector_element_p (tree type)
3818 machine_mode mode;
3820 if (!INTEGRAL_TYPE_P (type)
3821 && !SCALAR_FLOAT_TYPE_P (type)
3822 && !FIXED_POINT_TYPE_P (type))
3823 return false;
3825 mode = TYPE_MODE (type);
3826 if (GET_MODE_CLASS (mode) != MODE_INT
3827 && !SCALAR_FLOAT_MODE_P (mode)
3828 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3829 return false;
3831 return true;
3834 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3835 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3836 attribute declaration and want to issue error messages on failure. */
3838 static tree
3839 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3841 unsigned HOST_WIDE_INT size_int, inner_size_int;
3842 int nunits;
3844 /* Silently punt on variable sizes. We can't make vector types for them,
3845 need to ignore them on front-end generated subtypes of unconstrained
3846 base types, and this attribute is for binding implementors, not end
3847 users, so we should never get there from legitimate explicit uses. */
3848 if (!tree_fits_uhwi_p (size))
3849 return NULL_TREE;
3850 size_int = tree_to_uhwi (size);
3852 if (!type_for_vector_element_p (inner_type))
3854 if (attribute)
3855 error ("invalid element type for attribute %qs",
3856 IDENTIFIER_POINTER (attribute));
3857 return NULL_TREE;
3859 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3861 if (size_int % inner_size_int)
3863 if (attribute)
3864 error ("vector size not an integral multiple of component size");
3865 return NULL_TREE;
3868 if (size_int == 0)
3870 if (attribute)
3871 error ("zero vector size");
3872 return NULL_TREE;
3875 nunits = size_int / inner_size_int;
3876 if (nunits & (nunits - 1))
3878 if (attribute)
3879 error ("number of components of vector not a power of two");
3880 return NULL_TREE;
3883 return build_vector_type (inner_type, nunits);
3886 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3887 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3888 processing the attribute and want to issue error messages on failure. */
3890 static tree
3891 build_vector_type_for_array (tree array_type, tree attribute)
3893 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3894 TYPE_SIZE_UNIT (array_type),
3895 attribute);
3896 if (!vector_type)
3897 return NULL_TREE;
3899 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3900 return vector_type;
3903 /* Build a type to be used to represent an aliased object whose nominal type
3904 is an unconstrained array. This consists of a RECORD_TYPE containing a
3905 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3906 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3907 an arbitrary unconstrained object. Use NAME as the name of the record.
3908 DEBUG_INFO_P is true if we need to write debug information for the type. */
3910 tree
3911 build_unc_object_type (tree template_type, tree object_type, tree name,
3912 bool debug_info_p)
3914 tree decl;
3915 tree type = make_node (RECORD_TYPE);
3916 tree template_field
3917 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3918 NULL_TREE, NULL_TREE, 0, 1);
3919 tree array_field
3920 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3921 NULL_TREE, NULL_TREE, 0, 1);
3923 TYPE_NAME (type) = name;
3924 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3925 DECL_CHAIN (template_field) = array_field;
3926 finish_record_type (type, template_field, 0, true);
3928 /* Declare it now since it will never be declared otherwise. This is
3929 necessary to ensure that its subtrees are properly marked. */
3930 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3932 /* template_type will not be used elsewhere than here, so to keep the debug
3933 info clean and in order to avoid scoping issues, make decl its
3934 context. */
3935 gnat_set_type_context (template_type, decl);
3937 return type;
3940 /* Same, taking a thin or fat pointer type instead of a template type. */
3942 tree
3943 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3944 tree name, bool debug_info_p)
3946 tree template_type;
3948 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3950 template_type
3951 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3952 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3953 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3955 return
3956 build_unc_object_type (template_type, object_type, name, debug_info_p);
3959 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3960 In the normal case this is just two adjustments, but we have more to
3961 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3963 void
3964 update_pointer_to (tree old_type, tree new_type)
3966 tree ptr = TYPE_POINTER_TO (old_type);
3967 tree ref = TYPE_REFERENCE_TO (old_type);
3968 tree t;
3970 /* If this is the main variant, process all the other variants first. */
3971 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3972 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3973 update_pointer_to (t, new_type);
3975 /* If no pointers and no references, we are done. */
3976 if (!ptr && !ref)
3977 return;
3979 /* Merge the old type qualifiers in the new type.
3981 Each old variant has qualifiers for specific reasons, and the new
3982 designated type as well. Each set of qualifiers represents useful
3983 information grabbed at some point, and merging the two simply unifies
3984 these inputs into the final type description.
3986 Consider for instance a volatile type frozen after an access to constant
3987 type designating it; after the designated type's freeze, we get here with
3988 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3989 when the access type was processed. We will make a volatile and readonly
3990 designated type, because that's what it really is.
3992 We might also get here for a non-dummy OLD_TYPE variant with different
3993 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3994 to private record type elaboration (see the comments around the call to
3995 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3996 the qualifiers in those cases too, to avoid accidentally discarding the
3997 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3998 new_type
3999 = build_qualified_type (new_type,
4000 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4002 /* If old type and new type are identical, there is nothing to do. */
4003 if (old_type == new_type)
4004 return;
4006 /* Otherwise, first handle the simple case. */
4007 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4009 tree new_ptr, new_ref;
4011 /* If pointer or reference already points to new type, nothing to do.
4012 This can happen as update_pointer_to can be invoked multiple times
4013 on the same couple of types because of the type variants. */
4014 if ((ptr && TREE_TYPE (ptr) == new_type)
4015 || (ref && TREE_TYPE (ref) == new_type))
4016 return;
4018 /* Chain PTR and its variants at the end. */
4019 new_ptr = TYPE_POINTER_TO (new_type);
4020 if (new_ptr)
4022 while (TYPE_NEXT_PTR_TO (new_ptr))
4023 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4024 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4026 else
4027 TYPE_POINTER_TO (new_type) = ptr;
4029 /* Now adjust them. */
4030 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4031 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4033 TREE_TYPE (t) = new_type;
4034 if (TYPE_NULL_BOUNDS (t))
4035 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4038 /* Chain REF and its variants at the end. */
4039 new_ref = TYPE_REFERENCE_TO (new_type);
4040 if (new_ref)
4042 while (TYPE_NEXT_REF_TO (new_ref))
4043 new_ref = TYPE_NEXT_REF_TO (new_ref);
4044 TYPE_NEXT_REF_TO (new_ref) = ref;
4046 else
4047 TYPE_REFERENCE_TO (new_type) = ref;
4049 /* Now adjust them. */
4050 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4051 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4052 TREE_TYPE (t) = new_type;
4054 TYPE_POINTER_TO (old_type) = NULL_TREE;
4055 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4058 /* Now deal with the unconstrained array case. In this case the pointer
4059 is actually a record where both fields are pointers to dummy nodes.
4060 Turn them into pointers to the correct types using update_pointer_to.
4061 Likewise for the pointer to the object record (thin pointer). */
4062 else
4064 tree new_ptr = TYPE_POINTER_TO (new_type);
4066 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4068 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4069 since update_pointer_to can be invoked multiple times on the same
4070 couple of types because of the type variants. */
4071 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4072 return;
4074 update_pointer_to
4075 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4076 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4078 update_pointer_to
4079 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4080 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4082 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4083 TYPE_OBJECT_RECORD_TYPE (new_type));
4085 TYPE_POINTER_TO (old_type) = NULL_TREE;
4086 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4090 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4091 unconstrained one. This involves making or finding a template. */
4093 static tree
4094 convert_to_fat_pointer (tree type, tree expr)
4096 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4097 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4098 tree etype = TREE_TYPE (expr);
4099 tree template_addr;
4100 vec<constructor_elt, va_gc> *v;
4101 vec_alloc (v, 2);
4103 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4104 array (compare_fat_pointers ensures that this is the full discriminant)
4105 and a valid pointer to the bounds. This latter property is necessary
4106 since the compiler can hoist the load of the bounds done through it. */
4107 if (integer_zerop (expr))
4109 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4110 tree null_bounds, t;
4112 if (TYPE_NULL_BOUNDS (ptr_template_type))
4113 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4114 else
4116 /* The template type can still be dummy at this point so we build an
4117 empty constructor. The middle-end will fill it in with zeros. */
4118 t = build_constructor (template_type, NULL);
4119 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4120 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4121 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4124 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4125 fold_convert (p_array_type, null_pointer_node));
4126 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4127 t = build_constructor (type, v);
4128 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4129 TREE_CONSTANT (t) = 0;
4130 TREE_STATIC (t) = 1;
4132 return t;
4135 /* If EXPR is a thin pointer, make template and data from the record. */
4136 if (TYPE_IS_THIN_POINTER_P (etype))
4138 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4140 expr = gnat_protect_expr (expr);
4142 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4143 the thin pointer value has been shifted so we shift it back to get
4144 the template address. */
4145 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4147 template_addr
4148 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4149 fold_build1 (NEGATE_EXPR, sizetype,
4150 byte_position
4151 (DECL_CHAIN (field))));
4152 template_addr
4153 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4154 template_addr);
4157 /* Otherwise we explicitly take the address of the fields. */
4158 else
4160 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4161 template_addr
4162 = build_unary_op (ADDR_EXPR, NULL_TREE,
4163 build_component_ref (expr, field, false));
4164 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4165 build_component_ref (expr, DECL_CHAIN (field),
4166 false));
4170 /* Otherwise, build the constructor for the template. */
4171 else
4172 template_addr
4173 = build_unary_op (ADDR_EXPR, NULL_TREE,
4174 build_template (template_type, TREE_TYPE (etype),
4175 expr));
4177 /* The final result is a constructor for the fat pointer.
4179 If EXPR is an argument of a foreign convention subprogram, the type it
4180 points to is directly the component type. In this case, the expression
4181 type may not match the corresponding FIELD_DECL type at this point, so we
4182 call "convert" here to fix that up if necessary. This type consistency is
4183 required, for instance because it ensures that possible later folding of
4184 COMPONENT_REFs against this constructor always yields something of the
4185 same type as the initial reference.
4187 Note that the call to "build_template" above is still fine because it
4188 will only refer to the provided TEMPLATE_TYPE in this case. */
4189 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4190 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4191 return gnat_build_constructor (type, v);
4194 /* Create an expression whose value is that of EXPR,
4195 converted to type TYPE. The TREE_TYPE of the value
4196 is always TYPE. This function implements all reasonable
4197 conversions; callers should filter out those that are
4198 not permitted by the language being compiled. */
4200 tree
4201 convert (tree type, tree expr)
4203 tree etype = TREE_TYPE (expr);
4204 enum tree_code ecode = TREE_CODE (etype);
4205 enum tree_code code = TREE_CODE (type);
4207 /* If the expression is already of the right type, we are done. */
4208 if (etype == type)
4209 return expr;
4211 /* If both input and output have padding and are of variable size, do this
4212 as an unchecked conversion. Likewise if one is a mere variant of the
4213 other, so we avoid a pointless unpad/repad sequence. */
4214 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4215 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4216 && (!TREE_CONSTANT (TYPE_SIZE (type))
4217 || !TREE_CONSTANT (TYPE_SIZE (etype))
4218 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4219 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4220 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4223 /* If the output type has padding, convert to the inner type and make a
4224 constructor to build the record, unless a variable size is involved. */
4225 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4227 /* If we previously converted from another type and our type is
4228 of variable size, remove the conversion to avoid the need for
4229 variable-sized temporaries. Likewise for a conversion between
4230 original and packable version. */
4231 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4232 && (!TREE_CONSTANT (TYPE_SIZE (type))
4233 || (ecode == RECORD_TYPE
4234 && TYPE_NAME (etype)
4235 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4236 expr = TREE_OPERAND (expr, 0);
4238 /* If we are just removing the padding from expr, convert the original
4239 object if we have variable size in order to avoid the need for some
4240 variable-sized temporaries. Likewise if the padding is a variant
4241 of the other, so we avoid a pointless unpad/repad sequence. */
4242 if (TREE_CODE (expr) == COMPONENT_REF
4243 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4244 && (!TREE_CONSTANT (TYPE_SIZE (type))
4245 || TYPE_MAIN_VARIANT (type)
4246 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4247 || (ecode == RECORD_TYPE
4248 && TYPE_NAME (etype)
4249 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4250 return convert (type, TREE_OPERAND (expr, 0));
4252 /* If the inner type is of self-referential size and the expression type
4253 is a record, do this as an unchecked conversion unless both types are
4254 essentially the same. But first pad the expression if possible to
4255 have the same size on both sides. */
4256 if (ecode == RECORD_TYPE
4257 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type)))
4258 && TYPE_MAIN_VARIANT (etype)
4259 != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type))))
4261 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4262 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4263 false, false, false, true),
4264 expr);
4265 return unchecked_convert (type, expr, false);
4268 /* If we are converting between array types with variable size, do the
4269 final conversion as an unchecked conversion, again to avoid the need
4270 for some variable-sized temporaries. If valid, this conversion is
4271 very likely purely technical and without real effects. */
4272 if (ecode == ARRAY_TYPE
4273 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4274 && !TREE_CONSTANT (TYPE_SIZE (etype))
4275 && !TREE_CONSTANT (TYPE_SIZE (type)))
4276 return unchecked_convert (type,
4277 convert (TREE_TYPE (TYPE_FIELDS (type)),
4278 expr),
4279 false);
4281 tree t = convert (TREE_TYPE (TYPE_FIELDS (type)), expr);
4283 /* If converting to the inner type has already created a CONSTRUCTOR with
4284 the right size, then reuse it instead of creating another one. This
4285 can happen for the padding type built to overalign local variables. */
4286 if (TREE_CODE (t) == VIEW_CONVERT_EXPR
4287 && TREE_CODE (TREE_OPERAND (t, 0)) == CONSTRUCTOR
4288 && TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0))))
4289 && tree_int_cst_equal (TYPE_SIZE (type),
4290 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0)))))
4291 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (t, 0));
4293 vec<constructor_elt, va_gc> *v;
4294 vec_alloc (v, 1);
4295 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), t);
4296 return gnat_build_constructor (type, v);
4299 /* If the input type has padding, remove it and convert to the output type.
4300 The conditions ordering is arranged to ensure that the output type is not
4301 a padding type here, as it is not clear whether the conversion would
4302 always be correct if this was to happen. */
4303 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4305 tree unpadded;
4307 /* If we have just converted to this padded type, just get the
4308 inner expression. */
4309 if (TREE_CODE (expr) == CONSTRUCTOR)
4310 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4312 /* Otherwise, build an explicit component reference. */
4313 else
4314 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4316 return convert (type, unpadded);
4319 /* If the input is a biased type, convert first to the base type and add
4320 the bias. Note that the bias must go through a full conversion to the
4321 base type, lest it is itself a biased value; this happens for subtypes
4322 of biased types. */
4323 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4324 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4325 fold_convert (TREE_TYPE (etype), expr),
4326 convert (TREE_TYPE (etype),
4327 TYPE_MIN_VALUE (etype))));
4329 /* If the input is a justified modular type, we need to extract the actual
4330 object before converting it to any other type with the exceptions of an
4331 unconstrained array or of a mere type variant. It is useful to avoid the
4332 extraction and conversion in the type variant case because it could end
4333 up replacing a VAR_DECL expr by a constructor and we might be about the
4334 take the address of the result. */
4335 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4336 && code != UNCONSTRAINED_ARRAY_TYPE
4337 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4338 return
4339 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4341 /* If converting to a type that contains a template, convert to the data
4342 type and then build the template. */
4343 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4345 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4346 vec<constructor_elt, va_gc> *v;
4347 vec_alloc (v, 2);
4349 /* If the source already has a template, get a reference to the
4350 associated array only, as we are going to rebuild a template
4351 for the target type anyway. */
4352 expr = maybe_unconstrained_array (expr);
4354 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4355 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4356 obj_type, NULL_TREE));
4357 if (expr)
4358 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4359 convert (obj_type, expr));
4360 return gnat_build_constructor (type, v);
4363 /* There are some cases of expressions that we process specially. */
4364 switch (TREE_CODE (expr))
4366 case ERROR_MARK:
4367 return expr;
4369 case NULL_EXPR:
4370 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4371 conversion in gnat_expand_expr. NULL_EXPR does not represent
4372 and actual value, so no conversion is needed. */
4373 expr = copy_node (expr);
4374 TREE_TYPE (expr) = type;
4375 return expr;
4377 case STRING_CST:
4378 /* If we are converting a STRING_CST to another constrained array type,
4379 just make a new one in the proper type. */
4380 if (code == ecode && AGGREGATE_TYPE_P (etype)
4381 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4382 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4384 expr = copy_node (expr);
4385 TREE_TYPE (expr) = type;
4386 return expr;
4388 break;
4390 case VECTOR_CST:
4391 /* If we are converting a VECTOR_CST to a mere type variant, just make
4392 a new one in the proper type. */
4393 if (code == ecode && gnat_types_compatible_p (type, etype))
4395 expr = copy_node (expr);
4396 TREE_TYPE (expr) = type;
4397 return expr;
4399 break;
4401 case CONSTRUCTOR:
4402 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4403 another padding type around the same type, just make a new one in
4404 the proper type. */
4405 if (code == ecode
4406 && (gnat_types_compatible_p (type, etype)
4407 || (code == RECORD_TYPE
4408 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4409 && TREE_TYPE (TYPE_FIELDS (type))
4410 == TREE_TYPE (TYPE_FIELDS (etype)))))
4412 expr = copy_node (expr);
4413 TREE_TYPE (expr) = type;
4414 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4415 return expr;
4418 /* Likewise for a conversion between original and packable version, or
4419 conversion between types of the same size and with the same list of
4420 fields, but we have to work harder to preserve type consistency. */
4421 if (code == ecode
4422 && code == RECORD_TYPE
4423 && (TYPE_NAME (type) == TYPE_NAME (etype)
4424 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4427 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4428 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4429 vec<constructor_elt, va_gc> *v;
4430 vec_alloc (v, len);
4431 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4432 unsigned HOST_WIDE_INT idx;
4433 tree index, value;
4435 /* Whether we need to clear TREE_CONSTANT et al. on the output
4436 constructor when we convert in place. */
4437 bool clear_constant = false;
4439 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4441 /* Skip the missing fields in the CONSTRUCTOR. */
4442 while (efield && field && !SAME_FIELD_P (efield, index))
4444 efield = DECL_CHAIN (efield);
4445 field = DECL_CHAIN (field);
4447 /* The field must be the same. */
4448 if (!(efield && field && SAME_FIELD_P (efield, field)))
4449 break;
4450 constructor_elt elt
4451 = {field, convert (TREE_TYPE (field), value)};
4452 v->quick_push (elt);
4454 /* If packing has made this field a bitfield and the input
4455 value couldn't be emitted statically any more, we need to
4456 clear TREE_CONSTANT on our output. */
4457 if (!clear_constant
4458 && TREE_CONSTANT (expr)
4459 && !CONSTRUCTOR_BITFIELD_P (efield)
4460 && CONSTRUCTOR_BITFIELD_P (field)
4461 && !initializer_constant_valid_for_bitfield_p (value))
4462 clear_constant = true;
4464 efield = DECL_CHAIN (efield);
4465 field = DECL_CHAIN (field);
4468 /* If we have been able to match and convert all the input fields
4469 to their output type, convert in place now. We'll fallback to a
4470 view conversion downstream otherwise. */
4471 if (idx == len)
4473 expr = copy_node (expr);
4474 TREE_TYPE (expr) = type;
4475 CONSTRUCTOR_ELTS (expr) = v;
4476 if (clear_constant)
4477 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4478 return expr;
4482 /* Likewise for a conversion between array type and vector type with a
4483 compatible representative array. */
4484 else if (code == VECTOR_TYPE
4485 && ecode == ARRAY_TYPE
4486 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4487 etype))
4489 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4490 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4491 vec<constructor_elt, va_gc> *v;
4492 unsigned HOST_WIDE_INT ix;
4493 tree value;
4495 /* Build a VECTOR_CST from a *constant* array constructor. */
4496 if (TREE_CONSTANT (expr))
4498 bool constant_p = true;
4500 /* Iterate through elements and check if all constructor
4501 elements are *_CSTs. */
4502 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4503 if (!CONSTANT_CLASS_P (value))
4505 constant_p = false;
4506 break;
4509 if (constant_p)
4510 return build_vector_from_ctor (type,
4511 CONSTRUCTOR_ELTS (expr));
4514 /* Otherwise, build a regular vector constructor. */
4515 vec_alloc (v, len);
4516 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4518 constructor_elt elt = {NULL_TREE, value};
4519 v->quick_push (elt);
4521 expr = copy_node (expr);
4522 TREE_TYPE (expr) = type;
4523 CONSTRUCTOR_ELTS (expr) = v;
4524 return expr;
4526 break;
4528 case UNCONSTRAINED_ARRAY_REF:
4529 /* First retrieve the underlying array. */
4530 expr = maybe_unconstrained_array (expr);
4531 etype = TREE_TYPE (expr);
4532 ecode = TREE_CODE (etype);
4533 break;
4535 case VIEW_CONVERT_EXPR:
4537 /* GCC 4.x is very sensitive to type consistency overall, and view
4538 conversions thus are very frequent. Even though just "convert"ing
4539 the inner operand to the output type is fine in most cases, it
4540 might expose unexpected input/output type mismatches in special
4541 circumstances so we avoid such recursive calls when we can. */
4542 tree op0 = TREE_OPERAND (expr, 0);
4544 /* If we are converting back to the original type, we can just
4545 lift the input conversion. This is a common occurrence with
4546 switches back-and-forth amongst type variants. */
4547 if (type == TREE_TYPE (op0))
4548 return op0;
4550 /* Otherwise, if we're converting between two aggregate or vector
4551 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4552 target type in place or to just convert the inner expression. */
4553 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4554 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4556 /* If we are converting between mere variants, we can just
4557 substitute the VIEW_CONVERT_EXPR in place. */
4558 if (gnat_types_compatible_p (type, etype))
4559 return build1 (VIEW_CONVERT_EXPR, type, op0);
4561 /* Otherwise, we may just bypass the input view conversion unless
4562 one of the types is a fat pointer, which is handled by
4563 specialized code below which relies on exact type matching. */
4564 else if (!TYPE_IS_FAT_POINTER_P (type)
4565 && !TYPE_IS_FAT_POINTER_P (etype))
4566 return convert (type, op0);
4569 break;
4572 default:
4573 break;
4576 /* Check for converting to a pointer to an unconstrained array. */
4577 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4578 return convert_to_fat_pointer (type, expr);
4580 /* If we are converting between two aggregate or vector types that are mere
4581 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4582 to a vector type from its representative array type. */
4583 else if ((code == ecode
4584 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4585 && gnat_types_compatible_p (type, etype))
4586 || (code == VECTOR_TYPE
4587 && ecode == ARRAY_TYPE
4588 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4589 etype)))
4590 return build1 (VIEW_CONVERT_EXPR, type, expr);
4592 /* If we are converting between tagged types, try to upcast properly. */
4593 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4594 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4596 tree child_etype = etype;
4597 do {
4598 tree field = TYPE_FIELDS (child_etype);
4599 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4600 return build_component_ref (expr, field, false);
4601 child_etype = TREE_TYPE (field);
4602 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4605 /* If we are converting from a smaller form of record type back to it, just
4606 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4607 size on both sides. */
4608 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4609 && smaller_form_type_p (etype, type))
4611 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4612 false, false, false, true),
4613 expr);
4614 return build1 (VIEW_CONVERT_EXPR, type, expr);
4617 /* In all other cases of related types, make a NOP_EXPR. */
4618 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4619 return fold_convert (type, expr);
4621 switch (code)
4623 case VOID_TYPE:
4624 return fold_build1 (CONVERT_EXPR, type, expr);
4626 case INTEGER_TYPE:
4627 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4628 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4629 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4630 return unchecked_convert (type, expr, false);
4632 /* If the output is a biased type, convert first to the base type and
4633 subtract the bias. Note that the bias itself must go through a full
4634 conversion to the base type, lest it is a biased value; this happens
4635 for subtypes of biased types. */
4636 if (TYPE_BIASED_REPRESENTATION_P (type))
4637 return fold_convert (type,
4638 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4639 convert (TREE_TYPE (type), expr),
4640 convert (TREE_TYPE (type),
4641 TYPE_MIN_VALUE (type))));
4643 /* ... fall through ... */
4645 case ENUMERAL_TYPE:
4646 case BOOLEAN_TYPE:
4647 /* If we are converting an additive expression to an integer type
4648 with lower precision, be wary of the optimization that can be
4649 applied by convert_to_integer. There are 2 problematic cases:
4650 - if the first operand was originally of a biased type,
4651 because we could be recursively called to convert it
4652 to an intermediate type and thus rematerialize the
4653 additive operator endlessly,
4654 - if the expression contains a placeholder, because an
4655 intermediate conversion that changes the sign could
4656 be inserted and thus introduce an artificial overflow
4657 at compile time when the placeholder is substituted. */
4658 if (code == INTEGER_TYPE
4659 && ecode == INTEGER_TYPE
4660 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4661 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4663 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4665 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4666 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4667 || CONTAINS_PLACEHOLDER_P (expr))
4668 return build1 (NOP_EXPR, type, expr);
4671 return fold (convert_to_integer (type, expr));
4673 case POINTER_TYPE:
4674 case REFERENCE_TYPE:
4675 /* If converting between two thin pointers, adjust if needed to account
4676 for differing offsets from the base pointer, depending on whether
4677 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4678 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4680 tree etype_pos
4681 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4682 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4683 : size_zero_node;
4684 tree type_pos
4685 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4686 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4687 : size_zero_node;
4688 tree byte_diff = size_diffop (type_pos, etype_pos);
4690 expr = build1 (NOP_EXPR, type, expr);
4691 if (integer_zerop (byte_diff))
4692 return expr;
4694 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4695 fold_convert (sizetype, byte_diff));
4698 /* If converting fat pointer to normal or thin pointer, get the pointer
4699 to the array and then convert it. */
4700 if (TYPE_IS_FAT_POINTER_P (etype))
4701 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4703 return fold (convert_to_pointer (type, expr));
4705 case REAL_TYPE:
4706 return fold (convert_to_real (type, expr));
4708 case RECORD_TYPE:
4709 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4711 vec<constructor_elt, va_gc> *v;
4712 vec_alloc (v, 1);
4714 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4715 convert (TREE_TYPE (TYPE_FIELDS (type)),
4716 expr));
4717 return gnat_build_constructor (type, v);
4720 /* ... fall through ... */
4722 case ARRAY_TYPE:
4723 /* In these cases, assume the front-end has validated the conversion.
4724 If the conversion is valid, it will be a bit-wise conversion, so
4725 it can be viewed as an unchecked conversion. */
4726 return unchecked_convert (type, expr, false);
4728 case UNION_TYPE:
4729 /* This is a either a conversion between a tagged type and some
4730 subtype, which we have to mark as a UNION_TYPE because of
4731 overlapping fields or a conversion of an Unchecked_Union. */
4732 return unchecked_convert (type, expr, false);
4734 case UNCONSTRAINED_ARRAY_TYPE:
4735 /* If the input is a VECTOR_TYPE, convert to the representative
4736 array type first. */
4737 if (ecode == VECTOR_TYPE)
4739 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4740 etype = TREE_TYPE (expr);
4741 ecode = TREE_CODE (etype);
4744 /* If EXPR is a constrained array, take its address, convert it to a
4745 fat pointer, and then dereference it. Likewise if EXPR is a
4746 record containing both a template and a constrained array.
4747 Note that a record representing a justified modular type
4748 always represents a packed constrained array. */
4749 if (ecode == ARRAY_TYPE
4750 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4751 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4752 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4753 return
4754 build_unary_op
4755 (INDIRECT_REF, NULL_TREE,
4756 convert_to_fat_pointer (TREE_TYPE (type),
4757 build_unary_op (ADDR_EXPR,
4758 NULL_TREE, expr)));
4760 /* Do something very similar for converting one unconstrained
4761 array to another. */
4762 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4763 return
4764 build_unary_op (INDIRECT_REF, NULL_TREE,
4765 convert (TREE_TYPE (type),
4766 build_unary_op (ADDR_EXPR,
4767 NULL_TREE, expr)));
4768 else
4769 gcc_unreachable ();
4771 case COMPLEX_TYPE:
4772 return fold (convert_to_complex (type, expr));
4774 default:
4775 gcc_unreachable ();
4779 /* Create an expression whose value is that of EXPR converted to the common
4780 index type, which is sizetype. EXPR is supposed to be in the base type
4781 of the GNAT index type. Calling it is equivalent to doing
4783 convert (sizetype, expr)
4785 but we try to distribute the type conversion with the knowledge that EXPR
4786 cannot overflow in its type. This is a best-effort approach and we fall
4787 back to the above expression as soon as difficulties are encountered.
4789 This is necessary to overcome issues that arise when the GNAT base index
4790 type and the GCC common index type (sizetype) don't have the same size,
4791 which is quite frequent on 64-bit architectures. In this case, and if
4792 the GNAT base index type is signed but the iteration type of the loop has
4793 been forced to unsigned, the loop scalar evolution engine cannot compute
4794 a simple evolution for the general induction variables associated with the
4795 array indices, because it will preserve the wrap-around semantics in the
4796 unsigned type of their "inner" part. As a result, many loop optimizations
4797 are blocked.
4799 The solution is to use a special (basic) induction variable that is at
4800 least as large as sizetype, and to express the aforementioned general
4801 induction variables in terms of this induction variable, eliminating
4802 the problematic intermediate truncation to the GNAT base index type.
4803 This is possible as long as the original expression doesn't overflow
4804 and if the middle-end hasn't introduced artificial overflows in the
4805 course of the various simplification it can make to the expression. */
4807 tree
4808 convert_to_index_type (tree expr)
4810 enum tree_code code = TREE_CODE (expr);
4811 tree type = TREE_TYPE (expr);
4813 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4814 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4815 if (TYPE_UNSIGNED (type) || !optimize)
4816 return convert (sizetype, expr);
4818 switch (code)
4820 case VAR_DECL:
4821 /* The main effect of the function: replace a loop parameter with its
4822 associated special induction variable. */
4823 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4824 expr = DECL_INDUCTION_VAR (expr);
4825 break;
4827 CASE_CONVERT:
4829 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4830 /* Bail out as soon as we suspect some sort of type frobbing. */
4831 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4832 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4833 break;
4836 /* ... fall through ... */
4838 case NON_LVALUE_EXPR:
4839 return fold_build1 (code, sizetype,
4840 convert_to_index_type (TREE_OPERAND (expr, 0)));
4842 case PLUS_EXPR:
4843 case MINUS_EXPR:
4844 case MULT_EXPR:
4845 return fold_build2 (code, sizetype,
4846 convert_to_index_type (TREE_OPERAND (expr, 0)),
4847 convert_to_index_type (TREE_OPERAND (expr, 1)));
4849 case COMPOUND_EXPR:
4850 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4851 convert_to_index_type (TREE_OPERAND (expr, 1)));
4853 case COND_EXPR:
4854 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4855 convert_to_index_type (TREE_OPERAND (expr, 1)),
4856 convert_to_index_type (TREE_OPERAND (expr, 2)));
4858 default:
4859 break;
4862 return convert (sizetype, expr);
4865 /* Remove all conversions that are done in EXP. This includes converting
4866 from a padded type or to a justified modular type. If TRUE_ADDRESS
4867 is true, always return the address of the containing object even if
4868 the address is not bit-aligned. */
4870 tree
4871 remove_conversions (tree exp, bool true_address)
4873 switch (TREE_CODE (exp))
4875 case CONSTRUCTOR:
4876 if (true_address
4877 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4878 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4879 return
4880 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
4881 break;
4883 case COMPONENT_REF:
4884 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4885 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4886 break;
4888 CASE_CONVERT:
4889 case VIEW_CONVERT_EXPR:
4890 case NON_LVALUE_EXPR:
4891 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4893 default:
4894 break;
4897 return exp;
4900 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4901 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4902 likewise return an expression pointing to the underlying array. */
4904 tree
4905 maybe_unconstrained_array (tree exp)
4907 enum tree_code code = TREE_CODE (exp);
4908 tree type = TREE_TYPE (exp);
4910 switch (TREE_CODE (type))
4912 case UNCONSTRAINED_ARRAY_TYPE:
4913 if (code == UNCONSTRAINED_ARRAY_REF)
4915 const bool read_only = TREE_READONLY (exp);
4916 const bool no_trap = TREE_THIS_NOTRAP (exp);
4918 exp = TREE_OPERAND (exp, 0);
4919 type = TREE_TYPE (exp);
4921 if (TREE_CODE (exp) == COND_EXPR)
4923 tree op1
4924 = build_unary_op (INDIRECT_REF, NULL_TREE,
4925 build_component_ref (TREE_OPERAND (exp, 1),
4926 TYPE_FIELDS (type),
4927 false));
4928 tree op2
4929 = build_unary_op (INDIRECT_REF, NULL_TREE,
4930 build_component_ref (TREE_OPERAND (exp, 2),
4931 TYPE_FIELDS (type),
4932 false));
4934 exp = build3 (COND_EXPR,
4935 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4936 TREE_OPERAND (exp, 0), op1, op2);
4938 else
4940 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4941 build_component_ref (exp,
4942 TYPE_FIELDS (type),
4943 false));
4944 TREE_READONLY (exp) = read_only;
4945 TREE_THIS_NOTRAP (exp) = no_trap;
4949 else if (code == NULL_EXPR)
4950 exp = build1 (NULL_EXPR,
4951 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4952 TREE_OPERAND (exp, 0));
4953 break;
4955 case RECORD_TYPE:
4956 /* If this is a padded type and it contains a template, convert to the
4957 unpadded type first. */
4958 if (TYPE_PADDING_P (type)
4959 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4960 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4962 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4963 code = TREE_CODE (exp);
4964 type = TREE_TYPE (exp);
4967 if (TYPE_CONTAINS_TEMPLATE_P (type))
4969 /* If the array initializer is a box, return NULL_TREE. */
4970 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
4971 return NULL_TREE;
4973 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
4974 false);
4975 type = TREE_TYPE (exp);
4977 /* If the array type is padded, convert to the unpadded type. */
4978 if (TYPE_IS_PADDING_P (type))
4979 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4981 break;
4983 default:
4984 break;
4987 return exp;
4990 /* Return true if EXPR is an expression that can be folded as an operand
4991 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4993 static bool
4994 can_fold_for_view_convert_p (tree expr)
4996 tree t1, t2;
4998 /* The folder will fold NOP_EXPRs between integral types with the same
4999 precision (in the middle-end's sense). We cannot allow it if the
5000 types don't have the same precision in the Ada sense as well. */
5001 if (TREE_CODE (expr) != NOP_EXPR)
5002 return true;
5004 t1 = TREE_TYPE (expr);
5005 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5007 /* Defer to the folder for non-integral conversions. */
5008 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5009 return true;
5011 /* Only fold conversions that preserve both precisions. */
5012 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5013 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5014 return true;
5016 return false;
5019 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5020 If NOTRUNC_P is true, truncation operations should be suppressed.
5022 Special care is required with (source or target) integral types whose
5023 precision is not equal to their size, to make sure we fetch or assign
5024 the value bits whose location might depend on the endianness, e.g.
5026 Rmsize : constant := 8;
5027 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5029 type Bit_Array is array (1 .. Rmsize) of Boolean;
5030 pragma Pack (Bit_Array);
5032 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5034 Value : Int := 2#1000_0001#;
5035 Vbits : Bit_Array := To_Bit_Array (Value);
5037 we expect the 8 bits at Vbits'Address to always contain Value, while
5038 their original location depends on the endianness, at Value'Address
5039 on a little-endian architecture but not on a big-endian one.
5041 One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5042 the bits between the precision and the size are filled, because of the
5043 trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5044 So we use the special predicate type_unsigned_for_rm above. */
5046 tree
5047 unchecked_convert (tree type, tree expr, bool notrunc_p)
5049 tree etype = TREE_TYPE (expr);
5050 enum tree_code ecode = TREE_CODE (etype);
5051 enum tree_code code = TREE_CODE (type);
5052 tree tem;
5053 int c;
5055 /* If the expression is already of the right type, we are done. */
5056 if (etype == type)
5057 return expr;
5059 /* If both types are integral just do a normal conversion.
5060 Likewise for a conversion to an unconstrained array. */
5061 if (((INTEGRAL_TYPE_P (type)
5062 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5063 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5064 && (INTEGRAL_TYPE_P (etype)
5065 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5066 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5067 || code == UNCONSTRAINED_ARRAY_TYPE)
5069 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
5071 tree ntype = copy_type (etype);
5072 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5073 TYPE_MAIN_VARIANT (ntype) = ntype;
5074 expr = build1 (NOP_EXPR, ntype, expr);
5077 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5079 tree rtype = copy_type (type);
5080 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5081 TYPE_MAIN_VARIANT (rtype) = rtype;
5082 expr = convert (rtype, expr);
5083 expr = build1 (NOP_EXPR, type, expr);
5085 else
5086 expr = convert (type, expr);
5089 /* If we are converting to an integral type whose precision is not equal
5090 to its size, first unchecked convert to a record type that contains a
5091 field of the given precision. Then extract the result from the field.
5093 There is a subtlety if the source type is an aggregate type with reverse
5094 storage order because its representation is not contiguous in the native
5095 storage order, i.e. a direct unchecked conversion to an integral type
5096 with N bits of precision cannot read the first N bits of the aggregate
5097 type. To overcome it, we do an unchecked conversion to an integral type
5098 with reverse storage order and return the resulting value. This also
5099 ensures that the result of the unchecked conversion doesn't depend on
5100 the endianness of the target machine, but only on the storage order of
5101 the aggregate type.
5103 Finally, for the sake of consistency, we do the unchecked conversion
5104 to an integral type with reverse storage order as soon as the source
5105 type is an aggregate type with reverse storage order, even if there
5106 are no considerations of precision or size involved. */
5107 else if (INTEGRAL_TYPE_P (type)
5108 && TYPE_RM_SIZE (type)
5109 && (tree_int_cst_compare (TYPE_RM_SIZE (type),
5110 TYPE_SIZE (type)) < 0
5111 || (AGGREGATE_TYPE_P (etype)
5112 && TYPE_REVERSE_STORAGE_ORDER (etype))))
5114 tree rec_type = make_node (RECORD_TYPE);
5115 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5116 tree field_type, field;
5118 if (AGGREGATE_TYPE_P (etype))
5119 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5120 = TYPE_REVERSE_STORAGE_ORDER (etype);
5122 if (type_unsigned_for_rm (type))
5123 field_type = make_unsigned_type (prec);
5124 else
5125 field_type = make_signed_type (prec);
5126 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5128 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5129 NULL_TREE, bitsize_zero_node, 1, 0);
5131 finish_record_type (rec_type, field, 1, false);
5133 expr = unchecked_convert (rec_type, expr, notrunc_p);
5134 expr = build_component_ref (expr, field, false);
5135 expr = fold_build1 (NOP_EXPR, type, expr);
5138 /* Similarly if we are converting from an integral type whose precision is
5139 not equal to its size, first copy into a field of the given precision
5140 and unchecked convert the record type.
5142 The same considerations as above apply if the target type is an aggregate
5143 type with reverse storage order and we also proceed similarly. */
5144 else if (INTEGRAL_TYPE_P (etype)
5145 && TYPE_RM_SIZE (etype)
5146 && (tree_int_cst_compare (TYPE_RM_SIZE (etype),
5147 TYPE_SIZE (etype)) < 0
5148 || (AGGREGATE_TYPE_P (type)
5149 && TYPE_REVERSE_STORAGE_ORDER (type))))
5151 tree rec_type = make_node (RECORD_TYPE);
5152 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5153 vec<constructor_elt, va_gc> *v;
5154 vec_alloc (v, 1);
5155 tree field_type, field;
5157 if (AGGREGATE_TYPE_P (type))
5158 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5159 = TYPE_REVERSE_STORAGE_ORDER (type);
5161 if (type_unsigned_for_rm (etype))
5162 field_type = make_unsigned_type (prec);
5163 else
5164 field_type = make_signed_type (prec);
5165 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5167 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5168 NULL_TREE, bitsize_zero_node, 1, 0);
5170 finish_record_type (rec_type, field, 1, false);
5172 expr = fold_build1 (NOP_EXPR, field_type, expr);
5173 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5174 expr = gnat_build_constructor (rec_type, v);
5175 expr = unchecked_convert (type, expr, notrunc_p);
5178 /* If we are converting from a scalar type to a type with a different size,
5179 we need to pad to have the same size on both sides.
5181 ??? We cannot do it unconditionally because unchecked conversions are
5182 used liberally by the front-end to implement polymorphism, e.g. in:
5184 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5185 return p___size__4 (p__object!(S191s.all));
5187 so we skip all expressions that are references. */
5188 else if (!REFERENCE_CLASS_P (expr)
5189 && !AGGREGATE_TYPE_P (etype)
5190 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5191 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5193 if (c < 0)
5195 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5196 false, false, false, true),
5197 expr);
5198 expr = unchecked_convert (type, expr, notrunc_p);
5200 else
5202 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5203 false, false, false, true);
5204 expr = unchecked_convert (rec_type, expr, notrunc_p);
5205 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5209 /* We have a special case when we are converting between two unconstrained
5210 array types. In that case, take the address, convert the fat pointer
5211 types, and dereference. */
5212 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5213 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5214 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5215 build_unary_op (ADDR_EXPR, NULL_TREE,
5216 expr)));
5218 /* Another special case is when we are converting to a vector type from its
5219 representative array type; this a regular conversion. */
5220 else if (code == VECTOR_TYPE
5221 && ecode == ARRAY_TYPE
5222 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5223 etype))
5224 expr = convert (type, expr);
5226 /* And, if the array type is not the representative, we try to build an
5227 intermediate vector type of which the array type is the representative
5228 and to do the unchecked conversion between the vector types, in order
5229 to enable further simplifications in the middle-end. */
5230 else if (code == VECTOR_TYPE
5231 && ecode == ARRAY_TYPE
5232 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5234 expr = convert (tem, expr);
5235 return unchecked_convert (type, expr, notrunc_p);
5238 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5239 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5240 else if (TREE_CODE (expr) == CONSTRUCTOR
5241 && code == RECORD_TYPE
5242 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5244 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5245 Empty, false, false, false, true),
5246 expr);
5247 return unchecked_convert (type, expr, notrunc_p);
5250 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5251 else
5253 expr = maybe_unconstrained_array (expr);
5254 etype = TREE_TYPE (expr);
5255 ecode = TREE_CODE (etype);
5256 if (can_fold_for_view_convert_p (expr))
5257 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5258 else
5259 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5262 /* If the result is a non-biased integral type whose precision is not equal
5263 to its size, sign- or zero-extend the result. But we need not do this
5264 if the input is also an integral type and both are unsigned or both are
5265 signed and have the same precision. */
5266 if (!notrunc_p
5267 && INTEGRAL_TYPE_P (type)
5268 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5269 && TYPE_RM_SIZE (type)
5270 && tree_int_cst_compare (TYPE_RM_SIZE (type), TYPE_SIZE (type)) < 0
5271 && !(INTEGRAL_TYPE_P (etype)
5272 && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5273 && (type_unsigned_for_rm (type)
5274 || tree_int_cst_compare (TYPE_RM_SIZE (type),
5275 TYPE_RM_SIZE (etype)
5276 ? TYPE_RM_SIZE (etype)
5277 : TYPE_SIZE (etype)) == 0)))
5279 if (integer_zerop (TYPE_RM_SIZE (type)))
5280 expr = build_int_cst (type, 0);
5281 else
5283 tree base_type
5284 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5285 type_unsigned_for_rm (type));
5286 tree shift_expr
5287 = convert (base_type,
5288 size_binop (MINUS_EXPR,
5289 TYPE_SIZE (type), TYPE_RM_SIZE (type)));
5290 expr
5291 = convert (type,
5292 build_binary_op (RSHIFT_EXPR, base_type,
5293 build_binary_op (LSHIFT_EXPR, base_type,
5294 convert (base_type,
5295 expr),
5296 shift_expr),
5297 shift_expr));
5301 /* An unchecked conversion should never raise Constraint_Error. The code
5302 below assumes that GCC's conversion routines overflow the same way that
5303 the underlying hardware does. This is probably true. In the rare case
5304 when it is false, we can rely on the fact that such conversions are
5305 erroneous anyway. */
5306 if (TREE_CODE (expr) == INTEGER_CST)
5307 TREE_OVERFLOW (expr) = 0;
5309 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5310 show no longer constant. */
5311 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5312 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5313 OEP_ONLY_CONST))
5314 TREE_CONSTANT (expr) = 0;
5316 return expr;
5319 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5320 the latter being a record type as predicated by Is_Record_Type. */
5322 enum tree_code
5323 tree_code_for_record_type (Entity_Id gnat_type)
5325 Node_Id component_list, component;
5327 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5328 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5329 if (!Is_Unchecked_Union (gnat_type))
5330 return RECORD_TYPE;
5332 gnat_type = Implementation_Base_Type (gnat_type);
5333 component_list
5334 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5336 for (component = First_Non_Pragma (Component_Items (component_list));
5337 Present (component);
5338 component = Next_Non_Pragma (component))
5339 if (Ekind (Defining_Entity (component)) == E_Component)
5340 return RECORD_TYPE;
5342 return UNION_TYPE;
5345 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5346 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5347 according to the presence of an alignment clause on the type or, if it
5348 is an array, on the component type. */
5350 bool
5351 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5353 gnat_type = Underlying_Type (gnat_type);
5355 *align_clause = Present (Alignment_Clause (gnat_type));
5357 if (Is_Array_Type (gnat_type))
5359 gnat_type = Underlying_Type (Component_Type (gnat_type));
5360 if (Present (Alignment_Clause (gnat_type)))
5361 *align_clause = true;
5364 if (!Is_Floating_Point_Type (gnat_type))
5365 return false;
5367 if (UI_To_Int (Esize (gnat_type)) != 64)
5368 return false;
5370 return true;
5373 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5374 size is greater or equal to 64 bits, or an array of such a type. Set
5375 ALIGN_CLAUSE according to the presence of an alignment clause on the
5376 type or, if it is an array, on the component type. */
5378 bool
5379 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5381 gnat_type = Underlying_Type (gnat_type);
5383 *align_clause = Present (Alignment_Clause (gnat_type));
5385 if (Is_Array_Type (gnat_type))
5387 gnat_type = Underlying_Type (Component_Type (gnat_type));
5388 if (Present (Alignment_Clause (gnat_type)))
5389 *align_clause = true;
5392 if (!Is_Scalar_Type (gnat_type))
5393 return false;
5395 if (UI_To_Int (Esize (gnat_type)) < 64)
5396 return false;
5398 return true;
5401 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5402 component of an aggregate type. */
5404 bool
5405 type_for_nonaliased_component_p (tree gnu_type)
5407 /* If the type is passed by reference, we may have pointers to the
5408 component so it cannot be made non-aliased. */
5409 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5410 return false;
5412 /* We used to say that any component of aggregate type is aliased
5413 because the front-end may take 'Reference of it. The front-end
5414 has been enhanced in the meantime so as to use a renaming instead
5415 in most cases, but the back-end can probably take the address of
5416 such a component too so we go for the conservative stance.
5418 For instance, we might need the address of any array type, even
5419 if normally passed by copy, to construct a fat pointer if the
5420 component is used as an actual for an unconstrained formal.
5422 Likewise for record types: even if a specific record subtype is
5423 passed by copy, the parent type might be passed by ref (e.g. if
5424 it's of variable size) and we might take the address of a child
5425 component to pass to a parent formal. We have no way to check
5426 for such conditions here. */
5427 if (AGGREGATE_TYPE_P (gnu_type))
5428 return false;
5430 return true;
5433 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5435 bool
5436 smaller_form_type_p (tree type, tree orig_type)
5438 tree size, osize;
5440 /* We're not interested in variants here. */
5441 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5442 return false;
5444 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5445 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5446 return false;
5448 size = TYPE_SIZE (type);
5449 osize = TYPE_SIZE (orig_type);
5451 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5452 return false;
5454 return tree_int_cst_lt (size, osize) != 0;
5457 /* Return whether EXPR, which is the renamed object in an object renaming
5458 declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5459 This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration. */
5461 bool
5462 can_materialize_object_renaming_p (Node_Id expr)
5464 while (true)
5466 expr = Original_Node (expr);
5468 switch Nkind (expr)
5470 case N_Identifier:
5471 case N_Expanded_Name:
5472 if (!Present (Renamed_Object (Entity (expr))))
5473 return true;
5474 expr = Renamed_Object (Entity (expr));
5475 break;
5477 case N_Selected_Component:
5479 if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5480 return false;
5482 const Uint bitpos
5483 = Normalized_First_Bit (Entity (Selector_Name (expr)));
5484 if (!UI_Is_In_Int_Range (bitpos)
5485 || (bitpos != UI_No_Uint && bitpos != UI_From_Int (0)))
5486 return false;
5488 expr = Prefix (expr);
5489 break;
5492 case N_Indexed_Component:
5493 case N_Slice:
5495 const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5497 if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5498 return false;
5500 expr = Prefix (expr);
5501 break;
5504 case N_Explicit_Dereference:
5505 expr = Prefix (expr);
5506 break;
5508 default:
5509 return true;
5514 /* Perform final processing on global declarations. */
5516 static GTY (()) tree dummy_global;
5518 void
5519 gnat_write_global_declarations (void)
5521 unsigned int i;
5522 tree iter;
5524 /* If we have declared types as used at the global level, insert them in
5525 the global hash table. We use a dummy variable for this purpose, but
5526 we need to build it unconditionally to avoid -fcompare-debug issues. */
5527 if (first_global_object_name)
5529 struct varpool_node *node;
5530 char *label;
5532 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5533 dummy_global
5534 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5535 void_type_node);
5536 DECL_HARD_REGISTER (dummy_global) = 1;
5537 TREE_STATIC (dummy_global) = 1;
5538 node = varpool_node::get_create (dummy_global);
5539 node->definition = 1;
5540 node->force_output = 1;
5542 if (types_used_by_cur_var_decl)
5543 while (!types_used_by_cur_var_decl->is_empty ())
5545 tree t = types_used_by_cur_var_decl->pop ();
5546 types_used_by_var_decl_insert (t, dummy_global);
5550 /* Output debug information for all global type declarations first. This
5551 ensures that global types whose compilation hasn't been finalized yet,
5552 for example pointers to Taft amendment types, have their compilation
5553 finalized in the right context. */
5554 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5555 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5556 debug_hooks->type_decl (iter, false);
5558 /* Output imported functions. */
5559 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5560 if (TREE_CODE (iter) == FUNCTION_DECL
5561 && DECL_EXTERNAL (iter)
5562 && DECL_INITIAL (iter) == NULL
5563 && !DECL_IGNORED_P (iter)
5564 && DECL_FUNCTION_IS_DEF (iter))
5565 debug_hooks->early_global_decl (iter);
5567 /* Then output the global variables. We need to do that after the debug
5568 information for global types is emitted so that they are finalized. Skip
5569 external global variables, unless we need to emit debug info for them:
5570 this is useful for imported variables, for instance. */
5571 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5572 if (TREE_CODE (iter) == VAR_DECL
5573 && (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5574 rest_of_decl_compilation (iter, true, 0);
5576 /* Output the imported modules/declarations. In GNAT, these are only
5577 materializing subprogram. */
5578 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5579 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5580 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5581 DECL_CONTEXT (iter), false, false);
5584 /* ************************************************************************
5585 * * GCC builtins support *
5586 * ************************************************************************ */
5588 /* The general scheme is fairly simple:
5590 For each builtin function/type to be declared, gnat_install_builtins calls
5591 internal facilities which eventually get to gnat_pushdecl, which in turn
5592 tracks the so declared builtin function decls in the 'builtin_decls' global
5593 datastructure. When an Intrinsic subprogram declaration is processed, we
5594 search this global datastructure to retrieve the associated BUILT_IN DECL
5595 node. */
5597 /* Search the chain of currently available builtin declarations for a node
5598 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5599 found, if any, or NULL_TREE otherwise. */
5600 tree
5601 builtin_decl_for (tree name)
5603 unsigned i;
5604 tree decl;
5606 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5607 if (DECL_NAME (decl) == name)
5608 return decl;
5610 return NULL_TREE;
5613 /* The code below eventually exposes gnat_install_builtins, which declares
5614 the builtin types and functions we might need, either internally or as
5615 user accessible facilities.
5617 ??? This is a first implementation shot, still in rough shape. It is
5618 heavily inspired from the "C" family implementation, with chunks copied
5619 verbatim from there.
5621 Two obvious improvement candidates are:
5622 o Use a more efficient name/decl mapping scheme
5623 o Devise a middle-end infrastructure to avoid having to copy
5624 pieces between front-ends. */
5626 /* ----------------------------------------------------------------------- *
5627 * BUILTIN ELEMENTARY TYPES *
5628 * ----------------------------------------------------------------------- */
5630 /* Standard data types to be used in builtin argument declarations. */
5632 enum c_tree_index
5634 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5635 CTI_STRING_TYPE,
5636 CTI_CONST_STRING_TYPE,
5638 CTI_MAX
5641 static tree c_global_trees[CTI_MAX];
5643 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5644 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5645 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5647 /* ??? In addition some attribute handlers, we currently don't support a
5648 (small) number of builtin-types, which in turns inhibits support for a
5649 number of builtin functions. */
5650 #define wint_type_node void_type_node
5651 #define intmax_type_node void_type_node
5652 #define uintmax_type_node void_type_node
5654 /* Used to help initialize the builtin-types.def table. When a type of
5655 the correct size doesn't exist, use error_mark_node instead of NULL.
5656 The later results in segfaults even when a decl using the type doesn't
5657 get invoked. */
5659 static tree
5660 builtin_type_for_size (int size, bool unsignedp)
5662 tree type = gnat_type_for_size (size, unsignedp);
5663 return type ? type : error_mark_node;
5666 /* Build/push the elementary type decls that builtin functions/types
5667 will need. */
5669 static void
5670 install_builtin_elementary_types (void)
5672 signed_size_type_node = gnat_signed_type_for (size_type_node);
5673 pid_type_node = integer_type_node;
5675 string_type_node = build_pointer_type (char_type_node);
5676 const_string_type_node
5677 = build_pointer_type (build_qualified_type
5678 (char_type_node, TYPE_QUAL_CONST));
5681 /* ----------------------------------------------------------------------- *
5682 * BUILTIN FUNCTION TYPES *
5683 * ----------------------------------------------------------------------- */
5685 /* Now, builtin function types per se. */
5687 enum c_builtin_type
5689 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5690 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5691 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5692 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5693 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5694 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5695 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5696 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5697 ARG6) NAME,
5698 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5699 ARG6, ARG7) NAME,
5700 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5701 ARG6, ARG7, ARG8) NAME,
5702 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5703 ARG6, ARG7, ARG8, ARG9) NAME,
5704 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5705 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5706 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5707 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5708 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5709 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5710 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5711 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5712 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5713 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5714 NAME,
5715 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5716 ARG6) NAME,
5717 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5718 ARG6, ARG7) NAME,
5719 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5720 #include "builtin-types.def"
5721 #undef DEF_PRIMITIVE_TYPE
5722 #undef DEF_FUNCTION_TYPE_0
5723 #undef DEF_FUNCTION_TYPE_1
5724 #undef DEF_FUNCTION_TYPE_2
5725 #undef DEF_FUNCTION_TYPE_3
5726 #undef DEF_FUNCTION_TYPE_4
5727 #undef DEF_FUNCTION_TYPE_5
5728 #undef DEF_FUNCTION_TYPE_6
5729 #undef DEF_FUNCTION_TYPE_7
5730 #undef DEF_FUNCTION_TYPE_8
5731 #undef DEF_FUNCTION_TYPE_9
5732 #undef DEF_FUNCTION_TYPE_10
5733 #undef DEF_FUNCTION_TYPE_11
5734 #undef DEF_FUNCTION_TYPE_VAR_0
5735 #undef DEF_FUNCTION_TYPE_VAR_1
5736 #undef DEF_FUNCTION_TYPE_VAR_2
5737 #undef DEF_FUNCTION_TYPE_VAR_3
5738 #undef DEF_FUNCTION_TYPE_VAR_4
5739 #undef DEF_FUNCTION_TYPE_VAR_5
5740 #undef DEF_FUNCTION_TYPE_VAR_6
5741 #undef DEF_FUNCTION_TYPE_VAR_7
5742 #undef DEF_POINTER_TYPE
5743 BT_LAST
5746 typedef enum c_builtin_type builtin_type;
5748 /* A temporary array used in communication with def_fn_type. */
5749 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5751 /* A helper function for install_builtin_types. Build function type
5752 for DEF with return type RET and N arguments. If VAR is true, then the
5753 function should be variadic after those N arguments.
5755 Takes special care not to ICE if any of the types involved are
5756 error_mark_node, which indicates that said type is not in fact available
5757 (see builtin_type_for_size). In which case the function type as a whole
5758 should be error_mark_node. */
5760 static void
5761 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5763 tree t;
5764 tree *args = XALLOCAVEC (tree, n);
5765 va_list list;
5766 int i;
5768 va_start (list, n);
5769 for (i = 0; i < n; ++i)
5771 builtin_type a = (builtin_type) va_arg (list, int);
5772 t = builtin_types[a];
5773 if (t == error_mark_node)
5774 goto egress;
5775 args[i] = t;
5778 t = builtin_types[ret];
5779 if (t == error_mark_node)
5780 goto egress;
5781 if (var)
5782 t = build_varargs_function_type_array (t, n, args);
5783 else
5784 t = build_function_type_array (t, n, args);
5786 egress:
5787 builtin_types[def] = t;
5788 va_end (list);
5791 /* Build the builtin function types and install them in the builtin_types
5792 array for later use in builtin function decls. */
5794 static void
5795 install_builtin_function_types (void)
5797 tree va_list_ref_type_node;
5798 tree va_list_arg_type_node;
5800 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5802 va_list_arg_type_node = va_list_ref_type_node =
5803 build_pointer_type (TREE_TYPE (va_list_type_node));
5805 else
5807 va_list_arg_type_node = va_list_type_node;
5808 va_list_ref_type_node = build_reference_type (va_list_type_node);
5811 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5812 builtin_types[ENUM] = VALUE;
5813 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5814 def_fn_type (ENUM, RETURN, 0, 0);
5815 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5816 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5817 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5818 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5819 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5820 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5821 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5822 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5823 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5824 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5825 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5826 ARG6) \
5827 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5828 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5829 ARG6, ARG7) \
5830 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5831 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5832 ARG6, ARG7, ARG8) \
5833 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5834 ARG7, ARG8);
5835 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5836 ARG6, ARG7, ARG8, ARG9) \
5837 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5838 ARG7, ARG8, ARG9);
5839 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5840 ARG6, ARG7, ARG8, ARG9, ARG10) \
5841 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5842 ARG7, ARG8, ARG9, ARG10);
5843 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5844 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5845 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5846 ARG7, ARG8, ARG9, ARG10, ARG11);
5847 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5848 def_fn_type (ENUM, RETURN, 1, 0);
5849 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5850 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5851 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5852 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5853 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5854 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5855 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5856 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5857 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5858 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5859 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5860 ARG6) \
5861 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5862 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5863 ARG6, ARG7) \
5864 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5865 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5866 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5868 #include "builtin-types.def"
5870 #undef DEF_PRIMITIVE_TYPE
5871 #undef DEF_FUNCTION_TYPE_0
5872 #undef DEF_FUNCTION_TYPE_1
5873 #undef DEF_FUNCTION_TYPE_2
5874 #undef DEF_FUNCTION_TYPE_3
5875 #undef DEF_FUNCTION_TYPE_4
5876 #undef DEF_FUNCTION_TYPE_5
5877 #undef DEF_FUNCTION_TYPE_6
5878 #undef DEF_FUNCTION_TYPE_7
5879 #undef DEF_FUNCTION_TYPE_8
5880 #undef DEF_FUNCTION_TYPE_9
5881 #undef DEF_FUNCTION_TYPE_10
5882 #undef DEF_FUNCTION_TYPE_11
5883 #undef DEF_FUNCTION_TYPE_VAR_0
5884 #undef DEF_FUNCTION_TYPE_VAR_1
5885 #undef DEF_FUNCTION_TYPE_VAR_2
5886 #undef DEF_FUNCTION_TYPE_VAR_3
5887 #undef DEF_FUNCTION_TYPE_VAR_4
5888 #undef DEF_FUNCTION_TYPE_VAR_5
5889 #undef DEF_FUNCTION_TYPE_VAR_6
5890 #undef DEF_FUNCTION_TYPE_VAR_7
5891 #undef DEF_POINTER_TYPE
5892 builtin_types[(int) BT_LAST] = NULL_TREE;
5895 /* ----------------------------------------------------------------------- *
5896 * BUILTIN ATTRIBUTES *
5897 * ----------------------------------------------------------------------- */
5899 enum built_in_attribute
5901 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5902 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5903 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5904 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5905 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5906 #include "builtin-attrs.def"
5907 #undef DEF_ATTR_NULL_TREE
5908 #undef DEF_ATTR_INT
5909 #undef DEF_ATTR_STRING
5910 #undef DEF_ATTR_IDENT
5911 #undef DEF_ATTR_TREE_LIST
5912 ATTR_LAST
5915 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5917 static void
5918 install_builtin_attributes (void)
5920 /* Fill in the built_in_attributes array. */
5921 #define DEF_ATTR_NULL_TREE(ENUM) \
5922 built_in_attributes[(int) ENUM] = NULL_TREE;
5923 #define DEF_ATTR_INT(ENUM, VALUE) \
5924 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5925 #define DEF_ATTR_STRING(ENUM, VALUE) \
5926 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5927 #define DEF_ATTR_IDENT(ENUM, STRING) \
5928 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5929 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5930 built_in_attributes[(int) ENUM] \
5931 = tree_cons (built_in_attributes[(int) PURPOSE], \
5932 built_in_attributes[(int) VALUE], \
5933 built_in_attributes[(int) CHAIN]);
5934 #include "builtin-attrs.def"
5935 #undef DEF_ATTR_NULL_TREE
5936 #undef DEF_ATTR_INT
5937 #undef DEF_ATTR_STRING
5938 #undef DEF_ATTR_IDENT
5939 #undef DEF_ATTR_TREE_LIST
5942 /* Handle a "const" attribute; arguments as in
5943 struct attribute_spec.handler. */
5945 static tree
5946 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5947 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5948 bool *no_add_attrs)
5950 if (TREE_CODE (*node) == FUNCTION_DECL)
5951 TREE_READONLY (*node) = 1;
5952 else
5953 *no_add_attrs = true;
5955 return NULL_TREE;
5958 /* Handle a "nothrow" attribute; arguments as in
5959 struct attribute_spec.handler. */
5961 static tree
5962 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5963 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5964 bool *no_add_attrs)
5966 if (TREE_CODE (*node) == FUNCTION_DECL)
5967 TREE_NOTHROW (*node) = 1;
5968 else
5969 *no_add_attrs = true;
5971 return NULL_TREE;
5974 /* Handle a "pure" attribute; arguments as in
5975 struct attribute_spec.handler. */
5977 static tree
5978 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5979 int ARG_UNUSED (flags), bool *no_add_attrs)
5981 if (TREE_CODE (*node) == FUNCTION_DECL)
5982 DECL_PURE_P (*node) = 1;
5983 /* TODO: support types. */
5984 else
5986 warning (OPT_Wattributes, "%qs attribute ignored",
5987 IDENTIFIER_POINTER (name));
5988 *no_add_attrs = true;
5991 return NULL_TREE;
5994 /* Handle a "no vops" attribute; arguments as in
5995 struct attribute_spec.handler. */
5997 static tree
5998 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5999 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6000 bool *ARG_UNUSED (no_add_attrs))
6002 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6003 DECL_IS_NOVOPS (*node) = 1;
6004 return NULL_TREE;
6007 /* Helper for nonnull attribute handling; fetch the operand number
6008 from the attribute argument list. */
6010 static bool
6011 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6013 /* Verify the arg number is a constant. */
6014 if (!tree_fits_uhwi_p (arg_num_expr))
6015 return false;
6017 *valp = TREE_INT_CST_LOW (arg_num_expr);
6018 return true;
6021 /* Handle the "nonnull" attribute. */
6022 static tree
6023 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6024 tree args, int ARG_UNUSED (flags),
6025 bool *no_add_attrs)
6027 tree type = *node;
6028 unsigned HOST_WIDE_INT attr_arg_num;
6030 /* If no arguments are specified, all pointer arguments should be
6031 non-null. Verify a full prototype is given so that the arguments
6032 will have the correct types when we actually check them later.
6033 Avoid diagnosing type-generic built-ins since those have no
6034 prototype. */
6035 if (!args)
6037 if (!prototype_p (type)
6038 && (!TYPE_ATTRIBUTES (type)
6039 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6041 error ("nonnull attribute without arguments on a non-prototype");
6042 *no_add_attrs = true;
6044 return NULL_TREE;
6047 /* Argument list specified. Verify that each argument number references
6048 a pointer argument. */
6049 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6051 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6053 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6055 error ("nonnull argument has invalid operand number (argument %lu)",
6056 (unsigned long) attr_arg_num);
6057 *no_add_attrs = true;
6058 return NULL_TREE;
6061 if (prototype_p (type))
6063 function_args_iterator iter;
6064 tree argument;
6066 function_args_iter_init (&iter, type);
6067 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6069 argument = function_args_iter_cond (&iter);
6070 if (!argument || ck_num == arg_num)
6071 break;
6074 if (!argument
6075 || TREE_CODE (argument) == VOID_TYPE)
6077 error ("nonnull argument with out-of-range operand number "
6078 "(argument %lu, operand %lu)",
6079 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6080 *no_add_attrs = true;
6081 return NULL_TREE;
6084 if (TREE_CODE (argument) != POINTER_TYPE)
6086 error ("nonnull argument references non-pointer operand "
6087 "(argument %lu, operand %lu)",
6088 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6089 *no_add_attrs = true;
6090 return NULL_TREE;
6095 return NULL_TREE;
6098 /* Handle a "sentinel" attribute. */
6100 static tree
6101 handle_sentinel_attribute (tree *node, tree name, tree args,
6102 int ARG_UNUSED (flags), bool *no_add_attrs)
6104 if (!prototype_p (*node))
6106 warning (OPT_Wattributes,
6107 "%qs attribute requires prototypes with named arguments",
6108 IDENTIFIER_POINTER (name));
6109 *no_add_attrs = true;
6111 else
6113 if (!stdarg_p (*node))
6115 warning (OPT_Wattributes,
6116 "%qs attribute only applies to variadic functions",
6117 IDENTIFIER_POINTER (name));
6118 *no_add_attrs = true;
6122 if (args)
6124 tree position = TREE_VALUE (args);
6126 if (TREE_CODE (position) != INTEGER_CST)
6128 warning (0, "requested position is not an integer constant");
6129 *no_add_attrs = true;
6131 else
6133 if (tree_int_cst_lt (position, integer_zero_node))
6135 warning (0, "requested position is less than zero");
6136 *no_add_attrs = true;
6141 return NULL_TREE;
6144 /* Handle a "noreturn" attribute; arguments as in
6145 struct attribute_spec.handler. */
6147 static tree
6148 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6149 int ARG_UNUSED (flags), bool *no_add_attrs)
6151 tree type = TREE_TYPE (*node);
6153 /* See FIXME comment in c_common_attribute_table. */
6154 if (TREE_CODE (*node) == FUNCTION_DECL)
6155 TREE_THIS_VOLATILE (*node) = 1;
6156 else if (TREE_CODE (type) == POINTER_TYPE
6157 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6158 TREE_TYPE (*node)
6159 = build_pointer_type
6160 (build_type_variant (TREE_TYPE (type),
6161 TYPE_READONLY (TREE_TYPE (type)), 1));
6162 else
6164 warning (OPT_Wattributes, "%qs attribute ignored",
6165 IDENTIFIER_POINTER (name));
6166 *no_add_attrs = true;
6169 return NULL_TREE;
6172 /* Handle a "noinline" attribute; arguments as in
6173 struct attribute_spec.handler. */
6175 static tree
6176 handle_noinline_attribute (tree *node, tree name,
6177 tree ARG_UNUSED (args),
6178 int ARG_UNUSED (flags), bool *no_add_attrs)
6180 if (TREE_CODE (*node) == FUNCTION_DECL)
6182 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6184 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6185 "with attribute %qs", name, "always_inline");
6186 *no_add_attrs = true;
6188 else
6189 DECL_UNINLINABLE (*node) = 1;
6191 else
6193 warning (OPT_Wattributes, "%qE attribute ignored", name);
6194 *no_add_attrs = true;
6197 return NULL_TREE;
6200 /* Handle a "noclone" attribute; arguments as in
6201 struct attribute_spec.handler. */
6203 static tree
6204 handle_noclone_attribute (tree *node, tree name,
6205 tree ARG_UNUSED (args),
6206 int ARG_UNUSED (flags), bool *no_add_attrs)
6208 if (TREE_CODE (*node) != FUNCTION_DECL)
6210 warning (OPT_Wattributes, "%qE attribute ignored", name);
6211 *no_add_attrs = true;
6214 return NULL_TREE;
6217 /* Handle a "leaf" attribute; arguments as in
6218 struct attribute_spec.handler. */
6220 static tree
6221 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6222 int ARG_UNUSED (flags), bool *no_add_attrs)
6224 if (TREE_CODE (*node) != FUNCTION_DECL)
6226 warning (OPT_Wattributes, "%qE attribute ignored", name);
6227 *no_add_attrs = true;
6229 if (!TREE_PUBLIC (*node))
6231 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6232 *no_add_attrs = true;
6235 return NULL_TREE;
6238 /* Handle a "always_inline" attribute; arguments as in
6239 struct attribute_spec.handler. */
6241 static tree
6242 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6243 int ARG_UNUSED (flags), bool *no_add_attrs)
6245 if (TREE_CODE (*node) == FUNCTION_DECL)
6247 /* Set the attribute and mark it for disregarding inline limits. */
6248 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6250 else
6252 warning (OPT_Wattributes, "%qE attribute ignored", name);
6253 *no_add_attrs = true;
6256 return NULL_TREE;
6259 /* Handle a "malloc" attribute; arguments as in
6260 struct attribute_spec.handler. */
6262 static tree
6263 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6264 int ARG_UNUSED (flags), bool *no_add_attrs)
6266 if (TREE_CODE (*node) == FUNCTION_DECL
6267 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6268 DECL_IS_MALLOC (*node) = 1;
6269 else
6271 warning (OPT_Wattributes, "%qs attribute ignored",
6272 IDENTIFIER_POINTER (name));
6273 *no_add_attrs = true;
6276 return NULL_TREE;
6279 /* Fake handler for attributes we don't properly support. */
6281 tree
6282 fake_attribute_handler (tree * ARG_UNUSED (node),
6283 tree ARG_UNUSED (name),
6284 tree ARG_UNUSED (args),
6285 int ARG_UNUSED (flags),
6286 bool * ARG_UNUSED (no_add_attrs))
6288 return NULL_TREE;
6291 /* Handle a "type_generic" attribute. */
6293 static tree
6294 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6295 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6296 bool * ARG_UNUSED (no_add_attrs))
6298 /* Ensure we have a function type. */
6299 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6301 /* Ensure we have a variadic function. */
6302 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6304 return NULL_TREE;
6307 /* Handle a "vector_size" attribute; arguments as in
6308 struct attribute_spec.handler. */
6310 static tree
6311 handle_vector_size_attribute (tree *node, tree name, tree args,
6312 int ARG_UNUSED (flags), bool *no_add_attrs)
6314 tree type = *node;
6315 tree vector_type;
6317 *no_add_attrs = true;
6319 /* We need to provide for vector pointers, vector arrays, and
6320 functions returning vectors. For example:
6322 __attribute__((vector_size(16))) short *foo;
6324 In this case, the mode is SI, but the type being modified is
6325 HI, so we need to look further. */
6326 while (POINTER_TYPE_P (type)
6327 || TREE_CODE (type) == FUNCTION_TYPE
6328 || TREE_CODE (type) == ARRAY_TYPE)
6329 type = TREE_TYPE (type);
6331 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6332 if (!vector_type)
6333 return NULL_TREE;
6335 /* Build back pointers if needed. */
6336 *node = reconstruct_complex_type (*node, vector_type);
6338 return NULL_TREE;
6341 /* Handle a "vector_type" attribute; arguments as in
6342 struct attribute_spec.handler. */
6344 static tree
6345 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6346 int ARG_UNUSED (flags), bool *no_add_attrs)
6348 tree type = *node;
6349 tree vector_type;
6351 *no_add_attrs = true;
6353 if (TREE_CODE (type) != ARRAY_TYPE)
6355 error ("attribute %qs applies to array types only",
6356 IDENTIFIER_POINTER (name));
6357 return NULL_TREE;
6360 vector_type = build_vector_type_for_array (type, name);
6361 if (!vector_type)
6362 return NULL_TREE;
6364 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6365 *node = vector_type;
6367 return NULL_TREE;
6370 /* ----------------------------------------------------------------------- *
6371 * BUILTIN FUNCTIONS *
6372 * ----------------------------------------------------------------------- */
6374 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6375 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6376 if nonansi_p and flag_no_nonansi_builtin. */
6378 static void
6379 def_builtin_1 (enum built_in_function fncode,
6380 const char *name,
6381 enum built_in_class fnclass,
6382 tree fntype, tree libtype,
6383 bool both_p, bool fallback_p,
6384 bool nonansi_p ATTRIBUTE_UNUSED,
6385 tree fnattrs, bool implicit_p)
6387 tree decl;
6388 const char *libname;
6390 /* Preserve an already installed decl. It most likely was setup in advance
6391 (e.g. as part of the internal builtins) for specific reasons. */
6392 if (builtin_decl_explicit (fncode))
6393 return;
6395 gcc_assert ((!both_p && !fallback_p)
6396 || !strncmp (name, "__builtin_",
6397 strlen ("__builtin_")));
6399 libname = name + strlen ("__builtin_");
6400 decl = add_builtin_function (name, fntype, fncode, fnclass,
6401 (fallback_p ? libname : NULL),
6402 fnattrs);
6403 if (both_p)
6404 /* ??? This is normally further controlled by command-line options
6405 like -fno-builtin, but we don't have them for Ada. */
6406 add_builtin_function (libname, libtype, fncode, fnclass,
6407 NULL, fnattrs);
6409 set_builtin_decl (fncode, decl, implicit_p);
6412 static int flag_isoc94 = 0;
6413 static int flag_isoc99 = 0;
6414 static int flag_isoc11 = 0;
6416 /* Install what the common builtins.def offers. */
6418 static void
6419 install_builtin_functions (void)
6421 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6422 NONANSI_P, ATTRS, IMPLICIT, COND) \
6423 if (NAME && COND) \
6424 def_builtin_1 (ENUM, NAME, CLASS, \
6425 builtin_types[(int) TYPE], \
6426 builtin_types[(int) LIBTYPE], \
6427 BOTH_P, FALLBACK_P, NONANSI_P, \
6428 built_in_attributes[(int) ATTRS], IMPLICIT);
6429 #include "builtins.def"
6432 /* ----------------------------------------------------------------------- *
6433 * BUILTIN FUNCTIONS *
6434 * ----------------------------------------------------------------------- */
6436 /* Install the builtin functions we might need. */
6438 void
6439 gnat_install_builtins (void)
6441 install_builtin_elementary_types ();
6442 install_builtin_function_types ();
6443 install_builtin_attributes ();
6445 /* Install builtins used by generic middle-end pieces first. Some of these
6446 know about internal specificities and control attributes accordingly, for
6447 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6448 the generic definition from builtins.def. */
6449 build_common_builtin_nodes ();
6451 /* Now, install the target specific builtins, such as the AltiVec family on
6452 ppc, and the common set as exposed by builtins.def. */
6453 targetm.init_builtins ();
6454 install_builtin_functions ();
6457 #include "gt-ada-utils.h"
6458 #include "gtype-ada.h"