* gcc-interface/trans.c (push_range_check_info): Replace early test
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob1b1473723c6e48d039962289bf54742d998f41c6
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "stringpool.h"
32 #include "stor-layout.h"
33 #include "attribs.h"
34 #include "varasm.h"
35 #include "flags.h"
36 #include "toplev.h"
37 #include "diagnostic-core.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "debug.h"
41 #include "convert.h"
42 #include "target.h"
43 #include "common/common-target.h"
44 #include "langhooks.h"
45 #include "hash-map.h"
46 #include "is-a.h"
47 #include "plugin-api.h"
48 #include "vec.h"
49 #include "hashtab.h"
50 #include "hash-set.h"
51 #include "machmode.h"
52 #include "hard-reg-set.h"
53 #include "input.h"
54 #include "function.h"
55 #include "ipa-ref.h"
56 #include "cgraph.h"
57 #include "diagnostic.h"
58 #include "timevar.h"
59 #include "tree-dump.h"
60 #include "tree-inline.h"
61 #include "tree-iterator.h"
63 #include "ada.h"
64 #include "types.h"
65 #include "atree.h"
66 #include "elists.h"
67 #include "namet.h"
68 #include "nlists.h"
69 #include "stringt.h"
70 #include "uintp.h"
71 #include "fe.h"
72 #include "sinfo.h"
73 #include "einfo.h"
74 #include "ada-tree.h"
75 #include "gigi.h"
77 /* If nonzero, pretend we are allocating at global level. */
78 int force_global;
80 /* The default alignment of "double" floating-point types, i.e. floating
81 point types whose size is equal to 64 bits, or 0 if this alignment is
82 not specifically capped. */
83 int double_float_alignment;
85 /* The default alignment of "double" or larger scalar types, i.e. scalar
86 types whose size is greater or equal to 64 bits, or 0 if this alignment
87 is not specifically capped. */
88 int double_scalar_alignment;
90 /* True if floating-point arithmetics may use wider intermediate results. */
91 bool fp_arith_may_widen = true;
93 /* Tree nodes for the various types and decls we create. */
94 tree gnat_std_decls[(int) ADT_LAST];
96 /* Functions to call for each of the possible raise reasons. */
97 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
99 /* Likewise, but with extra info for each of the possible raise reasons. */
100 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
102 /* Forward declarations for handlers of attributes. */
103 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
104 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
105 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
106 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
107 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
108 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
109 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
110 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
111 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
112 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
113 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
114 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
115 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
117 /* Fake handler for attributes we don't properly support, typically because
118 they'd require dragging a lot of the common-c front-end circuitry. */
119 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
121 /* Table of machine-independent internal attributes for Ada. We support
122 this minimal set of attributes to accommodate the needs of builtins. */
123 const struct attribute_spec gnat_internal_attribute_table[] =
125 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
126 affects_type_identity } */
127 { "const", 0, 0, true, false, false, handle_const_attribute,
128 false },
129 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
130 false },
131 { "pure", 0, 0, true, false, false, handle_pure_attribute,
132 false },
133 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
134 false },
135 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
136 false },
137 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
138 false },
139 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
140 false },
141 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
142 false },
143 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
144 false },
145 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
146 false },
147 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
148 false },
150 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
151 false },
152 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
153 false },
154 { "may_alias", 0, 0, false, true, false, NULL, false },
156 /* ??? format and format_arg are heavy and not supported, which actually
157 prevents support for stdio builtins, which we however declare as part
158 of the common builtins.def contents. */
159 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
160 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
162 { NULL, 0, 0, false, false, false, NULL, false }
165 /* Associates a GNAT tree node to a GCC tree node. It is used in
166 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
167 of `save_gnu_tree' for more info. */
168 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
170 #define GET_GNU_TREE(GNAT_ENTITY) \
171 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
173 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
174 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
176 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
177 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
179 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
180 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
182 #define GET_DUMMY_NODE(GNAT_ENTITY) \
183 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
185 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
186 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
188 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
189 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
191 /* This variable keeps a table for types for each precision so that we only
192 allocate each of them once. Signed and unsigned types are kept separate.
194 Note that these types are only used when fold-const requests something
195 special. Perhaps we should NOT share these types; we'll see how it
196 goes later. */
197 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
199 /* Likewise for float types, but record these by mode. */
200 static GTY(()) tree float_types[NUM_MACHINE_MODES];
202 /* For each binding contour we allocate a binding_level structure to indicate
203 the binding depth. */
205 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
206 /* The binding level containing this one (the enclosing binding level). */
207 struct gnat_binding_level *chain;
208 /* The BLOCK node for this level. */
209 tree block;
210 /* If nonzero, the setjmp buffer that needs to be updated for any
211 variable-sized definition within this context. */
212 tree jmpbuf_decl;
215 /* The binding level currently in effect. */
216 static GTY(()) struct gnat_binding_level *current_binding_level;
218 /* A chain of gnat_binding_level structures awaiting reuse. */
219 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
221 /* The context to be used for global declarations. */
222 static GTY(()) tree global_context;
224 /* An array of global declarations. */
225 static GTY(()) vec<tree, va_gc> *global_decls;
227 /* An array of builtin function declarations. */
228 static GTY(()) vec<tree, va_gc> *builtin_decls;
230 /* An array of global renaming pointers. */
231 static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
233 /* A chain of unused BLOCK nodes. */
234 static GTY((deletable)) tree free_block_chain;
236 /* A hash table of padded types. It is modelled on the generic type
237 hash table in tree.c, which must thus be used as a reference. */
239 struct GTY((for_user)) pad_type_hash {
240 unsigned long hash;
241 tree type;
244 struct pad_type_hasher : ggc_cache_hasher<pad_type_hash *>
246 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
247 static bool equal (pad_type_hash *a, pad_type_hash *b);
248 static void handle_cache_entry (pad_type_hash *&);
251 static GTY ((cache))
252 hash_table<pad_type_hasher> *pad_type_hash_table;
254 static tree merge_sizes (tree, tree, tree, bool, bool);
255 static tree compute_related_constant (tree, tree);
256 static tree split_plus (tree, tree *);
257 static tree float_type_for_precision (int, machine_mode);
258 static tree convert_to_fat_pointer (tree, tree);
259 static unsigned int scale_by_factor_of (tree, unsigned int);
260 static bool potential_alignment_gap (tree, tree, tree);
262 /* A linked list used as a queue to defer the initialization of the
263 DECL_CONTEXT attribute of ..._DECL nodes and of the TYPE_CONTEXT attribute
264 of ..._TYPE nodes. */
265 struct deferred_decl_context_node
267 tree decl; /* The ..._DECL node to work on. */
268 Entity_Id gnat_scope; /* The corresponding entity's Scope attribute. */
269 int force_global; /* force_global value when pushing DECL. */
270 vec<tree, va_heap, vl_ptr> types; /* A list of ..._TYPE nodes to propagate the
271 context to. */
272 struct deferred_decl_context_node *next; /* The next queue item. */
275 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
277 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
278 feed it with the elaboration of GNAT_SCOPE. */
279 static struct deferred_decl_context_node *
280 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
282 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
283 feed it with the DECL_CONTEXT computed as part of N as soon as it is
284 computed. */
285 static void add_deferred_type_context (struct deferred_decl_context_node *n,
286 tree type);
288 /* Initialize data structures of the utils.c module. */
290 void
291 init_gnat_utils (void)
293 /* Initialize the association of GNAT nodes to GCC trees. */
294 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
296 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
297 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
299 /* Initialize the hash table of padded types. */
300 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
303 /* Destroy data structures of the utils.c module. */
305 void
306 destroy_gnat_utils (void)
308 /* Destroy the association of GNAT nodes to GCC trees. */
309 ggc_free (associate_gnat_to_gnu);
310 associate_gnat_to_gnu = NULL;
312 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
313 ggc_free (dummy_node_table);
314 dummy_node_table = NULL;
316 /* Destroy the hash table of padded types. */
317 pad_type_hash_table->empty ();
318 pad_type_hash_table = NULL;
320 /* Invalidate the global renaming pointers. */
321 invalidate_global_renaming_pointers ();
324 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
325 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
326 If NO_CHECK is true, the latter check is suppressed.
328 If GNU_DECL is zero, reset a previous association. */
330 void
331 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
333 /* Check that GNAT_ENTITY is not already defined and that it is being set
334 to something which is a decl. If that is not the case, this usually
335 means GNAT_ENTITY is defined twice, but occasionally is due to some
336 Gigi problem. */
337 gcc_assert (!(gnu_decl
338 && (PRESENT_GNU_TREE (gnat_entity)
339 || (!no_check && !DECL_P (gnu_decl)))));
341 SET_GNU_TREE (gnat_entity, gnu_decl);
344 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
345 that was associated with it. If there is no such tree node, abort.
347 In some cases, such as delayed elaboration or expressions that need to
348 be elaborated only once, GNAT_ENTITY is really not an entity. */
350 tree
351 get_gnu_tree (Entity_Id gnat_entity)
353 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
354 return GET_GNU_TREE (gnat_entity);
357 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
359 bool
360 present_gnu_tree (Entity_Id gnat_entity)
362 return PRESENT_GNU_TREE (gnat_entity);
365 /* Make a dummy type corresponding to GNAT_TYPE. */
367 tree
368 make_dummy_type (Entity_Id gnat_type)
370 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
371 tree gnu_type;
373 /* If there was no equivalent type (can only happen when just annotating
374 types) or underlying type, go back to the original type. */
375 if (No (gnat_equiv))
376 gnat_equiv = gnat_type;
378 /* If it there already a dummy type, use that one. Else make one. */
379 if (PRESENT_DUMMY_NODE (gnat_equiv))
380 return GET_DUMMY_NODE (gnat_equiv);
382 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
383 an ENUMERAL_TYPE. */
384 gnu_type = make_node (Is_Record_Type (gnat_equiv)
385 ? tree_code_for_record_type (gnat_equiv)
386 : ENUMERAL_TYPE);
387 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
388 TYPE_DUMMY_P (gnu_type) = 1;
389 TYPE_STUB_DECL (gnu_type)
390 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
391 if (Is_By_Reference_Type (gnat_equiv))
392 TYPE_BY_REFERENCE_P (gnu_type) = 1;
394 SET_DUMMY_NODE (gnat_equiv, gnu_type);
396 return gnu_type;
399 /* Return the dummy type that was made for GNAT_TYPE, if any. */
401 tree
402 get_dummy_type (Entity_Id gnat_type)
404 return GET_DUMMY_NODE (gnat_type);
407 /* Build dummy fat and thin pointer types whose designated type is specified
408 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
410 void
411 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
413 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
414 tree gnu_fat_type, fields, gnu_object_type;
416 gnu_template_type = make_node (RECORD_TYPE);
417 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
418 TYPE_DUMMY_P (gnu_template_type) = 1;
419 gnu_ptr_template = build_pointer_type (gnu_template_type);
421 gnu_array_type = make_node (ENUMERAL_TYPE);
422 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
423 TYPE_DUMMY_P (gnu_array_type) = 1;
424 gnu_ptr_array = build_pointer_type (gnu_array_type);
426 gnu_fat_type = make_node (RECORD_TYPE);
427 /* Build a stub DECL to trigger the special processing for fat pointer types
428 in gnat_pushdecl. */
429 TYPE_NAME (gnu_fat_type)
430 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
431 gnu_fat_type);
432 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
433 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
434 DECL_CHAIN (fields)
435 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
436 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
437 finish_fat_pointer_type (gnu_fat_type, fields);
438 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
439 /* Suppress debug info until after the type is completed. */
440 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
442 gnu_object_type = make_node (RECORD_TYPE);
443 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
444 TYPE_DUMMY_P (gnu_object_type) = 1;
446 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
447 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
450 /* Return true if we are in the global binding level. */
452 bool
453 global_bindings_p (void)
455 return force_global || current_function_decl == NULL_TREE;
458 /* Enter a new binding level. */
460 void
461 gnat_pushlevel (void)
463 struct gnat_binding_level *newlevel = NULL;
465 /* Reuse a struct for this binding level, if there is one. */
466 if (free_binding_level)
468 newlevel = free_binding_level;
469 free_binding_level = free_binding_level->chain;
471 else
472 newlevel = ggc_alloc<gnat_binding_level> ();
474 /* Use a free BLOCK, if any; otherwise, allocate one. */
475 if (free_block_chain)
477 newlevel->block = free_block_chain;
478 free_block_chain = BLOCK_CHAIN (free_block_chain);
479 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
481 else
482 newlevel->block = make_node (BLOCK);
484 /* Point the BLOCK we just made to its parent. */
485 if (current_binding_level)
486 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
488 BLOCK_VARS (newlevel->block) = NULL_TREE;
489 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
490 TREE_USED (newlevel->block) = 1;
492 /* Add this level to the front of the chain (stack) of active levels. */
493 newlevel->chain = current_binding_level;
494 newlevel->jmpbuf_decl = NULL_TREE;
495 current_binding_level = newlevel;
498 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
499 and point FNDECL to this BLOCK. */
501 void
502 set_current_block_context (tree fndecl)
504 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
505 DECL_INITIAL (fndecl) = current_binding_level->block;
506 set_block_for_group (current_binding_level->block);
509 /* Set the jmpbuf_decl for the current binding level to DECL. */
511 void
512 set_block_jmpbuf_decl (tree decl)
514 current_binding_level->jmpbuf_decl = decl;
517 /* Get the jmpbuf_decl, if any, for the current binding level. */
519 tree
520 get_block_jmpbuf_decl (void)
522 return current_binding_level->jmpbuf_decl;
525 /* Exit a binding level. Set any BLOCK into the current code group. */
527 void
528 gnat_poplevel (void)
530 struct gnat_binding_level *level = current_binding_level;
531 tree block = level->block;
533 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
534 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
536 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
537 are no variables free the block and merge its subblocks into those of its
538 parent block. Otherwise, add it to the list of its parent. */
539 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
541 else if (BLOCK_VARS (block) == NULL_TREE)
543 BLOCK_SUBBLOCKS (level->chain->block)
544 = block_chainon (BLOCK_SUBBLOCKS (block),
545 BLOCK_SUBBLOCKS (level->chain->block));
546 BLOCK_CHAIN (block) = free_block_chain;
547 free_block_chain = block;
549 else
551 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
552 BLOCK_SUBBLOCKS (level->chain->block) = block;
553 TREE_USED (block) = 1;
554 set_block_for_group (block);
557 /* Free this binding structure. */
558 current_binding_level = level->chain;
559 level->chain = free_binding_level;
560 free_binding_level = level;
563 /* Exit a binding level and discard the associated BLOCK. */
565 void
566 gnat_zaplevel (void)
568 struct gnat_binding_level *level = current_binding_level;
569 tree block = level->block;
571 BLOCK_CHAIN (block) = free_block_chain;
572 free_block_chain = block;
574 /* Free this binding structure. */
575 current_binding_level = level->chain;
576 level->chain = free_binding_level;
577 free_binding_level = level;
580 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
582 static void
583 gnat_set_type_context (tree type, tree context)
585 tree decl = TYPE_STUB_DECL (type);
587 TYPE_CONTEXT (type) = context;
589 while (decl && DECL_PARALLEL_TYPE (decl))
591 tree parallel_type = DECL_PARALLEL_TYPE (decl);
593 /* Give a context to the parallel types and their stub decl, if any.
594 Some parallel types seems to be present in multiple parallel type
595 chains, so don't mess with their context if they already have one. */
596 if (TYPE_CONTEXT (parallel_type) == NULL_TREE)
598 if (TYPE_STUB_DECL (parallel_type) != NULL_TREE)
599 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
600 TYPE_CONTEXT (parallel_type) = context;
603 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
607 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
608 the debug info, or Empty if there is no such scope. If not NULL, set
609 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
611 static Entity_Id
612 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
614 Entity_Id gnat_entity;
616 if (is_subprogram)
617 *is_subprogram = false;
619 if (Nkind (gnat_node) == N_Defining_Identifier)
620 gnat_entity = Scope (gnat_node);
621 else
622 return Empty;
624 while (Present (gnat_entity))
626 switch (Ekind (gnat_entity))
628 case E_Function:
629 case E_Procedure:
630 if (Present (Protected_Body_Subprogram (gnat_entity)))
631 gnat_entity = Protected_Body_Subprogram (gnat_entity);
633 /* If the scope is a subprogram, then just rely on
634 current_function_decl, so that we don't have to defer
635 anything. This is needed because other places rely on the
636 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
637 if (is_subprogram)
638 *is_subprogram = true;
639 return gnat_entity;
641 case E_Record_Type:
642 case E_Record_Subtype:
643 return gnat_entity;
645 default:
646 /* By default, we are not interested in this particular scope: go to
647 the outer one. */
648 break;
650 gnat_entity = Scope (gnat_entity);
652 return Empty;
655 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing of
656 N otherwise. */
658 static void
659 defer_or_set_type_context (tree type,
660 tree context,
661 struct deferred_decl_context_node *n)
663 if (n)
664 add_deferred_type_context (n, type);
665 else
666 gnat_set_type_context (type, context);
669 /* Return global_context. Create it if needed, first. */
671 static tree
672 get_global_context (void)
674 if (!global_context)
675 global_context = build_translation_unit_decl (NULL_TREE);
676 return global_context;
679 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
680 for location information and flag propagation. */
682 void
683 gnat_pushdecl (tree decl, Node_Id gnat_node)
685 tree context = NULL_TREE;
686 struct deferred_decl_context_node *deferred_decl_context = NULL;
688 /* If explicitely asked to make DECL global or if it's an imported nested
689 object, short-circuit the regular Scope-based context computation. */
690 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
692 /* Rely on the GNAT scope, or fallback to the current_function_decl if
693 the GNAT scope reached the global scope, if it reached a subprogram
694 or the declaration is a subprogram or a variable (for them we skip
695 intermediate context types because the subprogram body elaboration
696 machinery and the inliner both expect a subprogram context).
698 Falling back to current_function_decl is necessary for implicit
699 subprograms created by gigi, such as the elaboration subprograms. */
700 bool context_is_subprogram = false;
701 const Entity_Id gnat_scope
702 = get_debug_scope (gnat_node, &context_is_subprogram);
704 if (Present (gnat_scope)
705 && !context_is_subprogram
706 && TREE_CODE (decl) != FUNCTION_DECL
707 && TREE_CODE (decl) != VAR_DECL)
708 /* Always assume the scope has not been elaborated, thus defer the
709 context propagation to the time its elaboration will be
710 available. */
711 deferred_decl_context
712 = add_deferred_decl_context (decl, gnat_scope, force_global);
714 /* External declarations (when force_global > 0) may not be in a
715 local context. */
716 else if (current_function_decl != NULL_TREE && force_global == 0)
717 context = current_function_decl;
720 /* If either we are forced to be in global mode or if both the GNAT scope and
721 the current_function_decl did not help determining the context, use the
722 global scope. */
723 if (!deferred_decl_context && context == NULL_TREE)
724 context = get_global_context ();
726 /* Functions imported in another function are not really nested.
727 For really nested functions mark them initially as needing
728 a static chain for uses of that flag before unnesting;
729 lower_nested_functions will then recompute it. */
730 if (TREE_CODE (decl) == FUNCTION_DECL
731 && !TREE_PUBLIC (decl)
732 && context != NULL_TREE
733 && (TREE_CODE (context) == FUNCTION_DECL
734 || decl_function_context (context) != NULL_TREE))
735 DECL_STATIC_CHAIN (decl) = 1;
737 if (!deferred_decl_context)
738 DECL_CONTEXT (decl) = context;
740 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
742 /* Set the location of DECL and emit a declaration for it. */
743 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
744 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
746 add_decl_expr (decl, gnat_node);
748 /* Put the declaration on the list. The list of declarations is in reverse
749 order. The list will be reversed later. Put global declarations in the
750 globals list and local ones in the current block. But skip TYPE_DECLs
751 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
752 with the debugger and aren't needed anyway. */
753 if (!(TREE_CODE (decl) == TYPE_DECL
754 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
756 if (DECL_EXTERNAL (decl))
758 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
759 vec_safe_push (builtin_decls, decl);
761 else if (global_bindings_p ())
762 vec_safe_push (global_decls, decl);
763 else
765 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
766 BLOCK_VARS (current_binding_level->block) = decl;
770 /* For the declaration of a type, set its name if it either is not already
771 set or if the previous type name was not derived from a source name.
772 We'd rather have the type named with a real name and all the pointer
773 types to the same object have the same POINTER_TYPE node. Code in the
774 equivalent function of c-decl.c makes a copy of the type node here, but
775 that may cause us trouble with incomplete types. We make an exception
776 for fat pointer types because the compiler automatically builds them
777 for unconstrained array types and the debugger uses them to represent
778 both these and pointers to these. */
779 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
781 tree t = TREE_TYPE (decl);
783 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
785 /* Array and pointer types aren't "tagged" types so we force the
786 type to be associated with its typedef in the DWARF back-end,
787 in order to make sure that the latter is always preserved. */
788 if (!DECL_ARTIFICIAL (decl)
789 && (TREE_CODE (t) == ARRAY_TYPE
790 || TREE_CODE (t) == POINTER_TYPE))
792 tree tt = build_distinct_type_copy (t);
793 if (TREE_CODE (t) == POINTER_TYPE)
794 TYPE_NEXT_PTR_TO (t) = tt;
795 TYPE_NAME (tt) = DECL_NAME (decl);
796 defer_or_set_type_context (tt,
797 DECL_CONTEXT (decl),
798 deferred_decl_context);
799 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
800 DECL_ORIGINAL_TYPE (decl) = tt;
803 else if (TYPE_IS_FAT_POINTER_P (t))
805 /* We need a variant for the placeholder machinery to work. */
806 tree tt = build_variant_type_copy (t);
807 TYPE_NAME (tt) = decl;
808 defer_or_set_type_context (tt,
809 DECL_CONTEXT (decl),
810 deferred_decl_context);
811 TREE_USED (tt) = TREE_USED (t);
812 TREE_TYPE (decl) = tt;
813 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
814 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
815 else
816 DECL_ORIGINAL_TYPE (decl) = t;
817 DECL_ARTIFICIAL (decl) = 0;
818 t = NULL_TREE;
820 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
822 else
823 t = NULL_TREE;
825 /* Propagate the name to all the anonymous variants. This is needed
826 for the type qualifiers machinery to work properly. Also propagate
827 the context to them. Note that the context will be propagated to all
828 parallel types too thanks to gnat_set_type_context. */
829 if (t)
830 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
831 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
833 TYPE_NAME (t) = decl;
834 defer_or_set_type_context (t,
835 DECL_CONTEXT (decl),
836 deferred_decl_context);
841 /* Create a record type that contains a SIZE bytes long field of TYPE with a
842 starting bit position so that it is aligned to ALIGN bits, and leaving at
843 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
844 record is guaranteed to get. GNAT_NODE is used for the position of the
845 associated TYPE_DECL. */
847 tree
848 make_aligning_type (tree type, unsigned int align, tree size,
849 unsigned int base_align, int room, Node_Id gnat_node)
851 /* We will be crafting a record type with one field at a position set to be
852 the next multiple of ALIGN past record'address + room bytes. We use a
853 record placeholder to express record'address. */
854 tree record_type = make_node (RECORD_TYPE);
855 tree record = build0 (PLACEHOLDER_EXPR, record_type);
857 tree record_addr_st
858 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
860 /* The diagram below summarizes the shape of what we manipulate:
862 <--------- pos ---------->
863 { +------------+-------------+-----------------+
864 record =>{ |############| ... | field (type) |
865 { +------------+-------------+-----------------+
866 |<-- room -->|<- voffset ->|<---- size ----->|
869 record_addr vblock_addr
871 Every length is in sizetype bytes there, except "pos" which has to be
872 set as a bit position in the GCC tree for the record. */
873 tree room_st = size_int (room);
874 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
875 tree voffset_st, pos, field;
877 tree name = TYPE_IDENTIFIER (type);
879 name = concat_name (name, "ALIGN");
880 TYPE_NAME (record_type) = name;
882 /* Compute VOFFSET and then POS. The next byte position multiple of some
883 alignment after some address is obtained by "and"ing the alignment minus
884 1 with the two's complement of the address. */
885 voffset_st = size_binop (BIT_AND_EXPR,
886 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
887 size_int ((align / BITS_PER_UNIT) - 1));
889 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
890 pos = size_binop (MULT_EXPR,
891 convert (bitsizetype,
892 size_binop (PLUS_EXPR, room_st, voffset_st)),
893 bitsize_unit_node);
895 /* Craft the GCC record representation. We exceptionally do everything
896 manually here because 1) our generic circuitry is not quite ready to
897 handle the complex position/size expressions we are setting up, 2) we
898 have a strong simplifying factor at hand: we know the maximum possible
899 value of voffset, and 3) we have to set/reset at least the sizes in
900 accordance with this maximum value anyway, as we need them to convey
901 what should be "alloc"ated for this type.
903 Use -1 as the 'addressable' indication for the field to prevent the
904 creation of a bitfield. We don't need one, it would have damaging
905 consequences on the alignment computation, and create_field_decl would
906 make one without this special argument, for instance because of the
907 complex position expression. */
908 field = create_field_decl (get_identifier ("F"), type, record_type, size,
909 pos, 1, -1);
910 TYPE_FIELDS (record_type) = field;
912 TYPE_ALIGN (record_type) = base_align;
913 TYPE_USER_ALIGN (record_type) = 1;
915 TYPE_SIZE (record_type)
916 = size_binop (PLUS_EXPR,
917 size_binop (MULT_EXPR, convert (bitsizetype, size),
918 bitsize_unit_node),
919 bitsize_int (align + room * BITS_PER_UNIT));
920 TYPE_SIZE_UNIT (record_type)
921 = size_binop (PLUS_EXPR, size,
922 size_int (room + align / BITS_PER_UNIT));
924 SET_TYPE_MODE (record_type, BLKmode);
925 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
927 /* Declare it now since it will never be declared otherwise. This is
928 necessary to ensure that its subtrees are properly marked. */
929 create_type_decl (name, record_type, true, false, gnat_node);
931 return record_type;
934 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
935 as the field type of a packed record if IN_RECORD is true, or as the
936 component type of a packed array if IN_RECORD is false. See if we can
937 rewrite it either as a type that has a non-BLKmode, which we can pack
938 tighter in the packed record case, or as a smaller type. If so, return
939 the new type. If not, return the original type. */
941 tree
942 make_packable_type (tree type, bool in_record)
944 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
945 unsigned HOST_WIDE_INT new_size;
946 tree new_type, old_field, field_list = NULL_TREE;
947 unsigned int align;
949 /* No point in doing anything if the size is zero. */
950 if (size == 0)
951 return type;
953 new_type = make_node (TREE_CODE (type));
955 /* Copy the name and flags from the old type to that of the new.
956 Note that we rely on the pointer equality created here for
957 TYPE_NAME to look through conversions in various places. */
958 TYPE_NAME (new_type) = TYPE_NAME (type);
959 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
960 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
961 if (TREE_CODE (type) == RECORD_TYPE)
962 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
964 /* If we are in a record and have a small size, set the alignment to
965 try for an integral mode. Otherwise set it to try for a smaller
966 type with BLKmode. */
967 if (in_record && size <= MAX_FIXED_MODE_SIZE)
969 align = ceil_pow2 (size);
970 TYPE_ALIGN (new_type) = align;
971 new_size = (size + align - 1) & -align;
973 else
975 unsigned HOST_WIDE_INT align;
977 /* Do not try to shrink the size if the RM size is not constant. */
978 if (TYPE_CONTAINS_TEMPLATE_P (type)
979 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
980 return type;
982 /* Round the RM size up to a unit boundary to get the minimal size
983 for a BLKmode record. Give up if it's already the size. */
984 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
985 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
986 if (new_size == size)
987 return type;
989 align = new_size & -new_size;
990 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
993 TYPE_USER_ALIGN (new_type) = 1;
995 /* Now copy the fields, keeping the position and size as we don't want
996 to change the layout by propagating the packedness downwards. */
997 for (old_field = TYPE_FIELDS (type); old_field;
998 old_field = DECL_CHAIN (old_field))
1000 tree new_field_type = TREE_TYPE (old_field);
1001 tree new_field, new_size;
1003 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1004 && !TYPE_FAT_POINTER_P (new_field_type)
1005 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1006 new_field_type = make_packable_type (new_field_type, true);
1008 /* However, for the last field in a not already packed record type
1009 that is of an aggregate type, we need to use the RM size in the
1010 packable version of the record type, see finish_record_type. */
1011 if (!DECL_CHAIN (old_field)
1012 && !TYPE_PACKED (type)
1013 && RECORD_OR_UNION_TYPE_P (new_field_type)
1014 && !TYPE_FAT_POINTER_P (new_field_type)
1015 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1016 && TYPE_ADA_SIZE (new_field_type))
1017 new_size = TYPE_ADA_SIZE (new_field_type);
1018 else
1019 new_size = DECL_SIZE (old_field);
1021 new_field
1022 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
1023 new_size, bit_position (old_field),
1024 TYPE_PACKED (type),
1025 !DECL_NONADDRESSABLE_P (old_field));
1027 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
1028 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
1029 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1030 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
1032 DECL_CHAIN (new_field) = field_list;
1033 field_list = new_field;
1036 finish_record_type (new_type, nreverse (field_list), 2, false);
1037 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1038 if (TYPE_STUB_DECL (type))
1039 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1040 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1042 /* If this is a padding record, we never want to make the size smaller
1043 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1044 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1046 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1047 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1048 new_size = size;
1050 else
1052 TYPE_SIZE (new_type) = bitsize_int (new_size);
1053 TYPE_SIZE_UNIT (new_type)
1054 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
1057 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1058 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1060 compute_record_mode (new_type);
1062 /* Try harder to get a packable type if necessary, for example
1063 in case the record itself contains a BLKmode field. */
1064 if (in_record && TYPE_MODE (new_type) == BLKmode)
1065 SET_TYPE_MODE (new_type,
1066 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1068 /* If neither the mode nor the size has shrunk, return the old type. */
1069 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
1070 return type;
1072 return new_type;
1075 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1076 If TYPE is the best type, return it. Otherwise, make a new type. We
1077 only support new integral and pointer types. FOR_BIASED is true if
1078 we are making a biased type. */
1080 tree
1081 make_type_from_size (tree type, tree size_tree, bool for_biased)
1083 unsigned HOST_WIDE_INT size;
1084 bool biased_p;
1085 tree new_type;
1087 /* If size indicates an error, just return TYPE to avoid propagating
1088 the error. Likewise if it's too large to represent. */
1089 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1090 return type;
1092 size = tree_to_uhwi (size_tree);
1094 switch (TREE_CODE (type))
1096 case INTEGER_TYPE:
1097 case ENUMERAL_TYPE:
1098 case BOOLEAN_TYPE:
1099 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1100 && TYPE_BIASED_REPRESENTATION_P (type));
1102 /* Integer types with precision 0 are forbidden. */
1103 if (size == 0)
1104 size = 1;
1106 /* Only do something if the type isn't a packed array type and doesn't
1107 already have the proper size and the size isn't too large. */
1108 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1109 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1110 || size > LONG_LONG_TYPE_SIZE)
1111 break;
1113 biased_p |= for_biased;
1114 if (TYPE_UNSIGNED (type) || biased_p)
1115 new_type = make_unsigned_type (size);
1116 else
1117 new_type = make_signed_type (size);
1118 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1119 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1120 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1121 /* Copy the name to show that it's essentially the same type and
1122 not a subrange type. */
1123 TYPE_NAME (new_type) = TYPE_NAME (type);
1124 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1125 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1126 return new_type;
1128 case RECORD_TYPE:
1129 /* Do something if this is a fat pointer, in which case we
1130 may need to return the thin pointer. */
1131 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1133 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
1134 if (!targetm.valid_pointer_mode (p_mode))
1135 p_mode = ptr_mode;
1136 return
1137 build_pointer_type_for_mode
1138 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1139 p_mode, 0);
1141 break;
1143 case POINTER_TYPE:
1144 /* Only do something if this is a thin pointer, in which case we
1145 may need to return the fat pointer. */
1146 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1147 return
1148 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1149 break;
1151 default:
1152 break;
1155 return type;
1158 /* See if the data pointed to by the hash table slot is marked. */
1160 void
1161 pad_type_hasher::handle_cache_entry (pad_type_hash *&t)
1163 extern void gt_ggc_mx (pad_type_hash *&);
1164 if (t == HTAB_EMPTY_ENTRY || t == HTAB_DELETED_ENTRY)
1165 return;
1166 else if (ggc_marked_p (t->type))
1167 gt_ggc_mx (t);
1168 else
1169 t = static_cast<pad_type_hash *> (HTAB_DELETED_ENTRY);
1172 /* Return true iff the padded types are equivalent. */
1174 bool
1175 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1177 tree type1, type2;
1179 if (t1->hash != t2->hash)
1180 return 0;
1182 type1 = t1->type;
1183 type2 = t2->type;
1185 /* We consider that the padded types are equivalent if they pad the same
1186 type and have the same size, alignment and RM size. Taking the mode
1187 into account is redundant since it is determined by the others. */
1188 return
1189 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1190 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1191 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1192 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1195 /* Look up the padded TYPE in the hash table and return its canonical version
1196 if it exists; otherwise, insert it into the hash table. */
1198 static tree
1199 lookup_and_insert_pad_type (tree type)
1201 hashval_t hashcode;
1202 struct pad_type_hash in, *h;
1204 hashcode
1205 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1206 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1207 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1208 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1210 in.hash = hashcode;
1211 in.type = type;
1212 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1213 if (h)
1214 return h->type;
1216 h = ggc_alloc<pad_type_hash> ();
1217 h->hash = hashcode;
1218 h->type = type;
1219 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
1220 return NULL_TREE;
1223 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1224 if needed. We have already verified that SIZE and ALIGN are large enough.
1225 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1226 IS_COMPONENT_TYPE is true if this is being done for the component type of
1227 an array. IS_USER_TYPE is true if the original type needs to be completed.
1228 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1229 the RM size of the resulting type is to be set to SIZE too. */
1231 tree
1232 maybe_pad_type (tree type, tree size, unsigned int align,
1233 Entity_Id gnat_entity, bool is_component_type,
1234 bool is_user_type, bool definition, bool set_rm_size)
1236 tree orig_size = TYPE_SIZE (type);
1237 unsigned int orig_align = TYPE_ALIGN (type);
1238 tree record, field;
1240 /* If TYPE is a padded type, see if it agrees with any size and alignment
1241 we were given. If so, return the original type. Otherwise, strip
1242 off the padding, since we will either be returning the inner type
1243 or repadding it. If no size or alignment is specified, use that of
1244 the original padded type. */
1245 if (TYPE_IS_PADDING_P (type))
1247 if ((!size
1248 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1249 && (align == 0 || align == orig_align))
1250 return type;
1252 if (!size)
1253 size = orig_size;
1254 if (align == 0)
1255 align = orig_align;
1257 type = TREE_TYPE (TYPE_FIELDS (type));
1258 orig_size = TYPE_SIZE (type);
1259 orig_align = TYPE_ALIGN (type);
1262 /* If the size is either not being changed or is being made smaller (which
1263 is not done here and is only valid for bitfields anyway), show the size
1264 isn't changing. Likewise, clear the alignment if it isn't being
1265 changed. Then return if we aren't doing anything. */
1266 if (size
1267 && (operand_equal_p (size, orig_size, 0)
1268 || (TREE_CODE (orig_size) == INTEGER_CST
1269 && tree_int_cst_lt (size, orig_size))))
1270 size = NULL_TREE;
1272 if (align == orig_align)
1273 align = 0;
1275 if (align == 0 && !size)
1276 return type;
1278 /* If requested, complete the original type and give it a name. */
1279 if (is_user_type)
1280 create_type_decl (get_entity_name (gnat_entity), type,
1281 !Comes_From_Source (gnat_entity),
1282 !(TYPE_NAME (type)
1283 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1284 && DECL_IGNORED_P (TYPE_NAME (type))),
1285 gnat_entity);
1287 /* We used to modify the record in place in some cases, but that could
1288 generate incorrect debugging information. So make a new record
1289 type and name. */
1290 record = make_node (RECORD_TYPE);
1291 TYPE_PADDING_P (record) = 1;
1293 if (Present (gnat_entity))
1294 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1296 TYPE_ALIGN (record) = align ? align : orig_align;
1297 TYPE_SIZE (record) = size ? size : orig_size;
1298 TYPE_SIZE_UNIT (record)
1299 = convert (sizetype,
1300 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1301 bitsize_unit_node));
1303 /* If we are changing the alignment and the input type is a record with
1304 BLKmode and a small constant size, try to make a form that has an
1305 integral mode. This might allow the padding record to also have an
1306 integral mode, which will be much more efficient. There is no point
1307 in doing so if a size is specified unless it is also a small constant
1308 size and it is incorrect to do so if we cannot guarantee that the mode
1309 will be naturally aligned since the field must always be addressable.
1311 ??? This might not always be a win when done for a stand-alone object:
1312 since the nominal and the effective type of the object will now have
1313 different modes, a VIEW_CONVERT_EXPR will be required for converting
1314 between them and it might be hard to overcome afterwards, including
1315 at the RTL level when the stand-alone object is accessed as a whole. */
1316 if (align != 0
1317 && RECORD_OR_UNION_TYPE_P (type)
1318 && TYPE_MODE (type) == BLKmode
1319 && !TYPE_BY_REFERENCE_P (type)
1320 && TREE_CODE (orig_size) == INTEGER_CST
1321 && !TREE_OVERFLOW (orig_size)
1322 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1323 && (!size
1324 || (TREE_CODE (size) == INTEGER_CST
1325 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1327 tree packable_type = make_packable_type (type, true);
1328 if (TYPE_MODE (packable_type) != BLKmode
1329 && align >= TYPE_ALIGN (packable_type))
1330 type = packable_type;
1333 /* Now create the field with the original size. */
1334 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1335 bitsize_zero_node, 0, 1);
1336 DECL_INTERNAL_P (field) = 1;
1338 /* Do not emit debug info until after the auxiliary record is built. */
1339 finish_record_type (record, field, 1, false);
1341 /* Set the RM size if requested. */
1342 if (set_rm_size)
1344 tree canonical_pad_type;
1346 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1348 /* If the padded type is complete and has constant size, we canonicalize
1349 it by means of the hash table. This is consistent with the language
1350 semantics and ensures that gigi and the middle-end have a common view
1351 of these padded types. */
1352 if (TREE_CONSTANT (TYPE_SIZE (record))
1353 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1355 record = canonical_pad_type;
1356 goto built;
1360 /* Unless debugging information isn't being written for the input type,
1361 write a record that shows what we are a subtype of and also make a
1362 variable that indicates our size, if still variable. */
1363 if (TREE_CODE (orig_size) != INTEGER_CST
1364 && TYPE_NAME (record)
1365 && TYPE_NAME (type)
1366 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1367 && DECL_IGNORED_P (TYPE_NAME (type))))
1369 tree marker = make_node (RECORD_TYPE);
1370 tree name = TYPE_IDENTIFIER (record);
1371 tree orig_name = TYPE_IDENTIFIER (type);
1373 TYPE_NAME (marker) = concat_name (name, "XVS");
1374 finish_record_type (marker,
1375 create_field_decl (orig_name,
1376 build_reference_type (type),
1377 marker, NULL_TREE, NULL_TREE,
1378 0, 0),
1379 0, true);
1381 add_parallel_type (record, marker);
1383 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1384 TYPE_SIZE_UNIT (marker)
1385 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1386 TYPE_SIZE_UNIT (record), false, false, false,
1387 false, NULL, gnat_entity);
1390 rest_of_record_type_compilation (record);
1392 built:
1393 /* If the size was widened explicitly, maybe give a warning. Take the
1394 original size as the maximum size of the input if there was an
1395 unconstrained record involved and round it up to the specified alignment,
1396 if one was specified. But don't do it if we are just annotating types
1397 and the type is tagged, since tagged types aren't fully laid out in this
1398 mode. */
1399 if (!size
1400 || TREE_CODE (size) == COND_EXPR
1401 || TREE_CODE (size) == MAX_EXPR
1402 || No (gnat_entity)
1403 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1404 return record;
1406 if (CONTAINS_PLACEHOLDER_P (orig_size))
1407 orig_size = max_size (orig_size, true);
1409 if (align)
1410 orig_size = round_up (orig_size, align);
1412 if (!operand_equal_p (size, orig_size, 0)
1413 && !(TREE_CODE (size) == INTEGER_CST
1414 && TREE_CODE (orig_size) == INTEGER_CST
1415 && (TREE_OVERFLOW (size)
1416 || TREE_OVERFLOW (orig_size)
1417 || tree_int_cst_lt (size, orig_size))))
1419 Node_Id gnat_error_node = Empty;
1421 /* For a packed array, post the message on the original array type. */
1422 if (Is_Packed_Array_Impl_Type (gnat_entity))
1423 gnat_entity = Original_Array_Type (gnat_entity);
1425 if ((Ekind (gnat_entity) == E_Component
1426 || Ekind (gnat_entity) == E_Discriminant)
1427 && Present (Component_Clause (gnat_entity)))
1428 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1429 else if (Present (Size_Clause (gnat_entity)))
1430 gnat_error_node = Expression (Size_Clause (gnat_entity));
1432 /* Generate message only for entities that come from source, since
1433 if we have an entity created by expansion, the message will be
1434 generated for some other corresponding source entity. */
1435 if (Comes_From_Source (gnat_entity))
1437 if (Present (gnat_error_node))
1438 post_error_ne_tree ("{^ }bits of & unused?",
1439 gnat_error_node, gnat_entity,
1440 size_diffop (size, orig_size));
1441 else if (is_component_type)
1442 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1443 gnat_entity, gnat_entity,
1444 size_diffop (size, orig_size));
1448 return record;
1451 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1452 If this is a multi-dimensional array type, do this recursively.
1454 OP may be
1455 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1456 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1457 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1459 void
1460 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1462 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1463 of a one-dimensional array, since the padding has the same alias set
1464 as the field type, but if it's a multi-dimensional array, we need to
1465 see the inner types. */
1466 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1467 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1468 || TYPE_PADDING_P (gnu_old_type)))
1469 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1471 /* Unconstrained array types are deemed incomplete and would thus be given
1472 alias set 0. Retrieve the underlying array type. */
1473 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1474 gnu_old_type
1475 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1476 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1477 gnu_new_type
1478 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1480 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1481 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1482 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1483 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1485 switch (op)
1487 case ALIAS_SET_COPY:
1488 /* The alias set shouldn't be copied between array types with different
1489 aliasing settings because this can break the aliasing relationship
1490 between the array type and its element type. */
1491 #ifndef ENABLE_CHECKING
1492 if (flag_strict_aliasing)
1493 #endif
1494 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1495 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1496 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1497 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1499 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1500 break;
1502 case ALIAS_SET_SUBSET:
1503 case ALIAS_SET_SUPERSET:
1505 alias_set_type old_set = get_alias_set (gnu_old_type);
1506 alias_set_type new_set = get_alias_set (gnu_new_type);
1508 /* Do nothing if the alias sets conflict. This ensures that we
1509 never call record_alias_subset several times for the same pair
1510 or at all for alias set 0. */
1511 if (!alias_sets_conflict_p (old_set, new_set))
1513 if (op == ALIAS_SET_SUBSET)
1514 record_alias_subset (old_set, new_set);
1515 else
1516 record_alias_subset (new_set, old_set);
1519 break;
1521 default:
1522 gcc_unreachable ();
1525 record_component_aliases (gnu_new_type);
1528 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1529 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1531 void
1532 record_builtin_type (const char *name, tree type, bool artificial_p)
1534 tree type_decl = build_decl (input_location,
1535 TYPE_DECL, get_identifier (name), type);
1536 DECL_ARTIFICIAL (type_decl) = artificial_p;
1537 TYPE_ARTIFICIAL (type) = artificial_p;
1538 gnat_pushdecl (type_decl, Empty);
1540 if (debug_hooks->type_decl)
1541 debug_hooks->type_decl (type_decl, false);
1544 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1545 finish constructing the record type as a fat pointer type. */
1547 void
1548 finish_fat_pointer_type (tree record_type, tree field_list)
1550 /* Make sure we can put it into a register. */
1551 if (STRICT_ALIGNMENT)
1552 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
1554 /* Show what it really is. */
1555 TYPE_FAT_POINTER_P (record_type) = 1;
1557 /* Do not emit debug info for it since the types of its fields may still be
1558 incomplete at this point. */
1559 finish_record_type (record_type, field_list, 0, false);
1561 /* Force type_contains_placeholder_p to return true on it. Although the
1562 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1563 type but the representation of the unconstrained array. */
1564 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1567 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1568 finish constructing the record or union type. If REP_LEVEL is zero, this
1569 record has no representation clause and so will be entirely laid out here.
1570 If REP_LEVEL is one, this record has a representation clause and has been
1571 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1572 this record is derived from a parent record and thus inherits its layout;
1573 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1574 we need to write debug information about this type. */
1576 void
1577 finish_record_type (tree record_type, tree field_list, int rep_level,
1578 bool debug_info_p)
1580 enum tree_code code = TREE_CODE (record_type);
1581 tree name = TYPE_IDENTIFIER (record_type);
1582 tree ada_size = bitsize_zero_node;
1583 tree size = bitsize_zero_node;
1584 bool had_size = TYPE_SIZE (record_type) != 0;
1585 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1586 bool had_align = TYPE_ALIGN (record_type) != 0;
1587 tree field;
1589 TYPE_FIELDS (record_type) = field_list;
1591 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1592 generate debug info and have a parallel type. */
1593 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1595 /* Globally initialize the record first. If this is a rep'ed record,
1596 that just means some initializations; otherwise, layout the record. */
1597 if (rep_level > 0)
1599 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
1601 if (!had_size_unit)
1602 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1604 if (!had_size)
1605 TYPE_SIZE (record_type) = bitsize_zero_node;
1607 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1608 out just like a UNION_TYPE, since the size will be fixed. */
1609 else if (code == QUAL_UNION_TYPE)
1610 code = UNION_TYPE;
1612 else
1614 /* Ensure there isn't a size already set. There can be in an error
1615 case where there is a rep clause but all fields have errors and
1616 no longer have a position. */
1617 TYPE_SIZE (record_type) = 0;
1619 /* Ensure we use the traditional GCC layout for bitfields when we need
1620 to pack the record type or have a representation clause. The other
1621 possible layout (Microsoft C compiler), if available, would prevent
1622 efficient packing in almost all cases. */
1623 #ifdef TARGET_MS_BITFIELD_LAYOUT
1624 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1625 decl_attributes (&record_type,
1626 tree_cons (get_identifier ("gcc_struct"),
1627 NULL_TREE, NULL_TREE),
1628 ATTR_FLAG_TYPE_IN_PLACE);
1629 #endif
1631 layout_type (record_type);
1634 /* At this point, the position and size of each field is known. It was
1635 either set before entry by a rep clause, or by laying out the type above.
1637 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1638 to compute the Ada size; the GCC size and alignment (for rep'ed records
1639 that are not padding types); and the mode (for rep'ed records). We also
1640 clear the DECL_BIT_FIELD indication for the cases we know have not been
1641 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1643 if (code == QUAL_UNION_TYPE)
1644 field_list = nreverse (field_list);
1646 for (field = field_list; field; field = DECL_CHAIN (field))
1648 tree type = TREE_TYPE (field);
1649 tree pos = bit_position (field);
1650 tree this_size = DECL_SIZE (field);
1651 tree this_ada_size;
1653 if (RECORD_OR_UNION_TYPE_P (type)
1654 && !TYPE_FAT_POINTER_P (type)
1655 && !TYPE_CONTAINS_TEMPLATE_P (type)
1656 && TYPE_ADA_SIZE (type))
1657 this_ada_size = TYPE_ADA_SIZE (type);
1658 else
1659 this_ada_size = this_size;
1661 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1662 if (DECL_BIT_FIELD (field)
1663 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1665 unsigned int align = TYPE_ALIGN (type);
1667 /* In the general case, type alignment is required. */
1668 if (value_factor_p (pos, align))
1670 /* The enclosing record type must be sufficiently aligned.
1671 Otherwise, if no alignment was specified for it and it
1672 has been laid out already, bump its alignment to the
1673 desired one if this is compatible with its size. */
1674 if (TYPE_ALIGN (record_type) >= align)
1676 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1677 DECL_BIT_FIELD (field) = 0;
1679 else if (!had_align
1680 && rep_level == 0
1681 && value_factor_p (TYPE_SIZE (record_type), align))
1683 TYPE_ALIGN (record_type) = align;
1684 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1685 DECL_BIT_FIELD (field) = 0;
1689 /* In the non-strict alignment case, only byte alignment is. */
1690 if (!STRICT_ALIGNMENT
1691 && DECL_BIT_FIELD (field)
1692 && value_factor_p (pos, BITS_PER_UNIT))
1693 DECL_BIT_FIELD (field) = 0;
1696 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1697 field is technically not addressable. Except that it can actually
1698 be addressed if it is BLKmode and happens to be properly aligned. */
1699 if (DECL_BIT_FIELD (field)
1700 && !(DECL_MODE (field) == BLKmode
1701 && value_factor_p (pos, BITS_PER_UNIT)))
1702 DECL_NONADDRESSABLE_P (field) = 1;
1704 /* A type must be as aligned as its most aligned field that is not
1705 a bit-field. But this is already enforced by layout_type. */
1706 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1707 TYPE_ALIGN (record_type)
1708 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1710 switch (code)
1712 case UNION_TYPE:
1713 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1714 size = size_binop (MAX_EXPR, size, this_size);
1715 break;
1717 case QUAL_UNION_TYPE:
1718 ada_size
1719 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1720 this_ada_size, ada_size);
1721 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1722 this_size, size);
1723 break;
1725 case RECORD_TYPE:
1726 /* Since we know here that all fields are sorted in order of
1727 increasing bit position, the size of the record is one
1728 higher than the ending bit of the last field processed
1729 unless we have a rep clause, since in that case we might
1730 have a field outside a QUAL_UNION_TYPE that has a higher ending
1731 position. So use a MAX in that case. Also, if this field is a
1732 QUAL_UNION_TYPE, we need to take into account the previous size in
1733 the case of empty variants. */
1734 ada_size
1735 = merge_sizes (ada_size, pos, this_ada_size,
1736 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1737 size
1738 = merge_sizes (size, pos, this_size,
1739 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1740 break;
1742 default:
1743 gcc_unreachable ();
1747 if (code == QUAL_UNION_TYPE)
1748 nreverse (field_list);
1750 if (rep_level < 2)
1752 /* If this is a padding record, we never want to make the size smaller
1753 than what was specified in it, if any. */
1754 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1755 size = TYPE_SIZE (record_type);
1757 /* Now set any of the values we've just computed that apply. */
1758 if (!TYPE_FAT_POINTER_P (record_type)
1759 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1760 SET_TYPE_ADA_SIZE (record_type, ada_size);
1762 if (rep_level > 0)
1764 tree size_unit = had_size_unit
1765 ? TYPE_SIZE_UNIT (record_type)
1766 : convert (sizetype,
1767 size_binop (CEIL_DIV_EXPR, size,
1768 bitsize_unit_node));
1769 unsigned int align = TYPE_ALIGN (record_type);
1771 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1772 TYPE_SIZE_UNIT (record_type)
1773 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1775 compute_record_mode (record_type);
1779 if (debug_info_p)
1780 rest_of_record_type_compilation (record_type);
1783 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1784 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1785 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1786 moment TYPE will get a context. */
1788 void
1789 add_parallel_type (tree type, tree parallel_type)
1791 tree decl = TYPE_STUB_DECL (type);
1793 while (DECL_PARALLEL_TYPE (decl))
1794 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1796 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1798 /* If PARALLEL_TYPE already has a context, we are done. */
1799 if (TYPE_CONTEXT (parallel_type) != NULL_TREE)
1800 return;
1802 /* Otherwise, try to get one from TYPE's context. */
1803 if (TYPE_CONTEXT (type) != NULL_TREE)
1804 /* TYPE already has a context, so simply propagate it to PARALLEL_TYPE. */
1805 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1807 /* ... otherwise TYPE has not context yet. We know it will thanks to
1808 gnat_pushdecl, and then its context will be propagated to PARALLEL_TYPE.
1809 So we have nothing to do in this case. */
1812 /* Return true if TYPE has a parallel type. */
1814 static bool
1815 has_parallel_type (tree type)
1817 tree decl = TYPE_STUB_DECL (type);
1819 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1822 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1823 associated with it. It need not be invoked directly in most cases since
1824 finish_record_type takes care of doing so, but this can be necessary if
1825 a parallel type is to be attached to the record type. */
1827 void
1828 rest_of_record_type_compilation (tree record_type)
1830 bool var_size = false;
1831 tree field;
1833 /* If this is a padded type, the bulk of the debug info has already been
1834 generated for the field's type. */
1835 if (TYPE_IS_PADDING_P (record_type))
1836 return;
1838 /* If the type already has a parallel type (XVS type), then we're done. */
1839 if (has_parallel_type (record_type))
1840 return;
1842 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1844 /* We need to make an XVE/XVU record if any field has variable size,
1845 whether or not the record does. For example, if we have a union,
1846 it may be that all fields, rounded up to the alignment, have the
1847 same size, in which case we'll use that size. But the debug
1848 output routines (except Dwarf2) won't be able to output the fields,
1849 so we need to make the special record. */
1850 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1851 /* If a field has a non-constant qualifier, the record will have
1852 variable size too. */
1853 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1854 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1856 var_size = true;
1857 break;
1861 /* If this record type is of variable size, make a parallel record type that
1862 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1863 if (var_size)
1865 tree new_record_type
1866 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1867 ? UNION_TYPE : TREE_CODE (record_type));
1868 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
1869 tree last_pos = bitsize_zero_node;
1870 tree old_field, prev_old_field = NULL_TREE;
1872 new_name
1873 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1874 ? "XVU" : "XVE");
1875 TYPE_NAME (new_record_type) = new_name;
1876 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1877 TYPE_STUB_DECL (new_record_type)
1878 = create_type_stub_decl (new_name, new_record_type);
1879 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1880 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1881 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1882 TYPE_SIZE_UNIT (new_record_type)
1883 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1885 /* Now scan all the fields, replacing each field with a new field
1886 corresponding to the new encoding. */
1887 for (old_field = TYPE_FIELDS (record_type); old_field;
1888 old_field = DECL_CHAIN (old_field))
1890 tree field_type = TREE_TYPE (old_field);
1891 tree field_name = DECL_NAME (old_field);
1892 tree curpos = bit_position (old_field);
1893 tree pos, new_field;
1894 bool var = false;
1895 unsigned int align = 0;
1897 /* We're going to do some pattern matching below so remove as many
1898 conversions as possible. */
1899 curpos = remove_conversions (curpos, true);
1901 /* See how the position was modified from the last position.
1903 There are two basic cases we support: a value was added
1904 to the last position or the last position was rounded to
1905 a boundary and they something was added. Check for the
1906 first case first. If not, see if there is any evidence
1907 of rounding. If so, round the last position and retry.
1909 If this is a union, the position can be taken as zero. */
1910 if (TREE_CODE (new_record_type) == UNION_TYPE)
1911 pos = bitsize_zero_node;
1912 else
1913 pos = compute_related_constant (curpos, last_pos);
1915 if (!pos
1916 && TREE_CODE (curpos) == MULT_EXPR
1917 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
1919 tree offset = TREE_OPERAND (curpos, 0);
1920 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1921 align = scale_by_factor_of (offset, align);
1922 last_pos = round_up (last_pos, align);
1923 pos = compute_related_constant (curpos, last_pos);
1925 else if (!pos
1926 && TREE_CODE (curpos) == PLUS_EXPR
1927 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
1928 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1929 && tree_fits_uhwi_p
1930 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
1932 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1933 unsigned HOST_WIDE_INT addend
1934 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1935 align
1936 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
1937 align = scale_by_factor_of (offset, align);
1938 align = MIN (align, addend & -addend);
1939 last_pos = round_up (last_pos, align);
1940 pos = compute_related_constant (curpos, last_pos);
1942 else if (potential_alignment_gap (prev_old_field, old_field, pos))
1944 align = TYPE_ALIGN (field_type);
1945 last_pos = round_up (last_pos, align);
1946 pos = compute_related_constant (curpos, last_pos);
1949 /* If we can't compute a position, set it to zero.
1951 ??? We really should abort here, but it's too much work
1952 to get this correct for all cases. */
1953 if (!pos)
1954 pos = bitsize_zero_node;
1956 /* See if this type is variable-sized and make a pointer type
1957 and indicate the indirection if so. Beware that the debug
1958 back-end may adjust the position computed above according
1959 to the alignment of the field type, i.e. the pointer type
1960 in this case, if we don't preventively counter that. */
1961 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1963 field_type = build_pointer_type (field_type);
1964 if (align != 0 && TYPE_ALIGN (field_type) > align)
1966 field_type = copy_node (field_type);
1967 TYPE_ALIGN (field_type) = align;
1969 var = true;
1972 /* Make a new field name, if necessary. */
1973 if (var || align != 0)
1975 char suffix[16];
1977 if (align != 0)
1978 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1979 align / BITS_PER_UNIT);
1980 else
1981 strcpy (suffix, "XVL");
1983 field_name = concat_name (field_name, suffix);
1986 new_field
1987 = create_field_decl (field_name, field_type, new_record_type,
1988 DECL_SIZE (old_field), pos, 0, 0);
1989 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1990 TYPE_FIELDS (new_record_type) = new_field;
1992 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1993 zero. The only time it's not the last field of the record
1994 is when there are other components at fixed positions after
1995 it (meaning there was a rep clause for every field) and we
1996 want to be able to encode them. */
1997 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1998 (TREE_CODE (TREE_TYPE (old_field))
1999 == QUAL_UNION_TYPE)
2000 ? bitsize_zero_node
2001 : DECL_SIZE (old_field));
2002 prev_old_field = old_field;
2005 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2007 add_parallel_type (record_type, new_record_type);
2011 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2012 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2013 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2014 replace a value of zero with the old size. If HAS_REP is true, we take the
2015 MAX of the end position of this field with LAST_SIZE. In all other cases,
2016 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2018 static tree
2019 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2020 bool has_rep)
2022 tree type = TREE_TYPE (last_size);
2023 tree new_size;
2025 if (!special || TREE_CODE (size) != COND_EXPR)
2027 new_size = size_binop (PLUS_EXPR, first_bit, size);
2028 if (has_rep)
2029 new_size = size_binop (MAX_EXPR, last_size, new_size);
2032 else
2033 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2034 integer_zerop (TREE_OPERAND (size, 1))
2035 ? last_size : merge_sizes (last_size, first_bit,
2036 TREE_OPERAND (size, 1),
2037 1, has_rep),
2038 integer_zerop (TREE_OPERAND (size, 2))
2039 ? last_size : merge_sizes (last_size, first_bit,
2040 TREE_OPERAND (size, 2),
2041 1, has_rep));
2043 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2044 when fed through substitute_in_expr) into thinking that a constant
2045 size is not constant. */
2046 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2047 new_size = TREE_OPERAND (new_size, 0);
2049 return new_size;
2052 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2053 related by the addition of a constant. Return that constant if so. */
2055 static tree
2056 compute_related_constant (tree op0, tree op1)
2058 tree op0_var, op1_var;
2059 tree op0_con = split_plus (op0, &op0_var);
2060 tree op1_con = split_plus (op1, &op1_var);
2061 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2063 if (operand_equal_p (op0_var, op1_var, 0))
2064 return result;
2065 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2066 return result;
2067 else
2068 return 0;
2071 /* Utility function of above to split a tree OP which may be a sum, into a
2072 constant part, which is returned, and a variable part, which is stored
2073 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2074 bitsizetype. */
2076 static tree
2077 split_plus (tree in, tree *pvar)
2079 /* Strip conversions in order to ease the tree traversal and maximize the
2080 potential for constant or plus/minus discovery. We need to be careful
2081 to always return and set *pvar to bitsizetype trees, but it's worth
2082 the effort. */
2083 in = remove_conversions (in, false);
2085 *pvar = convert (bitsizetype, in);
2087 if (TREE_CODE (in) == INTEGER_CST)
2089 *pvar = bitsize_zero_node;
2090 return convert (bitsizetype, in);
2092 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2094 tree lhs_var, rhs_var;
2095 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2096 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2098 if (lhs_var == TREE_OPERAND (in, 0)
2099 && rhs_var == TREE_OPERAND (in, 1))
2100 return bitsize_zero_node;
2102 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2103 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2105 else
2106 return bitsize_zero_node;
2109 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
2110 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
2111 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
2112 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
2113 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
2114 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
2115 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
2116 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
2117 invisible reference. */
2119 tree
2120 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
2121 bool return_unconstrained_p, bool return_by_direct_ref_p,
2122 bool return_by_invisi_ref_p)
2124 /* A list of the data type nodes of the subprogram formal parameters.
2125 This list is generated by traversing the input list of PARM_DECL
2126 nodes. */
2127 vec<tree, va_gc> *param_type_list = NULL;
2128 tree t, type;
2130 for (t = param_decl_list; t; t = DECL_CHAIN (t))
2131 vec_safe_push (param_type_list, TREE_TYPE (t));
2133 type = build_function_type_vec (return_type, param_type_list);
2135 /* TYPE may have been shared since GCC hashes types. If it has a different
2136 CICO_LIST, make a copy. Likewise for the various flags. */
2137 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
2138 return_by_direct_ref_p, return_by_invisi_ref_p))
2140 type = copy_type (type);
2141 TYPE_CI_CO_LIST (type) = cico_list;
2142 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
2143 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
2144 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
2147 return type;
2150 /* Return a copy of TYPE but safe to modify in any way. */
2152 tree
2153 copy_type (tree type)
2155 tree new_type = copy_node (type);
2157 /* Unshare the language-specific data. */
2158 if (TYPE_LANG_SPECIFIC (type))
2160 TYPE_LANG_SPECIFIC (new_type) = NULL;
2161 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2164 /* And the contents of the language-specific slot if needed. */
2165 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2166 && TYPE_RM_VALUES (type))
2168 TYPE_RM_VALUES (new_type) = NULL_TREE;
2169 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2170 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2171 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2174 /* copy_node clears this field instead of copying it, because it is
2175 aliased with TREE_CHAIN. */
2176 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2178 TYPE_POINTER_TO (new_type) = 0;
2179 TYPE_REFERENCE_TO (new_type) = 0;
2180 TYPE_MAIN_VARIANT (new_type) = new_type;
2181 TYPE_NEXT_VARIANT (new_type) = 0;
2183 return new_type;
2186 /* Return a subtype of sizetype with range MIN to MAX and whose
2187 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2188 of the associated TYPE_DECL. */
2190 tree
2191 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2193 /* First build a type for the desired range. */
2194 tree type = build_nonshared_range_type (sizetype, min, max);
2196 /* Then set the index type. */
2197 SET_TYPE_INDEX_TYPE (type, index);
2198 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2200 return type;
2203 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2204 sizetype is used. */
2206 tree
2207 create_range_type (tree type, tree min, tree max)
2209 tree range_type;
2211 if (type == NULL_TREE)
2212 type = sizetype;
2214 /* First build a type with the base range. */
2215 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2216 TYPE_MAX_VALUE (type));
2218 /* Then set the actual range. */
2219 SET_TYPE_RM_MIN_VALUE (range_type, min);
2220 SET_TYPE_RM_MAX_VALUE (range_type, max);
2222 return range_type;
2225 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2226 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2227 its data type. */
2229 tree
2230 create_type_stub_decl (tree type_name, tree type)
2232 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2233 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2234 emitted in DWARF. */
2235 tree type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2236 DECL_ARTIFICIAL (type_decl) = 1;
2237 TYPE_ARTIFICIAL (type) = 1;
2238 return type_decl;
2241 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2242 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2243 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2244 true if we need to write debug information about this type. GNAT_NODE
2245 is used for the position of the decl. */
2247 tree
2248 create_type_decl (tree type_name, tree type, bool artificial_p,
2249 bool debug_info_p, Node_Id gnat_node)
2251 enum tree_code code = TREE_CODE (type);
2252 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2253 tree type_decl;
2255 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2256 gcc_assert (!TYPE_IS_DUMMY_P (type));
2258 /* If the type hasn't been named yet, we're naming it; preserve an existing
2259 TYPE_STUB_DECL that has been attached to it for some purpose. */
2260 if (!named && TYPE_STUB_DECL (type))
2262 type_decl = TYPE_STUB_DECL (type);
2263 DECL_NAME (type_decl) = type_name;
2265 else
2266 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2268 DECL_ARTIFICIAL (type_decl) = artificial_p;
2269 TYPE_ARTIFICIAL (type) = artificial_p;
2271 /* Add this decl to the current binding level. */
2272 gnat_pushdecl (type_decl, gnat_node);
2274 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2275 This causes the name to be also viewed as a "tag" by the debug
2276 back-end, with the advantage that no DW_TAG_typedef is emitted
2277 for artificial "tagged" types in DWARF. */
2278 if (!named)
2279 TYPE_STUB_DECL (type) = type_decl;
2281 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2282 back-end doesn't support, and for others if we don't need to. */
2283 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2284 DECL_IGNORED_P (type_decl) = 1;
2286 return type_decl;
2289 /* Return a VAR_DECL or CONST_DECL node.
2291 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2292 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2293 the GCC tree for an optional initial expression; NULL_TREE if none.
2295 CONST_FLAG is true if this variable is constant, in which case we might
2296 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2298 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2299 definition to be made visible outside of the current compilation unit, for
2300 instance variable definitions in a package specification.
2302 EXTERN_FLAG is true when processing an external variable declaration (as
2303 opposed to a definition: no storage is to be allocated for the variable).
2305 STATIC_FLAG is only relevant when not at top level. In that case
2306 it indicates whether to always allocate storage to the variable.
2308 GNAT_NODE is used for the position of the decl. */
2310 tree
2311 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2312 bool const_flag, bool public_flag, bool extern_flag,
2313 bool static_flag, bool const_decl_allowed_p,
2314 struct attrib *attr_list, Node_Id gnat_node)
2316 /* Whether the object has static storage duration, either explicitly or by
2317 virtue of being declared at the global level. */
2318 const bool static_storage = static_flag || global_bindings_p ();
2320 /* Whether the initializer is constant: for an external object or an object
2321 with static storage duration, we check that the initializer is a valid
2322 constant expression for initializing a static variable; otherwise, we
2323 only check that it is constant. */
2324 const bool init_const
2325 = (var_init
2326 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
2327 && (extern_flag || static_storage
2328 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2329 != NULL_TREE
2330 : TREE_CONSTANT (var_init)));
2332 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2333 case the initializer may be used in lieu of the DECL node (as done in
2334 Identifier_to_gnu). This is useful to prevent the need of elaboration
2335 code when an identifier for which such a DECL is made is in turn used
2336 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2337 but extra constraints apply to this choice (see below) and they are not
2338 relevant to the distinction we wish to make. */
2339 const bool constant_p = const_flag && init_const;
2341 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2342 and may be used for scalars in general but not for aggregates. */
2343 tree var_decl
2344 = build_decl (input_location,
2345 (constant_p && const_decl_allowed_p
2346 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2347 var_name, type);
2349 /* If this is external, throw away any initializations (they will be done
2350 elsewhere) unless this is a constant for which we would like to remain
2351 able to get the initializer. If we are defining a global here, leave a
2352 constant initialization and save any variable elaborations for the
2353 elaboration routine. If we are just annotating types, throw away the
2354 initialization if it isn't a constant. */
2355 if ((extern_flag && !constant_p)
2356 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2357 var_init = NULL_TREE;
2359 /* At the global level, a non-constant initializer generates elaboration
2360 statements. Check that such statements are allowed, that is to say,
2361 not violating a No_Elaboration_Code restriction. */
2362 if (var_init && !init_const && global_bindings_p ())
2363 Check_Elaboration_Code_Allowed (gnat_node);
2365 DECL_INITIAL (var_decl) = var_init;
2366 TREE_READONLY (var_decl) = const_flag;
2367 DECL_EXTERNAL (var_decl) = extern_flag;
2368 TREE_CONSTANT (var_decl) = constant_p;
2370 /* We need to allocate static storage for an object with static storage
2371 duration if it isn't external. */
2372 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2374 /* The object is public if it is external or if it is declared public
2375 and has static storage duration. */
2376 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2378 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2379 try to fiddle with DECL_COMMON. However, on platforms that don't
2380 support global BSS sections, uninitialized global variables would
2381 go in DATA instead, thus increasing the size of the executable. */
2382 if (!flag_no_common
2383 && TREE_CODE (var_decl) == VAR_DECL
2384 && TREE_PUBLIC (var_decl)
2385 && !have_global_bss_p ())
2386 DECL_COMMON (var_decl) = 1;
2388 /* For an external constant whose initializer is not absolute, do not emit
2389 debug info. In DWARF this would mean a global relocation in a read-only
2390 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2391 if (extern_flag
2392 && constant_p
2393 && var_init
2394 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2395 != null_pointer_node)
2396 DECL_IGNORED_P (var_decl) = 1;
2398 if (TYPE_VOLATILE (type))
2399 TREE_SIDE_EFFECTS (var_decl) = TREE_THIS_VOLATILE (var_decl) = 1;
2401 if (TREE_SIDE_EFFECTS (var_decl))
2402 TREE_ADDRESSABLE (var_decl) = 1;
2404 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2405 if (TREE_CODE (var_decl) == VAR_DECL)
2406 process_attributes (&var_decl, &attr_list, true, gnat_node);
2408 /* Add this decl to the current binding level. */
2409 gnat_pushdecl (var_decl, gnat_node);
2411 if (TREE_CODE (var_decl) == VAR_DECL)
2413 if (asm_name)
2414 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2416 if (global_bindings_p ())
2417 rest_of_decl_compilation (var_decl, true, 0);
2420 return var_decl;
2423 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2425 static bool
2426 aggregate_type_contains_array_p (tree type)
2428 switch (TREE_CODE (type))
2430 case RECORD_TYPE:
2431 case UNION_TYPE:
2432 case QUAL_UNION_TYPE:
2434 tree field;
2435 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2436 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2437 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2438 return true;
2439 return false;
2442 case ARRAY_TYPE:
2443 return true;
2445 default:
2446 gcc_unreachable ();
2450 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2451 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2452 nonzero, it is the specified size of the field. If POS is nonzero, it is
2453 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2454 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2455 means we are allowed to take the address of the field; if it is negative,
2456 we should not make a bitfield, which is used by make_aligning_type. */
2458 tree
2459 create_field_decl (tree field_name, tree field_type, tree record_type,
2460 tree size, tree pos, int packed, int addressable)
2462 tree field_decl = build_decl (input_location,
2463 FIELD_DECL, field_name, field_type);
2465 DECL_CONTEXT (field_decl) = record_type;
2466 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2468 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2469 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2470 Likewise for an aggregate without specified position that contains an
2471 array, because in this case slices of variable length of this array
2472 must be handled by GCC and variable-sized objects need to be aligned
2473 to at least a byte boundary. */
2474 if (packed && (TYPE_MODE (field_type) == BLKmode
2475 || (!pos
2476 && AGGREGATE_TYPE_P (field_type)
2477 && aggregate_type_contains_array_p (field_type))))
2478 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2480 /* If a size is specified, use it. Otherwise, if the record type is packed
2481 compute a size to use, which may differ from the object's natural size.
2482 We always set a size in this case to trigger the checks for bitfield
2483 creation below, which is typically required when no position has been
2484 specified. */
2485 if (size)
2486 size = convert (bitsizetype, size);
2487 else if (packed == 1)
2489 size = rm_size (field_type);
2490 if (TYPE_MODE (field_type) == BLKmode)
2491 size = round_up (size, BITS_PER_UNIT);
2494 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2495 specified for two reasons: first if the size differs from the natural
2496 size. Second, if the alignment is insufficient. There are a number of
2497 ways the latter can be true.
2499 We never make a bitfield if the type of the field has a nonconstant size,
2500 because no such entity requiring bitfield operations should reach here.
2502 We do *preventively* make a bitfield when there might be the need for it
2503 but we don't have all the necessary information to decide, as is the case
2504 of a field with no specified position in a packed record.
2506 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2507 in layout_decl or finish_record_type to clear the bit_field indication if
2508 it is in fact not needed. */
2509 if (addressable >= 0
2510 && size
2511 && TREE_CODE (size) == INTEGER_CST
2512 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2513 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2514 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2515 || packed
2516 || (TYPE_ALIGN (record_type) != 0
2517 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2519 DECL_BIT_FIELD (field_decl) = 1;
2520 DECL_SIZE (field_decl) = size;
2521 if (!packed && !pos)
2523 if (TYPE_ALIGN (record_type) != 0
2524 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2525 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2526 else
2527 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2531 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2533 /* Bump the alignment if need be, either for bitfield/packing purposes or
2534 to satisfy the type requirements if no such consideration applies. When
2535 we get the alignment from the type, indicate if this is from an explicit
2536 user request, which prevents stor-layout from lowering it later on. */
2538 unsigned int bit_align
2539 = (DECL_BIT_FIELD (field_decl) ? 1
2540 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2542 if (bit_align > DECL_ALIGN (field_decl))
2543 DECL_ALIGN (field_decl) = bit_align;
2544 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2546 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2547 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2551 if (pos)
2553 /* We need to pass in the alignment the DECL is known to have.
2554 This is the lowest-order bit set in POS, but no more than
2555 the alignment of the record, if one is specified. Note
2556 that an alignment of 0 is taken as infinite. */
2557 unsigned int known_align;
2559 if (tree_fits_uhwi_p (pos))
2560 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2561 else
2562 known_align = BITS_PER_UNIT;
2564 if (TYPE_ALIGN (record_type)
2565 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2566 known_align = TYPE_ALIGN (record_type);
2568 layout_decl (field_decl, known_align);
2569 SET_DECL_OFFSET_ALIGN (field_decl,
2570 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2571 : BITS_PER_UNIT);
2572 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2573 &DECL_FIELD_BIT_OFFSET (field_decl),
2574 DECL_OFFSET_ALIGN (field_decl), pos);
2577 /* In addition to what our caller says, claim the field is addressable if we
2578 know that its type is not suitable.
2580 The field may also be "technically" nonaddressable, meaning that even if
2581 we attempt to take the field's address we will actually get the address
2582 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2583 value we have at this point is not accurate enough, so we don't account
2584 for this here and let finish_record_type decide. */
2585 if (!addressable && !type_for_nonaliased_component_p (field_type))
2586 addressable = 1;
2588 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2590 return field_decl;
2593 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2594 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2595 (either an In parameter or an address of a pass-by-ref parameter). */
2597 tree
2598 create_param_decl (tree param_name, tree param_type, bool readonly)
2600 tree param_decl = build_decl (input_location,
2601 PARM_DECL, param_name, param_type);
2603 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2604 can lead to various ABI violations. */
2605 if (targetm.calls.promote_prototypes (NULL_TREE)
2606 && INTEGRAL_TYPE_P (param_type)
2607 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2609 /* We have to be careful about biased types here. Make a subtype
2610 of integer_type_node with the proper biasing. */
2611 if (TREE_CODE (param_type) == INTEGER_TYPE
2612 && TYPE_BIASED_REPRESENTATION_P (param_type))
2614 tree subtype
2615 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2616 TREE_TYPE (subtype) = integer_type_node;
2617 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2618 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2619 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
2620 param_type = subtype;
2622 else
2623 param_type = integer_type_node;
2626 DECL_ARG_TYPE (param_decl) = param_type;
2627 TREE_READONLY (param_decl) = readonly;
2628 return param_decl;
2631 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2632 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2633 changed. GNAT_NODE is used for the position of error messages. */
2635 void
2636 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2637 Node_Id gnat_node)
2639 struct attrib *attr;
2641 for (attr = *attr_list; attr; attr = attr->next)
2642 switch (attr->type)
2644 case ATTR_MACHINE_ATTRIBUTE:
2645 Sloc_to_locus (Sloc (gnat_node), &input_location);
2646 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2647 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2648 break;
2650 case ATTR_LINK_ALIAS:
2651 if (!DECL_EXTERNAL (*node))
2653 TREE_STATIC (*node) = 1;
2654 assemble_alias (*node, attr->name);
2656 break;
2658 case ATTR_WEAK_EXTERNAL:
2659 if (SUPPORTS_WEAK)
2660 declare_weak (*node);
2661 else
2662 post_error ("?weak declarations not supported on this target",
2663 attr->error_point);
2664 break;
2666 case ATTR_LINK_SECTION:
2667 if (targetm_common.have_named_sections)
2669 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2670 DECL_COMMON (*node) = 0;
2672 else
2673 post_error ("?section attributes are not supported for this target",
2674 attr->error_point);
2675 break;
2677 case ATTR_LINK_CONSTRUCTOR:
2678 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2679 TREE_USED (*node) = 1;
2680 break;
2682 case ATTR_LINK_DESTRUCTOR:
2683 DECL_STATIC_DESTRUCTOR (*node) = 1;
2684 TREE_USED (*node) = 1;
2685 break;
2687 case ATTR_THREAD_LOCAL_STORAGE:
2688 set_decl_tls_model (*node, decl_default_tls_model (*node));
2689 DECL_COMMON (*node) = 0;
2690 break;
2693 *attr_list = NULL;
2696 /* Record DECL as a global renaming pointer. */
2698 void
2699 record_global_renaming_pointer (tree decl)
2701 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
2702 vec_safe_push (global_renaming_pointers, decl);
2705 /* Invalidate the global renaming pointers that are not constant, lest their
2706 renamed object contains SAVE_EXPRs tied to an elaboration routine. Note
2707 that we should not blindly invalidate everything here because of the need
2708 to propagate constant values through renaming. */
2710 void
2711 invalidate_global_renaming_pointers (void)
2713 unsigned int i;
2714 tree iter;
2716 if (global_renaming_pointers == NULL)
2717 return;
2719 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
2720 if (!TREE_CONSTANT (DECL_RENAMED_OBJECT (iter)))
2721 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2723 vec_free (global_renaming_pointers);
2726 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2727 a power of 2. */
2729 bool
2730 value_factor_p (tree value, HOST_WIDE_INT factor)
2732 if (tree_fits_uhwi_p (value))
2733 return tree_to_uhwi (value) % factor == 0;
2735 if (TREE_CODE (value) == MULT_EXPR)
2736 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2737 || value_factor_p (TREE_OPERAND (value, 1), factor));
2739 return false;
2742 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2743 from the parameter association for the instantiation of a generic. We do
2744 not want to emit source location for them: the code generated for their
2745 initialization is likely to disturb debugging. */
2747 bool
2748 renaming_from_generic_instantiation_p (Node_Id gnat_node)
2750 if (Nkind (gnat_node) != N_Defining_Identifier
2751 || !IN (Ekind (gnat_node), Object_Kind)
2752 || Comes_From_Source (gnat_node)
2753 || !Present (Renamed_Object (gnat_node)))
2754 return false;
2756 /* Get the object declaration of the renamed object, if any and if the
2757 renamed object is a mere identifier. */
2758 gnat_node = Renamed_Object (gnat_node);
2759 if (Nkind (gnat_node) != N_Identifier)
2760 return false;
2762 gnat_node = Entity (gnat_node);
2763 if (!Present (Parent (gnat_node)))
2764 return false;
2766 gnat_node = Parent (gnat_node);
2767 return
2768 (Present (gnat_node)
2769 && Nkind (gnat_node) == N_Object_Declaration
2770 && Present (Corresponding_Generic_Association (gnat_node)));
2773 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2774 feed it with the elaboration of GNAT_SCOPE. */
2776 static struct deferred_decl_context_node *
2777 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2779 struct deferred_decl_context_node *new_node;
2781 new_node
2782 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2783 new_node->decl = decl;
2784 new_node->gnat_scope = gnat_scope;
2785 new_node->force_global = force_global;
2786 new_node->types.create (1);
2787 new_node->next = deferred_decl_context_queue;
2788 deferred_decl_context_queue = new_node;
2789 return new_node;
2792 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2793 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2794 computed. */
2796 static void
2797 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2799 n->types.safe_push (type);
2802 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2803 NULL_TREE if it is not available. */
2805 static tree
2806 compute_deferred_decl_context (Entity_Id gnat_scope)
2808 tree context;
2810 if (present_gnu_tree (gnat_scope))
2811 context = get_gnu_tree (gnat_scope);
2812 else
2813 return NULL_TREE;
2815 if (TREE_CODE (context) == TYPE_DECL)
2817 const tree context_type = TREE_TYPE (context);
2819 /* Skip dummy types: only the final ones can appear in the context
2820 chain. */
2821 if (TYPE_DUMMY_P (context_type))
2822 return NULL_TREE;
2824 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2825 chain. */
2826 else
2827 context = context_type;
2830 return context;
2833 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2834 that cannot be processed yet, remove the other ones. If FORCE is true,
2835 force the processing for all nodes, use the global context when nodes don't
2836 have a GNU translation. */
2838 void
2839 process_deferred_decl_context (bool force)
2841 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2842 struct deferred_decl_context_node *node;
2844 while (*it != NULL)
2846 bool processed = false;
2847 tree context = NULL_TREE;
2848 Entity_Id gnat_scope;
2850 node = *it;
2852 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2853 get the first scope. */
2854 gnat_scope = node->gnat_scope;
2855 while (Present (gnat_scope))
2857 context = compute_deferred_decl_context (gnat_scope);
2858 if (!force || context != NULL_TREE)
2859 break;
2860 gnat_scope = get_debug_scope (gnat_scope, NULL);
2863 /* Imported declarations must not be in a local context (i.e. not inside
2864 a function). */
2865 if (context != NULL_TREE && node->force_global > 0)
2867 tree ctx = context;
2869 while (ctx != NULL_TREE)
2871 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2872 ctx = (DECL_P (ctx))
2873 ? DECL_CONTEXT (ctx)
2874 : TYPE_CONTEXT (ctx);
2878 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2879 was no elaborated scope, use the global context. */
2880 if (force && context == NULL_TREE)
2881 context = get_global_context ();
2883 if (context != NULL_TREE)
2885 tree t;
2886 int i;
2888 DECL_CONTEXT (node->decl) = context;
2890 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2891 ..._TYPE nodes. */
2892 FOR_EACH_VEC_ELT (node->types, i, t)
2894 gnat_set_type_context (t, context);
2896 processed = true;
2899 /* If this node has been successfuly processed, remove it from the
2900 queue. Then move to the next node. */
2901 if (processed)
2903 *it = node->next;
2904 node->types.release ();
2905 free (node);
2907 else
2908 it = &node->next;
2913 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2915 static unsigned int
2916 scale_by_factor_of (tree expr, unsigned int value)
2918 expr = remove_conversions (expr, true);
2920 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2921 corresponding to the number of trailing zeros of the mask. */
2922 if (TREE_CODE (expr) == BIT_AND_EXPR
2923 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2925 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2926 unsigned int i = 0;
2928 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2930 mask >>= 1;
2931 value *= 2;
2932 i++;
2936 return value;
2939 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2940 unless we can prove these 2 fields are laid out in such a way that no gap
2941 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2942 is the distance in bits between the end of PREV_FIELD and the starting
2943 position of CURR_FIELD. It is ignored if null. */
2945 static bool
2946 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2948 /* If this is the first field of the record, there cannot be any gap */
2949 if (!prev_field)
2950 return false;
2952 /* If the previous field is a union type, then return false: The only
2953 time when such a field is not the last field of the record is when
2954 there are other components at fixed positions after it (meaning there
2955 was a rep clause for every field), in which case we don't want the
2956 alignment constraint to override them. */
2957 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2958 return false;
2960 /* If the distance between the end of prev_field and the beginning of
2961 curr_field is constant, then there is a gap if the value of this
2962 constant is not null. */
2963 if (offset && tree_fits_uhwi_p (offset))
2964 return !integer_zerop (offset);
2966 /* If the size and position of the previous field are constant,
2967 then check the sum of this size and position. There will be a gap
2968 iff it is not multiple of the current field alignment. */
2969 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
2970 && tree_fits_uhwi_p (bit_position (prev_field)))
2971 return ((tree_to_uhwi (bit_position (prev_field))
2972 + tree_to_uhwi (DECL_SIZE (prev_field)))
2973 % DECL_ALIGN (curr_field) != 0);
2975 /* If both the position and size of the previous field are multiples
2976 of the current field alignment, there cannot be any gap. */
2977 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2978 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2979 return false;
2981 /* Fallback, return that there may be a potential gap */
2982 return true;
2985 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2986 of the decl. */
2988 tree
2989 create_label_decl (tree label_name, Node_Id gnat_node)
2991 tree label_decl
2992 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
2994 DECL_MODE (label_decl) = VOIDmode;
2996 /* Add this decl to the current binding level. */
2997 gnat_pushdecl (label_decl, gnat_node);
2999 return label_decl;
3002 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
3003 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
3004 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
3005 PARM_DECL nodes chained through the DECL_CHAIN field).
3007 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
3008 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
3009 used for the position of the decl. */
3011 tree
3012 create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
3013 tree param_decl_list, enum inline_status_t inline_status,
3014 bool public_flag, bool extern_flag, bool artificial_flag,
3015 struct attrib *attr_list, Node_Id gnat_node)
3017 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
3018 subprog_type);
3019 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
3020 TREE_TYPE (subprog_type));
3021 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3023 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
3024 DECL_EXTERNAL (subprog_decl) = extern_flag;
3026 switch (inline_status)
3028 case is_suppressed:
3029 DECL_UNINLINABLE (subprog_decl) = 1;
3030 break;
3032 case is_disabled:
3033 break;
3035 case is_required:
3036 if (Back_End_Inlining)
3037 decl_attributes (&subprog_decl,
3038 tree_cons (get_identifier ("always_inline"),
3039 NULL_TREE, NULL_TREE),
3040 ATTR_FLAG_TYPE_IN_PLACE);
3042 /* ... fall through ... */
3044 case is_enabled:
3045 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3046 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
3047 break;
3049 default:
3050 gcc_unreachable ();
3053 TREE_PUBLIC (subprog_decl) = public_flag;
3054 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
3055 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
3056 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
3058 DECL_ARTIFICIAL (result_decl) = 1;
3059 DECL_IGNORED_P (result_decl) = 1;
3060 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
3061 DECL_RESULT (subprog_decl) = result_decl;
3063 if (asm_name)
3065 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
3067 /* The expand_main_function circuitry expects "main_identifier_node" to
3068 designate the DECL_NAME of the 'main' entry point, in turn expected
3069 to be declared as the "main" function literally by default. Ada
3070 program entry points are typically declared with a different name
3071 within the binder generated file, exported as 'main' to satisfy the
3072 system expectations. Force main_identifier_node in this case. */
3073 if (asm_name == main_identifier_node)
3074 DECL_NAME (subprog_decl) = main_identifier_node;
3077 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3079 /* Add this decl to the current binding level. */
3080 gnat_pushdecl (subprog_decl, gnat_node);
3082 /* Output the assembler code and/or RTL for the declaration. */
3083 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3085 return subprog_decl;
3088 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3089 body. This routine needs to be invoked before processing the declarations
3090 appearing in the subprogram. */
3092 void
3093 begin_subprog_body (tree subprog_decl)
3095 tree param_decl;
3097 announce_function (subprog_decl);
3099 /* This function is being defined. */
3100 TREE_STATIC (subprog_decl) = 1;
3102 current_function_decl = subprog_decl;
3104 /* Enter a new binding level and show that all the parameters belong to
3105 this function. */
3106 gnat_pushlevel ();
3108 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3109 param_decl = DECL_CHAIN (param_decl))
3110 DECL_CONTEXT (param_decl) = subprog_decl;
3112 make_decl_rtl (subprog_decl);
3115 /* Finish translating the current subprogram and set its BODY. */
3117 void
3118 end_subprog_body (tree body)
3120 tree fndecl = current_function_decl;
3122 /* Attach the BLOCK for this level to the function and pop the level. */
3123 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3124 DECL_INITIAL (fndecl) = current_binding_level->block;
3125 gnat_poplevel ();
3127 /* Mark the RESULT_DECL as being in this subprogram. */
3128 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3130 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3131 if (TREE_CODE (body) == BIND_EXPR)
3133 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3134 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3137 DECL_SAVED_TREE (fndecl) = body;
3139 current_function_decl = decl_function_context (fndecl);
3142 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3144 void
3145 rest_of_subprog_body_compilation (tree subprog_decl)
3147 /* We cannot track the location of errors past this point. */
3148 error_gnat_node = Empty;
3150 /* If we're only annotating types, don't actually compile this function. */
3151 if (type_annotate_only)
3152 return;
3154 /* Dump functions before gimplification. */
3155 dump_function (TDI_original, subprog_decl);
3157 if (!decl_function_context (subprog_decl))
3158 cgraph_node::finalize_function (subprog_decl, false);
3159 else
3160 /* Register this function with cgraph just far enough to get it
3161 added to our parent's nested function list. */
3162 (void) cgraph_node::get_create (subprog_decl);
3165 tree
3166 gnat_builtin_function (tree decl)
3168 gnat_pushdecl (decl, Empty);
3169 return decl;
3172 /* Return an integer type with the number of bits of precision given by
3173 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3174 it is a signed type. */
3176 tree
3177 gnat_type_for_size (unsigned precision, int unsignedp)
3179 tree t;
3180 char type_name[20];
3182 if (precision <= 2 * MAX_BITS_PER_WORD
3183 && signed_and_unsigned_types[precision][unsignedp])
3184 return signed_and_unsigned_types[precision][unsignedp];
3186 if (unsignedp)
3187 t = make_unsigned_type (precision);
3188 else
3189 t = make_signed_type (precision);
3191 if (precision <= 2 * MAX_BITS_PER_WORD)
3192 signed_and_unsigned_types[precision][unsignedp] = t;
3194 if (!TYPE_NAME (t))
3196 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3197 TYPE_NAME (t) = get_identifier (type_name);
3200 return t;
3203 /* Likewise for floating-point types. */
3205 static tree
3206 float_type_for_precision (int precision, machine_mode mode)
3208 tree t;
3209 char type_name[20];
3211 if (float_types[(int) mode])
3212 return float_types[(int) mode];
3214 float_types[(int) mode] = t = make_node (REAL_TYPE);
3215 TYPE_PRECISION (t) = precision;
3216 layout_type (t);
3218 gcc_assert (TYPE_MODE (t) == mode);
3219 if (!TYPE_NAME (t))
3221 sprintf (type_name, "FLOAT_%d", precision);
3222 TYPE_NAME (t) = get_identifier (type_name);
3225 return t;
3228 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3229 an unsigned type; otherwise a signed type is returned. */
3231 tree
3232 gnat_type_for_mode (machine_mode mode, int unsignedp)
3234 if (mode == BLKmode)
3235 return NULL_TREE;
3237 if (mode == VOIDmode)
3238 return void_type_node;
3240 if (COMPLEX_MODE_P (mode))
3241 return NULL_TREE;
3243 if (SCALAR_FLOAT_MODE_P (mode))
3244 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
3246 if (SCALAR_INT_MODE_P (mode))
3247 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
3249 if (VECTOR_MODE_P (mode))
3251 machine_mode inner_mode = GET_MODE_INNER (mode);
3252 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3253 if (inner_type)
3254 return build_vector_type_for_mode (inner_type, mode);
3257 return NULL_TREE;
3260 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
3262 tree
3263 gnat_unsigned_type (tree type_node)
3265 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
3267 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3269 type = copy_node (type);
3270 TREE_TYPE (type) = type_node;
3272 else if (TREE_TYPE (type_node)
3273 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3274 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3276 type = copy_node (type);
3277 TREE_TYPE (type) = TREE_TYPE (type_node);
3280 return type;
3283 /* Return the signed version of a TYPE_NODE, a scalar type. */
3285 tree
3286 gnat_signed_type (tree type_node)
3288 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
3290 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3292 type = copy_node (type);
3293 TREE_TYPE (type) = type_node;
3295 else if (TREE_TYPE (type_node)
3296 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3297 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3299 type = copy_node (type);
3300 TREE_TYPE (type) = TREE_TYPE (type_node);
3303 return type;
3306 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3307 transparently converted to each other. */
3310 gnat_types_compatible_p (tree t1, tree t2)
3312 enum tree_code code;
3314 /* This is the default criterion. */
3315 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3316 return 1;
3318 /* We only check structural equivalence here. */
3319 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3320 return 0;
3322 /* Vector types are also compatible if they have the same number of subparts
3323 and the same form of (scalar) element type. */
3324 if (code == VECTOR_TYPE
3325 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3326 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3327 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3328 return 1;
3330 /* Array types are also compatible if they are constrained and have the same
3331 domain(s) and the same component type. */
3332 if (code == ARRAY_TYPE
3333 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3334 || (TYPE_DOMAIN (t1)
3335 && TYPE_DOMAIN (t2)
3336 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3337 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3338 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3339 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3340 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3341 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3342 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
3343 return 1;
3345 return 0;
3348 /* Return true if EXPR is a useless type conversion. */
3350 bool
3351 gnat_useless_type_conversion (tree expr)
3353 if (CONVERT_EXPR_P (expr)
3354 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3355 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3356 return gnat_types_compatible_p (TREE_TYPE (expr),
3357 TREE_TYPE (TREE_OPERAND (expr, 0)));
3359 return false;
3362 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3364 bool
3365 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3366 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3368 return TYPE_CI_CO_LIST (t) == cico_list
3369 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3370 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3371 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3374 /* EXP is an expression for the size of an object. If this size contains
3375 discriminant references, replace them with the maximum (if MAX_P) or
3376 minimum (if !MAX_P) possible value of the discriminant. */
3378 tree
3379 max_size (tree exp, bool max_p)
3381 enum tree_code code = TREE_CODE (exp);
3382 tree type = TREE_TYPE (exp);
3384 switch (TREE_CODE_CLASS (code))
3386 case tcc_declaration:
3387 case tcc_constant:
3388 return exp;
3390 case tcc_vl_exp:
3391 if (code == CALL_EXPR)
3393 tree t, *argarray;
3394 int n, i;
3396 t = maybe_inline_call_in_expr (exp);
3397 if (t)
3398 return max_size (t, max_p);
3400 n = call_expr_nargs (exp);
3401 gcc_assert (n > 0);
3402 argarray = XALLOCAVEC (tree, n);
3403 for (i = 0; i < n; i++)
3404 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3405 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3407 break;
3409 case tcc_reference:
3410 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3411 modify. Otherwise, we treat it like a variable. */
3412 if (CONTAINS_PLACEHOLDER_P (exp))
3414 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3415 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3416 return max_size (convert (get_base_type (val_type), val), true);
3419 return exp;
3421 case tcc_comparison:
3422 return max_p ? size_one_node : size_zero_node;
3424 case tcc_unary:
3425 if (code == NON_LVALUE_EXPR)
3426 return max_size (TREE_OPERAND (exp, 0), max_p);
3428 return fold_build1 (code, type,
3429 max_size (TREE_OPERAND (exp, 0),
3430 code == NEGATE_EXPR ? !max_p : max_p));
3432 case tcc_binary:
3434 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3435 tree rhs = max_size (TREE_OPERAND (exp, 1),
3436 code == MINUS_EXPR ? !max_p : max_p);
3438 /* Special-case wanting the maximum value of a MIN_EXPR.
3439 In that case, if one side overflows, return the other. */
3440 if (max_p && code == MIN_EXPR)
3442 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3443 return lhs;
3445 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3446 return rhs;
3449 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3450 overflowing and the RHS a variable. */
3451 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3452 && TREE_CODE (lhs) == INTEGER_CST
3453 && TREE_OVERFLOW (lhs)
3454 && !TREE_CONSTANT (rhs))
3455 return lhs;
3457 return size_binop (code, lhs, rhs);
3460 case tcc_expression:
3461 switch (TREE_CODE_LENGTH (code))
3463 case 1:
3464 if (code == SAVE_EXPR)
3465 return exp;
3467 return fold_build1 (code, type,
3468 max_size (TREE_OPERAND (exp, 0), max_p));
3470 case 2:
3471 if (code == COMPOUND_EXPR)
3472 return max_size (TREE_OPERAND (exp, 1), max_p);
3474 return fold_build2 (code, type,
3475 max_size (TREE_OPERAND (exp, 0), max_p),
3476 max_size (TREE_OPERAND (exp, 1), max_p));
3478 case 3:
3479 if (code == COND_EXPR)
3480 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3481 max_size (TREE_OPERAND (exp, 1), max_p),
3482 max_size (TREE_OPERAND (exp, 2), max_p));
3484 default:
3485 break;
3488 /* Other tree classes cannot happen. */
3489 default:
3490 break;
3493 gcc_unreachable ();
3496 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3497 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3498 Return a constructor for the template. */
3500 tree
3501 build_template (tree template_type, tree array_type, tree expr)
3503 vec<constructor_elt, va_gc> *template_elts = NULL;
3504 tree bound_list = NULL_TREE;
3505 tree field;
3507 while (TREE_CODE (array_type) == RECORD_TYPE
3508 && (TYPE_PADDING_P (array_type)
3509 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3510 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3512 if (TREE_CODE (array_type) == ARRAY_TYPE
3513 || (TREE_CODE (array_type) == INTEGER_TYPE
3514 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3515 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3517 /* First make the list for a CONSTRUCTOR for the template. Go down the
3518 field list of the template instead of the type chain because this
3519 array might be an Ada array of arrays and we can't tell where the
3520 nested arrays stop being the underlying object. */
3522 for (field = TYPE_FIELDS (template_type); field;
3523 (bound_list
3524 ? (bound_list = TREE_CHAIN (bound_list))
3525 : (array_type = TREE_TYPE (array_type))),
3526 field = DECL_CHAIN (DECL_CHAIN (field)))
3528 tree bounds, min, max;
3530 /* If we have a bound list, get the bounds from there. Likewise
3531 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3532 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3533 This will give us a maximum range. */
3534 if (bound_list)
3535 bounds = TREE_VALUE (bound_list);
3536 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3537 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3538 else if (expr && TREE_CODE (expr) == PARM_DECL
3539 && DECL_BY_COMPONENT_PTR_P (expr))
3540 bounds = TREE_TYPE (field);
3541 else
3542 gcc_unreachable ();
3544 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3545 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3547 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3548 substitute it from OBJECT. */
3549 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3550 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3552 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3553 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3556 return gnat_build_constructor (template_type, template_elts);
3559 /* Return true if TYPE is suitable for the element type of a vector. */
3561 static bool
3562 type_for_vector_element_p (tree type)
3564 machine_mode mode;
3566 if (!INTEGRAL_TYPE_P (type)
3567 && !SCALAR_FLOAT_TYPE_P (type)
3568 && !FIXED_POINT_TYPE_P (type))
3569 return false;
3571 mode = TYPE_MODE (type);
3572 if (GET_MODE_CLASS (mode) != MODE_INT
3573 && !SCALAR_FLOAT_MODE_P (mode)
3574 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3575 return false;
3577 return true;
3580 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3581 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3582 attribute declaration and want to issue error messages on failure. */
3584 static tree
3585 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3587 unsigned HOST_WIDE_INT size_int, inner_size_int;
3588 int nunits;
3590 /* Silently punt on variable sizes. We can't make vector types for them,
3591 need to ignore them on front-end generated subtypes of unconstrained
3592 base types, and this attribute is for binding implementors, not end
3593 users, so we should never get there from legitimate explicit uses. */
3594 if (!tree_fits_uhwi_p (size))
3595 return NULL_TREE;
3596 size_int = tree_to_uhwi (size);
3598 if (!type_for_vector_element_p (inner_type))
3600 if (attribute)
3601 error ("invalid element type for attribute %qs",
3602 IDENTIFIER_POINTER (attribute));
3603 return NULL_TREE;
3605 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3607 if (size_int % inner_size_int)
3609 if (attribute)
3610 error ("vector size not an integral multiple of component size");
3611 return NULL_TREE;
3614 if (size_int == 0)
3616 if (attribute)
3617 error ("zero vector size");
3618 return NULL_TREE;
3621 nunits = size_int / inner_size_int;
3622 if (nunits & (nunits - 1))
3624 if (attribute)
3625 error ("number of components of vector not a power of two");
3626 return NULL_TREE;
3629 return build_vector_type (inner_type, nunits);
3632 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3633 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3634 processing the attribute and want to issue error messages on failure. */
3636 static tree
3637 build_vector_type_for_array (tree array_type, tree attribute)
3639 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3640 TYPE_SIZE_UNIT (array_type),
3641 attribute);
3642 if (!vector_type)
3643 return NULL_TREE;
3645 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3646 return vector_type;
3649 /* Build a type to be used to represent an aliased object whose nominal type
3650 is an unconstrained array. This consists of a RECORD_TYPE containing a
3651 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3652 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3653 an arbitrary unconstrained object. Use NAME as the name of the record.
3654 DEBUG_INFO_P is true if we need to write debug information for the type. */
3656 tree
3657 build_unc_object_type (tree template_type, tree object_type, tree name,
3658 bool debug_info_p)
3660 tree decl;
3661 tree type = make_node (RECORD_TYPE);
3662 tree template_field
3663 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3664 NULL_TREE, NULL_TREE, 0, 1);
3665 tree array_field
3666 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3667 NULL_TREE, NULL_TREE, 0, 1);
3669 TYPE_NAME (type) = name;
3670 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3671 DECL_CHAIN (template_field) = array_field;
3672 finish_record_type (type, template_field, 0, true);
3674 /* Declare it now since it will never be declared otherwise. This is
3675 necessary to ensure that its subtrees are properly marked. */
3676 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3678 /* template_type will not be used elsewhere than here, so to keep the debug
3679 info clean and in order to avoid scoping issues, make decl its
3680 context. */
3681 gnat_set_type_context (template_type, decl);
3683 return type;
3686 /* Same, taking a thin or fat pointer type instead of a template type. */
3688 tree
3689 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3690 tree name, bool debug_info_p)
3692 tree template_type;
3694 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3696 template_type
3697 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3698 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3699 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3701 return
3702 build_unc_object_type (template_type, object_type, name, debug_info_p);
3705 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3706 In the normal case this is just two adjustments, but we have more to
3707 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3709 void
3710 update_pointer_to (tree old_type, tree new_type)
3712 tree ptr = TYPE_POINTER_TO (old_type);
3713 tree ref = TYPE_REFERENCE_TO (old_type);
3714 tree t;
3716 /* If this is the main variant, process all the other variants first. */
3717 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3718 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3719 update_pointer_to (t, new_type);
3721 /* If no pointers and no references, we are done. */
3722 if (!ptr && !ref)
3723 return;
3725 /* Merge the old type qualifiers in the new type.
3727 Each old variant has qualifiers for specific reasons, and the new
3728 designated type as well. Each set of qualifiers represents useful
3729 information grabbed at some point, and merging the two simply unifies
3730 these inputs into the final type description.
3732 Consider for instance a volatile type frozen after an access to constant
3733 type designating it; after the designated type's freeze, we get here with
3734 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3735 when the access type was processed. We will make a volatile and readonly
3736 designated type, because that's what it really is.
3738 We might also get here for a non-dummy OLD_TYPE variant with different
3739 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3740 to private record type elaboration (see the comments around the call to
3741 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3742 the qualifiers in those cases too, to avoid accidentally discarding the
3743 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3744 new_type
3745 = build_qualified_type (new_type,
3746 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3748 /* If old type and new type are identical, there is nothing to do. */
3749 if (old_type == new_type)
3750 return;
3752 /* Otherwise, first handle the simple case. */
3753 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3755 tree new_ptr, new_ref;
3757 /* If pointer or reference already points to new type, nothing to do.
3758 This can happen as update_pointer_to can be invoked multiple times
3759 on the same couple of types because of the type variants. */
3760 if ((ptr && TREE_TYPE (ptr) == new_type)
3761 || (ref && TREE_TYPE (ref) == new_type))
3762 return;
3764 /* Chain PTR and its variants at the end. */
3765 new_ptr = TYPE_POINTER_TO (new_type);
3766 if (new_ptr)
3768 while (TYPE_NEXT_PTR_TO (new_ptr))
3769 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3770 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3772 else
3773 TYPE_POINTER_TO (new_type) = ptr;
3775 /* Now adjust them. */
3776 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3777 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3779 TREE_TYPE (t) = new_type;
3780 if (TYPE_NULL_BOUNDS (t))
3781 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3784 /* Chain REF and its variants at the end. */
3785 new_ref = TYPE_REFERENCE_TO (new_type);
3786 if (new_ref)
3788 while (TYPE_NEXT_REF_TO (new_ref))
3789 new_ref = TYPE_NEXT_REF_TO (new_ref);
3790 TYPE_NEXT_REF_TO (new_ref) = ref;
3792 else
3793 TYPE_REFERENCE_TO (new_type) = ref;
3795 /* Now adjust them. */
3796 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3797 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3798 TREE_TYPE (t) = new_type;
3800 TYPE_POINTER_TO (old_type) = NULL_TREE;
3801 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3804 /* Now deal with the unconstrained array case. In this case the pointer
3805 is actually a record where both fields are pointers to dummy nodes.
3806 Turn them into pointers to the correct types using update_pointer_to.
3807 Likewise for the pointer to the object record (thin pointer). */
3808 else
3810 tree new_ptr = TYPE_POINTER_TO (new_type);
3812 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3814 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3815 since update_pointer_to can be invoked multiple times on the same
3816 couple of types because of the type variants. */
3817 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3818 return;
3820 update_pointer_to
3821 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3822 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3824 update_pointer_to
3825 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3826 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3828 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3829 TYPE_OBJECT_RECORD_TYPE (new_type));
3831 TYPE_POINTER_TO (old_type) = NULL_TREE;
3835 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3836 unconstrained one. This involves making or finding a template. */
3838 static tree
3839 convert_to_fat_pointer (tree type, tree expr)
3841 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3842 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3843 tree etype = TREE_TYPE (expr);
3844 tree template_addr;
3845 vec<constructor_elt, va_gc> *v;
3846 vec_alloc (v, 2);
3848 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3849 array (compare_fat_pointers ensures that this is the full discriminant)
3850 and a valid pointer to the bounds. This latter property is necessary
3851 since the compiler can hoist the load of the bounds done through it. */
3852 if (integer_zerop (expr))
3854 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3855 tree null_bounds, t;
3857 if (TYPE_NULL_BOUNDS (ptr_template_type))
3858 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3859 else
3861 /* The template type can still be dummy at this point so we build an
3862 empty constructor. The middle-end will fill it in with zeros. */
3863 t = build_constructor (template_type, NULL);
3864 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3865 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3866 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
3869 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3870 fold_convert (p_array_type, null_pointer_node));
3871 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
3872 t = build_constructor (type, v);
3873 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3874 TREE_CONSTANT (t) = 0;
3875 TREE_STATIC (t) = 1;
3877 return t;
3880 /* If EXPR is a thin pointer, make template and data from the record. */
3881 if (TYPE_IS_THIN_POINTER_P (etype))
3883 tree field = TYPE_FIELDS (TREE_TYPE (etype));
3885 expr = gnat_protect_expr (expr);
3887 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
3888 the thin pointer value has been shifted so we shift it back to get
3889 the template address. */
3890 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
3892 template_addr
3893 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
3894 fold_build1 (NEGATE_EXPR, sizetype,
3895 byte_position
3896 (DECL_CHAIN (field))));
3897 template_addr
3898 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
3899 template_addr);
3902 /* Otherwise we explicitly take the address of the fields. */
3903 else
3905 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
3906 template_addr
3907 = build_unary_op (ADDR_EXPR, NULL_TREE,
3908 build_component_ref (expr, NULL_TREE, field,
3909 false));
3910 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3911 build_component_ref (expr, NULL_TREE,
3912 DECL_CHAIN (field),
3913 false));
3917 /* Otherwise, build the constructor for the template. */
3918 else
3919 template_addr
3920 = build_unary_op (ADDR_EXPR, NULL_TREE,
3921 build_template (template_type, TREE_TYPE (etype),
3922 expr));
3924 /* The final result is a constructor for the fat pointer.
3926 If EXPR is an argument of a foreign convention subprogram, the type it
3927 points to is directly the component type. In this case, the expression
3928 type may not match the corresponding FIELD_DECL type at this point, so we
3929 call "convert" here to fix that up if necessary. This type consistency is
3930 required, for instance because it ensures that possible later folding of
3931 COMPONENT_REFs against this constructor always yields something of the
3932 same type as the initial reference.
3934 Note that the call to "build_template" above is still fine because it
3935 will only refer to the provided TEMPLATE_TYPE in this case. */
3936 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
3937 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
3938 return gnat_build_constructor (type, v);
3941 /* Create an expression whose value is that of EXPR,
3942 converted to type TYPE. The TREE_TYPE of the value
3943 is always TYPE. This function implements all reasonable
3944 conversions; callers should filter out those that are
3945 not permitted by the language being compiled. */
3947 tree
3948 convert (tree type, tree expr)
3950 tree etype = TREE_TYPE (expr);
3951 enum tree_code ecode = TREE_CODE (etype);
3952 enum tree_code code = TREE_CODE (type);
3954 /* If the expression is already of the right type, we are done. */
3955 if (etype == type)
3956 return expr;
3958 /* If both input and output have padding and are of variable size, do this
3959 as an unchecked conversion. Likewise if one is a mere variant of the
3960 other, so we avoid a pointless unpad/repad sequence. */
3961 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3962 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3963 && (!TREE_CONSTANT (TYPE_SIZE (type))
3964 || !TREE_CONSTANT (TYPE_SIZE (etype))
3965 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3966 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3967 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3970 /* If the output type has padding, convert to the inner type and make a
3971 constructor to build the record, unless a variable size is involved. */
3972 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3974 vec<constructor_elt, va_gc> *v;
3976 /* If we previously converted from another type and our type is
3977 of variable size, remove the conversion to avoid the need for
3978 variable-sized temporaries. Likewise for a conversion between
3979 original and packable version. */
3980 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3981 && (!TREE_CONSTANT (TYPE_SIZE (type))
3982 || (ecode == RECORD_TYPE
3983 && TYPE_NAME (etype)
3984 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3985 expr = TREE_OPERAND (expr, 0);
3987 /* If we are just removing the padding from expr, convert the original
3988 object if we have variable size in order to avoid the need for some
3989 variable-sized temporaries. Likewise if the padding is a variant
3990 of the other, so we avoid a pointless unpad/repad sequence. */
3991 if (TREE_CODE (expr) == COMPONENT_REF
3992 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3993 && (!TREE_CONSTANT (TYPE_SIZE (type))
3994 || TYPE_MAIN_VARIANT (type)
3995 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
3996 || (ecode == RECORD_TYPE
3997 && TYPE_NAME (etype)
3998 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3999 return convert (type, TREE_OPERAND (expr, 0));
4001 /* If the inner type is of self-referential size and the expression type
4002 is a record, do this as an unchecked conversion. But first pad the
4003 expression if possible to have the same size on both sides. */
4004 if (ecode == RECORD_TYPE
4005 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4007 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4008 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4009 false, false, false, true),
4010 expr);
4011 return unchecked_convert (type, expr, false);
4014 /* If we are converting between array types with variable size, do the
4015 final conversion as an unchecked conversion, again to avoid the need
4016 for some variable-sized temporaries. If valid, this conversion is
4017 very likely purely technical and without real effects. */
4018 if (ecode == ARRAY_TYPE
4019 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4020 && !TREE_CONSTANT (TYPE_SIZE (etype))
4021 && !TREE_CONSTANT (TYPE_SIZE (type)))
4022 return unchecked_convert (type,
4023 convert (TREE_TYPE (TYPE_FIELDS (type)),
4024 expr),
4025 false);
4027 vec_alloc (v, 1);
4028 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4029 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4030 return gnat_build_constructor (type, v);
4033 /* If the input type has padding, remove it and convert to the output type.
4034 The conditions ordering is arranged to ensure that the output type is not
4035 a padding type here, as it is not clear whether the conversion would
4036 always be correct if this was to happen. */
4037 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4039 tree unpadded;
4041 /* If we have just converted to this padded type, just get the
4042 inner expression. */
4043 if (TREE_CODE (expr) == CONSTRUCTOR
4044 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4045 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4046 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
4048 /* Otherwise, build an explicit component reference. */
4049 else
4050 unpadded
4051 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4053 return convert (type, unpadded);
4056 /* If the input is a biased type, adjust first. */
4057 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4058 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4059 fold_convert (TREE_TYPE (etype), expr),
4060 fold_convert (TREE_TYPE (etype),
4061 TYPE_MIN_VALUE (etype))));
4063 /* If the input is a justified modular type, we need to extract the actual
4064 object before converting it to any other type with the exceptions of an
4065 unconstrained array or of a mere type variant. It is useful to avoid the
4066 extraction and conversion in the type variant case because it could end
4067 up replacing a VAR_DECL expr by a constructor and we might be about the
4068 take the address of the result. */
4069 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4070 && code != UNCONSTRAINED_ARRAY_TYPE
4071 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4072 return convert (type, build_component_ref (expr, NULL_TREE,
4073 TYPE_FIELDS (etype), false));
4075 /* If converting to a type that contains a template, convert to the data
4076 type and then build the template. */
4077 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4079 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4080 vec<constructor_elt, va_gc> *v;
4081 vec_alloc (v, 2);
4083 /* If the source already has a template, get a reference to the
4084 associated array only, as we are going to rebuild a template
4085 for the target type anyway. */
4086 expr = maybe_unconstrained_array (expr);
4088 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4089 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4090 obj_type, NULL_TREE));
4091 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4092 convert (obj_type, expr));
4093 return gnat_build_constructor (type, v);
4096 /* There are some cases of expressions that we process specially. */
4097 switch (TREE_CODE (expr))
4099 case ERROR_MARK:
4100 return expr;
4102 case NULL_EXPR:
4103 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4104 conversion in gnat_expand_expr. NULL_EXPR does not represent
4105 and actual value, so no conversion is needed. */
4106 expr = copy_node (expr);
4107 TREE_TYPE (expr) = type;
4108 return expr;
4110 case STRING_CST:
4111 /* If we are converting a STRING_CST to another constrained array type,
4112 just make a new one in the proper type. */
4113 if (code == ecode && AGGREGATE_TYPE_P (etype)
4114 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4115 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4117 expr = copy_node (expr);
4118 TREE_TYPE (expr) = type;
4119 return expr;
4121 break;
4123 case VECTOR_CST:
4124 /* If we are converting a VECTOR_CST to a mere type variant, just make
4125 a new one in the proper type. */
4126 if (code == ecode && gnat_types_compatible_p (type, etype))
4128 expr = copy_node (expr);
4129 TREE_TYPE (expr) = type;
4130 return expr;
4133 case CONSTRUCTOR:
4134 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4135 another padding type around the same type, just make a new one in
4136 the proper type. */
4137 if (code == ecode
4138 && (gnat_types_compatible_p (type, etype)
4139 || (code == RECORD_TYPE
4140 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4141 && TREE_TYPE (TYPE_FIELDS (type))
4142 == TREE_TYPE (TYPE_FIELDS (etype)))))
4144 expr = copy_node (expr);
4145 TREE_TYPE (expr) = type;
4146 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4147 return expr;
4150 /* Likewise for a conversion between original and packable version, or
4151 conversion between types of the same size and with the same list of
4152 fields, but we have to work harder to preserve type consistency. */
4153 if (code == ecode
4154 && code == RECORD_TYPE
4155 && (TYPE_NAME (type) == TYPE_NAME (etype)
4156 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4159 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4160 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4161 vec<constructor_elt, va_gc> *v;
4162 vec_alloc (v, len);
4163 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4164 unsigned HOST_WIDE_INT idx;
4165 tree index, value;
4167 /* Whether we need to clear TREE_CONSTANT et al. on the output
4168 constructor when we convert in place. */
4169 bool clear_constant = false;
4171 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4173 /* Skip the missing fields in the CONSTRUCTOR. */
4174 while (efield && field && !SAME_FIELD_P (efield, index))
4176 efield = DECL_CHAIN (efield);
4177 field = DECL_CHAIN (field);
4179 /* The field must be the same. */
4180 if (!(efield && field && SAME_FIELD_P (efield, field)))
4181 break;
4182 constructor_elt elt
4183 = {field, convert (TREE_TYPE (field), value)};
4184 v->quick_push (elt);
4186 /* If packing has made this field a bitfield and the input
4187 value couldn't be emitted statically any more, we need to
4188 clear TREE_CONSTANT on our output. */
4189 if (!clear_constant
4190 && TREE_CONSTANT (expr)
4191 && !CONSTRUCTOR_BITFIELD_P (efield)
4192 && CONSTRUCTOR_BITFIELD_P (field)
4193 && !initializer_constant_valid_for_bitfield_p (value))
4194 clear_constant = true;
4196 efield = DECL_CHAIN (efield);
4197 field = DECL_CHAIN (field);
4200 /* If we have been able to match and convert all the input fields
4201 to their output type, convert in place now. We'll fallback to a
4202 view conversion downstream otherwise. */
4203 if (idx == len)
4205 expr = copy_node (expr);
4206 TREE_TYPE (expr) = type;
4207 CONSTRUCTOR_ELTS (expr) = v;
4208 if (clear_constant)
4209 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4210 return expr;
4214 /* Likewise for a conversion between array type and vector type with a
4215 compatible representative array. */
4216 else if (code == VECTOR_TYPE
4217 && ecode == ARRAY_TYPE
4218 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4219 etype))
4221 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4222 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4223 vec<constructor_elt, va_gc> *v;
4224 unsigned HOST_WIDE_INT ix;
4225 tree value;
4227 /* Build a VECTOR_CST from a *constant* array constructor. */
4228 if (TREE_CONSTANT (expr))
4230 bool constant_p = true;
4232 /* Iterate through elements and check if all constructor
4233 elements are *_CSTs. */
4234 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4235 if (!CONSTANT_CLASS_P (value))
4237 constant_p = false;
4238 break;
4241 if (constant_p)
4242 return build_vector_from_ctor (type,
4243 CONSTRUCTOR_ELTS (expr));
4246 /* Otherwise, build a regular vector constructor. */
4247 vec_alloc (v, len);
4248 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4250 constructor_elt elt = {NULL_TREE, value};
4251 v->quick_push (elt);
4253 expr = copy_node (expr);
4254 TREE_TYPE (expr) = type;
4255 CONSTRUCTOR_ELTS (expr) = v;
4256 return expr;
4258 break;
4260 case UNCONSTRAINED_ARRAY_REF:
4261 /* First retrieve the underlying array. */
4262 expr = maybe_unconstrained_array (expr);
4263 etype = TREE_TYPE (expr);
4264 ecode = TREE_CODE (etype);
4265 break;
4267 case VIEW_CONVERT_EXPR:
4269 /* GCC 4.x is very sensitive to type consistency overall, and view
4270 conversions thus are very frequent. Even though just "convert"ing
4271 the inner operand to the output type is fine in most cases, it
4272 might expose unexpected input/output type mismatches in special
4273 circumstances so we avoid such recursive calls when we can. */
4274 tree op0 = TREE_OPERAND (expr, 0);
4276 /* If we are converting back to the original type, we can just
4277 lift the input conversion. This is a common occurrence with
4278 switches back-and-forth amongst type variants. */
4279 if (type == TREE_TYPE (op0))
4280 return op0;
4282 /* Otherwise, if we're converting between two aggregate or vector
4283 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4284 target type in place or to just convert the inner expression. */
4285 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4286 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4288 /* If we are converting between mere variants, we can just
4289 substitute the VIEW_CONVERT_EXPR in place. */
4290 if (gnat_types_compatible_p (type, etype))
4291 return build1 (VIEW_CONVERT_EXPR, type, op0);
4293 /* Otherwise, we may just bypass the input view conversion unless
4294 one of the types is a fat pointer, which is handled by
4295 specialized code below which relies on exact type matching. */
4296 else if (!TYPE_IS_FAT_POINTER_P (type)
4297 && !TYPE_IS_FAT_POINTER_P (etype))
4298 return convert (type, op0);
4301 break;
4304 default:
4305 break;
4308 /* Check for converting to a pointer to an unconstrained array. */
4309 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4310 return convert_to_fat_pointer (type, expr);
4312 /* If we are converting between two aggregate or vector types that are mere
4313 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4314 to a vector type from its representative array type. */
4315 else if ((code == ecode
4316 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4317 && gnat_types_compatible_p (type, etype))
4318 || (code == VECTOR_TYPE
4319 && ecode == ARRAY_TYPE
4320 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4321 etype)))
4322 return build1 (VIEW_CONVERT_EXPR, type, expr);
4324 /* If we are converting between tagged types, try to upcast properly. */
4325 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4326 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4328 tree child_etype = etype;
4329 do {
4330 tree field = TYPE_FIELDS (child_etype);
4331 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4332 return build_component_ref (expr, NULL_TREE, field, false);
4333 child_etype = TREE_TYPE (field);
4334 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4337 /* If we are converting from a smaller form of record type back to it, just
4338 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4339 size on both sides. */
4340 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4341 && smaller_form_type_p (etype, type))
4343 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4344 false, false, false, true),
4345 expr);
4346 return build1 (VIEW_CONVERT_EXPR, type, expr);
4349 /* In all other cases of related types, make a NOP_EXPR. */
4350 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4351 return fold_convert (type, expr);
4353 switch (code)
4355 case VOID_TYPE:
4356 return fold_build1 (CONVERT_EXPR, type, expr);
4358 case INTEGER_TYPE:
4359 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4360 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4361 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4362 return unchecked_convert (type, expr, false);
4363 else if (TYPE_BIASED_REPRESENTATION_P (type))
4364 return fold_convert (type,
4365 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4366 convert (TREE_TYPE (type), expr),
4367 convert (TREE_TYPE (type),
4368 TYPE_MIN_VALUE (type))));
4370 /* ... fall through ... */
4372 case ENUMERAL_TYPE:
4373 case BOOLEAN_TYPE:
4374 /* If we are converting an additive expression to an integer type
4375 with lower precision, be wary of the optimization that can be
4376 applied by convert_to_integer. There are 2 problematic cases:
4377 - if the first operand was originally of a biased type,
4378 because we could be recursively called to convert it
4379 to an intermediate type and thus rematerialize the
4380 additive operator endlessly,
4381 - if the expression contains a placeholder, because an
4382 intermediate conversion that changes the sign could
4383 be inserted and thus introduce an artificial overflow
4384 at compile time when the placeholder is substituted. */
4385 if (code == INTEGER_TYPE
4386 && ecode == INTEGER_TYPE
4387 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4388 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4390 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4392 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4393 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4394 || CONTAINS_PLACEHOLDER_P (expr))
4395 return build1 (NOP_EXPR, type, expr);
4398 return fold (convert_to_integer (type, expr));
4400 case POINTER_TYPE:
4401 case REFERENCE_TYPE:
4402 /* If converting between two thin pointers, adjust if needed to account
4403 for differing offsets from the base pointer, depending on whether
4404 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4405 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4407 tree etype_pos
4408 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4409 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4410 : size_zero_node;
4411 tree type_pos
4412 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4413 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4414 : size_zero_node;
4415 tree byte_diff = size_diffop (type_pos, etype_pos);
4417 expr = build1 (NOP_EXPR, type, expr);
4418 if (integer_zerop (byte_diff))
4419 return expr;
4421 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4422 fold_convert (sizetype, byte_diff));
4425 /* If converting fat pointer to normal or thin pointer, get the pointer
4426 to the array and then convert it. */
4427 if (TYPE_IS_FAT_POINTER_P (etype))
4428 expr
4429 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4431 return fold (convert_to_pointer (type, expr));
4433 case REAL_TYPE:
4434 return fold (convert_to_real (type, expr));
4436 case RECORD_TYPE:
4437 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4439 vec<constructor_elt, va_gc> *v;
4440 vec_alloc (v, 1);
4442 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4443 convert (TREE_TYPE (TYPE_FIELDS (type)),
4444 expr));
4445 return gnat_build_constructor (type, v);
4448 /* ... fall through ... */
4450 case ARRAY_TYPE:
4451 /* In these cases, assume the front-end has validated the conversion.
4452 If the conversion is valid, it will be a bit-wise conversion, so
4453 it can be viewed as an unchecked conversion. */
4454 return unchecked_convert (type, expr, false);
4456 case UNION_TYPE:
4457 /* This is a either a conversion between a tagged type and some
4458 subtype, which we have to mark as a UNION_TYPE because of
4459 overlapping fields or a conversion of an Unchecked_Union. */
4460 return unchecked_convert (type, expr, false);
4462 case UNCONSTRAINED_ARRAY_TYPE:
4463 /* If the input is a VECTOR_TYPE, convert to the representative
4464 array type first. */
4465 if (ecode == VECTOR_TYPE)
4467 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4468 etype = TREE_TYPE (expr);
4469 ecode = TREE_CODE (etype);
4472 /* If EXPR is a constrained array, take its address, convert it to a
4473 fat pointer, and then dereference it. Likewise if EXPR is a
4474 record containing both a template and a constrained array.
4475 Note that a record representing a justified modular type
4476 always represents a packed constrained array. */
4477 if (ecode == ARRAY_TYPE
4478 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4479 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4480 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4481 return
4482 build_unary_op
4483 (INDIRECT_REF, NULL_TREE,
4484 convert_to_fat_pointer (TREE_TYPE (type),
4485 build_unary_op (ADDR_EXPR,
4486 NULL_TREE, expr)));
4488 /* Do something very similar for converting one unconstrained
4489 array to another. */
4490 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4491 return
4492 build_unary_op (INDIRECT_REF, NULL_TREE,
4493 convert (TREE_TYPE (type),
4494 build_unary_op (ADDR_EXPR,
4495 NULL_TREE, expr)));
4496 else
4497 gcc_unreachable ();
4499 case COMPLEX_TYPE:
4500 return fold (convert_to_complex (type, expr));
4502 default:
4503 gcc_unreachable ();
4507 /* Create an expression whose value is that of EXPR converted to the common
4508 index type, which is sizetype. EXPR is supposed to be in the base type
4509 of the GNAT index type. Calling it is equivalent to doing
4511 convert (sizetype, expr)
4513 but we try to distribute the type conversion with the knowledge that EXPR
4514 cannot overflow in its type. This is a best-effort approach and we fall
4515 back to the above expression as soon as difficulties are encountered.
4517 This is necessary to overcome issues that arise when the GNAT base index
4518 type and the GCC common index type (sizetype) don't have the same size,
4519 which is quite frequent on 64-bit architectures. In this case, and if
4520 the GNAT base index type is signed but the iteration type of the loop has
4521 been forced to unsigned, the loop scalar evolution engine cannot compute
4522 a simple evolution for the general induction variables associated with the
4523 array indices, because it will preserve the wrap-around semantics in the
4524 unsigned type of their "inner" part. As a result, many loop optimizations
4525 are blocked.
4527 The solution is to use a special (basic) induction variable that is at
4528 least as large as sizetype, and to express the aforementioned general
4529 induction variables in terms of this induction variable, eliminating
4530 the problematic intermediate truncation to the GNAT base index type.
4531 This is possible as long as the original expression doesn't overflow
4532 and if the middle-end hasn't introduced artificial overflows in the
4533 course of the various simplification it can make to the expression. */
4535 tree
4536 convert_to_index_type (tree expr)
4538 enum tree_code code = TREE_CODE (expr);
4539 tree type = TREE_TYPE (expr);
4541 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4542 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4543 if (TYPE_UNSIGNED (type) || !optimize)
4544 return convert (sizetype, expr);
4546 switch (code)
4548 case VAR_DECL:
4549 /* The main effect of the function: replace a loop parameter with its
4550 associated special induction variable. */
4551 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4552 expr = DECL_INDUCTION_VAR (expr);
4553 break;
4555 CASE_CONVERT:
4557 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4558 /* Bail out as soon as we suspect some sort of type frobbing. */
4559 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4560 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4561 break;
4564 /* ... fall through ... */
4566 case NON_LVALUE_EXPR:
4567 return fold_build1 (code, sizetype,
4568 convert_to_index_type (TREE_OPERAND (expr, 0)));
4570 case PLUS_EXPR:
4571 case MINUS_EXPR:
4572 case MULT_EXPR:
4573 return fold_build2 (code, sizetype,
4574 convert_to_index_type (TREE_OPERAND (expr, 0)),
4575 convert_to_index_type (TREE_OPERAND (expr, 1)));
4577 case COMPOUND_EXPR:
4578 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4579 convert_to_index_type (TREE_OPERAND (expr, 1)));
4581 case COND_EXPR:
4582 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4583 convert_to_index_type (TREE_OPERAND (expr, 1)),
4584 convert_to_index_type (TREE_OPERAND (expr, 2)));
4586 default:
4587 break;
4590 return convert (sizetype, expr);
4593 /* Remove all conversions that are done in EXP. This includes converting
4594 from a padded type or to a justified modular type. If TRUE_ADDRESS
4595 is true, always return the address of the containing object even if
4596 the address is not bit-aligned. */
4598 tree
4599 remove_conversions (tree exp, bool true_address)
4601 switch (TREE_CODE (exp))
4603 case CONSTRUCTOR:
4604 if (true_address
4605 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4606 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4607 return
4608 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
4609 break;
4611 case COMPONENT_REF:
4612 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4613 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4614 break;
4616 CASE_CONVERT:
4617 case VIEW_CONVERT_EXPR:
4618 case NON_LVALUE_EXPR:
4619 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4621 default:
4622 break;
4625 return exp;
4628 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4629 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4630 likewise return an expression pointing to the underlying array. */
4632 tree
4633 maybe_unconstrained_array (tree exp)
4635 enum tree_code code = TREE_CODE (exp);
4636 tree type = TREE_TYPE (exp);
4638 switch (TREE_CODE (type))
4640 case UNCONSTRAINED_ARRAY_TYPE:
4641 if (code == UNCONSTRAINED_ARRAY_REF)
4643 const bool read_only = TREE_READONLY (exp);
4644 const bool no_trap = TREE_THIS_NOTRAP (exp);
4646 exp = TREE_OPERAND (exp, 0);
4647 type = TREE_TYPE (exp);
4649 if (TREE_CODE (exp) == COND_EXPR)
4651 tree op1
4652 = build_unary_op (INDIRECT_REF, NULL_TREE,
4653 build_component_ref (TREE_OPERAND (exp, 1),
4654 NULL_TREE,
4655 TYPE_FIELDS (type),
4656 false));
4657 tree op2
4658 = build_unary_op (INDIRECT_REF, NULL_TREE,
4659 build_component_ref (TREE_OPERAND (exp, 2),
4660 NULL_TREE,
4661 TYPE_FIELDS (type),
4662 false));
4664 exp = build3 (COND_EXPR,
4665 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4666 TREE_OPERAND (exp, 0), op1, op2);
4668 else
4670 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4671 build_component_ref (exp, NULL_TREE,
4672 TYPE_FIELDS (type),
4673 false));
4674 TREE_READONLY (exp) = read_only;
4675 TREE_THIS_NOTRAP (exp) = no_trap;
4679 else if (code == NULL_EXPR)
4680 exp = build1 (NULL_EXPR,
4681 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4682 TREE_OPERAND (exp, 0));
4683 break;
4685 case RECORD_TYPE:
4686 /* If this is a padded type and it contains a template, convert to the
4687 unpadded type first. */
4688 if (TYPE_PADDING_P (type)
4689 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4690 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4692 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4693 type = TREE_TYPE (exp);
4696 if (TYPE_CONTAINS_TEMPLATE_P (type))
4698 exp = build_component_ref (exp, NULL_TREE,
4699 DECL_CHAIN (TYPE_FIELDS (type)),
4700 false);
4701 type = TREE_TYPE (exp);
4703 /* If the array type is padded, convert to the unpadded type. */
4704 if (TYPE_IS_PADDING_P (type))
4705 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4707 break;
4709 default:
4710 break;
4713 return exp;
4716 /* Return true if EXPR is an expression that can be folded as an operand
4717 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4719 static bool
4720 can_fold_for_view_convert_p (tree expr)
4722 tree t1, t2;
4724 /* The folder will fold NOP_EXPRs between integral types with the same
4725 precision (in the middle-end's sense). We cannot allow it if the
4726 types don't have the same precision in the Ada sense as well. */
4727 if (TREE_CODE (expr) != NOP_EXPR)
4728 return true;
4730 t1 = TREE_TYPE (expr);
4731 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4733 /* Defer to the folder for non-integral conversions. */
4734 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4735 return true;
4737 /* Only fold conversions that preserve both precisions. */
4738 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4739 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4740 return true;
4742 return false;
4745 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4746 If NOTRUNC_P is true, truncation operations should be suppressed.
4748 Special care is required with (source or target) integral types whose
4749 precision is not equal to their size, to make sure we fetch or assign
4750 the value bits whose location might depend on the endianness, e.g.
4752 Rmsize : constant := 8;
4753 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4755 type Bit_Array is array (1 .. Rmsize) of Boolean;
4756 pragma Pack (Bit_Array);
4758 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4760 Value : Int := 2#1000_0001#;
4761 Vbits : Bit_Array := To_Bit_Array (Value);
4763 we expect the 8 bits at Vbits'Address to always contain Value, while
4764 their original location depends on the endianness, at Value'Address
4765 on a little-endian architecture but not on a big-endian one. */
4767 tree
4768 unchecked_convert (tree type, tree expr, bool notrunc_p)
4770 tree etype = TREE_TYPE (expr);
4771 enum tree_code ecode = TREE_CODE (etype);
4772 enum tree_code code = TREE_CODE (type);
4773 tree tem;
4774 int c;
4776 /* If the expression is already of the right type, we are done. */
4777 if (etype == type)
4778 return expr;
4780 /* If both types types are integral just do a normal conversion.
4781 Likewise for a conversion to an unconstrained array. */
4782 if (((INTEGRAL_TYPE_P (type)
4783 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
4784 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4785 && (INTEGRAL_TYPE_P (etype)
4786 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4787 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4788 || code == UNCONSTRAINED_ARRAY_TYPE)
4790 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4792 tree ntype = copy_type (etype);
4793 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4794 TYPE_MAIN_VARIANT (ntype) = ntype;
4795 expr = build1 (NOP_EXPR, ntype, expr);
4798 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4800 tree rtype = copy_type (type);
4801 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4802 TYPE_MAIN_VARIANT (rtype) = rtype;
4803 expr = convert (rtype, expr);
4804 expr = build1 (NOP_EXPR, type, expr);
4806 else
4807 expr = convert (type, expr);
4810 /* If we are converting to an integral type whose precision is not equal
4811 to its size, first unchecked convert to a record type that contains an
4812 field of the given precision. Then extract the field. */
4813 else if (INTEGRAL_TYPE_P (type)
4814 && TYPE_RM_SIZE (type)
4815 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4816 GET_MODE_BITSIZE (TYPE_MODE (type))))
4818 tree rec_type = make_node (RECORD_TYPE);
4819 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4820 tree field_type, field;
4822 if (TYPE_UNSIGNED (type))
4823 field_type = make_unsigned_type (prec);
4824 else
4825 field_type = make_signed_type (prec);
4826 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4828 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4829 NULL_TREE, bitsize_zero_node, 1, 0);
4831 finish_record_type (rec_type, field, 1, false);
4833 expr = unchecked_convert (rec_type, expr, notrunc_p);
4834 expr = build_component_ref (expr, NULL_TREE, field, false);
4835 expr = fold_build1 (NOP_EXPR, type, expr);
4838 /* Similarly if we are converting from an integral type whose precision is
4839 not equal to its size, first copy into a field of the given precision
4840 and unchecked convert the record type. */
4841 else if (INTEGRAL_TYPE_P (etype)
4842 && TYPE_RM_SIZE (etype)
4843 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4844 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4846 tree rec_type = make_node (RECORD_TYPE);
4847 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
4848 vec<constructor_elt, va_gc> *v;
4849 vec_alloc (v, 1);
4850 tree field_type, field;
4852 if (TYPE_UNSIGNED (etype))
4853 field_type = make_unsigned_type (prec);
4854 else
4855 field_type = make_signed_type (prec);
4856 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
4858 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4859 NULL_TREE, bitsize_zero_node, 1, 0);
4861 finish_record_type (rec_type, field, 1, false);
4863 expr = fold_build1 (NOP_EXPR, field_type, expr);
4864 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4865 expr = gnat_build_constructor (rec_type, v);
4866 expr = unchecked_convert (type, expr, notrunc_p);
4869 /* If we are converting from a scalar type to a type with a different size,
4870 we need to pad to have the same size on both sides.
4872 ??? We cannot do it unconditionally because unchecked conversions are
4873 used liberally by the front-end to implement polymorphism, e.g. in:
4875 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4876 return p___size__4 (p__object!(S191s.all));
4878 so we skip all expressions that are references. */
4879 else if (!REFERENCE_CLASS_P (expr)
4880 && !AGGREGATE_TYPE_P (etype)
4881 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4882 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4884 if (c < 0)
4886 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4887 false, false, false, true),
4888 expr);
4889 expr = unchecked_convert (type, expr, notrunc_p);
4891 else
4893 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4894 false, false, false, true);
4895 expr = unchecked_convert (rec_type, expr, notrunc_p);
4896 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4897 false);
4901 /* We have a special case when we are converting between two unconstrained
4902 array types. In that case, take the address, convert the fat pointer
4903 types, and dereference. */
4904 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4905 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4906 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4907 build_unary_op (ADDR_EXPR, NULL_TREE,
4908 expr)));
4910 /* Another special case is when we are converting to a vector type from its
4911 representative array type; this a regular conversion. */
4912 else if (code == VECTOR_TYPE
4913 && ecode == ARRAY_TYPE
4914 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4915 etype))
4916 expr = convert (type, expr);
4918 /* And, if the array type is not the representative, we try to build an
4919 intermediate vector type of which the array type is the representative
4920 and to do the unchecked conversion between the vector types, in order
4921 to enable further simplifications in the middle-end. */
4922 else if (code == VECTOR_TYPE
4923 && ecode == ARRAY_TYPE
4924 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
4926 expr = convert (tem, expr);
4927 return unchecked_convert (type, expr, notrunc_p);
4930 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
4931 the alignment of the CONSTRUCTOR to speed up the copy operation. */
4932 else if (TREE_CODE (expr) == CONSTRUCTOR
4933 && code == RECORD_TYPE
4934 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
4936 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
4937 Empty, false, false, false, true),
4938 expr);
4939 return unchecked_convert (type, expr, notrunc_p);
4942 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
4943 else
4945 expr = maybe_unconstrained_array (expr);
4946 etype = TREE_TYPE (expr);
4947 ecode = TREE_CODE (etype);
4948 if (can_fold_for_view_convert_p (expr))
4949 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4950 else
4951 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4954 /* If the result is an integral type whose precision is not equal to its
4955 size, sign- or zero-extend the result. We need not do this if the input
4956 is an integral type of the same precision and signedness or if the output
4957 is a biased type or if both the input and output are unsigned. */
4958 if (!notrunc_p
4959 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4960 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4961 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4962 GET_MODE_BITSIZE (TYPE_MODE (type)))
4963 && !(INTEGRAL_TYPE_P (etype)
4964 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4965 && operand_equal_p (TYPE_RM_SIZE (type),
4966 (TYPE_RM_SIZE (etype) != 0
4967 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4969 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4971 tree base_type
4972 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4973 tree shift_expr
4974 = convert (base_type,
4975 size_binop (MINUS_EXPR,
4976 bitsize_int
4977 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4978 TYPE_RM_SIZE (type)));
4979 expr
4980 = convert (type,
4981 build_binary_op (RSHIFT_EXPR, base_type,
4982 build_binary_op (LSHIFT_EXPR, base_type,
4983 convert (base_type, expr),
4984 shift_expr),
4985 shift_expr));
4988 /* An unchecked conversion should never raise Constraint_Error. The code
4989 below assumes that GCC's conversion routines overflow the same way that
4990 the underlying hardware does. This is probably true. In the rare case
4991 when it is false, we can rely on the fact that such conversions are
4992 erroneous anyway. */
4993 if (TREE_CODE (expr) == INTEGER_CST)
4994 TREE_OVERFLOW (expr) = 0;
4996 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4997 show no longer constant. */
4998 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4999 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5000 OEP_ONLY_CONST))
5001 TREE_CONSTANT (expr) = 0;
5003 return expr;
5006 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5007 the latter being a record type as predicated by Is_Record_Type. */
5009 enum tree_code
5010 tree_code_for_record_type (Entity_Id gnat_type)
5012 Node_Id component_list, component;
5014 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5015 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5016 if (!Is_Unchecked_Union (gnat_type))
5017 return RECORD_TYPE;
5019 gnat_type = Implementation_Base_Type (gnat_type);
5020 component_list
5021 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5023 for (component = First_Non_Pragma (Component_Items (component_list));
5024 Present (component);
5025 component = Next_Non_Pragma (component))
5026 if (Ekind (Defining_Entity (component)) == E_Component)
5027 return RECORD_TYPE;
5029 return UNION_TYPE;
5032 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5033 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5034 according to the presence of an alignment clause on the type or, if it
5035 is an array, on the component type. */
5037 bool
5038 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5040 gnat_type = Underlying_Type (gnat_type);
5042 *align_clause = Present (Alignment_Clause (gnat_type));
5044 if (Is_Array_Type (gnat_type))
5046 gnat_type = Underlying_Type (Component_Type (gnat_type));
5047 if (Present (Alignment_Clause (gnat_type)))
5048 *align_clause = true;
5051 if (!Is_Floating_Point_Type (gnat_type))
5052 return false;
5054 if (UI_To_Int (Esize (gnat_type)) != 64)
5055 return false;
5057 return true;
5060 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5061 size is greater or equal to 64 bits, or an array of such a type. Set
5062 ALIGN_CLAUSE according to the presence of an alignment clause on the
5063 type or, if it is an array, on the component type. */
5065 bool
5066 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5068 gnat_type = Underlying_Type (gnat_type);
5070 *align_clause = Present (Alignment_Clause (gnat_type));
5072 if (Is_Array_Type (gnat_type))
5074 gnat_type = Underlying_Type (Component_Type (gnat_type));
5075 if (Present (Alignment_Clause (gnat_type)))
5076 *align_clause = true;
5079 if (!Is_Scalar_Type (gnat_type))
5080 return false;
5082 if (UI_To_Int (Esize (gnat_type)) < 64)
5083 return false;
5085 return true;
5088 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5089 component of an aggregate type. */
5091 bool
5092 type_for_nonaliased_component_p (tree gnu_type)
5094 /* If the type is passed by reference, we may have pointers to the
5095 component so it cannot be made non-aliased. */
5096 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5097 return false;
5099 /* We used to say that any component of aggregate type is aliased
5100 because the front-end may take 'Reference of it. The front-end
5101 has been enhanced in the meantime so as to use a renaming instead
5102 in most cases, but the back-end can probably take the address of
5103 such a component too so we go for the conservative stance.
5105 For instance, we might need the address of any array type, even
5106 if normally passed by copy, to construct a fat pointer if the
5107 component is used as an actual for an unconstrained formal.
5109 Likewise for record types: even if a specific record subtype is
5110 passed by copy, the parent type might be passed by ref (e.g. if
5111 it's of variable size) and we might take the address of a child
5112 component to pass to a parent formal. We have no way to check
5113 for such conditions here. */
5114 if (AGGREGATE_TYPE_P (gnu_type))
5115 return false;
5117 return true;
5120 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5122 bool
5123 smaller_form_type_p (tree type, tree orig_type)
5125 tree size, osize;
5127 /* We're not interested in variants here. */
5128 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5129 return false;
5131 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5132 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5133 return false;
5135 size = TYPE_SIZE (type);
5136 osize = TYPE_SIZE (orig_type);
5138 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5139 return false;
5141 return tree_int_cst_lt (size, osize) != 0;
5144 /* Perform final processing on global variables. */
5146 static GTY (()) tree dummy_global;
5148 void
5149 gnat_write_global_declarations (void)
5151 unsigned int i;
5152 tree iter;
5154 /* If we have declared types as used at the global level, insert them in
5155 the global hash table. We use a dummy variable for this purpose. */
5156 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
5158 struct varpool_node *node;
5159 char *label;
5161 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5162 dummy_global
5163 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5164 void_type_node);
5165 DECL_HARD_REGISTER (dummy_global) = 1;
5166 TREE_STATIC (dummy_global) = 1;
5167 node = varpool_node::get_create (dummy_global);
5168 node->definition = 1;
5169 node->force_output = 1;
5171 while (!types_used_by_cur_var_decl->is_empty ())
5173 tree t = types_used_by_cur_var_decl->pop ();
5174 types_used_by_var_decl_insert (t, dummy_global);
5178 /* Output debug information for all global type declarations first. This
5179 ensures that global types whose compilation hasn't been finalized yet,
5180 for example pointers to Taft amendment types, have their compilation
5181 finalized in the right context. */
5182 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5183 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5184 debug_hooks->global_decl (iter);
5186 /* Proceed to optimize and emit assembly. */
5187 symtab->finalize_compilation_unit ();
5189 /* After cgraph has had a chance to emit everything that's going to
5190 be emitted, output debug information for the rest of globals. */
5191 if (!seen_error ())
5193 timevar_push (TV_SYMOUT);
5194 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5195 if (TREE_CODE (iter) != TYPE_DECL && !DECL_IGNORED_P (iter))
5196 debug_hooks->global_decl (iter);
5197 timevar_pop (TV_SYMOUT);
5201 /* ************************************************************************
5202 * * GCC builtins support *
5203 * ************************************************************************ */
5205 /* The general scheme is fairly simple:
5207 For each builtin function/type to be declared, gnat_install_builtins calls
5208 internal facilities which eventually get to gnat_push_decl, which in turn
5209 tracks the so declared builtin function decls in the 'builtin_decls' global
5210 datastructure. When an Intrinsic subprogram declaration is processed, we
5211 search this global datastructure to retrieve the associated BUILT_IN DECL
5212 node. */
5214 /* Search the chain of currently available builtin declarations for a node
5215 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5216 found, if any, or NULL_TREE otherwise. */
5217 tree
5218 builtin_decl_for (tree name)
5220 unsigned i;
5221 tree decl;
5223 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5224 if (DECL_NAME (decl) == name)
5225 return decl;
5227 return NULL_TREE;
5230 /* The code below eventually exposes gnat_install_builtins, which declares
5231 the builtin types and functions we might need, either internally or as
5232 user accessible facilities.
5234 ??? This is a first implementation shot, still in rough shape. It is
5235 heavily inspired from the "C" family implementation, with chunks copied
5236 verbatim from there.
5238 Two obvious TODO candidates are
5239 o Use a more efficient name/decl mapping scheme
5240 o Devise a middle-end infrastructure to avoid having to copy
5241 pieces between front-ends. */
5243 /* ----------------------------------------------------------------------- *
5244 * BUILTIN ELEMENTARY TYPES *
5245 * ----------------------------------------------------------------------- */
5247 /* Standard data types to be used in builtin argument declarations. */
5249 enum c_tree_index
5251 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5252 CTI_STRING_TYPE,
5253 CTI_CONST_STRING_TYPE,
5255 CTI_MAX
5258 static tree c_global_trees[CTI_MAX];
5260 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5261 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5262 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5264 /* ??? In addition some attribute handlers, we currently don't support a
5265 (small) number of builtin-types, which in turns inhibits support for a
5266 number of builtin functions. */
5267 #define wint_type_node void_type_node
5268 #define intmax_type_node void_type_node
5269 #define uintmax_type_node void_type_node
5271 /* Build the void_list_node (void_type_node having been created). */
5273 static tree
5274 build_void_list_node (void)
5276 tree t = build_tree_list (NULL_TREE, void_type_node);
5277 return t;
5280 /* Used to help initialize the builtin-types.def table. When a type of
5281 the correct size doesn't exist, use error_mark_node instead of NULL.
5282 The later results in segfaults even when a decl using the type doesn't
5283 get invoked. */
5285 static tree
5286 builtin_type_for_size (int size, bool unsignedp)
5288 tree type = gnat_type_for_size (size, unsignedp);
5289 return type ? type : error_mark_node;
5292 /* Build/push the elementary type decls that builtin functions/types
5293 will need. */
5295 static void
5296 install_builtin_elementary_types (void)
5298 signed_size_type_node = gnat_signed_type (size_type_node);
5299 pid_type_node = integer_type_node;
5300 void_list_node = build_void_list_node ();
5302 string_type_node = build_pointer_type (char_type_node);
5303 const_string_type_node
5304 = build_pointer_type (build_qualified_type
5305 (char_type_node, TYPE_QUAL_CONST));
5308 /* ----------------------------------------------------------------------- *
5309 * BUILTIN FUNCTION TYPES *
5310 * ----------------------------------------------------------------------- */
5312 /* Now, builtin function types per se. */
5314 enum c_builtin_type
5316 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5317 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5318 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5319 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5320 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5321 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5322 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5323 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5324 ARG6) NAME,
5325 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5326 ARG6, ARG7) NAME,
5327 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5328 ARG6, ARG7, ARG8) NAME,
5329 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5330 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5331 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5332 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5333 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5334 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5335 NAME,
5336 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5337 #include "builtin-types.def"
5338 #undef DEF_PRIMITIVE_TYPE
5339 #undef DEF_FUNCTION_TYPE_0
5340 #undef DEF_FUNCTION_TYPE_1
5341 #undef DEF_FUNCTION_TYPE_2
5342 #undef DEF_FUNCTION_TYPE_3
5343 #undef DEF_FUNCTION_TYPE_4
5344 #undef DEF_FUNCTION_TYPE_5
5345 #undef DEF_FUNCTION_TYPE_6
5346 #undef DEF_FUNCTION_TYPE_7
5347 #undef DEF_FUNCTION_TYPE_8
5348 #undef DEF_FUNCTION_TYPE_VAR_0
5349 #undef DEF_FUNCTION_TYPE_VAR_1
5350 #undef DEF_FUNCTION_TYPE_VAR_2
5351 #undef DEF_FUNCTION_TYPE_VAR_3
5352 #undef DEF_FUNCTION_TYPE_VAR_4
5353 #undef DEF_FUNCTION_TYPE_VAR_5
5354 #undef DEF_POINTER_TYPE
5355 BT_LAST
5358 typedef enum c_builtin_type builtin_type;
5360 /* A temporary array used in communication with def_fn_type. */
5361 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5363 /* A helper function for install_builtin_types. Build function type
5364 for DEF with return type RET and N arguments. If VAR is true, then the
5365 function should be variadic after those N arguments.
5367 Takes special care not to ICE if any of the types involved are
5368 error_mark_node, which indicates that said type is not in fact available
5369 (see builtin_type_for_size). In which case the function type as a whole
5370 should be error_mark_node. */
5372 static void
5373 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5375 tree t;
5376 tree *args = XALLOCAVEC (tree, n);
5377 va_list list;
5378 int i;
5380 va_start (list, n);
5381 for (i = 0; i < n; ++i)
5383 builtin_type a = (builtin_type) va_arg (list, int);
5384 t = builtin_types[a];
5385 if (t == error_mark_node)
5386 goto egress;
5387 args[i] = t;
5390 t = builtin_types[ret];
5391 if (t == error_mark_node)
5392 goto egress;
5393 if (var)
5394 t = build_varargs_function_type_array (t, n, args);
5395 else
5396 t = build_function_type_array (t, n, args);
5398 egress:
5399 builtin_types[def] = t;
5400 va_end (list);
5403 /* Build the builtin function types and install them in the builtin_types
5404 array for later use in builtin function decls. */
5406 static void
5407 install_builtin_function_types (void)
5409 tree va_list_ref_type_node;
5410 tree va_list_arg_type_node;
5412 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5414 va_list_arg_type_node = va_list_ref_type_node =
5415 build_pointer_type (TREE_TYPE (va_list_type_node));
5417 else
5419 va_list_arg_type_node = va_list_type_node;
5420 va_list_ref_type_node = build_reference_type (va_list_type_node);
5423 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5424 builtin_types[ENUM] = VALUE;
5425 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5426 def_fn_type (ENUM, RETURN, 0, 0);
5427 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5428 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5429 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5430 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5431 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5432 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5433 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5434 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5435 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5436 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5437 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5438 ARG6) \
5439 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5440 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5441 ARG6, ARG7) \
5442 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5443 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5444 ARG6, ARG7, ARG8) \
5445 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5446 ARG7, ARG8);
5447 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5448 def_fn_type (ENUM, RETURN, 1, 0);
5449 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5450 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5451 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5452 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5453 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5454 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5455 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5456 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5457 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5458 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5459 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5460 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5462 #include "builtin-types.def"
5464 #undef DEF_PRIMITIVE_TYPE
5465 #undef DEF_FUNCTION_TYPE_0
5466 #undef DEF_FUNCTION_TYPE_1
5467 #undef DEF_FUNCTION_TYPE_2
5468 #undef DEF_FUNCTION_TYPE_3
5469 #undef DEF_FUNCTION_TYPE_4
5470 #undef DEF_FUNCTION_TYPE_5
5471 #undef DEF_FUNCTION_TYPE_6
5472 #undef DEF_FUNCTION_TYPE_7
5473 #undef DEF_FUNCTION_TYPE_8
5474 #undef DEF_FUNCTION_TYPE_VAR_0
5475 #undef DEF_FUNCTION_TYPE_VAR_1
5476 #undef DEF_FUNCTION_TYPE_VAR_2
5477 #undef DEF_FUNCTION_TYPE_VAR_3
5478 #undef DEF_FUNCTION_TYPE_VAR_4
5479 #undef DEF_FUNCTION_TYPE_VAR_5
5480 #undef DEF_POINTER_TYPE
5481 builtin_types[(int) BT_LAST] = NULL_TREE;
5484 /* ----------------------------------------------------------------------- *
5485 * BUILTIN ATTRIBUTES *
5486 * ----------------------------------------------------------------------- */
5488 enum built_in_attribute
5490 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5491 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5492 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5493 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5494 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5495 #include "builtin-attrs.def"
5496 #undef DEF_ATTR_NULL_TREE
5497 #undef DEF_ATTR_INT
5498 #undef DEF_ATTR_STRING
5499 #undef DEF_ATTR_IDENT
5500 #undef DEF_ATTR_TREE_LIST
5501 ATTR_LAST
5504 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5506 static void
5507 install_builtin_attributes (void)
5509 /* Fill in the built_in_attributes array. */
5510 #define DEF_ATTR_NULL_TREE(ENUM) \
5511 built_in_attributes[(int) ENUM] = NULL_TREE;
5512 #define DEF_ATTR_INT(ENUM, VALUE) \
5513 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5514 #define DEF_ATTR_STRING(ENUM, VALUE) \
5515 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5516 #define DEF_ATTR_IDENT(ENUM, STRING) \
5517 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5518 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5519 built_in_attributes[(int) ENUM] \
5520 = tree_cons (built_in_attributes[(int) PURPOSE], \
5521 built_in_attributes[(int) VALUE], \
5522 built_in_attributes[(int) CHAIN]);
5523 #include "builtin-attrs.def"
5524 #undef DEF_ATTR_NULL_TREE
5525 #undef DEF_ATTR_INT
5526 #undef DEF_ATTR_STRING
5527 #undef DEF_ATTR_IDENT
5528 #undef DEF_ATTR_TREE_LIST
5531 /* Handle a "const" attribute; arguments as in
5532 struct attribute_spec.handler. */
5534 static tree
5535 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5536 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5537 bool *no_add_attrs)
5539 if (TREE_CODE (*node) == FUNCTION_DECL)
5540 TREE_READONLY (*node) = 1;
5541 else
5542 *no_add_attrs = true;
5544 return NULL_TREE;
5547 /* Handle a "nothrow" attribute; arguments as in
5548 struct attribute_spec.handler. */
5550 static tree
5551 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5552 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5553 bool *no_add_attrs)
5555 if (TREE_CODE (*node) == FUNCTION_DECL)
5556 TREE_NOTHROW (*node) = 1;
5557 else
5558 *no_add_attrs = true;
5560 return NULL_TREE;
5563 /* Handle a "pure" attribute; arguments as in
5564 struct attribute_spec.handler. */
5566 static tree
5567 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5568 int ARG_UNUSED (flags), bool *no_add_attrs)
5570 if (TREE_CODE (*node) == FUNCTION_DECL)
5571 DECL_PURE_P (*node) = 1;
5572 /* ??? TODO: Support types. */
5573 else
5575 warning (OPT_Wattributes, "%qs attribute ignored",
5576 IDENTIFIER_POINTER (name));
5577 *no_add_attrs = true;
5580 return NULL_TREE;
5583 /* Handle a "no vops" attribute; arguments as in
5584 struct attribute_spec.handler. */
5586 static tree
5587 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5588 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5589 bool *ARG_UNUSED (no_add_attrs))
5591 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5592 DECL_IS_NOVOPS (*node) = 1;
5593 return NULL_TREE;
5596 /* Helper for nonnull attribute handling; fetch the operand number
5597 from the attribute argument list. */
5599 static bool
5600 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5602 /* Verify the arg number is a constant. */
5603 if (!tree_fits_uhwi_p (arg_num_expr))
5604 return false;
5606 *valp = TREE_INT_CST_LOW (arg_num_expr);
5607 return true;
5610 /* Handle the "nonnull" attribute. */
5611 static tree
5612 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5613 tree args, int ARG_UNUSED (flags),
5614 bool *no_add_attrs)
5616 tree type = *node;
5617 unsigned HOST_WIDE_INT attr_arg_num;
5619 /* If no arguments are specified, all pointer arguments should be
5620 non-null. Verify a full prototype is given so that the arguments
5621 will have the correct types when we actually check them later. */
5622 if (!args)
5624 if (!prototype_p (type))
5626 error ("nonnull attribute without arguments on a non-prototype");
5627 *no_add_attrs = true;
5629 return NULL_TREE;
5632 /* Argument list specified. Verify that each argument number references
5633 a pointer argument. */
5634 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5636 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5638 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5640 error ("nonnull argument has invalid operand number (argument %lu)",
5641 (unsigned long) attr_arg_num);
5642 *no_add_attrs = true;
5643 return NULL_TREE;
5646 if (prototype_p (type))
5648 function_args_iterator iter;
5649 tree argument;
5651 function_args_iter_init (&iter, type);
5652 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5654 argument = function_args_iter_cond (&iter);
5655 if (!argument || ck_num == arg_num)
5656 break;
5659 if (!argument
5660 || TREE_CODE (argument) == VOID_TYPE)
5662 error ("nonnull argument with out-of-range operand number "
5663 "(argument %lu, operand %lu)",
5664 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5665 *no_add_attrs = true;
5666 return NULL_TREE;
5669 if (TREE_CODE (argument) != POINTER_TYPE)
5671 error ("nonnull argument references non-pointer operand "
5672 "(argument %lu, operand %lu)",
5673 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5674 *no_add_attrs = true;
5675 return NULL_TREE;
5680 return NULL_TREE;
5683 /* Handle a "sentinel" attribute. */
5685 static tree
5686 handle_sentinel_attribute (tree *node, tree name, tree args,
5687 int ARG_UNUSED (flags), bool *no_add_attrs)
5689 if (!prototype_p (*node))
5691 warning (OPT_Wattributes,
5692 "%qs attribute requires prototypes with named arguments",
5693 IDENTIFIER_POINTER (name));
5694 *no_add_attrs = true;
5696 else
5698 if (!stdarg_p (*node))
5700 warning (OPT_Wattributes,
5701 "%qs attribute only applies to variadic functions",
5702 IDENTIFIER_POINTER (name));
5703 *no_add_attrs = true;
5707 if (args)
5709 tree position = TREE_VALUE (args);
5711 if (TREE_CODE (position) != INTEGER_CST)
5713 warning (0, "requested position is not an integer constant");
5714 *no_add_attrs = true;
5716 else
5718 if (tree_int_cst_lt (position, integer_zero_node))
5720 warning (0, "requested position is less than zero");
5721 *no_add_attrs = true;
5726 return NULL_TREE;
5729 /* Handle a "noreturn" attribute; arguments as in
5730 struct attribute_spec.handler. */
5732 static tree
5733 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5734 int ARG_UNUSED (flags), bool *no_add_attrs)
5736 tree type = TREE_TYPE (*node);
5738 /* See FIXME comment in c_common_attribute_table. */
5739 if (TREE_CODE (*node) == FUNCTION_DECL)
5740 TREE_THIS_VOLATILE (*node) = 1;
5741 else if (TREE_CODE (type) == POINTER_TYPE
5742 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5743 TREE_TYPE (*node)
5744 = build_pointer_type
5745 (build_type_variant (TREE_TYPE (type),
5746 TYPE_READONLY (TREE_TYPE (type)), 1));
5747 else
5749 warning (OPT_Wattributes, "%qs attribute ignored",
5750 IDENTIFIER_POINTER (name));
5751 *no_add_attrs = true;
5754 return NULL_TREE;
5757 /* Handle a "leaf" attribute; arguments as in
5758 struct attribute_spec.handler. */
5760 static tree
5761 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5762 int ARG_UNUSED (flags), bool *no_add_attrs)
5764 if (TREE_CODE (*node) != FUNCTION_DECL)
5766 warning (OPT_Wattributes, "%qE attribute ignored", name);
5767 *no_add_attrs = true;
5769 if (!TREE_PUBLIC (*node))
5771 warning (OPT_Wattributes, "%qE attribute has no effect", name);
5772 *no_add_attrs = true;
5775 return NULL_TREE;
5778 /* Handle a "always_inline" attribute; arguments as in
5779 struct attribute_spec.handler. */
5781 static tree
5782 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5783 int ARG_UNUSED (flags), bool *no_add_attrs)
5785 if (TREE_CODE (*node) == FUNCTION_DECL)
5787 /* Set the attribute and mark it for disregarding inline limits. */
5788 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
5790 else
5792 warning (OPT_Wattributes, "%qE attribute ignored", name);
5793 *no_add_attrs = true;
5796 return NULL_TREE;
5799 /* Handle a "malloc" attribute; arguments as in
5800 struct attribute_spec.handler. */
5802 static tree
5803 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5804 int ARG_UNUSED (flags), bool *no_add_attrs)
5806 if (TREE_CODE (*node) == FUNCTION_DECL
5807 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5808 DECL_IS_MALLOC (*node) = 1;
5809 else
5811 warning (OPT_Wattributes, "%qs attribute ignored",
5812 IDENTIFIER_POINTER (name));
5813 *no_add_attrs = true;
5816 return NULL_TREE;
5819 /* Fake handler for attributes we don't properly support. */
5821 tree
5822 fake_attribute_handler (tree * ARG_UNUSED (node),
5823 tree ARG_UNUSED (name),
5824 tree ARG_UNUSED (args),
5825 int ARG_UNUSED (flags),
5826 bool * ARG_UNUSED (no_add_attrs))
5828 return NULL_TREE;
5831 /* Handle a "type_generic" attribute. */
5833 static tree
5834 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5835 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5836 bool * ARG_UNUSED (no_add_attrs))
5838 /* Ensure we have a function type. */
5839 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5841 /* Ensure we have a variadic function. */
5842 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
5844 return NULL_TREE;
5847 /* Handle a "vector_size" attribute; arguments as in
5848 struct attribute_spec.handler. */
5850 static tree
5851 handle_vector_size_attribute (tree *node, tree name, tree args,
5852 int ARG_UNUSED (flags), bool *no_add_attrs)
5854 tree type = *node;
5855 tree vector_type;
5857 *no_add_attrs = true;
5859 /* We need to provide for vector pointers, vector arrays, and
5860 functions returning vectors. For example:
5862 __attribute__((vector_size(16))) short *foo;
5864 In this case, the mode is SI, but the type being modified is
5865 HI, so we need to look further. */
5866 while (POINTER_TYPE_P (type)
5867 || TREE_CODE (type) == FUNCTION_TYPE
5868 || TREE_CODE (type) == ARRAY_TYPE)
5869 type = TREE_TYPE (type);
5871 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
5872 if (!vector_type)
5873 return NULL_TREE;
5875 /* Build back pointers if needed. */
5876 *node = reconstruct_complex_type (*node, vector_type);
5878 return NULL_TREE;
5881 /* Handle a "vector_type" attribute; arguments as in
5882 struct attribute_spec.handler. */
5884 static tree
5885 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5886 int ARG_UNUSED (flags), bool *no_add_attrs)
5888 tree type = *node;
5889 tree vector_type;
5891 *no_add_attrs = true;
5893 if (TREE_CODE (type) != ARRAY_TYPE)
5895 error ("attribute %qs applies to array types only",
5896 IDENTIFIER_POINTER (name));
5897 return NULL_TREE;
5900 vector_type = build_vector_type_for_array (type, name);
5901 if (!vector_type)
5902 return NULL_TREE;
5904 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
5905 *node = vector_type;
5907 return NULL_TREE;
5910 /* ----------------------------------------------------------------------- *
5911 * BUILTIN FUNCTIONS *
5912 * ----------------------------------------------------------------------- */
5914 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5915 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5916 if nonansi_p and flag_no_nonansi_builtin. */
5918 static void
5919 def_builtin_1 (enum built_in_function fncode,
5920 const char *name,
5921 enum built_in_class fnclass,
5922 tree fntype, tree libtype,
5923 bool both_p, bool fallback_p,
5924 bool nonansi_p ATTRIBUTE_UNUSED,
5925 tree fnattrs, bool implicit_p)
5927 tree decl;
5928 const char *libname;
5930 /* Preserve an already installed decl. It most likely was setup in advance
5931 (e.g. as part of the internal builtins) for specific reasons. */
5932 if (builtin_decl_explicit (fncode) != NULL_TREE)
5933 return;
5935 gcc_assert ((!both_p && !fallback_p)
5936 || !strncmp (name, "__builtin_",
5937 strlen ("__builtin_")));
5939 libname = name + strlen ("__builtin_");
5940 decl = add_builtin_function (name, fntype, fncode, fnclass,
5941 (fallback_p ? libname : NULL),
5942 fnattrs);
5943 if (both_p)
5944 /* ??? This is normally further controlled by command-line options
5945 like -fno-builtin, but we don't have them for Ada. */
5946 add_builtin_function (libname, libtype, fncode, fnclass,
5947 NULL, fnattrs);
5949 set_builtin_decl (fncode, decl, implicit_p);
5952 static int flag_isoc94 = 0;
5953 static int flag_isoc99 = 0;
5954 static int flag_isoc11 = 0;
5956 /* Install what the common builtins.def offers. */
5958 static void
5959 install_builtin_functions (void)
5961 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5962 NONANSI_P, ATTRS, IMPLICIT, COND) \
5963 if (NAME && COND) \
5964 def_builtin_1 (ENUM, NAME, CLASS, \
5965 builtin_types[(int) TYPE], \
5966 builtin_types[(int) LIBTYPE], \
5967 BOTH_P, FALLBACK_P, NONANSI_P, \
5968 built_in_attributes[(int) ATTRS], IMPLICIT);
5969 #include "builtins.def"
5970 #undef DEF_BUILTIN
5973 /* ----------------------------------------------------------------------- *
5974 * BUILTIN FUNCTIONS *
5975 * ----------------------------------------------------------------------- */
5977 /* Install the builtin functions we might need. */
5979 void
5980 gnat_install_builtins (void)
5982 install_builtin_elementary_types ();
5983 install_builtin_function_types ();
5984 install_builtin_attributes ();
5986 /* Install builtins used by generic middle-end pieces first. Some of these
5987 know about internal specificities and control attributes accordingly, for
5988 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5989 the generic definition from builtins.def. */
5990 build_common_builtin_nodes ();
5992 /* Now, install the target specific builtins, such as the AltiVec family on
5993 ppc, and the common set as exposed by builtins.def. */
5994 targetm.init_builtins ();
5995 install_builtin_functions ();
5998 #include "gt-ada-utils.h"
5999 #include "gtype-ada.h"