2014-11-18 Christophe Lyon <christophe.lyon@linaro.org>
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob4d350605552b29f5f7a8f796efcbdbf270a2b685
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "stringpool.h"
32 #include "stor-layout.h"
33 #include "attribs.h"
34 #include "varasm.h"
35 #include "flags.h"
36 #include "toplev.h"
37 #include "diagnostic-core.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "debug.h"
41 #include "convert.h"
42 #include "target.h"
43 #include "common/common-target.h"
44 #include "langhooks.h"
45 #include "hash-map.h"
46 #include "is-a.h"
47 #include "plugin-api.h"
48 #include "vec.h"
49 #include "hashtab.h"
50 #include "hash-set.h"
51 #include "machmode.h"
52 #include "hard-reg-set.h"
53 #include "input.h"
54 #include "function.h"
55 #include "ipa-ref.h"
56 #include "cgraph.h"
57 #include "diagnostic.h"
58 #include "timevar.h"
59 #include "tree-dump.h"
60 #include "tree-inline.h"
61 #include "tree-iterator.h"
63 #include "ada.h"
64 #include "types.h"
65 #include "atree.h"
66 #include "elists.h"
67 #include "namet.h"
68 #include "nlists.h"
69 #include "stringt.h"
70 #include "uintp.h"
71 #include "fe.h"
72 #include "sinfo.h"
73 #include "einfo.h"
74 #include "ada-tree.h"
75 #include "gigi.h"
77 /* If nonzero, pretend we are allocating at global level. */
78 int force_global;
80 /* The default alignment of "double" floating-point types, i.e. floating
81 point types whose size is equal to 64 bits, or 0 if this alignment is
82 not specifically capped. */
83 int double_float_alignment;
85 /* The default alignment of "double" or larger scalar types, i.e. scalar
86 types whose size is greater or equal to 64 bits, or 0 if this alignment
87 is not specifically capped. */
88 int double_scalar_alignment;
90 /* True if floating-point arithmetics may use wider intermediate results. */
91 bool fp_arith_may_widen = true;
93 /* Tree nodes for the various types and decls we create. */
94 tree gnat_std_decls[(int) ADT_LAST];
96 /* Functions to call for each of the possible raise reasons. */
97 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
99 /* Likewise, but with extra info for each of the possible raise reasons. */
100 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
102 /* Forward declarations for handlers of attributes. */
103 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
104 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
105 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
106 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
107 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
108 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
109 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
110 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
111 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
112 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
113 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
114 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
115 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
117 /* Fake handler for attributes we don't properly support, typically because
118 they'd require dragging a lot of the common-c front-end circuitry. */
119 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
121 /* Table of machine-independent internal attributes for Ada. We support
122 this minimal set of attributes to accommodate the needs of builtins. */
123 const struct attribute_spec gnat_internal_attribute_table[] =
125 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
126 affects_type_identity } */
127 { "const", 0, 0, true, false, false, handle_const_attribute,
128 false },
129 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
130 false },
131 { "pure", 0, 0, true, false, false, handle_pure_attribute,
132 false },
133 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
134 false },
135 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
136 false },
137 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
138 false },
139 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
140 false },
141 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
142 false },
143 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
144 false },
145 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
146 false },
147 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
148 false },
150 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
151 false },
152 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
153 false },
154 { "may_alias", 0, 0, false, true, false, NULL, false },
156 /* ??? format and format_arg are heavy and not supported, which actually
157 prevents support for stdio builtins, which we however declare as part
158 of the common builtins.def contents. */
159 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
160 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
162 { NULL, 0, 0, false, false, false, NULL, false }
165 /* Associates a GNAT tree node to a GCC tree node. It is used in
166 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
167 of `save_gnu_tree' for more info. */
168 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
170 #define GET_GNU_TREE(GNAT_ENTITY) \
171 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
173 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
174 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
176 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
177 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
179 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
180 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
182 #define GET_DUMMY_NODE(GNAT_ENTITY) \
183 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
185 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
186 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
188 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
189 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
191 /* This variable keeps a table for types for each precision so that we only
192 allocate each of them once. Signed and unsigned types are kept separate.
194 Note that these types are only used when fold-const requests something
195 special. Perhaps we should NOT share these types; we'll see how it
196 goes later. */
197 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
199 /* Likewise for float types, but record these by mode. */
200 static GTY(()) tree float_types[NUM_MACHINE_MODES];
202 /* For each binding contour we allocate a binding_level structure to indicate
203 the binding depth. */
205 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
206 /* The binding level containing this one (the enclosing binding level). */
207 struct gnat_binding_level *chain;
208 /* The BLOCK node for this level. */
209 tree block;
210 /* If nonzero, the setjmp buffer that needs to be updated for any
211 variable-sized definition within this context. */
212 tree jmpbuf_decl;
215 /* The binding level currently in effect. */
216 static GTY(()) struct gnat_binding_level *current_binding_level;
218 /* A chain of gnat_binding_level structures awaiting reuse. */
219 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
221 /* The context to be used for global declarations. */
222 static GTY(()) tree global_context;
224 /* An array of global declarations. */
225 static GTY(()) vec<tree, va_gc> *global_decls;
227 /* An array of builtin function declarations. */
228 static GTY(()) vec<tree, va_gc> *builtin_decls;
230 /* An array of global renaming pointers. */
231 static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
233 /* A chain of unused BLOCK nodes. */
234 static GTY((deletable)) tree free_block_chain;
236 static int pad_type_hash_marked_p (const void *p);
237 static hashval_t pad_type_hash_hash (const void *p);
238 static int pad_type_hash_eq (const void *p1, const void *p2);
240 /* A hash table of padded types. It is modelled on the generic type
241 hash table in tree.c, which must thus be used as a reference. */
242 struct GTY(()) pad_type_hash {
243 unsigned long hash;
244 tree type;
247 static GTY ((if_marked ("pad_type_hash_marked_p"),
248 param_is (struct pad_type_hash)))
249 htab_t pad_type_hash_table;
251 static tree merge_sizes (tree, tree, tree, bool, bool);
252 static tree compute_related_constant (tree, tree);
253 static tree split_plus (tree, tree *);
254 static tree float_type_for_precision (int, machine_mode);
255 static tree convert_to_fat_pointer (tree, tree);
256 static unsigned int scale_by_factor_of (tree, unsigned int);
257 static bool potential_alignment_gap (tree, tree, tree);
259 /* A linked list used as a queue to defer the initialization of the
260 DECL_CONTEXT attribute of ..._DECL nodes and of the TYPE_CONTEXT attribute
261 of ..._TYPE nodes. */
262 struct deferred_decl_context_node
264 tree decl; /* The ..._DECL node to work on. */
265 Entity_Id gnat_scope; /* The corresponding entity's Scope attribute. */
266 int force_global; /* force_global value when pushing DECL. */
267 vec<tree, va_heap, vl_ptr> types; /* A list of ..._TYPE nodes to propagate the
268 context to. */
269 struct deferred_decl_context_node *next; /* The next queue item. */
272 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
274 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
275 feed it with the elaboration of GNAT_SCOPE. */
276 static struct deferred_decl_context_node *
277 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
279 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
280 feed it with the DECL_CONTEXT computed as part of N as soon as it is
281 computed. */
282 static void add_deferred_type_context (struct deferred_decl_context_node *n,
283 tree type);
285 /* Initialize data structures of the utils.c module. */
287 void
288 init_gnat_utils (void)
290 /* Initialize the association of GNAT nodes to GCC trees. */
291 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
293 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
294 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
296 /* Initialize the hash table of padded types. */
297 pad_type_hash_table
298 = htab_create_ggc (512, pad_type_hash_hash, pad_type_hash_eq, 0);
301 /* Destroy data structures of the utils.c module. */
303 void
304 destroy_gnat_utils (void)
306 /* Destroy the association of GNAT nodes to GCC trees. */
307 ggc_free (associate_gnat_to_gnu);
308 associate_gnat_to_gnu = NULL;
310 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
311 ggc_free (dummy_node_table);
312 dummy_node_table = NULL;
314 /* Destroy the hash table of padded types. */
315 htab_delete (pad_type_hash_table);
316 pad_type_hash_table = NULL;
318 /* Invalidate the global renaming pointers. */
319 invalidate_global_renaming_pointers ();
322 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
323 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
324 If NO_CHECK is true, the latter check is suppressed.
326 If GNU_DECL is zero, reset a previous association. */
328 void
329 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
331 /* Check that GNAT_ENTITY is not already defined and that it is being set
332 to something which is a decl. If that is not the case, this usually
333 means GNAT_ENTITY is defined twice, but occasionally is due to some
334 Gigi problem. */
335 gcc_assert (!(gnu_decl
336 && (PRESENT_GNU_TREE (gnat_entity)
337 || (!no_check && !DECL_P (gnu_decl)))));
339 SET_GNU_TREE (gnat_entity, gnu_decl);
342 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
343 that was associated with it. If there is no such tree node, abort.
345 In some cases, such as delayed elaboration or expressions that need to
346 be elaborated only once, GNAT_ENTITY is really not an entity. */
348 tree
349 get_gnu_tree (Entity_Id gnat_entity)
351 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
352 return GET_GNU_TREE (gnat_entity);
355 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
357 bool
358 present_gnu_tree (Entity_Id gnat_entity)
360 return PRESENT_GNU_TREE (gnat_entity);
363 /* Make a dummy type corresponding to GNAT_TYPE. */
365 tree
366 make_dummy_type (Entity_Id gnat_type)
368 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
369 tree gnu_type;
371 /* If there was no equivalent type (can only happen when just annotating
372 types) or underlying type, go back to the original type. */
373 if (No (gnat_equiv))
374 gnat_equiv = gnat_type;
376 /* If it there already a dummy type, use that one. Else make one. */
377 if (PRESENT_DUMMY_NODE (gnat_equiv))
378 return GET_DUMMY_NODE (gnat_equiv);
380 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
381 an ENUMERAL_TYPE. */
382 gnu_type = make_node (Is_Record_Type (gnat_equiv)
383 ? tree_code_for_record_type (gnat_equiv)
384 : ENUMERAL_TYPE);
385 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
386 TYPE_DUMMY_P (gnu_type) = 1;
387 TYPE_STUB_DECL (gnu_type)
388 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
389 if (Is_By_Reference_Type (gnat_equiv))
390 TYPE_BY_REFERENCE_P (gnu_type) = 1;
392 SET_DUMMY_NODE (gnat_equiv, gnu_type);
394 return gnu_type;
397 /* Return the dummy type that was made for GNAT_TYPE, if any. */
399 tree
400 get_dummy_type (Entity_Id gnat_type)
402 return GET_DUMMY_NODE (gnat_type);
405 /* Build dummy fat and thin pointer types whose designated type is specified
406 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
408 void
409 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
411 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
412 tree gnu_fat_type, fields, gnu_object_type;
414 gnu_template_type = make_node (RECORD_TYPE);
415 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
416 TYPE_DUMMY_P (gnu_template_type) = 1;
417 gnu_ptr_template = build_pointer_type (gnu_template_type);
419 gnu_array_type = make_node (ENUMERAL_TYPE);
420 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
421 TYPE_DUMMY_P (gnu_array_type) = 1;
422 gnu_ptr_array = build_pointer_type (gnu_array_type);
424 gnu_fat_type = make_node (RECORD_TYPE);
425 /* Build a stub DECL to trigger the special processing for fat pointer types
426 in gnat_pushdecl. */
427 TYPE_NAME (gnu_fat_type)
428 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
429 gnu_fat_type);
430 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
431 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
432 DECL_CHAIN (fields)
433 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
434 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
435 finish_fat_pointer_type (gnu_fat_type, fields);
436 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
437 /* Suppress debug info until after the type is completed. */
438 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
440 gnu_object_type = make_node (RECORD_TYPE);
441 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
442 TYPE_DUMMY_P (gnu_object_type) = 1;
444 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
445 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
448 /* Return true if we are in the global binding level. */
450 bool
451 global_bindings_p (void)
453 return force_global || current_function_decl == NULL_TREE;
456 /* Enter a new binding level. */
458 void
459 gnat_pushlevel (void)
461 struct gnat_binding_level *newlevel = NULL;
463 /* Reuse a struct for this binding level, if there is one. */
464 if (free_binding_level)
466 newlevel = free_binding_level;
467 free_binding_level = free_binding_level->chain;
469 else
470 newlevel = ggc_alloc<gnat_binding_level> ();
472 /* Use a free BLOCK, if any; otherwise, allocate one. */
473 if (free_block_chain)
475 newlevel->block = free_block_chain;
476 free_block_chain = BLOCK_CHAIN (free_block_chain);
477 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
479 else
480 newlevel->block = make_node (BLOCK);
482 /* Point the BLOCK we just made to its parent. */
483 if (current_binding_level)
484 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
486 BLOCK_VARS (newlevel->block) = NULL_TREE;
487 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
488 TREE_USED (newlevel->block) = 1;
490 /* Add this level to the front of the chain (stack) of active levels. */
491 newlevel->chain = current_binding_level;
492 newlevel->jmpbuf_decl = NULL_TREE;
493 current_binding_level = newlevel;
496 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
497 and point FNDECL to this BLOCK. */
499 void
500 set_current_block_context (tree fndecl)
502 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
503 DECL_INITIAL (fndecl) = current_binding_level->block;
504 set_block_for_group (current_binding_level->block);
507 /* Set the jmpbuf_decl for the current binding level to DECL. */
509 void
510 set_block_jmpbuf_decl (tree decl)
512 current_binding_level->jmpbuf_decl = decl;
515 /* Get the jmpbuf_decl, if any, for the current binding level. */
517 tree
518 get_block_jmpbuf_decl (void)
520 return current_binding_level->jmpbuf_decl;
523 /* Exit a binding level. Set any BLOCK into the current code group. */
525 void
526 gnat_poplevel (void)
528 struct gnat_binding_level *level = current_binding_level;
529 tree block = level->block;
531 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
532 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
534 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
535 are no variables free the block and merge its subblocks into those of its
536 parent block. Otherwise, add it to the list of its parent. */
537 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
539 else if (BLOCK_VARS (block) == NULL_TREE)
541 BLOCK_SUBBLOCKS (level->chain->block)
542 = block_chainon (BLOCK_SUBBLOCKS (block),
543 BLOCK_SUBBLOCKS (level->chain->block));
544 BLOCK_CHAIN (block) = free_block_chain;
545 free_block_chain = block;
547 else
549 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
550 BLOCK_SUBBLOCKS (level->chain->block) = block;
551 TREE_USED (block) = 1;
552 set_block_for_group (block);
555 /* Free this binding structure. */
556 current_binding_level = level->chain;
557 level->chain = free_binding_level;
558 free_binding_level = level;
561 /* Exit a binding level and discard the associated BLOCK. */
563 void
564 gnat_zaplevel (void)
566 struct gnat_binding_level *level = current_binding_level;
567 tree block = level->block;
569 BLOCK_CHAIN (block) = free_block_chain;
570 free_block_chain = block;
572 /* Free this binding structure. */
573 current_binding_level = level->chain;
574 level->chain = free_binding_level;
575 free_binding_level = level;
578 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
580 static void
581 gnat_set_type_context (tree type, tree context)
583 tree decl = TYPE_STUB_DECL (type);
585 TYPE_CONTEXT (type) = context;
587 while (decl && DECL_PARALLEL_TYPE (decl))
589 tree parallel_type = DECL_PARALLEL_TYPE (decl);
591 /* Give a context to the parallel types and their stub decl, if any.
592 Some parallel types seems to be present in multiple parallel type
593 chains, so don't mess with their context if they already have one. */
594 if (TYPE_CONTEXT (parallel_type) == NULL_TREE)
596 if (TYPE_STUB_DECL (parallel_type) != NULL_TREE)
597 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
598 TYPE_CONTEXT (parallel_type) = context;
601 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
605 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
606 the debug info, or Empty if there is no such scope. If not NULL, set
607 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
609 static Entity_Id
610 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
612 Entity_Id gnat_entity;
614 if (is_subprogram)
615 *is_subprogram = false;
617 if (Nkind (gnat_node) == N_Defining_Identifier)
618 gnat_entity = Scope (gnat_node);
619 else
620 return Empty;
622 while (Present (gnat_entity))
624 switch (Ekind (gnat_entity))
626 case E_Function:
627 case E_Procedure:
628 if (Present (Protected_Body_Subprogram (gnat_entity)))
629 gnat_entity = Protected_Body_Subprogram (gnat_entity);
631 /* If the scope is a subprogram, then just rely on
632 current_function_decl, so that we don't have to defer
633 anything. This is needed because other places rely on the
634 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
635 if (is_subprogram)
636 *is_subprogram = true;
637 return gnat_entity;
639 case E_Record_Type:
640 case E_Record_Subtype:
641 return gnat_entity;
643 default:
644 /* By default, we are not interested in this particular scope: go to
645 the outer one. */
646 break;
648 gnat_entity = Scope (gnat_entity);
650 return Empty;
653 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing of
654 N otherwise. */
656 static void
657 defer_or_set_type_context (tree type,
658 tree context,
659 struct deferred_decl_context_node *n)
661 if (n)
662 add_deferred_type_context (n, type);
663 else
664 gnat_set_type_context (type, context);
667 /* Return global_context. Create it if needed, first. */
669 static tree
670 get_global_context (void)
672 if (!global_context)
673 global_context = build_translation_unit_decl (NULL_TREE);
674 return global_context;
677 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
678 for location information and flag propagation. */
680 void
681 gnat_pushdecl (tree decl, Node_Id gnat_node)
683 tree context = NULL_TREE;
684 struct deferred_decl_context_node *deferred_decl_context = NULL;
686 /* If explicitely asked to make DECL global or if it's an imported nested
687 object, short-circuit the regular Scope-based context computation. */
688 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
690 /* Rely on the GNAT scope, or fallback to the current_function_decl if
691 the GNAT scope reached the global scope, if it reached a subprogram
692 or the declaration is a subprogram or a variable (for them we skip
693 intermediate context types because the subprogram body elaboration
694 machinery and the inliner both expect a subprogram context).
696 Falling back to current_function_decl is necessary for implicit
697 subprograms created by gigi, such as the elaboration subprograms. */
698 bool context_is_subprogram = false;
699 const Entity_Id gnat_scope
700 = get_debug_scope (gnat_node, &context_is_subprogram);
702 if (Present (gnat_scope)
703 && !context_is_subprogram
704 && TREE_CODE (decl) != FUNCTION_DECL
705 && TREE_CODE (decl) != VAR_DECL)
706 /* Always assume the scope has not been elaborated, thus defer the
707 context propagation to the time its elaboration will be
708 available. */
709 deferred_decl_context
710 = add_deferred_decl_context (decl, gnat_scope, force_global);
712 /* External declarations (when force_global > 0) may not be in a
713 local context. */
714 else if (current_function_decl != NULL_TREE && force_global == 0)
715 context = current_function_decl;
718 /* If either we are forced to be in global mode or if both the GNAT scope and
719 the current_function_decl did not help determining the context, use the
720 global scope. */
721 if (!deferred_decl_context && context == NULL_TREE)
722 context = get_global_context ();
724 /* Functions imported in another function are not really nested.
725 For really nested functions mark them initially as needing
726 a static chain for uses of that flag before unnesting;
727 lower_nested_functions will then recompute it. */
728 if (TREE_CODE (decl) == FUNCTION_DECL
729 && !TREE_PUBLIC (decl)
730 && context != NULL_TREE
731 && (TREE_CODE (context) == FUNCTION_DECL
732 || decl_function_context (context) != NULL_TREE))
733 DECL_STATIC_CHAIN (decl) = 1;
735 if (!deferred_decl_context)
736 DECL_CONTEXT (decl) = context;
738 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
740 /* Set the location of DECL and emit a declaration for it. */
741 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
742 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
744 add_decl_expr (decl, gnat_node);
746 /* Put the declaration on the list. The list of declarations is in reverse
747 order. The list will be reversed later. Put global declarations in the
748 globals list and local ones in the current block. But skip TYPE_DECLs
749 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
750 with the debugger and aren't needed anyway. */
751 if (!(TREE_CODE (decl) == TYPE_DECL
752 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
754 if (DECL_EXTERNAL (decl))
756 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
757 vec_safe_push (builtin_decls, decl);
759 else if (global_bindings_p ())
760 vec_safe_push (global_decls, decl);
761 else
763 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
764 BLOCK_VARS (current_binding_level->block) = decl;
768 /* For the declaration of a type, set its name if it either is not already
769 set or if the previous type name was not derived from a source name.
770 We'd rather have the type named with a real name and all the pointer
771 types to the same object have the same POINTER_TYPE node. Code in the
772 equivalent function of c-decl.c makes a copy of the type node here, but
773 that may cause us trouble with incomplete types. We make an exception
774 for fat pointer types because the compiler automatically builds them
775 for unconstrained array types and the debugger uses them to represent
776 both these and pointers to these. */
777 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
779 tree t = TREE_TYPE (decl);
781 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
783 /* Array and pointer types aren't "tagged" types so we force the
784 type to be associated with its typedef in the DWARF back-end,
785 in order to make sure that the latter is always preserved. */
786 if (!DECL_ARTIFICIAL (decl)
787 && (TREE_CODE (t) == ARRAY_TYPE
788 || TREE_CODE (t) == POINTER_TYPE))
790 tree tt = build_distinct_type_copy (t);
791 if (TREE_CODE (t) == POINTER_TYPE)
792 TYPE_NEXT_PTR_TO (t) = tt;
793 TYPE_NAME (tt) = DECL_NAME (decl);
794 defer_or_set_type_context (tt,
795 DECL_CONTEXT (decl),
796 deferred_decl_context);
797 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
798 DECL_ORIGINAL_TYPE (decl) = tt;
801 else if (TYPE_IS_FAT_POINTER_P (t))
803 /* We need a variant for the placeholder machinery to work. */
804 tree tt = build_variant_type_copy (t);
805 TYPE_NAME (tt) = decl;
806 defer_or_set_type_context (tt,
807 DECL_CONTEXT (decl),
808 deferred_decl_context);
809 TREE_USED (tt) = TREE_USED (t);
810 TREE_TYPE (decl) = tt;
811 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
812 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
813 else
814 DECL_ORIGINAL_TYPE (decl) = t;
815 DECL_ARTIFICIAL (decl) = 0;
816 t = NULL_TREE;
818 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
820 else
821 t = NULL_TREE;
823 /* Propagate the name to all the anonymous variants. This is needed
824 for the type qualifiers machinery to work properly. Also propagate
825 the context to them. Note that the context will be propagated to all
826 parallel types too thanks to gnat_set_type_context. */
827 if (t)
828 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
829 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
831 TYPE_NAME (t) = decl;
832 defer_or_set_type_context (t,
833 DECL_CONTEXT (decl),
834 deferred_decl_context);
839 /* Create a record type that contains a SIZE bytes long field of TYPE with a
840 starting bit position so that it is aligned to ALIGN bits, and leaving at
841 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
842 record is guaranteed to get. GNAT_NODE is used for the position of the
843 associated TYPE_DECL. */
845 tree
846 make_aligning_type (tree type, unsigned int align, tree size,
847 unsigned int base_align, int room, Node_Id gnat_node)
849 /* We will be crafting a record type with one field at a position set to be
850 the next multiple of ALIGN past record'address + room bytes. We use a
851 record placeholder to express record'address. */
852 tree record_type = make_node (RECORD_TYPE);
853 tree record = build0 (PLACEHOLDER_EXPR, record_type);
855 tree record_addr_st
856 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
858 /* The diagram below summarizes the shape of what we manipulate:
860 <--------- pos ---------->
861 { +------------+-------------+-----------------+
862 record =>{ |############| ... | field (type) |
863 { +------------+-------------+-----------------+
864 |<-- room -->|<- voffset ->|<---- size ----->|
867 record_addr vblock_addr
869 Every length is in sizetype bytes there, except "pos" which has to be
870 set as a bit position in the GCC tree for the record. */
871 tree room_st = size_int (room);
872 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
873 tree voffset_st, pos, field;
875 tree name = TYPE_IDENTIFIER (type);
877 name = concat_name (name, "ALIGN");
878 TYPE_NAME (record_type) = name;
880 /* Compute VOFFSET and then POS. The next byte position multiple of some
881 alignment after some address is obtained by "and"ing the alignment minus
882 1 with the two's complement of the address. */
883 voffset_st = size_binop (BIT_AND_EXPR,
884 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
885 size_int ((align / BITS_PER_UNIT) - 1));
887 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
888 pos = size_binop (MULT_EXPR,
889 convert (bitsizetype,
890 size_binop (PLUS_EXPR, room_st, voffset_st)),
891 bitsize_unit_node);
893 /* Craft the GCC record representation. We exceptionally do everything
894 manually here because 1) our generic circuitry is not quite ready to
895 handle the complex position/size expressions we are setting up, 2) we
896 have a strong simplifying factor at hand: we know the maximum possible
897 value of voffset, and 3) we have to set/reset at least the sizes in
898 accordance with this maximum value anyway, as we need them to convey
899 what should be "alloc"ated for this type.
901 Use -1 as the 'addressable' indication for the field to prevent the
902 creation of a bitfield. We don't need one, it would have damaging
903 consequences on the alignment computation, and create_field_decl would
904 make one without this special argument, for instance because of the
905 complex position expression. */
906 field = create_field_decl (get_identifier ("F"), type, record_type, size,
907 pos, 1, -1);
908 TYPE_FIELDS (record_type) = field;
910 TYPE_ALIGN (record_type) = base_align;
911 TYPE_USER_ALIGN (record_type) = 1;
913 TYPE_SIZE (record_type)
914 = size_binop (PLUS_EXPR,
915 size_binop (MULT_EXPR, convert (bitsizetype, size),
916 bitsize_unit_node),
917 bitsize_int (align + room * BITS_PER_UNIT));
918 TYPE_SIZE_UNIT (record_type)
919 = size_binop (PLUS_EXPR, size,
920 size_int (room + align / BITS_PER_UNIT));
922 SET_TYPE_MODE (record_type, BLKmode);
923 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
925 /* Declare it now since it will never be declared otherwise. This is
926 necessary to ensure that its subtrees are properly marked. */
927 create_type_decl (name, record_type, true, false, gnat_node);
929 return record_type;
932 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
933 as the field type of a packed record if IN_RECORD is true, or as the
934 component type of a packed array if IN_RECORD is false. See if we can
935 rewrite it either as a type that has a non-BLKmode, which we can pack
936 tighter in the packed record case, or as a smaller type. If so, return
937 the new type. If not, return the original type. */
939 tree
940 make_packable_type (tree type, bool in_record)
942 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
943 unsigned HOST_WIDE_INT new_size;
944 tree new_type, old_field, field_list = NULL_TREE;
945 unsigned int align;
947 /* No point in doing anything if the size is zero. */
948 if (size == 0)
949 return type;
951 new_type = make_node (TREE_CODE (type));
953 /* Copy the name and flags from the old type to that of the new.
954 Note that we rely on the pointer equality created here for
955 TYPE_NAME to look through conversions in various places. */
956 TYPE_NAME (new_type) = TYPE_NAME (type);
957 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
958 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
959 if (TREE_CODE (type) == RECORD_TYPE)
960 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
962 /* If we are in a record and have a small size, set the alignment to
963 try for an integral mode. Otherwise set it to try for a smaller
964 type with BLKmode. */
965 if (in_record && size <= MAX_FIXED_MODE_SIZE)
967 align = ceil_pow2 (size);
968 TYPE_ALIGN (new_type) = align;
969 new_size = (size + align - 1) & -align;
971 else
973 unsigned HOST_WIDE_INT align;
975 /* Do not try to shrink the size if the RM size is not constant. */
976 if (TYPE_CONTAINS_TEMPLATE_P (type)
977 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
978 return type;
980 /* Round the RM size up to a unit boundary to get the minimal size
981 for a BLKmode record. Give up if it's already the size. */
982 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
983 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
984 if (new_size == size)
985 return type;
987 align = new_size & -new_size;
988 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
991 TYPE_USER_ALIGN (new_type) = 1;
993 /* Now copy the fields, keeping the position and size as we don't want
994 to change the layout by propagating the packedness downwards. */
995 for (old_field = TYPE_FIELDS (type); old_field;
996 old_field = DECL_CHAIN (old_field))
998 tree new_field_type = TREE_TYPE (old_field);
999 tree new_field, new_size;
1001 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1002 && !TYPE_FAT_POINTER_P (new_field_type)
1003 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1004 new_field_type = make_packable_type (new_field_type, true);
1006 /* However, for the last field in a not already packed record type
1007 that is of an aggregate type, we need to use the RM size in the
1008 packable version of the record type, see finish_record_type. */
1009 if (!DECL_CHAIN (old_field)
1010 && !TYPE_PACKED (type)
1011 && RECORD_OR_UNION_TYPE_P (new_field_type)
1012 && !TYPE_FAT_POINTER_P (new_field_type)
1013 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1014 && TYPE_ADA_SIZE (new_field_type))
1015 new_size = TYPE_ADA_SIZE (new_field_type);
1016 else
1017 new_size = DECL_SIZE (old_field);
1019 new_field
1020 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
1021 new_size, bit_position (old_field),
1022 TYPE_PACKED (type),
1023 !DECL_NONADDRESSABLE_P (old_field));
1025 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
1026 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
1027 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1028 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
1030 DECL_CHAIN (new_field) = field_list;
1031 field_list = new_field;
1034 finish_record_type (new_type, nreverse (field_list), 2, false);
1035 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1036 if (TYPE_STUB_DECL (type))
1037 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1038 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1040 /* If this is a padding record, we never want to make the size smaller
1041 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1042 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1044 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1045 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1046 new_size = size;
1048 else
1050 TYPE_SIZE (new_type) = bitsize_int (new_size);
1051 TYPE_SIZE_UNIT (new_type)
1052 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
1055 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1056 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1058 compute_record_mode (new_type);
1060 /* Try harder to get a packable type if necessary, for example
1061 in case the record itself contains a BLKmode field. */
1062 if (in_record && TYPE_MODE (new_type) == BLKmode)
1063 SET_TYPE_MODE (new_type,
1064 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1066 /* If neither the mode nor the size has shrunk, return the old type. */
1067 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
1068 return type;
1070 return new_type;
1073 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1074 If TYPE is the best type, return it. Otherwise, make a new type. We
1075 only support new integral and pointer types. FOR_BIASED is true if
1076 we are making a biased type. */
1078 tree
1079 make_type_from_size (tree type, tree size_tree, bool for_biased)
1081 unsigned HOST_WIDE_INT size;
1082 bool biased_p;
1083 tree new_type;
1085 /* If size indicates an error, just return TYPE to avoid propagating
1086 the error. Likewise if it's too large to represent. */
1087 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1088 return type;
1090 size = tree_to_uhwi (size_tree);
1092 switch (TREE_CODE (type))
1094 case INTEGER_TYPE:
1095 case ENUMERAL_TYPE:
1096 case BOOLEAN_TYPE:
1097 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1098 && TYPE_BIASED_REPRESENTATION_P (type));
1100 /* Integer types with precision 0 are forbidden. */
1101 if (size == 0)
1102 size = 1;
1104 /* Only do something if the type isn't a packed array type and doesn't
1105 already have the proper size and the size isn't too large. */
1106 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1107 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1108 || size > LONG_LONG_TYPE_SIZE)
1109 break;
1111 biased_p |= for_biased;
1112 if (TYPE_UNSIGNED (type) || biased_p)
1113 new_type = make_unsigned_type (size);
1114 else
1115 new_type = make_signed_type (size);
1116 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1117 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1118 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1119 /* Copy the name to show that it's essentially the same type and
1120 not a subrange type. */
1121 TYPE_NAME (new_type) = TYPE_NAME (type);
1122 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1123 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1124 return new_type;
1126 case RECORD_TYPE:
1127 /* Do something if this is a fat pointer, in which case we
1128 may need to return the thin pointer. */
1129 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1131 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
1132 if (!targetm.valid_pointer_mode (p_mode))
1133 p_mode = ptr_mode;
1134 return
1135 build_pointer_type_for_mode
1136 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1137 p_mode, 0);
1139 break;
1141 case POINTER_TYPE:
1142 /* Only do something if this is a thin pointer, in which case we
1143 may need to return the fat pointer. */
1144 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1145 return
1146 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1147 break;
1149 default:
1150 break;
1153 return type;
1156 /* See if the data pointed to by the hash table slot is marked. */
1158 static int
1159 pad_type_hash_marked_p (const void *p)
1161 const_tree const type = ((const struct pad_type_hash *) p)->type;
1163 return ggc_marked_p (type);
1166 /* Return the cached hash value. */
1168 static hashval_t
1169 pad_type_hash_hash (const void *p)
1171 return ((const struct pad_type_hash *) p)->hash;
1174 /* Return 1 iff the padded types are equivalent. */
1176 static int
1177 pad_type_hash_eq (const void *p1, const void *p2)
1179 const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
1180 const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
1181 tree type1, type2;
1183 if (t1->hash != t2->hash)
1184 return 0;
1186 type1 = t1->type;
1187 type2 = t2->type;
1189 /* We consider that the padded types are equivalent if they pad the same
1190 type and have the same size, alignment and RM size. Taking the mode
1191 into account is redundant since it is determined by the others. */
1192 return
1193 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1194 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1195 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1196 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1199 /* Look up the padded TYPE in the hash table and return its canonical version
1200 if it exists; otherwise, insert it into the hash table. */
1202 static tree
1203 lookup_and_insert_pad_type (tree type)
1205 hashval_t hashcode;
1206 struct pad_type_hash in, *h;
1207 void **loc;
1209 hashcode
1210 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1211 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1212 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1213 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1215 in.hash = hashcode;
1216 in.type = type;
1217 h = (struct pad_type_hash *)
1218 htab_find_with_hash (pad_type_hash_table, &in, hashcode);
1219 if (h)
1220 return h->type;
1222 h = ggc_alloc<pad_type_hash> ();
1223 h->hash = hashcode;
1224 h->type = type;
1225 loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode, INSERT);
1226 *loc = (void *)h;
1227 return NULL_TREE;
1230 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1231 if needed. We have already verified that SIZE and ALIGN are large enough.
1232 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1233 IS_COMPONENT_TYPE is true if this is being done for the component type of
1234 an array. IS_USER_TYPE is true if the original type needs to be completed.
1235 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1236 the RM size of the resulting type is to be set to SIZE too. */
1238 tree
1239 maybe_pad_type (tree type, tree size, unsigned int align,
1240 Entity_Id gnat_entity, bool is_component_type,
1241 bool is_user_type, bool definition, bool set_rm_size)
1243 tree orig_size = TYPE_SIZE (type);
1244 unsigned int orig_align = TYPE_ALIGN (type);
1245 tree record, field;
1247 /* If TYPE is a padded type, see if it agrees with any size and alignment
1248 we were given. If so, return the original type. Otherwise, strip
1249 off the padding, since we will either be returning the inner type
1250 or repadding it. If no size or alignment is specified, use that of
1251 the original padded type. */
1252 if (TYPE_IS_PADDING_P (type))
1254 if ((!size
1255 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1256 && (align == 0 || align == orig_align))
1257 return type;
1259 if (!size)
1260 size = orig_size;
1261 if (align == 0)
1262 align = orig_align;
1264 type = TREE_TYPE (TYPE_FIELDS (type));
1265 orig_size = TYPE_SIZE (type);
1266 orig_align = TYPE_ALIGN (type);
1269 /* If the size is either not being changed or is being made smaller (which
1270 is not done here and is only valid for bitfields anyway), show the size
1271 isn't changing. Likewise, clear the alignment if it isn't being
1272 changed. Then return if we aren't doing anything. */
1273 if (size
1274 && (operand_equal_p (size, orig_size, 0)
1275 || (TREE_CODE (orig_size) == INTEGER_CST
1276 && tree_int_cst_lt (size, orig_size))))
1277 size = NULL_TREE;
1279 if (align == orig_align)
1280 align = 0;
1282 if (align == 0 && !size)
1283 return type;
1285 /* If requested, complete the original type and give it a name. */
1286 if (is_user_type)
1287 create_type_decl (get_entity_name (gnat_entity), type,
1288 !Comes_From_Source (gnat_entity),
1289 !(TYPE_NAME (type)
1290 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1291 && DECL_IGNORED_P (TYPE_NAME (type))),
1292 gnat_entity);
1294 /* We used to modify the record in place in some cases, but that could
1295 generate incorrect debugging information. So make a new record
1296 type and name. */
1297 record = make_node (RECORD_TYPE);
1298 TYPE_PADDING_P (record) = 1;
1300 if (Present (gnat_entity))
1301 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1303 TYPE_ALIGN (record) = align ? align : orig_align;
1304 TYPE_SIZE (record) = size ? size : orig_size;
1305 TYPE_SIZE_UNIT (record)
1306 = convert (sizetype,
1307 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1308 bitsize_unit_node));
1310 /* If we are changing the alignment and the input type is a record with
1311 BLKmode and a small constant size, try to make a form that has an
1312 integral mode. This might allow the padding record to also have an
1313 integral mode, which will be much more efficient. There is no point
1314 in doing so if a size is specified unless it is also a small constant
1315 size and it is incorrect to do so if we cannot guarantee that the mode
1316 will be naturally aligned since the field must always be addressable.
1318 ??? This might not always be a win when done for a stand-alone object:
1319 since the nominal and the effective type of the object will now have
1320 different modes, a VIEW_CONVERT_EXPR will be required for converting
1321 between them and it might be hard to overcome afterwards, including
1322 at the RTL level when the stand-alone object is accessed as a whole. */
1323 if (align != 0
1324 && RECORD_OR_UNION_TYPE_P (type)
1325 && TYPE_MODE (type) == BLKmode
1326 && !TYPE_BY_REFERENCE_P (type)
1327 && TREE_CODE (orig_size) == INTEGER_CST
1328 && !TREE_OVERFLOW (orig_size)
1329 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1330 && (!size
1331 || (TREE_CODE (size) == INTEGER_CST
1332 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1334 tree packable_type = make_packable_type (type, true);
1335 if (TYPE_MODE (packable_type) != BLKmode
1336 && align >= TYPE_ALIGN (packable_type))
1337 type = packable_type;
1340 /* Now create the field with the original size. */
1341 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1342 bitsize_zero_node, 0, 1);
1343 DECL_INTERNAL_P (field) = 1;
1345 /* Do not emit debug info until after the auxiliary record is built. */
1346 finish_record_type (record, field, 1, false);
1348 /* Set the RM size if requested. */
1349 if (set_rm_size)
1351 tree canonical_pad_type;
1353 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1355 /* If the padded type is complete and has constant size, we canonicalize
1356 it by means of the hash table. This is consistent with the language
1357 semantics and ensures that gigi and the middle-end have a common view
1358 of these padded types. */
1359 if (TREE_CONSTANT (TYPE_SIZE (record))
1360 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1362 record = canonical_pad_type;
1363 goto built;
1367 /* Unless debugging information isn't being written for the input type,
1368 write a record that shows what we are a subtype of and also make a
1369 variable that indicates our size, if still variable. */
1370 if (TREE_CODE (orig_size) != INTEGER_CST
1371 && TYPE_NAME (record)
1372 && TYPE_NAME (type)
1373 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1374 && DECL_IGNORED_P (TYPE_NAME (type))))
1376 tree marker = make_node (RECORD_TYPE);
1377 tree name = TYPE_IDENTIFIER (record);
1378 tree orig_name = TYPE_IDENTIFIER (type);
1380 TYPE_NAME (marker) = concat_name (name, "XVS");
1381 finish_record_type (marker,
1382 create_field_decl (orig_name,
1383 build_reference_type (type),
1384 marker, NULL_TREE, NULL_TREE,
1385 0, 0),
1386 0, true);
1388 add_parallel_type (record, marker);
1390 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1391 TYPE_SIZE_UNIT (marker)
1392 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1393 TYPE_SIZE_UNIT (record), false, false, false,
1394 false, NULL, gnat_entity);
1397 rest_of_record_type_compilation (record);
1399 built:
1400 /* If the size was widened explicitly, maybe give a warning. Take the
1401 original size as the maximum size of the input if there was an
1402 unconstrained record involved and round it up to the specified alignment,
1403 if one was specified. But don't do it if we are just annotating types
1404 and the type is tagged, since tagged types aren't fully laid out in this
1405 mode. */
1406 if (!size
1407 || TREE_CODE (size) == COND_EXPR
1408 || TREE_CODE (size) == MAX_EXPR
1409 || No (gnat_entity)
1410 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1411 return record;
1413 if (CONTAINS_PLACEHOLDER_P (orig_size))
1414 orig_size = max_size (orig_size, true);
1416 if (align)
1417 orig_size = round_up (orig_size, align);
1419 if (!operand_equal_p (size, orig_size, 0)
1420 && !(TREE_CODE (size) == INTEGER_CST
1421 && TREE_CODE (orig_size) == INTEGER_CST
1422 && (TREE_OVERFLOW (size)
1423 || TREE_OVERFLOW (orig_size)
1424 || tree_int_cst_lt (size, orig_size))))
1426 Node_Id gnat_error_node = Empty;
1428 /* For a packed array, post the message on the original array type. */
1429 if (Is_Packed_Array_Impl_Type (gnat_entity))
1430 gnat_entity = Original_Array_Type (gnat_entity);
1432 if ((Ekind (gnat_entity) == E_Component
1433 || Ekind (gnat_entity) == E_Discriminant)
1434 && Present (Component_Clause (gnat_entity)))
1435 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1436 else if (Present (Size_Clause (gnat_entity)))
1437 gnat_error_node = Expression (Size_Clause (gnat_entity));
1439 /* Generate message only for entities that come from source, since
1440 if we have an entity created by expansion, the message will be
1441 generated for some other corresponding source entity. */
1442 if (Comes_From_Source (gnat_entity))
1444 if (Present (gnat_error_node))
1445 post_error_ne_tree ("{^ }bits of & unused?",
1446 gnat_error_node, gnat_entity,
1447 size_diffop (size, orig_size));
1448 else if (is_component_type)
1449 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1450 gnat_entity, gnat_entity,
1451 size_diffop (size, orig_size));
1455 return record;
1458 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1459 If this is a multi-dimensional array type, do this recursively.
1461 OP may be
1462 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1463 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1464 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1466 void
1467 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1469 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1470 of a one-dimensional array, since the padding has the same alias set
1471 as the field type, but if it's a multi-dimensional array, we need to
1472 see the inner types. */
1473 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1474 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1475 || TYPE_PADDING_P (gnu_old_type)))
1476 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1478 /* Unconstrained array types are deemed incomplete and would thus be given
1479 alias set 0. Retrieve the underlying array type. */
1480 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1481 gnu_old_type
1482 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1483 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1484 gnu_new_type
1485 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1487 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1488 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1489 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1490 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1492 switch (op)
1494 case ALIAS_SET_COPY:
1495 /* The alias set shouldn't be copied between array types with different
1496 aliasing settings because this can break the aliasing relationship
1497 between the array type and its element type. */
1498 #ifndef ENABLE_CHECKING
1499 if (flag_strict_aliasing)
1500 #endif
1501 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1502 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1503 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1504 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1506 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1507 break;
1509 case ALIAS_SET_SUBSET:
1510 case ALIAS_SET_SUPERSET:
1512 alias_set_type old_set = get_alias_set (gnu_old_type);
1513 alias_set_type new_set = get_alias_set (gnu_new_type);
1515 /* Do nothing if the alias sets conflict. This ensures that we
1516 never call record_alias_subset several times for the same pair
1517 or at all for alias set 0. */
1518 if (!alias_sets_conflict_p (old_set, new_set))
1520 if (op == ALIAS_SET_SUBSET)
1521 record_alias_subset (old_set, new_set);
1522 else
1523 record_alias_subset (new_set, old_set);
1526 break;
1528 default:
1529 gcc_unreachable ();
1532 record_component_aliases (gnu_new_type);
1535 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1536 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1538 void
1539 record_builtin_type (const char *name, tree type, bool artificial_p)
1541 tree type_decl = build_decl (input_location,
1542 TYPE_DECL, get_identifier (name), type);
1543 DECL_ARTIFICIAL (type_decl) = artificial_p;
1544 TYPE_ARTIFICIAL (type) = artificial_p;
1545 gnat_pushdecl (type_decl, Empty);
1547 if (debug_hooks->type_decl)
1548 debug_hooks->type_decl (type_decl, false);
1551 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1552 finish constructing the record type as a fat pointer type. */
1554 void
1555 finish_fat_pointer_type (tree record_type, tree field_list)
1557 /* Make sure we can put it into a register. */
1558 if (STRICT_ALIGNMENT)
1559 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
1561 /* Show what it really is. */
1562 TYPE_FAT_POINTER_P (record_type) = 1;
1564 /* Do not emit debug info for it since the types of its fields may still be
1565 incomplete at this point. */
1566 finish_record_type (record_type, field_list, 0, false);
1568 /* Force type_contains_placeholder_p to return true on it. Although the
1569 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1570 type but the representation of the unconstrained array. */
1571 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1574 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1575 finish constructing the record or union type. If REP_LEVEL is zero, this
1576 record has no representation clause and so will be entirely laid out here.
1577 If REP_LEVEL is one, this record has a representation clause and has been
1578 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1579 this record is derived from a parent record and thus inherits its layout;
1580 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1581 we need to write debug information about this type. */
1583 void
1584 finish_record_type (tree record_type, tree field_list, int rep_level,
1585 bool debug_info_p)
1587 enum tree_code code = TREE_CODE (record_type);
1588 tree name = TYPE_IDENTIFIER (record_type);
1589 tree ada_size = bitsize_zero_node;
1590 tree size = bitsize_zero_node;
1591 bool had_size = TYPE_SIZE (record_type) != 0;
1592 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1593 bool had_align = TYPE_ALIGN (record_type) != 0;
1594 tree field;
1596 TYPE_FIELDS (record_type) = field_list;
1598 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1599 generate debug info and have a parallel type. */
1600 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1602 /* Globally initialize the record first. If this is a rep'ed record,
1603 that just means some initializations; otherwise, layout the record. */
1604 if (rep_level > 0)
1606 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
1608 if (!had_size_unit)
1609 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1611 if (!had_size)
1612 TYPE_SIZE (record_type) = bitsize_zero_node;
1614 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1615 out just like a UNION_TYPE, since the size will be fixed. */
1616 else if (code == QUAL_UNION_TYPE)
1617 code = UNION_TYPE;
1619 else
1621 /* Ensure there isn't a size already set. There can be in an error
1622 case where there is a rep clause but all fields have errors and
1623 no longer have a position. */
1624 TYPE_SIZE (record_type) = 0;
1626 /* Ensure we use the traditional GCC layout for bitfields when we need
1627 to pack the record type or have a representation clause. The other
1628 possible layout (Microsoft C compiler), if available, would prevent
1629 efficient packing in almost all cases. */
1630 #ifdef TARGET_MS_BITFIELD_LAYOUT
1631 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1632 decl_attributes (&record_type,
1633 tree_cons (get_identifier ("gcc_struct"),
1634 NULL_TREE, NULL_TREE),
1635 ATTR_FLAG_TYPE_IN_PLACE);
1636 #endif
1638 layout_type (record_type);
1641 /* At this point, the position and size of each field is known. It was
1642 either set before entry by a rep clause, or by laying out the type above.
1644 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1645 to compute the Ada size; the GCC size and alignment (for rep'ed records
1646 that are not padding types); and the mode (for rep'ed records). We also
1647 clear the DECL_BIT_FIELD indication for the cases we know have not been
1648 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1650 if (code == QUAL_UNION_TYPE)
1651 field_list = nreverse (field_list);
1653 for (field = field_list; field; field = DECL_CHAIN (field))
1655 tree type = TREE_TYPE (field);
1656 tree pos = bit_position (field);
1657 tree this_size = DECL_SIZE (field);
1658 tree this_ada_size;
1660 if (RECORD_OR_UNION_TYPE_P (type)
1661 && !TYPE_FAT_POINTER_P (type)
1662 && !TYPE_CONTAINS_TEMPLATE_P (type)
1663 && TYPE_ADA_SIZE (type))
1664 this_ada_size = TYPE_ADA_SIZE (type);
1665 else
1666 this_ada_size = this_size;
1668 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1669 if (DECL_BIT_FIELD (field)
1670 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1672 unsigned int align = TYPE_ALIGN (type);
1674 /* In the general case, type alignment is required. */
1675 if (value_factor_p (pos, align))
1677 /* The enclosing record type must be sufficiently aligned.
1678 Otherwise, if no alignment was specified for it and it
1679 has been laid out already, bump its alignment to the
1680 desired one if this is compatible with its size. */
1681 if (TYPE_ALIGN (record_type) >= align)
1683 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1684 DECL_BIT_FIELD (field) = 0;
1686 else if (!had_align
1687 && rep_level == 0
1688 && value_factor_p (TYPE_SIZE (record_type), align))
1690 TYPE_ALIGN (record_type) = align;
1691 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1692 DECL_BIT_FIELD (field) = 0;
1696 /* In the non-strict alignment case, only byte alignment is. */
1697 if (!STRICT_ALIGNMENT
1698 && DECL_BIT_FIELD (field)
1699 && value_factor_p (pos, BITS_PER_UNIT))
1700 DECL_BIT_FIELD (field) = 0;
1703 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1704 field is technically not addressable. Except that it can actually
1705 be addressed if it is BLKmode and happens to be properly aligned. */
1706 if (DECL_BIT_FIELD (field)
1707 && !(DECL_MODE (field) == BLKmode
1708 && value_factor_p (pos, BITS_PER_UNIT)))
1709 DECL_NONADDRESSABLE_P (field) = 1;
1711 /* A type must be as aligned as its most aligned field that is not
1712 a bit-field. But this is already enforced by layout_type. */
1713 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1714 TYPE_ALIGN (record_type)
1715 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1717 switch (code)
1719 case UNION_TYPE:
1720 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1721 size = size_binop (MAX_EXPR, size, this_size);
1722 break;
1724 case QUAL_UNION_TYPE:
1725 ada_size
1726 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1727 this_ada_size, ada_size);
1728 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1729 this_size, size);
1730 break;
1732 case RECORD_TYPE:
1733 /* Since we know here that all fields are sorted in order of
1734 increasing bit position, the size of the record is one
1735 higher than the ending bit of the last field processed
1736 unless we have a rep clause, since in that case we might
1737 have a field outside a QUAL_UNION_TYPE that has a higher ending
1738 position. So use a MAX in that case. Also, if this field is a
1739 QUAL_UNION_TYPE, we need to take into account the previous size in
1740 the case of empty variants. */
1741 ada_size
1742 = merge_sizes (ada_size, pos, this_ada_size,
1743 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1744 size
1745 = merge_sizes (size, pos, this_size,
1746 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1747 break;
1749 default:
1750 gcc_unreachable ();
1754 if (code == QUAL_UNION_TYPE)
1755 nreverse (field_list);
1757 if (rep_level < 2)
1759 /* If this is a padding record, we never want to make the size smaller
1760 than what was specified in it, if any. */
1761 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1762 size = TYPE_SIZE (record_type);
1764 /* Now set any of the values we've just computed that apply. */
1765 if (!TYPE_FAT_POINTER_P (record_type)
1766 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1767 SET_TYPE_ADA_SIZE (record_type, ada_size);
1769 if (rep_level > 0)
1771 tree size_unit = had_size_unit
1772 ? TYPE_SIZE_UNIT (record_type)
1773 : convert (sizetype,
1774 size_binop (CEIL_DIV_EXPR, size,
1775 bitsize_unit_node));
1776 unsigned int align = TYPE_ALIGN (record_type);
1778 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1779 TYPE_SIZE_UNIT (record_type)
1780 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1782 compute_record_mode (record_type);
1786 if (debug_info_p)
1787 rest_of_record_type_compilation (record_type);
1790 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1791 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1792 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1793 moment TYPE will get a context. */
1795 void
1796 add_parallel_type (tree type, tree parallel_type)
1798 tree decl = TYPE_STUB_DECL (type);
1800 while (DECL_PARALLEL_TYPE (decl))
1801 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1803 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1805 /* If PARALLEL_TYPE already has a context, we are done. */
1806 if (TYPE_CONTEXT (parallel_type) != NULL_TREE)
1807 return;
1809 /* Otherwise, try to get one from TYPE's context. */
1810 if (TYPE_CONTEXT (type) != NULL_TREE)
1811 /* TYPE already has a context, so simply propagate it to PARALLEL_TYPE. */
1812 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1814 /* ... otherwise TYPE has not context yet. We know it will thanks to
1815 gnat_pushdecl, and then its context will be propagated to PARALLEL_TYPE.
1816 So we have nothing to do in this case. */
1819 /* Return true if TYPE has a parallel type. */
1821 static bool
1822 has_parallel_type (tree type)
1824 tree decl = TYPE_STUB_DECL (type);
1826 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1829 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1830 associated with it. It need not be invoked directly in most cases since
1831 finish_record_type takes care of doing so, but this can be necessary if
1832 a parallel type is to be attached to the record type. */
1834 void
1835 rest_of_record_type_compilation (tree record_type)
1837 bool var_size = false;
1838 tree field;
1840 /* If this is a padded type, the bulk of the debug info has already been
1841 generated for the field's type. */
1842 if (TYPE_IS_PADDING_P (record_type))
1843 return;
1845 /* If the type already has a parallel type (XVS type), then we're done. */
1846 if (has_parallel_type (record_type))
1847 return;
1849 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1851 /* We need to make an XVE/XVU record if any field has variable size,
1852 whether or not the record does. For example, if we have a union,
1853 it may be that all fields, rounded up to the alignment, have the
1854 same size, in which case we'll use that size. But the debug
1855 output routines (except Dwarf2) won't be able to output the fields,
1856 so we need to make the special record. */
1857 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1858 /* If a field has a non-constant qualifier, the record will have
1859 variable size too. */
1860 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1861 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1863 var_size = true;
1864 break;
1868 /* If this record type is of variable size, make a parallel record type that
1869 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1870 if (var_size)
1872 tree new_record_type
1873 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1874 ? UNION_TYPE : TREE_CODE (record_type));
1875 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
1876 tree last_pos = bitsize_zero_node;
1877 tree old_field, prev_old_field = NULL_TREE;
1879 new_name
1880 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1881 ? "XVU" : "XVE");
1882 TYPE_NAME (new_record_type) = new_name;
1883 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1884 TYPE_STUB_DECL (new_record_type)
1885 = create_type_stub_decl (new_name, new_record_type);
1886 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1887 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1888 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1889 TYPE_SIZE_UNIT (new_record_type)
1890 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1892 /* Now scan all the fields, replacing each field with a new field
1893 corresponding to the new encoding. */
1894 for (old_field = TYPE_FIELDS (record_type); old_field;
1895 old_field = DECL_CHAIN (old_field))
1897 tree field_type = TREE_TYPE (old_field);
1898 tree field_name = DECL_NAME (old_field);
1899 tree curpos = bit_position (old_field);
1900 tree pos, new_field;
1901 bool var = false;
1902 unsigned int align = 0;
1904 /* We're going to do some pattern matching below so remove as many
1905 conversions as possible. */
1906 curpos = remove_conversions (curpos, true);
1908 /* See how the position was modified from the last position.
1910 There are two basic cases we support: a value was added
1911 to the last position or the last position was rounded to
1912 a boundary and they something was added. Check for the
1913 first case first. If not, see if there is any evidence
1914 of rounding. If so, round the last position and retry.
1916 If this is a union, the position can be taken as zero. */
1917 if (TREE_CODE (new_record_type) == UNION_TYPE)
1918 pos = bitsize_zero_node;
1919 else
1920 pos = compute_related_constant (curpos, last_pos);
1922 if (!pos
1923 && TREE_CODE (curpos) == MULT_EXPR
1924 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
1926 tree offset = TREE_OPERAND (curpos, 0);
1927 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1928 align = scale_by_factor_of (offset, align);
1929 last_pos = round_up (last_pos, align);
1930 pos = compute_related_constant (curpos, last_pos);
1932 else if (!pos
1933 && TREE_CODE (curpos) == PLUS_EXPR
1934 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
1935 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1936 && tree_fits_uhwi_p
1937 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
1939 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1940 unsigned HOST_WIDE_INT addend
1941 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1942 align
1943 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
1944 align = scale_by_factor_of (offset, align);
1945 align = MIN (align, addend & -addend);
1946 last_pos = round_up (last_pos, align);
1947 pos = compute_related_constant (curpos, last_pos);
1949 else if (potential_alignment_gap (prev_old_field, old_field, pos))
1951 align = TYPE_ALIGN (field_type);
1952 last_pos = round_up (last_pos, align);
1953 pos = compute_related_constant (curpos, last_pos);
1956 /* If we can't compute a position, set it to zero.
1958 ??? We really should abort here, but it's too much work
1959 to get this correct for all cases. */
1960 if (!pos)
1961 pos = bitsize_zero_node;
1963 /* See if this type is variable-sized and make a pointer type
1964 and indicate the indirection if so. Beware that the debug
1965 back-end may adjust the position computed above according
1966 to the alignment of the field type, i.e. the pointer type
1967 in this case, if we don't preventively counter that. */
1968 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1970 field_type = build_pointer_type (field_type);
1971 if (align != 0 && TYPE_ALIGN (field_type) > align)
1973 field_type = copy_node (field_type);
1974 TYPE_ALIGN (field_type) = align;
1976 var = true;
1979 /* Make a new field name, if necessary. */
1980 if (var || align != 0)
1982 char suffix[16];
1984 if (align != 0)
1985 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1986 align / BITS_PER_UNIT);
1987 else
1988 strcpy (suffix, "XVL");
1990 field_name = concat_name (field_name, suffix);
1993 new_field
1994 = create_field_decl (field_name, field_type, new_record_type,
1995 DECL_SIZE (old_field), pos, 0, 0);
1996 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1997 TYPE_FIELDS (new_record_type) = new_field;
1999 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2000 zero. The only time it's not the last field of the record
2001 is when there are other components at fixed positions after
2002 it (meaning there was a rep clause for every field) and we
2003 want to be able to encode them. */
2004 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2005 (TREE_CODE (TREE_TYPE (old_field))
2006 == QUAL_UNION_TYPE)
2007 ? bitsize_zero_node
2008 : DECL_SIZE (old_field));
2009 prev_old_field = old_field;
2012 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2014 add_parallel_type (record_type, new_record_type);
2018 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2019 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2020 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2021 replace a value of zero with the old size. If HAS_REP is true, we take the
2022 MAX of the end position of this field with LAST_SIZE. In all other cases,
2023 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2025 static tree
2026 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2027 bool has_rep)
2029 tree type = TREE_TYPE (last_size);
2030 tree new_size;
2032 if (!special || TREE_CODE (size) != COND_EXPR)
2034 new_size = size_binop (PLUS_EXPR, first_bit, size);
2035 if (has_rep)
2036 new_size = size_binop (MAX_EXPR, last_size, new_size);
2039 else
2040 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2041 integer_zerop (TREE_OPERAND (size, 1))
2042 ? last_size : merge_sizes (last_size, first_bit,
2043 TREE_OPERAND (size, 1),
2044 1, has_rep),
2045 integer_zerop (TREE_OPERAND (size, 2))
2046 ? last_size : merge_sizes (last_size, first_bit,
2047 TREE_OPERAND (size, 2),
2048 1, has_rep));
2050 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2051 when fed through substitute_in_expr) into thinking that a constant
2052 size is not constant. */
2053 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2054 new_size = TREE_OPERAND (new_size, 0);
2056 return new_size;
2059 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2060 related by the addition of a constant. Return that constant if so. */
2062 static tree
2063 compute_related_constant (tree op0, tree op1)
2065 tree op0_var, op1_var;
2066 tree op0_con = split_plus (op0, &op0_var);
2067 tree op1_con = split_plus (op1, &op1_var);
2068 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2070 if (operand_equal_p (op0_var, op1_var, 0))
2071 return result;
2072 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2073 return result;
2074 else
2075 return 0;
2078 /* Utility function of above to split a tree OP which may be a sum, into a
2079 constant part, which is returned, and a variable part, which is stored
2080 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2081 bitsizetype. */
2083 static tree
2084 split_plus (tree in, tree *pvar)
2086 /* Strip conversions in order to ease the tree traversal and maximize the
2087 potential for constant or plus/minus discovery. We need to be careful
2088 to always return and set *pvar to bitsizetype trees, but it's worth
2089 the effort. */
2090 in = remove_conversions (in, false);
2092 *pvar = convert (bitsizetype, in);
2094 if (TREE_CODE (in) == INTEGER_CST)
2096 *pvar = bitsize_zero_node;
2097 return convert (bitsizetype, in);
2099 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2101 tree lhs_var, rhs_var;
2102 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2103 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2105 if (lhs_var == TREE_OPERAND (in, 0)
2106 && rhs_var == TREE_OPERAND (in, 1))
2107 return bitsize_zero_node;
2109 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2110 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2112 else
2113 return bitsize_zero_node;
2116 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
2117 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
2118 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
2119 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
2120 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
2121 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
2122 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
2123 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
2124 invisible reference. */
2126 tree
2127 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
2128 bool return_unconstrained_p, bool return_by_direct_ref_p,
2129 bool return_by_invisi_ref_p)
2131 /* A list of the data type nodes of the subprogram formal parameters.
2132 This list is generated by traversing the input list of PARM_DECL
2133 nodes. */
2134 vec<tree, va_gc> *param_type_list = NULL;
2135 tree t, type;
2137 for (t = param_decl_list; t; t = DECL_CHAIN (t))
2138 vec_safe_push (param_type_list, TREE_TYPE (t));
2140 type = build_function_type_vec (return_type, param_type_list);
2142 /* TYPE may have been shared since GCC hashes types. If it has a different
2143 CICO_LIST, make a copy. Likewise for the various flags. */
2144 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
2145 return_by_direct_ref_p, return_by_invisi_ref_p))
2147 type = copy_type (type);
2148 TYPE_CI_CO_LIST (type) = cico_list;
2149 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
2150 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
2151 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
2154 return type;
2157 /* Return a copy of TYPE but safe to modify in any way. */
2159 tree
2160 copy_type (tree type)
2162 tree new_type = copy_node (type);
2164 /* Unshare the language-specific data. */
2165 if (TYPE_LANG_SPECIFIC (type))
2167 TYPE_LANG_SPECIFIC (new_type) = NULL;
2168 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2171 /* And the contents of the language-specific slot if needed. */
2172 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2173 && TYPE_RM_VALUES (type))
2175 TYPE_RM_VALUES (new_type) = NULL_TREE;
2176 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2177 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2178 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2181 /* copy_node clears this field instead of copying it, because it is
2182 aliased with TREE_CHAIN. */
2183 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2185 TYPE_POINTER_TO (new_type) = 0;
2186 TYPE_REFERENCE_TO (new_type) = 0;
2187 TYPE_MAIN_VARIANT (new_type) = new_type;
2188 TYPE_NEXT_VARIANT (new_type) = 0;
2190 return new_type;
2193 /* Return a subtype of sizetype with range MIN to MAX and whose
2194 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2195 of the associated TYPE_DECL. */
2197 tree
2198 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2200 /* First build a type for the desired range. */
2201 tree type = build_nonshared_range_type (sizetype, min, max);
2203 /* Then set the index type. */
2204 SET_TYPE_INDEX_TYPE (type, index);
2205 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2207 return type;
2210 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2211 sizetype is used. */
2213 tree
2214 create_range_type (tree type, tree min, tree max)
2216 tree range_type;
2218 if (type == NULL_TREE)
2219 type = sizetype;
2221 /* First build a type with the base range. */
2222 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2223 TYPE_MAX_VALUE (type));
2225 /* Then set the actual range. */
2226 SET_TYPE_RM_MIN_VALUE (range_type, min);
2227 SET_TYPE_RM_MAX_VALUE (range_type, max);
2229 return range_type;
2232 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2233 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2234 its data type. */
2236 tree
2237 create_type_stub_decl (tree type_name, tree type)
2239 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2240 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2241 emitted in DWARF. */
2242 tree type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2243 DECL_ARTIFICIAL (type_decl) = 1;
2244 TYPE_ARTIFICIAL (type) = 1;
2245 return type_decl;
2248 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2249 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2250 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2251 true if we need to write debug information about this type. GNAT_NODE
2252 is used for the position of the decl. */
2254 tree
2255 create_type_decl (tree type_name, tree type, bool artificial_p,
2256 bool debug_info_p, Node_Id gnat_node)
2258 enum tree_code code = TREE_CODE (type);
2259 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2260 tree type_decl;
2262 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2263 gcc_assert (!TYPE_IS_DUMMY_P (type));
2265 /* If the type hasn't been named yet, we're naming it; preserve an existing
2266 TYPE_STUB_DECL that has been attached to it for some purpose. */
2267 if (!named && TYPE_STUB_DECL (type))
2269 type_decl = TYPE_STUB_DECL (type);
2270 DECL_NAME (type_decl) = type_name;
2272 else
2273 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2275 DECL_ARTIFICIAL (type_decl) = artificial_p;
2276 TYPE_ARTIFICIAL (type) = artificial_p;
2278 /* Add this decl to the current binding level. */
2279 gnat_pushdecl (type_decl, gnat_node);
2281 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2282 This causes the name to be also viewed as a "tag" by the debug
2283 back-end, with the advantage that no DW_TAG_typedef is emitted
2284 for artificial "tagged" types in DWARF. */
2285 if (!named)
2286 TYPE_STUB_DECL (type) = type_decl;
2288 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2289 back-end doesn't support, and for others if we don't need to. */
2290 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2291 DECL_IGNORED_P (type_decl) = 1;
2293 return type_decl;
2296 /* Return a VAR_DECL or CONST_DECL node.
2298 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2299 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2300 the GCC tree for an optional initial expression; NULL_TREE if none.
2302 CONST_FLAG is true if this variable is constant, in which case we might
2303 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2305 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2306 definition to be made visible outside of the current compilation unit, for
2307 instance variable definitions in a package specification.
2309 EXTERN_FLAG is true when processing an external variable declaration (as
2310 opposed to a definition: no storage is to be allocated for the variable).
2312 STATIC_FLAG is only relevant when not at top level. In that case
2313 it indicates whether to always allocate storage to the variable.
2315 GNAT_NODE is used for the position of the decl. */
2317 tree
2318 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2319 bool const_flag, bool public_flag, bool extern_flag,
2320 bool static_flag, bool const_decl_allowed_p,
2321 struct attrib *attr_list, Node_Id gnat_node)
2323 /* Whether the object has static storage duration, either explicitly or by
2324 virtue of being declared at the global level. */
2325 const bool static_storage = static_flag || global_bindings_p ();
2327 /* Whether the initializer is constant: for an external object or an object
2328 with static storage duration, we check that the initializer is a valid
2329 constant expression for initializing a static variable; otherwise, we
2330 only check that it is constant. */
2331 const bool init_const
2332 = (var_init
2333 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
2334 && (extern_flag || static_storage
2335 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2336 != NULL_TREE
2337 : TREE_CONSTANT (var_init)));
2339 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2340 case the initializer may be used in lieu of the DECL node (as done in
2341 Identifier_to_gnu). This is useful to prevent the need of elaboration
2342 code when an identifier for which such a DECL is made is in turn used
2343 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2344 but extra constraints apply to this choice (see below) and they are not
2345 relevant to the distinction we wish to make. */
2346 const bool constant_p = const_flag && init_const;
2348 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2349 and may be used for scalars in general but not for aggregates. */
2350 tree var_decl
2351 = build_decl (input_location,
2352 (constant_p && const_decl_allowed_p
2353 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2354 var_name, type);
2356 /* If this is external, throw away any initializations (they will be done
2357 elsewhere) unless this is a constant for which we would like to remain
2358 able to get the initializer. If we are defining a global here, leave a
2359 constant initialization and save any variable elaborations for the
2360 elaboration routine. If we are just annotating types, throw away the
2361 initialization if it isn't a constant. */
2362 if ((extern_flag && !constant_p)
2363 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2364 var_init = NULL_TREE;
2366 /* At the global level, a non-constant initializer generates elaboration
2367 statements. Check that such statements are allowed, that is to say,
2368 not violating a No_Elaboration_Code restriction. */
2369 if (var_init && !init_const && global_bindings_p ())
2370 Check_Elaboration_Code_Allowed (gnat_node);
2372 DECL_INITIAL (var_decl) = var_init;
2373 TREE_READONLY (var_decl) = const_flag;
2374 DECL_EXTERNAL (var_decl) = extern_flag;
2375 TREE_CONSTANT (var_decl) = constant_p;
2377 /* We need to allocate static storage for an object with static storage
2378 duration if it isn't external. */
2379 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2381 /* The object is public if it is external or if it is declared public
2382 and has static storage duration. */
2383 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2385 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2386 try to fiddle with DECL_COMMON. However, on platforms that don't
2387 support global BSS sections, uninitialized global variables would
2388 go in DATA instead, thus increasing the size of the executable. */
2389 if (!flag_no_common
2390 && TREE_CODE (var_decl) == VAR_DECL
2391 && TREE_PUBLIC (var_decl)
2392 && !have_global_bss_p ())
2393 DECL_COMMON (var_decl) = 1;
2395 /* For an external constant whose initializer is not absolute, do not emit
2396 debug info. In DWARF this would mean a global relocation in a read-only
2397 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2398 if (extern_flag
2399 && constant_p
2400 && var_init
2401 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2402 != null_pointer_node)
2403 DECL_IGNORED_P (var_decl) = 1;
2405 if (TYPE_VOLATILE (type))
2406 TREE_SIDE_EFFECTS (var_decl) = TREE_THIS_VOLATILE (var_decl) = 1;
2408 if (TREE_SIDE_EFFECTS (var_decl))
2409 TREE_ADDRESSABLE (var_decl) = 1;
2411 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2412 if (TREE_CODE (var_decl) == VAR_DECL)
2413 process_attributes (&var_decl, &attr_list, true, gnat_node);
2415 /* Add this decl to the current binding level. */
2416 gnat_pushdecl (var_decl, gnat_node);
2418 if (TREE_CODE (var_decl) == VAR_DECL)
2420 if (asm_name)
2421 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2423 if (global_bindings_p ())
2424 rest_of_decl_compilation (var_decl, true, 0);
2427 return var_decl;
2430 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2432 static bool
2433 aggregate_type_contains_array_p (tree type)
2435 switch (TREE_CODE (type))
2437 case RECORD_TYPE:
2438 case UNION_TYPE:
2439 case QUAL_UNION_TYPE:
2441 tree field;
2442 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2443 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2444 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2445 return true;
2446 return false;
2449 case ARRAY_TYPE:
2450 return true;
2452 default:
2453 gcc_unreachable ();
2457 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2458 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2459 nonzero, it is the specified size of the field. If POS is nonzero, it is
2460 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2461 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2462 means we are allowed to take the address of the field; if it is negative,
2463 we should not make a bitfield, which is used by make_aligning_type. */
2465 tree
2466 create_field_decl (tree field_name, tree field_type, tree record_type,
2467 tree size, tree pos, int packed, int addressable)
2469 tree field_decl = build_decl (input_location,
2470 FIELD_DECL, field_name, field_type);
2472 DECL_CONTEXT (field_decl) = record_type;
2473 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2475 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2476 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2477 Likewise for an aggregate without specified position that contains an
2478 array, because in this case slices of variable length of this array
2479 must be handled by GCC and variable-sized objects need to be aligned
2480 to at least a byte boundary. */
2481 if (packed && (TYPE_MODE (field_type) == BLKmode
2482 || (!pos
2483 && AGGREGATE_TYPE_P (field_type)
2484 && aggregate_type_contains_array_p (field_type))))
2485 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2487 /* If a size is specified, use it. Otherwise, if the record type is packed
2488 compute a size to use, which may differ from the object's natural size.
2489 We always set a size in this case to trigger the checks for bitfield
2490 creation below, which is typically required when no position has been
2491 specified. */
2492 if (size)
2493 size = convert (bitsizetype, size);
2494 else if (packed == 1)
2496 size = rm_size (field_type);
2497 if (TYPE_MODE (field_type) == BLKmode)
2498 size = round_up (size, BITS_PER_UNIT);
2501 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2502 specified for two reasons: first if the size differs from the natural
2503 size. Second, if the alignment is insufficient. There are a number of
2504 ways the latter can be true.
2506 We never make a bitfield if the type of the field has a nonconstant size,
2507 because no such entity requiring bitfield operations should reach here.
2509 We do *preventively* make a bitfield when there might be the need for it
2510 but we don't have all the necessary information to decide, as is the case
2511 of a field with no specified position in a packed record.
2513 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2514 in layout_decl or finish_record_type to clear the bit_field indication if
2515 it is in fact not needed. */
2516 if (addressable >= 0
2517 && size
2518 && TREE_CODE (size) == INTEGER_CST
2519 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2520 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2521 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2522 || packed
2523 || (TYPE_ALIGN (record_type) != 0
2524 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2526 DECL_BIT_FIELD (field_decl) = 1;
2527 DECL_SIZE (field_decl) = size;
2528 if (!packed && !pos)
2530 if (TYPE_ALIGN (record_type) != 0
2531 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2532 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2533 else
2534 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2538 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2540 /* Bump the alignment if need be, either for bitfield/packing purposes or
2541 to satisfy the type requirements if no such consideration applies. When
2542 we get the alignment from the type, indicate if this is from an explicit
2543 user request, which prevents stor-layout from lowering it later on. */
2545 unsigned int bit_align
2546 = (DECL_BIT_FIELD (field_decl) ? 1
2547 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2549 if (bit_align > DECL_ALIGN (field_decl))
2550 DECL_ALIGN (field_decl) = bit_align;
2551 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2553 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2554 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2558 if (pos)
2560 /* We need to pass in the alignment the DECL is known to have.
2561 This is the lowest-order bit set in POS, but no more than
2562 the alignment of the record, if one is specified. Note
2563 that an alignment of 0 is taken as infinite. */
2564 unsigned int known_align;
2566 if (tree_fits_uhwi_p (pos))
2567 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2568 else
2569 known_align = BITS_PER_UNIT;
2571 if (TYPE_ALIGN (record_type)
2572 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2573 known_align = TYPE_ALIGN (record_type);
2575 layout_decl (field_decl, known_align);
2576 SET_DECL_OFFSET_ALIGN (field_decl,
2577 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2578 : BITS_PER_UNIT);
2579 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2580 &DECL_FIELD_BIT_OFFSET (field_decl),
2581 DECL_OFFSET_ALIGN (field_decl), pos);
2584 /* In addition to what our caller says, claim the field is addressable if we
2585 know that its type is not suitable.
2587 The field may also be "technically" nonaddressable, meaning that even if
2588 we attempt to take the field's address we will actually get the address
2589 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2590 value we have at this point is not accurate enough, so we don't account
2591 for this here and let finish_record_type decide. */
2592 if (!addressable && !type_for_nonaliased_component_p (field_type))
2593 addressable = 1;
2595 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2597 return field_decl;
2600 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2601 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2602 (either an In parameter or an address of a pass-by-ref parameter). */
2604 tree
2605 create_param_decl (tree param_name, tree param_type, bool readonly)
2607 tree param_decl = build_decl (input_location,
2608 PARM_DECL, param_name, param_type);
2610 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2611 can lead to various ABI violations. */
2612 if (targetm.calls.promote_prototypes (NULL_TREE)
2613 && INTEGRAL_TYPE_P (param_type)
2614 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2616 /* We have to be careful about biased types here. Make a subtype
2617 of integer_type_node with the proper biasing. */
2618 if (TREE_CODE (param_type) == INTEGER_TYPE
2619 && TYPE_BIASED_REPRESENTATION_P (param_type))
2621 tree subtype
2622 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2623 TREE_TYPE (subtype) = integer_type_node;
2624 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2625 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2626 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
2627 param_type = subtype;
2629 else
2630 param_type = integer_type_node;
2633 DECL_ARG_TYPE (param_decl) = param_type;
2634 TREE_READONLY (param_decl) = readonly;
2635 return param_decl;
2638 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2639 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2640 changed. GNAT_NODE is used for the position of error messages. */
2642 void
2643 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2644 Node_Id gnat_node)
2646 struct attrib *attr;
2648 for (attr = *attr_list; attr; attr = attr->next)
2649 switch (attr->type)
2651 case ATTR_MACHINE_ATTRIBUTE:
2652 Sloc_to_locus (Sloc (gnat_node), &input_location);
2653 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2654 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2655 break;
2657 case ATTR_LINK_ALIAS:
2658 if (!DECL_EXTERNAL (*node))
2660 TREE_STATIC (*node) = 1;
2661 assemble_alias (*node, attr->name);
2663 break;
2665 case ATTR_WEAK_EXTERNAL:
2666 if (SUPPORTS_WEAK)
2667 declare_weak (*node);
2668 else
2669 post_error ("?weak declarations not supported on this target",
2670 attr->error_point);
2671 break;
2673 case ATTR_LINK_SECTION:
2674 if (targetm_common.have_named_sections)
2676 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2677 DECL_COMMON (*node) = 0;
2679 else
2680 post_error ("?section attributes are not supported for this target",
2681 attr->error_point);
2682 break;
2684 case ATTR_LINK_CONSTRUCTOR:
2685 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2686 TREE_USED (*node) = 1;
2687 break;
2689 case ATTR_LINK_DESTRUCTOR:
2690 DECL_STATIC_DESTRUCTOR (*node) = 1;
2691 TREE_USED (*node) = 1;
2692 break;
2694 case ATTR_THREAD_LOCAL_STORAGE:
2695 set_decl_tls_model (*node, decl_default_tls_model (*node));
2696 DECL_COMMON (*node) = 0;
2697 break;
2700 *attr_list = NULL;
2703 /* Record DECL as a global renaming pointer. */
2705 void
2706 record_global_renaming_pointer (tree decl)
2708 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
2709 vec_safe_push (global_renaming_pointers, decl);
2712 /* Invalidate the global renaming pointers that are not constant, lest their
2713 renamed object contains SAVE_EXPRs tied to an elaboration routine. Note
2714 that we should not blindly invalidate everything here because of the need
2715 to propagate constant values through renaming. */
2717 void
2718 invalidate_global_renaming_pointers (void)
2720 unsigned int i;
2721 tree iter;
2723 if (global_renaming_pointers == NULL)
2724 return;
2726 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
2727 if (!TREE_CONSTANT (DECL_RENAMED_OBJECT (iter)))
2728 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2730 vec_free (global_renaming_pointers);
2733 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2734 a power of 2. */
2736 bool
2737 value_factor_p (tree value, HOST_WIDE_INT factor)
2739 if (tree_fits_uhwi_p (value))
2740 return tree_to_uhwi (value) % factor == 0;
2742 if (TREE_CODE (value) == MULT_EXPR)
2743 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2744 || value_factor_p (TREE_OPERAND (value, 1), factor));
2746 return false;
2749 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2750 from the parameter association for the instantiation of a generic. We do
2751 not want to emit source location for them: the code generated for their
2752 initialization is likely to disturb debugging. */
2754 bool
2755 renaming_from_generic_instantiation_p (Node_Id gnat_node)
2757 if (Nkind (gnat_node) != N_Defining_Identifier
2758 || !IN (Ekind (gnat_node), Object_Kind)
2759 || Comes_From_Source (gnat_node)
2760 || !Present (Renamed_Object (gnat_node)))
2761 return false;
2763 /* Get the object declaration of the renamed object, if any and if the
2764 renamed object is a mere identifier. */
2765 gnat_node = Renamed_Object (gnat_node);
2766 if (Nkind (gnat_node) != N_Identifier)
2767 return false;
2769 gnat_node = Entity (gnat_node);
2770 if (!Present (Parent (gnat_node)))
2771 return false;
2773 gnat_node = Parent (gnat_node);
2774 return
2775 (Present (gnat_node)
2776 && Nkind (gnat_node) == N_Object_Declaration
2777 && Present (Corresponding_Generic_Association (gnat_node)));
2780 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2781 feed it with the elaboration of GNAT_SCOPE. */
2783 static struct deferred_decl_context_node *
2784 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2786 struct deferred_decl_context_node *new_node;
2788 new_node
2789 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2790 new_node->decl = decl;
2791 new_node->gnat_scope = gnat_scope;
2792 new_node->force_global = force_global;
2793 new_node->types.create (1);
2794 new_node->next = deferred_decl_context_queue;
2795 deferred_decl_context_queue = new_node;
2796 return new_node;
2799 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2800 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2801 computed. */
2803 static void
2804 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2806 n->types.safe_push (type);
2809 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2810 NULL_TREE if it is not available. */
2812 static tree
2813 compute_deferred_decl_context (Entity_Id gnat_scope)
2815 tree context;
2817 if (present_gnu_tree (gnat_scope))
2818 context = get_gnu_tree (gnat_scope);
2819 else
2820 return NULL_TREE;
2822 if (TREE_CODE (context) == TYPE_DECL)
2824 const tree context_type = TREE_TYPE (context);
2826 /* Skip dummy types: only the final ones can appear in the context
2827 chain. */
2828 if (TYPE_DUMMY_P (context_type))
2829 return NULL_TREE;
2831 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2832 chain. */
2833 else
2834 context = context_type;
2837 return context;
2840 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2841 that cannot be processed yet, remove the other ones. If FORCE is true,
2842 force the processing for all nodes, use the global context when nodes don't
2843 have a GNU translation. */
2845 void
2846 process_deferred_decl_context (bool force)
2848 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2849 struct deferred_decl_context_node *node;
2851 while (*it != NULL)
2853 bool processed = false;
2854 tree context = NULL_TREE;
2855 Entity_Id gnat_scope;
2857 node = *it;
2859 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2860 get the first scope. */
2861 gnat_scope = node->gnat_scope;
2862 while (Present (gnat_scope))
2864 context = compute_deferred_decl_context (gnat_scope);
2865 if (!force || context != NULL_TREE)
2866 break;
2867 gnat_scope = get_debug_scope (gnat_scope, NULL);
2870 /* Imported declarations must not be in a local context (i.e. not inside
2871 a function). */
2872 if (context != NULL_TREE && node->force_global > 0)
2874 tree ctx = context;
2876 while (ctx != NULL_TREE)
2878 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2879 ctx = (DECL_P (ctx))
2880 ? DECL_CONTEXT (ctx)
2881 : TYPE_CONTEXT (ctx);
2885 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2886 was no elaborated scope, use the global context. */
2887 if (force && context == NULL_TREE)
2888 context = get_global_context ();
2890 if (context != NULL_TREE)
2892 tree t;
2893 int i;
2895 DECL_CONTEXT (node->decl) = context;
2897 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2898 ..._TYPE nodes. */
2899 FOR_EACH_VEC_ELT (node->types, i, t)
2901 gnat_set_type_context (t, context);
2903 processed = true;
2906 /* If this node has been successfuly processed, remove it from the
2907 queue. Then move to the next node. */
2908 if (processed)
2910 *it = node->next;
2911 node->types.release ();
2912 free (node);
2914 else
2915 it = &node->next;
2920 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2922 static unsigned int
2923 scale_by_factor_of (tree expr, unsigned int value)
2925 expr = remove_conversions (expr, true);
2927 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2928 corresponding to the number of trailing zeros of the mask. */
2929 if (TREE_CODE (expr) == BIT_AND_EXPR
2930 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2932 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2933 unsigned int i = 0;
2935 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2937 mask >>= 1;
2938 value *= 2;
2939 i++;
2943 return value;
2946 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2947 unless we can prove these 2 fields are laid out in such a way that no gap
2948 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2949 is the distance in bits between the end of PREV_FIELD and the starting
2950 position of CURR_FIELD. It is ignored if null. */
2952 static bool
2953 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2955 /* If this is the first field of the record, there cannot be any gap */
2956 if (!prev_field)
2957 return false;
2959 /* If the previous field is a union type, then return false: The only
2960 time when such a field is not the last field of the record is when
2961 there are other components at fixed positions after it (meaning there
2962 was a rep clause for every field), in which case we don't want the
2963 alignment constraint to override them. */
2964 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2965 return false;
2967 /* If the distance between the end of prev_field and the beginning of
2968 curr_field is constant, then there is a gap if the value of this
2969 constant is not null. */
2970 if (offset && tree_fits_uhwi_p (offset))
2971 return !integer_zerop (offset);
2973 /* If the size and position of the previous field are constant,
2974 then check the sum of this size and position. There will be a gap
2975 iff it is not multiple of the current field alignment. */
2976 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
2977 && tree_fits_uhwi_p (bit_position (prev_field)))
2978 return ((tree_to_uhwi (bit_position (prev_field))
2979 + tree_to_uhwi (DECL_SIZE (prev_field)))
2980 % DECL_ALIGN (curr_field) != 0);
2982 /* If both the position and size of the previous field are multiples
2983 of the current field alignment, there cannot be any gap. */
2984 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2985 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2986 return false;
2988 /* Fallback, return that there may be a potential gap */
2989 return true;
2992 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2993 of the decl. */
2995 tree
2996 create_label_decl (tree label_name, Node_Id gnat_node)
2998 tree label_decl
2999 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
3001 DECL_MODE (label_decl) = VOIDmode;
3003 /* Add this decl to the current binding level. */
3004 gnat_pushdecl (label_decl, gnat_node);
3006 return label_decl;
3009 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
3010 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
3011 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
3012 PARM_DECL nodes chained through the DECL_CHAIN field).
3014 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
3015 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
3016 used for the position of the decl. */
3018 tree
3019 create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
3020 tree param_decl_list, enum inline_status_t inline_status,
3021 bool public_flag, bool extern_flag, bool artificial_flag,
3022 struct attrib *attr_list, Node_Id gnat_node)
3024 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
3025 subprog_type);
3026 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
3027 TREE_TYPE (subprog_type));
3028 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3030 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
3031 DECL_EXTERNAL (subprog_decl) = extern_flag;
3033 switch (inline_status)
3035 case is_suppressed:
3036 DECL_UNINLINABLE (subprog_decl) = 1;
3037 break;
3039 case is_disabled:
3040 break;
3042 case is_required:
3043 if (Back_End_Inlining)
3044 decl_attributes (&subprog_decl,
3045 tree_cons (get_identifier ("always_inline"),
3046 NULL_TREE, NULL_TREE),
3047 ATTR_FLAG_TYPE_IN_PLACE);
3049 /* ... fall through ... */
3051 case is_enabled:
3052 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3053 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
3054 break;
3056 default:
3057 gcc_unreachable ();
3060 TREE_PUBLIC (subprog_decl) = public_flag;
3061 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
3062 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
3063 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
3065 DECL_ARTIFICIAL (result_decl) = 1;
3066 DECL_IGNORED_P (result_decl) = 1;
3067 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
3068 DECL_RESULT (subprog_decl) = result_decl;
3070 if (asm_name)
3072 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
3074 /* The expand_main_function circuitry expects "main_identifier_node" to
3075 designate the DECL_NAME of the 'main' entry point, in turn expected
3076 to be declared as the "main" function literally by default. Ada
3077 program entry points are typically declared with a different name
3078 within the binder generated file, exported as 'main' to satisfy the
3079 system expectations. Force main_identifier_node in this case. */
3080 if (asm_name == main_identifier_node)
3081 DECL_NAME (subprog_decl) = main_identifier_node;
3084 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3086 /* Add this decl to the current binding level. */
3087 gnat_pushdecl (subprog_decl, gnat_node);
3089 /* Output the assembler code and/or RTL for the declaration. */
3090 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3092 return subprog_decl;
3095 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3096 body. This routine needs to be invoked before processing the declarations
3097 appearing in the subprogram. */
3099 void
3100 begin_subprog_body (tree subprog_decl)
3102 tree param_decl;
3104 announce_function (subprog_decl);
3106 /* This function is being defined. */
3107 TREE_STATIC (subprog_decl) = 1;
3109 current_function_decl = subprog_decl;
3111 /* Enter a new binding level and show that all the parameters belong to
3112 this function. */
3113 gnat_pushlevel ();
3115 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3116 param_decl = DECL_CHAIN (param_decl))
3117 DECL_CONTEXT (param_decl) = subprog_decl;
3119 make_decl_rtl (subprog_decl);
3122 /* Finish translating the current subprogram and set its BODY. */
3124 void
3125 end_subprog_body (tree body)
3127 tree fndecl = current_function_decl;
3129 /* Attach the BLOCK for this level to the function and pop the level. */
3130 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3131 DECL_INITIAL (fndecl) = current_binding_level->block;
3132 gnat_poplevel ();
3134 /* Mark the RESULT_DECL as being in this subprogram. */
3135 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3137 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3138 if (TREE_CODE (body) == BIND_EXPR)
3140 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3141 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3144 DECL_SAVED_TREE (fndecl) = body;
3146 current_function_decl = decl_function_context (fndecl);
3149 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3151 void
3152 rest_of_subprog_body_compilation (tree subprog_decl)
3154 /* We cannot track the location of errors past this point. */
3155 error_gnat_node = Empty;
3157 /* If we're only annotating types, don't actually compile this function. */
3158 if (type_annotate_only)
3159 return;
3161 /* Dump functions before gimplification. */
3162 dump_function (TDI_original, subprog_decl);
3164 if (!decl_function_context (subprog_decl))
3165 cgraph_node::finalize_function (subprog_decl, false);
3166 else
3167 /* Register this function with cgraph just far enough to get it
3168 added to our parent's nested function list. */
3169 (void) cgraph_node::get_create (subprog_decl);
3172 tree
3173 gnat_builtin_function (tree decl)
3175 gnat_pushdecl (decl, Empty);
3176 return decl;
3179 /* Return an integer type with the number of bits of precision given by
3180 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3181 it is a signed type. */
3183 tree
3184 gnat_type_for_size (unsigned precision, int unsignedp)
3186 tree t;
3187 char type_name[20];
3189 if (precision <= 2 * MAX_BITS_PER_WORD
3190 && signed_and_unsigned_types[precision][unsignedp])
3191 return signed_and_unsigned_types[precision][unsignedp];
3193 if (unsignedp)
3194 t = make_unsigned_type (precision);
3195 else
3196 t = make_signed_type (precision);
3198 if (precision <= 2 * MAX_BITS_PER_WORD)
3199 signed_and_unsigned_types[precision][unsignedp] = t;
3201 if (!TYPE_NAME (t))
3203 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3204 TYPE_NAME (t) = get_identifier (type_name);
3207 return t;
3210 /* Likewise for floating-point types. */
3212 static tree
3213 float_type_for_precision (int precision, machine_mode mode)
3215 tree t;
3216 char type_name[20];
3218 if (float_types[(int) mode])
3219 return float_types[(int) mode];
3221 float_types[(int) mode] = t = make_node (REAL_TYPE);
3222 TYPE_PRECISION (t) = precision;
3223 layout_type (t);
3225 gcc_assert (TYPE_MODE (t) == mode);
3226 if (!TYPE_NAME (t))
3228 sprintf (type_name, "FLOAT_%d", precision);
3229 TYPE_NAME (t) = get_identifier (type_name);
3232 return t;
3235 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3236 an unsigned type; otherwise a signed type is returned. */
3238 tree
3239 gnat_type_for_mode (machine_mode mode, int unsignedp)
3241 if (mode == BLKmode)
3242 return NULL_TREE;
3244 if (mode == VOIDmode)
3245 return void_type_node;
3247 if (COMPLEX_MODE_P (mode))
3248 return NULL_TREE;
3250 if (SCALAR_FLOAT_MODE_P (mode))
3251 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
3253 if (SCALAR_INT_MODE_P (mode))
3254 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
3256 if (VECTOR_MODE_P (mode))
3258 machine_mode inner_mode = GET_MODE_INNER (mode);
3259 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3260 if (inner_type)
3261 return build_vector_type_for_mode (inner_type, mode);
3264 return NULL_TREE;
3267 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
3269 tree
3270 gnat_unsigned_type (tree type_node)
3272 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
3274 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3276 type = copy_node (type);
3277 TREE_TYPE (type) = type_node;
3279 else if (TREE_TYPE (type_node)
3280 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3281 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3283 type = copy_node (type);
3284 TREE_TYPE (type) = TREE_TYPE (type_node);
3287 return type;
3290 /* Return the signed version of a TYPE_NODE, a scalar type. */
3292 tree
3293 gnat_signed_type (tree type_node)
3295 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
3297 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3299 type = copy_node (type);
3300 TREE_TYPE (type) = type_node;
3302 else if (TREE_TYPE (type_node)
3303 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3304 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3306 type = copy_node (type);
3307 TREE_TYPE (type) = TREE_TYPE (type_node);
3310 return type;
3313 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3314 transparently converted to each other. */
3317 gnat_types_compatible_p (tree t1, tree t2)
3319 enum tree_code code;
3321 /* This is the default criterion. */
3322 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3323 return 1;
3325 /* We only check structural equivalence here. */
3326 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3327 return 0;
3329 /* Vector types are also compatible if they have the same number of subparts
3330 and the same form of (scalar) element type. */
3331 if (code == VECTOR_TYPE
3332 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3333 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3334 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3335 return 1;
3337 /* Array types are also compatible if they are constrained and have the same
3338 domain(s) and the same component type. */
3339 if (code == ARRAY_TYPE
3340 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3341 || (TYPE_DOMAIN (t1)
3342 && TYPE_DOMAIN (t2)
3343 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3344 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3345 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3346 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3347 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3348 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3349 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
3350 return 1;
3352 return 0;
3355 /* Return true if EXPR is a useless type conversion. */
3357 bool
3358 gnat_useless_type_conversion (tree expr)
3360 if (CONVERT_EXPR_P (expr)
3361 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3362 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3363 return gnat_types_compatible_p (TREE_TYPE (expr),
3364 TREE_TYPE (TREE_OPERAND (expr, 0)));
3366 return false;
3369 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3371 bool
3372 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3373 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3375 return TYPE_CI_CO_LIST (t) == cico_list
3376 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3377 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3378 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3381 /* EXP is an expression for the size of an object. If this size contains
3382 discriminant references, replace them with the maximum (if MAX_P) or
3383 minimum (if !MAX_P) possible value of the discriminant. */
3385 tree
3386 max_size (tree exp, bool max_p)
3388 enum tree_code code = TREE_CODE (exp);
3389 tree type = TREE_TYPE (exp);
3391 switch (TREE_CODE_CLASS (code))
3393 case tcc_declaration:
3394 case tcc_constant:
3395 return exp;
3397 case tcc_vl_exp:
3398 if (code == CALL_EXPR)
3400 tree t, *argarray;
3401 int n, i;
3403 t = maybe_inline_call_in_expr (exp);
3404 if (t)
3405 return max_size (t, max_p);
3407 n = call_expr_nargs (exp);
3408 gcc_assert (n > 0);
3409 argarray = XALLOCAVEC (tree, n);
3410 for (i = 0; i < n; i++)
3411 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3412 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3414 break;
3416 case tcc_reference:
3417 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3418 modify. Otherwise, we treat it like a variable. */
3419 if (CONTAINS_PLACEHOLDER_P (exp))
3421 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3422 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3423 return max_size (convert (get_base_type (val_type), val), true);
3426 return exp;
3428 case tcc_comparison:
3429 return max_p ? size_one_node : size_zero_node;
3431 case tcc_unary:
3432 if (code == NON_LVALUE_EXPR)
3433 return max_size (TREE_OPERAND (exp, 0), max_p);
3435 return fold_build1 (code, type,
3436 max_size (TREE_OPERAND (exp, 0),
3437 code == NEGATE_EXPR ? !max_p : max_p));
3439 case tcc_binary:
3441 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3442 tree rhs = max_size (TREE_OPERAND (exp, 1),
3443 code == MINUS_EXPR ? !max_p : max_p);
3445 /* Special-case wanting the maximum value of a MIN_EXPR.
3446 In that case, if one side overflows, return the other. */
3447 if (max_p && code == MIN_EXPR)
3449 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3450 return lhs;
3452 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3453 return rhs;
3456 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3457 overflowing and the RHS a variable. */
3458 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3459 && TREE_CODE (lhs) == INTEGER_CST
3460 && TREE_OVERFLOW (lhs)
3461 && !TREE_CONSTANT (rhs))
3462 return lhs;
3464 return size_binop (code, lhs, rhs);
3467 case tcc_expression:
3468 switch (TREE_CODE_LENGTH (code))
3470 case 1:
3471 if (code == SAVE_EXPR)
3472 return exp;
3474 return fold_build1 (code, type,
3475 max_size (TREE_OPERAND (exp, 0), max_p));
3477 case 2:
3478 if (code == COMPOUND_EXPR)
3479 return max_size (TREE_OPERAND (exp, 1), max_p);
3481 return fold_build2 (code, type,
3482 max_size (TREE_OPERAND (exp, 0), max_p),
3483 max_size (TREE_OPERAND (exp, 1), max_p));
3485 case 3:
3486 if (code == COND_EXPR)
3487 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3488 max_size (TREE_OPERAND (exp, 1), max_p),
3489 max_size (TREE_OPERAND (exp, 2), max_p));
3491 default:
3492 break;
3495 /* Other tree classes cannot happen. */
3496 default:
3497 break;
3500 gcc_unreachable ();
3503 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3504 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3505 Return a constructor for the template. */
3507 tree
3508 build_template (tree template_type, tree array_type, tree expr)
3510 vec<constructor_elt, va_gc> *template_elts = NULL;
3511 tree bound_list = NULL_TREE;
3512 tree field;
3514 while (TREE_CODE (array_type) == RECORD_TYPE
3515 && (TYPE_PADDING_P (array_type)
3516 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3517 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3519 if (TREE_CODE (array_type) == ARRAY_TYPE
3520 || (TREE_CODE (array_type) == INTEGER_TYPE
3521 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3522 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3524 /* First make the list for a CONSTRUCTOR for the template. Go down the
3525 field list of the template instead of the type chain because this
3526 array might be an Ada array of arrays and we can't tell where the
3527 nested arrays stop being the underlying object. */
3529 for (field = TYPE_FIELDS (template_type); field;
3530 (bound_list
3531 ? (bound_list = TREE_CHAIN (bound_list))
3532 : (array_type = TREE_TYPE (array_type))),
3533 field = DECL_CHAIN (DECL_CHAIN (field)))
3535 tree bounds, min, max;
3537 /* If we have a bound list, get the bounds from there. Likewise
3538 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3539 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3540 This will give us a maximum range. */
3541 if (bound_list)
3542 bounds = TREE_VALUE (bound_list);
3543 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3544 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3545 else if (expr && TREE_CODE (expr) == PARM_DECL
3546 && DECL_BY_COMPONENT_PTR_P (expr))
3547 bounds = TREE_TYPE (field);
3548 else
3549 gcc_unreachable ();
3551 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3552 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3554 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3555 substitute it from OBJECT. */
3556 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3557 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3559 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3560 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3563 return gnat_build_constructor (template_type, template_elts);
3566 /* Return true if TYPE is suitable for the element type of a vector. */
3568 static bool
3569 type_for_vector_element_p (tree type)
3571 machine_mode mode;
3573 if (!INTEGRAL_TYPE_P (type)
3574 && !SCALAR_FLOAT_TYPE_P (type)
3575 && !FIXED_POINT_TYPE_P (type))
3576 return false;
3578 mode = TYPE_MODE (type);
3579 if (GET_MODE_CLASS (mode) != MODE_INT
3580 && !SCALAR_FLOAT_MODE_P (mode)
3581 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3582 return false;
3584 return true;
3587 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3588 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3589 attribute declaration and want to issue error messages on failure. */
3591 static tree
3592 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3594 unsigned HOST_WIDE_INT size_int, inner_size_int;
3595 int nunits;
3597 /* Silently punt on variable sizes. We can't make vector types for them,
3598 need to ignore them on front-end generated subtypes of unconstrained
3599 base types, and this attribute is for binding implementors, not end
3600 users, so we should never get there from legitimate explicit uses. */
3601 if (!tree_fits_uhwi_p (size))
3602 return NULL_TREE;
3603 size_int = tree_to_uhwi (size);
3605 if (!type_for_vector_element_p (inner_type))
3607 if (attribute)
3608 error ("invalid element type for attribute %qs",
3609 IDENTIFIER_POINTER (attribute));
3610 return NULL_TREE;
3612 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3614 if (size_int % inner_size_int)
3616 if (attribute)
3617 error ("vector size not an integral multiple of component size");
3618 return NULL_TREE;
3621 if (size_int == 0)
3623 if (attribute)
3624 error ("zero vector size");
3625 return NULL_TREE;
3628 nunits = size_int / inner_size_int;
3629 if (nunits & (nunits - 1))
3631 if (attribute)
3632 error ("number of components of vector not a power of two");
3633 return NULL_TREE;
3636 return build_vector_type (inner_type, nunits);
3639 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3640 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3641 processing the attribute and want to issue error messages on failure. */
3643 static tree
3644 build_vector_type_for_array (tree array_type, tree attribute)
3646 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3647 TYPE_SIZE_UNIT (array_type),
3648 attribute);
3649 if (!vector_type)
3650 return NULL_TREE;
3652 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3653 return vector_type;
3656 /* Build a type to be used to represent an aliased object whose nominal type
3657 is an unconstrained array. This consists of a RECORD_TYPE containing a
3658 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3659 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3660 an arbitrary unconstrained object. Use NAME as the name of the record.
3661 DEBUG_INFO_P is true if we need to write debug information for the type. */
3663 tree
3664 build_unc_object_type (tree template_type, tree object_type, tree name,
3665 bool debug_info_p)
3667 tree decl;
3668 tree type = make_node (RECORD_TYPE);
3669 tree template_field
3670 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3671 NULL_TREE, NULL_TREE, 0, 1);
3672 tree array_field
3673 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3674 NULL_TREE, NULL_TREE, 0, 1);
3676 TYPE_NAME (type) = name;
3677 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3678 DECL_CHAIN (template_field) = array_field;
3679 finish_record_type (type, template_field, 0, true);
3681 /* Declare it now since it will never be declared otherwise. This is
3682 necessary to ensure that its subtrees are properly marked. */
3683 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3685 /* template_type will not be used elsewhere than here, so to keep the debug
3686 info clean and in order to avoid scoping issues, make decl its
3687 context. */
3688 gnat_set_type_context (template_type, decl);
3690 return type;
3693 /* Same, taking a thin or fat pointer type instead of a template type. */
3695 tree
3696 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3697 tree name, bool debug_info_p)
3699 tree template_type;
3701 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3703 template_type
3704 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3705 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3706 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3708 return
3709 build_unc_object_type (template_type, object_type, name, debug_info_p);
3712 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3713 In the normal case this is just two adjustments, but we have more to
3714 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3716 void
3717 update_pointer_to (tree old_type, tree new_type)
3719 tree ptr = TYPE_POINTER_TO (old_type);
3720 tree ref = TYPE_REFERENCE_TO (old_type);
3721 tree t;
3723 /* If this is the main variant, process all the other variants first. */
3724 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3725 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3726 update_pointer_to (t, new_type);
3728 /* If no pointers and no references, we are done. */
3729 if (!ptr && !ref)
3730 return;
3732 /* Merge the old type qualifiers in the new type.
3734 Each old variant has qualifiers for specific reasons, and the new
3735 designated type as well. Each set of qualifiers represents useful
3736 information grabbed at some point, and merging the two simply unifies
3737 these inputs into the final type description.
3739 Consider for instance a volatile type frozen after an access to constant
3740 type designating it; after the designated type's freeze, we get here with
3741 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3742 when the access type was processed. We will make a volatile and readonly
3743 designated type, because that's what it really is.
3745 We might also get here for a non-dummy OLD_TYPE variant with different
3746 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3747 to private record type elaboration (see the comments around the call to
3748 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3749 the qualifiers in those cases too, to avoid accidentally discarding the
3750 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3751 new_type
3752 = build_qualified_type (new_type,
3753 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3755 /* If old type and new type are identical, there is nothing to do. */
3756 if (old_type == new_type)
3757 return;
3759 /* Otherwise, first handle the simple case. */
3760 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3762 tree new_ptr, new_ref;
3764 /* If pointer or reference already points to new type, nothing to do.
3765 This can happen as update_pointer_to can be invoked multiple times
3766 on the same couple of types because of the type variants. */
3767 if ((ptr && TREE_TYPE (ptr) == new_type)
3768 || (ref && TREE_TYPE (ref) == new_type))
3769 return;
3771 /* Chain PTR and its variants at the end. */
3772 new_ptr = TYPE_POINTER_TO (new_type);
3773 if (new_ptr)
3775 while (TYPE_NEXT_PTR_TO (new_ptr))
3776 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3777 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3779 else
3780 TYPE_POINTER_TO (new_type) = ptr;
3782 /* Now adjust them. */
3783 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3784 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3786 TREE_TYPE (t) = new_type;
3787 if (TYPE_NULL_BOUNDS (t))
3788 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3791 /* Chain REF and its variants at the end. */
3792 new_ref = TYPE_REFERENCE_TO (new_type);
3793 if (new_ref)
3795 while (TYPE_NEXT_REF_TO (new_ref))
3796 new_ref = TYPE_NEXT_REF_TO (new_ref);
3797 TYPE_NEXT_REF_TO (new_ref) = ref;
3799 else
3800 TYPE_REFERENCE_TO (new_type) = ref;
3802 /* Now adjust them. */
3803 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3804 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3805 TREE_TYPE (t) = new_type;
3807 TYPE_POINTER_TO (old_type) = NULL_TREE;
3808 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3811 /* Now deal with the unconstrained array case. In this case the pointer
3812 is actually a record where both fields are pointers to dummy nodes.
3813 Turn them into pointers to the correct types using update_pointer_to.
3814 Likewise for the pointer to the object record (thin pointer). */
3815 else
3817 tree new_ptr = TYPE_POINTER_TO (new_type);
3819 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3821 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3822 since update_pointer_to can be invoked multiple times on the same
3823 couple of types because of the type variants. */
3824 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3825 return;
3827 update_pointer_to
3828 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3829 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3831 update_pointer_to
3832 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3833 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3835 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3836 TYPE_OBJECT_RECORD_TYPE (new_type));
3838 TYPE_POINTER_TO (old_type) = NULL_TREE;
3842 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3843 unconstrained one. This involves making or finding a template. */
3845 static tree
3846 convert_to_fat_pointer (tree type, tree expr)
3848 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3849 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3850 tree etype = TREE_TYPE (expr);
3851 tree template_addr;
3852 vec<constructor_elt, va_gc> *v;
3853 vec_alloc (v, 2);
3855 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3856 array (compare_fat_pointers ensures that this is the full discriminant)
3857 and a valid pointer to the bounds. This latter property is necessary
3858 since the compiler can hoist the load of the bounds done through it. */
3859 if (integer_zerop (expr))
3861 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3862 tree null_bounds, t;
3864 if (TYPE_NULL_BOUNDS (ptr_template_type))
3865 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3866 else
3868 /* The template type can still be dummy at this point so we build an
3869 empty constructor. The middle-end will fill it in with zeros. */
3870 t = build_constructor (template_type,
3871 NULL);
3872 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3873 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3874 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
3877 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3878 fold_convert (p_array_type, null_pointer_node));
3879 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
3880 t = build_constructor (type, v);
3881 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3882 TREE_CONSTANT (t) = 0;
3883 TREE_STATIC (t) = 1;
3885 return t;
3888 /* If EXPR is a thin pointer, make template and data from the record. */
3889 if (TYPE_IS_THIN_POINTER_P (etype))
3891 tree field = TYPE_FIELDS (TREE_TYPE (etype));
3893 expr = gnat_protect_expr (expr);
3895 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
3896 the thin pointer value has been shifted so we shift it back to get
3897 the template address. */
3898 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
3900 template_addr
3901 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
3902 fold_build1 (NEGATE_EXPR, sizetype,
3903 byte_position
3904 (DECL_CHAIN (field))));
3905 template_addr
3906 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
3907 template_addr);
3910 /* Otherwise we explicitly take the address of the fields. */
3911 else
3913 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
3914 template_addr
3915 = build_unary_op (ADDR_EXPR, NULL_TREE,
3916 build_component_ref (expr, NULL_TREE, field,
3917 false));
3918 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3919 build_component_ref (expr, NULL_TREE,
3920 DECL_CHAIN (field),
3921 false));
3925 /* Otherwise, build the constructor for the template. */
3926 else
3927 template_addr
3928 = build_unary_op (ADDR_EXPR, NULL_TREE,
3929 build_template (template_type, TREE_TYPE (etype),
3930 expr));
3932 /* The final result is a constructor for the fat pointer.
3934 If EXPR is an argument of a foreign convention subprogram, the type it
3935 points to is directly the component type. In this case, the expression
3936 type may not match the corresponding FIELD_DECL type at this point, so we
3937 call "convert" here to fix that up if necessary. This type consistency is
3938 required, for instance because it ensures that possible later folding of
3939 COMPONENT_REFs against this constructor always yields something of the
3940 same type as the initial reference.
3942 Note that the call to "build_template" above is still fine because it
3943 will only refer to the provided TEMPLATE_TYPE in this case. */
3944 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
3945 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
3946 return gnat_build_constructor (type, v);
3949 /* Create an expression whose value is that of EXPR,
3950 converted to type TYPE. The TREE_TYPE of the value
3951 is always TYPE. This function implements all reasonable
3952 conversions; callers should filter out those that are
3953 not permitted by the language being compiled. */
3955 tree
3956 convert (tree type, tree expr)
3958 tree etype = TREE_TYPE (expr);
3959 enum tree_code ecode = TREE_CODE (etype);
3960 enum tree_code code = TREE_CODE (type);
3962 /* If the expression is already of the right type, we are done. */
3963 if (etype == type)
3964 return expr;
3966 /* If both input and output have padding and are of variable size, do this
3967 as an unchecked conversion. Likewise if one is a mere variant of the
3968 other, so we avoid a pointless unpad/repad sequence. */
3969 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3970 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3971 && (!TREE_CONSTANT (TYPE_SIZE (type))
3972 || !TREE_CONSTANT (TYPE_SIZE (etype))
3973 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3974 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3975 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3978 /* If the output type has padding, convert to the inner type and make a
3979 constructor to build the record, unless a variable size is involved. */
3980 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3982 vec<constructor_elt, va_gc> *v;
3984 /* If we previously converted from another type and our type is
3985 of variable size, remove the conversion to avoid the need for
3986 variable-sized temporaries. Likewise for a conversion between
3987 original and packable version. */
3988 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3989 && (!TREE_CONSTANT (TYPE_SIZE (type))
3990 || (ecode == RECORD_TYPE
3991 && TYPE_NAME (etype)
3992 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3993 expr = TREE_OPERAND (expr, 0);
3995 /* If we are just removing the padding from expr, convert the original
3996 object if we have variable size in order to avoid the need for some
3997 variable-sized temporaries. Likewise if the padding is a variant
3998 of the other, so we avoid a pointless unpad/repad sequence. */
3999 if (TREE_CODE (expr) == COMPONENT_REF
4000 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4001 && (!TREE_CONSTANT (TYPE_SIZE (type))
4002 || TYPE_MAIN_VARIANT (type)
4003 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4004 || (ecode == RECORD_TYPE
4005 && TYPE_NAME (etype)
4006 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4007 return convert (type, TREE_OPERAND (expr, 0));
4009 /* If the inner type is of self-referential size and the expression type
4010 is a record, do this as an unchecked conversion. But first pad the
4011 expression if possible to have the same size on both sides. */
4012 if (ecode == RECORD_TYPE
4013 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4015 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4016 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4017 false, false, false, true),
4018 expr);
4019 return unchecked_convert (type, expr, false);
4022 /* If we are converting between array types with variable size, do the
4023 final conversion as an unchecked conversion, again to avoid the need
4024 for some variable-sized temporaries. If valid, this conversion is
4025 very likely purely technical and without real effects. */
4026 if (ecode == ARRAY_TYPE
4027 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4028 && !TREE_CONSTANT (TYPE_SIZE (etype))
4029 && !TREE_CONSTANT (TYPE_SIZE (type)))
4030 return unchecked_convert (type,
4031 convert (TREE_TYPE (TYPE_FIELDS (type)),
4032 expr),
4033 false);
4035 vec_alloc (v, 1);
4036 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4037 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4038 return gnat_build_constructor (type, v);
4041 /* If the input type has padding, remove it and convert to the output type.
4042 The conditions ordering is arranged to ensure that the output type is not
4043 a padding type here, as it is not clear whether the conversion would
4044 always be correct if this was to happen. */
4045 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4047 tree unpadded;
4049 /* If we have just converted to this padded type, just get the
4050 inner expression. */
4051 if (TREE_CODE (expr) == CONSTRUCTOR
4052 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4053 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4054 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
4056 /* Otherwise, build an explicit component reference. */
4057 else
4058 unpadded
4059 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4061 return convert (type, unpadded);
4064 /* If the input is a biased type, adjust first. */
4065 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4066 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4067 fold_convert (TREE_TYPE (etype), expr),
4068 fold_convert (TREE_TYPE (etype),
4069 TYPE_MIN_VALUE (etype))));
4071 /* If the input is a justified modular type, we need to extract the actual
4072 object before converting it to any other type with the exceptions of an
4073 unconstrained array or of a mere type variant. It is useful to avoid the
4074 extraction and conversion in the type variant case because it could end
4075 up replacing a VAR_DECL expr by a constructor and we might be about the
4076 take the address of the result. */
4077 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4078 && code != UNCONSTRAINED_ARRAY_TYPE
4079 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4080 return convert (type, build_component_ref (expr, NULL_TREE,
4081 TYPE_FIELDS (etype), false));
4083 /* If converting to a type that contains a template, convert to the data
4084 type and then build the template. */
4085 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4087 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4088 vec<constructor_elt, va_gc> *v;
4089 vec_alloc (v, 2);
4091 /* If the source already has a template, get a reference to the
4092 associated array only, as we are going to rebuild a template
4093 for the target type anyway. */
4094 expr = maybe_unconstrained_array (expr);
4096 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4097 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4098 obj_type, NULL_TREE));
4099 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4100 convert (obj_type, expr));
4101 return gnat_build_constructor (type, v);
4104 /* There are some cases of expressions that we process specially. */
4105 switch (TREE_CODE (expr))
4107 case ERROR_MARK:
4108 return expr;
4110 case NULL_EXPR:
4111 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4112 conversion in gnat_expand_expr. NULL_EXPR does not represent
4113 and actual value, so no conversion is needed. */
4114 expr = copy_node (expr);
4115 TREE_TYPE (expr) = type;
4116 return expr;
4118 case STRING_CST:
4119 /* If we are converting a STRING_CST to another constrained array type,
4120 just make a new one in the proper type. */
4121 if (code == ecode && AGGREGATE_TYPE_P (etype)
4122 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4123 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4125 expr = copy_node (expr);
4126 TREE_TYPE (expr) = type;
4127 return expr;
4129 break;
4131 case VECTOR_CST:
4132 /* If we are converting a VECTOR_CST to a mere type variant, just make
4133 a new one in the proper type. */
4134 if (code == ecode && gnat_types_compatible_p (type, etype))
4136 expr = copy_node (expr);
4137 TREE_TYPE (expr) = type;
4138 return expr;
4141 case CONSTRUCTOR:
4142 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4143 another padding type around the same type, just make a new one in
4144 the proper type. */
4145 if (code == ecode
4146 && (gnat_types_compatible_p (type, etype)
4147 || (code == RECORD_TYPE
4148 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4149 && TREE_TYPE (TYPE_FIELDS (type))
4150 == TREE_TYPE (TYPE_FIELDS (etype)))))
4152 expr = copy_node (expr);
4153 TREE_TYPE (expr) = type;
4154 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4155 return expr;
4158 /* Likewise for a conversion between original and packable version, or
4159 conversion between types of the same size and with the same list of
4160 fields, but we have to work harder to preserve type consistency. */
4161 if (code == ecode
4162 && code == RECORD_TYPE
4163 && (TYPE_NAME (type) == TYPE_NAME (etype)
4164 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4167 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4168 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4169 vec<constructor_elt, va_gc> *v;
4170 vec_alloc (v, len);
4171 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4172 unsigned HOST_WIDE_INT idx;
4173 tree index, value;
4175 /* Whether we need to clear TREE_CONSTANT et al. on the output
4176 constructor when we convert in place. */
4177 bool clear_constant = false;
4179 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4181 /* Skip the missing fields in the CONSTRUCTOR. */
4182 while (efield && field && !SAME_FIELD_P (efield, index))
4184 efield = DECL_CHAIN (efield);
4185 field = DECL_CHAIN (field);
4187 /* The field must be the same. */
4188 if (!(efield && field && SAME_FIELD_P (efield, field)))
4189 break;
4190 constructor_elt elt
4191 = {field, convert (TREE_TYPE (field), value)};
4192 v->quick_push (elt);
4194 /* If packing has made this field a bitfield and the input
4195 value couldn't be emitted statically any more, we need to
4196 clear TREE_CONSTANT on our output. */
4197 if (!clear_constant
4198 && TREE_CONSTANT (expr)
4199 && !CONSTRUCTOR_BITFIELD_P (efield)
4200 && CONSTRUCTOR_BITFIELD_P (field)
4201 && !initializer_constant_valid_for_bitfield_p (value))
4202 clear_constant = true;
4204 efield = DECL_CHAIN (efield);
4205 field = DECL_CHAIN (field);
4208 /* If we have been able to match and convert all the input fields
4209 to their output type, convert in place now. We'll fallback to a
4210 view conversion downstream otherwise. */
4211 if (idx == len)
4213 expr = copy_node (expr);
4214 TREE_TYPE (expr) = type;
4215 CONSTRUCTOR_ELTS (expr) = v;
4216 if (clear_constant)
4217 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4218 return expr;
4222 /* Likewise for a conversion between array type and vector type with a
4223 compatible representative array. */
4224 else if (code == VECTOR_TYPE
4225 && ecode == ARRAY_TYPE
4226 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4227 etype))
4229 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4230 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4231 vec<constructor_elt, va_gc> *v;
4232 unsigned HOST_WIDE_INT ix;
4233 tree value;
4235 /* Build a VECTOR_CST from a *constant* array constructor. */
4236 if (TREE_CONSTANT (expr))
4238 bool constant_p = true;
4240 /* Iterate through elements and check if all constructor
4241 elements are *_CSTs. */
4242 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4243 if (!CONSTANT_CLASS_P (value))
4245 constant_p = false;
4246 break;
4249 if (constant_p)
4250 return build_vector_from_ctor (type,
4251 CONSTRUCTOR_ELTS (expr));
4254 /* Otherwise, build a regular vector constructor. */
4255 vec_alloc (v, len);
4256 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4258 constructor_elt elt = {NULL_TREE, value};
4259 v->quick_push (elt);
4261 expr = copy_node (expr);
4262 TREE_TYPE (expr) = type;
4263 CONSTRUCTOR_ELTS (expr) = v;
4264 return expr;
4266 break;
4268 case UNCONSTRAINED_ARRAY_REF:
4269 /* First retrieve the underlying array. */
4270 expr = maybe_unconstrained_array (expr);
4271 etype = TREE_TYPE (expr);
4272 ecode = TREE_CODE (etype);
4273 break;
4275 case VIEW_CONVERT_EXPR:
4277 /* GCC 4.x is very sensitive to type consistency overall, and view
4278 conversions thus are very frequent. Even though just "convert"ing
4279 the inner operand to the output type is fine in most cases, it
4280 might expose unexpected input/output type mismatches in special
4281 circumstances so we avoid such recursive calls when we can. */
4282 tree op0 = TREE_OPERAND (expr, 0);
4284 /* If we are converting back to the original type, we can just
4285 lift the input conversion. This is a common occurrence with
4286 switches back-and-forth amongst type variants. */
4287 if (type == TREE_TYPE (op0))
4288 return op0;
4290 /* Otherwise, if we're converting between two aggregate or vector
4291 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4292 target type in place or to just convert the inner expression. */
4293 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4294 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4296 /* If we are converting between mere variants, we can just
4297 substitute the VIEW_CONVERT_EXPR in place. */
4298 if (gnat_types_compatible_p (type, etype))
4299 return build1 (VIEW_CONVERT_EXPR, type, op0);
4301 /* Otherwise, we may just bypass the input view conversion unless
4302 one of the types is a fat pointer, which is handled by
4303 specialized code below which relies on exact type matching. */
4304 else if (!TYPE_IS_FAT_POINTER_P (type)
4305 && !TYPE_IS_FAT_POINTER_P (etype))
4306 return convert (type, op0);
4309 break;
4312 default:
4313 break;
4316 /* Check for converting to a pointer to an unconstrained array. */
4317 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4318 return convert_to_fat_pointer (type, expr);
4320 /* If we are converting between two aggregate or vector types that are mere
4321 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4322 to a vector type from its representative array type. */
4323 else if ((code == ecode
4324 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4325 && gnat_types_compatible_p (type, etype))
4326 || (code == VECTOR_TYPE
4327 && ecode == ARRAY_TYPE
4328 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4329 etype)))
4330 return build1 (VIEW_CONVERT_EXPR, type, expr);
4332 /* If we are converting between tagged types, try to upcast properly. */
4333 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4334 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4336 tree child_etype = etype;
4337 do {
4338 tree field = TYPE_FIELDS (child_etype);
4339 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4340 return build_component_ref (expr, NULL_TREE, field, false);
4341 child_etype = TREE_TYPE (field);
4342 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4345 /* If we are converting from a smaller form of record type back to it, just
4346 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4347 size on both sides. */
4348 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4349 && smaller_form_type_p (etype, type))
4351 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4352 false, false, false, true),
4353 expr);
4354 return build1 (VIEW_CONVERT_EXPR, type, expr);
4357 /* In all other cases of related types, make a NOP_EXPR. */
4358 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4359 return fold_convert (type, expr);
4361 switch (code)
4363 case VOID_TYPE:
4364 return fold_build1 (CONVERT_EXPR, type, expr);
4366 case INTEGER_TYPE:
4367 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4368 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4369 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4370 return unchecked_convert (type, expr, false);
4371 else if (TYPE_BIASED_REPRESENTATION_P (type))
4372 return fold_convert (type,
4373 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4374 convert (TREE_TYPE (type), expr),
4375 convert (TREE_TYPE (type),
4376 TYPE_MIN_VALUE (type))));
4378 /* ... fall through ... */
4380 case ENUMERAL_TYPE:
4381 case BOOLEAN_TYPE:
4382 /* If we are converting an additive expression to an integer type
4383 with lower precision, be wary of the optimization that can be
4384 applied by convert_to_integer. There are 2 problematic cases:
4385 - if the first operand was originally of a biased type,
4386 because we could be recursively called to convert it
4387 to an intermediate type and thus rematerialize the
4388 additive operator endlessly,
4389 - if the expression contains a placeholder, because an
4390 intermediate conversion that changes the sign could
4391 be inserted and thus introduce an artificial overflow
4392 at compile time when the placeholder is substituted. */
4393 if (code == INTEGER_TYPE
4394 && ecode == INTEGER_TYPE
4395 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4396 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4398 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4400 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4401 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4402 || CONTAINS_PLACEHOLDER_P (expr))
4403 return build1 (NOP_EXPR, type, expr);
4406 return fold (convert_to_integer (type, expr));
4408 case POINTER_TYPE:
4409 case REFERENCE_TYPE:
4410 /* If converting between two thin pointers, adjust if needed to account
4411 for differing offsets from the base pointer, depending on whether
4412 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4413 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4415 tree etype_pos
4416 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4417 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4418 : size_zero_node;
4419 tree type_pos
4420 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4421 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4422 : size_zero_node;
4423 tree byte_diff = size_diffop (type_pos, etype_pos);
4425 expr = build1 (NOP_EXPR, type, expr);
4426 if (integer_zerop (byte_diff))
4427 return expr;
4429 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4430 fold_convert (sizetype, byte_diff));
4433 /* If converting fat pointer to normal or thin pointer, get the pointer
4434 to the array and then convert it. */
4435 if (TYPE_IS_FAT_POINTER_P (etype))
4436 expr
4437 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4439 return fold (convert_to_pointer (type, expr));
4441 case REAL_TYPE:
4442 return fold (convert_to_real (type, expr));
4444 case RECORD_TYPE:
4445 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4447 vec<constructor_elt, va_gc> *v;
4448 vec_alloc (v, 1);
4450 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4451 convert (TREE_TYPE (TYPE_FIELDS (type)),
4452 expr));
4453 return gnat_build_constructor (type, v);
4456 /* ... fall through ... */
4458 case ARRAY_TYPE:
4459 /* In these cases, assume the front-end has validated the conversion.
4460 If the conversion is valid, it will be a bit-wise conversion, so
4461 it can be viewed as an unchecked conversion. */
4462 return unchecked_convert (type, expr, false);
4464 case UNION_TYPE:
4465 /* This is a either a conversion between a tagged type and some
4466 subtype, which we have to mark as a UNION_TYPE because of
4467 overlapping fields or a conversion of an Unchecked_Union. */
4468 return unchecked_convert (type, expr, false);
4470 case UNCONSTRAINED_ARRAY_TYPE:
4471 /* If the input is a VECTOR_TYPE, convert to the representative
4472 array type first. */
4473 if (ecode == VECTOR_TYPE)
4475 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4476 etype = TREE_TYPE (expr);
4477 ecode = TREE_CODE (etype);
4480 /* If EXPR is a constrained array, take its address, convert it to a
4481 fat pointer, and then dereference it. Likewise if EXPR is a
4482 record containing both a template and a constrained array.
4483 Note that a record representing a justified modular type
4484 always represents a packed constrained array. */
4485 if (ecode == ARRAY_TYPE
4486 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4487 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4488 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4489 return
4490 build_unary_op
4491 (INDIRECT_REF, NULL_TREE,
4492 convert_to_fat_pointer (TREE_TYPE (type),
4493 build_unary_op (ADDR_EXPR,
4494 NULL_TREE, expr)));
4496 /* Do something very similar for converting one unconstrained
4497 array to another. */
4498 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4499 return
4500 build_unary_op (INDIRECT_REF, NULL_TREE,
4501 convert (TREE_TYPE (type),
4502 build_unary_op (ADDR_EXPR,
4503 NULL_TREE, expr)));
4504 else
4505 gcc_unreachable ();
4507 case COMPLEX_TYPE:
4508 return fold (convert_to_complex (type, expr));
4510 default:
4511 gcc_unreachable ();
4515 /* Create an expression whose value is that of EXPR converted to the common
4516 index type, which is sizetype. EXPR is supposed to be in the base type
4517 of the GNAT index type. Calling it is equivalent to doing
4519 convert (sizetype, expr)
4521 but we try to distribute the type conversion with the knowledge that EXPR
4522 cannot overflow in its type. This is a best-effort approach and we fall
4523 back to the above expression as soon as difficulties are encountered.
4525 This is necessary to overcome issues that arise when the GNAT base index
4526 type and the GCC common index type (sizetype) don't have the same size,
4527 which is quite frequent on 64-bit architectures. In this case, and if
4528 the GNAT base index type is signed but the iteration type of the loop has
4529 been forced to unsigned, the loop scalar evolution engine cannot compute
4530 a simple evolution for the general induction variables associated with the
4531 array indices, because it will preserve the wrap-around semantics in the
4532 unsigned type of their "inner" part. As a result, many loop optimizations
4533 are blocked.
4535 The solution is to use a special (basic) induction variable that is at
4536 least as large as sizetype, and to express the aforementioned general
4537 induction variables in terms of this induction variable, eliminating
4538 the problematic intermediate truncation to the GNAT base index type.
4539 This is possible as long as the original expression doesn't overflow
4540 and if the middle-end hasn't introduced artificial overflows in the
4541 course of the various simplification it can make to the expression. */
4543 tree
4544 convert_to_index_type (tree expr)
4546 enum tree_code code = TREE_CODE (expr);
4547 tree type = TREE_TYPE (expr);
4549 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4550 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4551 if (TYPE_UNSIGNED (type) || !optimize)
4552 return convert (sizetype, expr);
4554 switch (code)
4556 case VAR_DECL:
4557 /* The main effect of the function: replace a loop parameter with its
4558 associated special induction variable. */
4559 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4560 expr = DECL_INDUCTION_VAR (expr);
4561 break;
4563 CASE_CONVERT:
4565 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4566 /* Bail out as soon as we suspect some sort of type frobbing. */
4567 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4568 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4569 break;
4572 /* ... fall through ... */
4574 case NON_LVALUE_EXPR:
4575 return fold_build1 (code, sizetype,
4576 convert_to_index_type (TREE_OPERAND (expr, 0)));
4578 case PLUS_EXPR:
4579 case MINUS_EXPR:
4580 case MULT_EXPR:
4581 return fold_build2 (code, sizetype,
4582 convert_to_index_type (TREE_OPERAND (expr, 0)),
4583 convert_to_index_type (TREE_OPERAND (expr, 1)));
4585 case COMPOUND_EXPR:
4586 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4587 convert_to_index_type (TREE_OPERAND (expr, 1)));
4589 case COND_EXPR:
4590 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4591 convert_to_index_type (TREE_OPERAND (expr, 1)),
4592 convert_to_index_type (TREE_OPERAND (expr, 2)));
4594 default:
4595 break;
4598 return convert (sizetype, expr);
4601 /* Remove all conversions that are done in EXP. This includes converting
4602 from a padded type or to a justified modular type. If TRUE_ADDRESS
4603 is true, always return the address of the containing object even if
4604 the address is not bit-aligned. */
4606 tree
4607 remove_conversions (tree exp, bool true_address)
4609 switch (TREE_CODE (exp))
4611 case CONSTRUCTOR:
4612 if (true_address
4613 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4614 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4615 return
4616 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
4617 break;
4619 case COMPONENT_REF:
4620 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4621 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4622 break;
4624 CASE_CONVERT:
4625 case VIEW_CONVERT_EXPR:
4626 case NON_LVALUE_EXPR:
4627 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4629 default:
4630 break;
4633 return exp;
4636 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4637 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4638 likewise return an expression pointing to the underlying array. */
4640 tree
4641 maybe_unconstrained_array (tree exp)
4643 enum tree_code code = TREE_CODE (exp);
4644 tree type = TREE_TYPE (exp);
4646 switch (TREE_CODE (type))
4648 case UNCONSTRAINED_ARRAY_TYPE:
4649 if (code == UNCONSTRAINED_ARRAY_REF)
4651 const bool read_only = TREE_READONLY (exp);
4652 const bool no_trap = TREE_THIS_NOTRAP (exp);
4654 exp = TREE_OPERAND (exp, 0);
4655 type = TREE_TYPE (exp);
4657 if (TREE_CODE (exp) == COND_EXPR)
4659 tree op1
4660 = build_unary_op (INDIRECT_REF, NULL_TREE,
4661 build_component_ref (TREE_OPERAND (exp, 1),
4662 NULL_TREE,
4663 TYPE_FIELDS (type),
4664 false));
4665 tree op2
4666 = build_unary_op (INDIRECT_REF, NULL_TREE,
4667 build_component_ref (TREE_OPERAND (exp, 2),
4668 NULL_TREE,
4669 TYPE_FIELDS (type),
4670 false));
4672 exp = build3 (COND_EXPR,
4673 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4674 TREE_OPERAND (exp, 0), op1, op2);
4676 else
4678 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4679 build_component_ref (exp, NULL_TREE,
4680 TYPE_FIELDS (type),
4681 false));
4682 TREE_READONLY (exp) = read_only;
4683 TREE_THIS_NOTRAP (exp) = no_trap;
4687 else if (code == NULL_EXPR)
4688 exp = build1 (NULL_EXPR,
4689 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4690 TREE_OPERAND (exp, 0));
4691 break;
4693 case RECORD_TYPE:
4694 /* If this is a padded type and it contains a template, convert to the
4695 unpadded type first. */
4696 if (TYPE_PADDING_P (type)
4697 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4698 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4700 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4701 type = TREE_TYPE (exp);
4704 if (TYPE_CONTAINS_TEMPLATE_P (type))
4706 exp = build_component_ref (exp, NULL_TREE,
4707 DECL_CHAIN (TYPE_FIELDS (type)),
4708 false);
4709 type = TREE_TYPE (exp);
4711 /* If the array type is padded, convert to the unpadded type. */
4712 if (TYPE_IS_PADDING_P (type))
4713 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4715 break;
4717 default:
4718 break;
4721 return exp;
4724 /* Return true if EXPR is an expression that can be folded as an operand
4725 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4727 static bool
4728 can_fold_for_view_convert_p (tree expr)
4730 tree t1, t2;
4732 /* The folder will fold NOP_EXPRs between integral types with the same
4733 precision (in the middle-end's sense). We cannot allow it if the
4734 types don't have the same precision in the Ada sense as well. */
4735 if (TREE_CODE (expr) != NOP_EXPR)
4736 return true;
4738 t1 = TREE_TYPE (expr);
4739 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4741 /* Defer to the folder for non-integral conversions. */
4742 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4743 return true;
4745 /* Only fold conversions that preserve both precisions. */
4746 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4747 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4748 return true;
4750 return false;
4753 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4754 If NOTRUNC_P is true, truncation operations should be suppressed.
4756 Special care is required with (source or target) integral types whose
4757 precision is not equal to their size, to make sure we fetch or assign
4758 the value bits whose location might depend on the endianness, e.g.
4760 Rmsize : constant := 8;
4761 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4763 type Bit_Array is array (1 .. Rmsize) of Boolean;
4764 pragma Pack (Bit_Array);
4766 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4768 Value : Int := 2#1000_0001#;
4769 Vbits : Bit_Array := To_Bit_Array (Value);
4771 we expect the 8 bits at Vbits'Address to always contain Value, while
4772 their original location depends on the endianness, at Value'Address
4773 on a little-endian architecture but not on a big-endian one. */
4775 tree
4776 unchecked_convert (tree type, tree expr, bool notrunc_p)
4778 tree etype = TREE_TYPE (expr);
4779 enum tree_code ecode = TREE_CODE (etype);
4780 enum tree_code code = TREE_CODE (type);
4781 tree tem;
4782 int c;
4784 /* If the expression is already of the right type, we are done. */
4785 if (etype == type)
4786 return expr;
4788 /* If both types types are integral just do a normal conversion.
4789 Likewise for a conversion to an unconstrained array. */
4790 if (((INTEGRAL_TYPE_P (type)
4791 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
4792 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4793 && (INTEGRAL_TYPE_P (etype)
4794 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4795 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4796 || code == UNCONSTRAINED_ARRAY_TYPE)
4798 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4800 tree ntype = copy_type (etype);
4801 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4802 TYPE_MAIN_VARIANT (ntype) = ntype;
4803 expr = build1 (NOP_EXPR, ntype, expr);
4806 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4808 tree rtype = copy_type (type);
4809 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4810 TYPE_MAIN_VARIANT (rtype) = rtype;
4811 expr = convert (rtype, expr);
4812 expr = build1 (NOP_EXPR, type, expr);
4814 else
4815 expr = convert (type, expr);
4818 /* If we are converting to an integral type whose precision is not equal
4819 to its size, first unchecked convert to a record type that contains an
4820 field of the given precision. Then extract the field. */
4821 else if (INTEGRAL_TYPE_P (type)
4822 && TYPE_RM_SIZE (type)
4823 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4824 GET_MODE_BITSIZE (TYPE_MODE (type))))
4826 tree rec_type = make_node (RECORD_TYPE);
4827 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4828 tree field_type, field;
4830 if (TYPE_UNSIGNED (type))
4831 field_type = make_unsigned_type (prec);
4832 else
4833 field_type = make_signed_type (prec);
4834 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4836 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4837 NULL_TREE, bitsize_zero_node, 1, 0);
4839 finish_record_type (rec_type, field, 1, false);
4841 expr = unchecked_convert (rec_type, expr, notrunc_p);
4842 expr = build_component_ref (expr, NULL_TREE, field, false);
4843 expr = fold_build1 (NOP_EXPR, type, expr);
4846 /* Similarly if we are converting from an integral type whose precision is
4847 not equal to its size, first copy into a field of the given precision
4848 and unchecked convert the record type. */
4849 else if (INTEGRAL_TYPE_P (etype)
4850 && TYPE_RM_SIZE (etype)
4851 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4852 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4854 tree rec_type = make_node (RECORD_TYPE);
4855 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
4856 vec<constructor_elt, va_gc> *v;
4857 vec_alloc (v, 1);
4858 tree field_type, field;
4860 if (TYPE_UNSIGNED (etype))
4861 field_type = make_unsigned_type (prec);
4862 else
4863 field_type = make_signed_type (prec);
4864 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
4866 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4867 NULL_TREE, bitsize_zero_node, 1, 0);
4869 finish_record_type (rec_type, field, 1, false);
4871 expr = fold_build1 (NOP_EXPR, field_type, expr);
4872 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4873 expr = gnat_build_constructor (rec_type, v);
4874 expr = unchecked_convert (type, expr, notrunc_p);
4877 /* If we are converting from a scalar type to a type with a different size,
4878 we need to pad to have the same size on both sides.
4880 ??? We cannot do it unconditionally because unchecked conversions are
4881 used liberally by the front-end to implement polymorphism, e.g. in:
4883 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4884 return p___size__4 (p__object!(S191s.all));
4886 so we skip all expressions that are references. */
4887 else if (!REFERENCE_CLASS_P (expr)
4888 && !AGGREGATE_TYPE_P (etype)
4889 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4890 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4892 if (c < 0)
4894 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4895 false, false, false, true),
4896 expr);
4897 expr = unchecked_convert (type, expr, notrunc_p);
4899 else
4901 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4902 false, false, false, true);
4903 expr = unchecked_convert (rec_type, expr, notrunc_p);
4904 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4905 false);
4909 /* We have a special case when we are converting between two unconstrained
4910 array types. In that case, take the address, convert the fat pointer
4911 types, and dereference. */
4912 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4913 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4914 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4915 build_unary_op (ADDR_EXPR, NULL_TREE,
4916 expr)));
4918 /* Another special case is when we are converting to a vector type from its
4919 representative array type; this a regular conversion. */
4920 else if (code == VECTOR_TYPE
4921 && ecode == ARRAY_TYPE
4922 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4923 etype))
4924 expr = convert (type, expr);
4926 /* And, if the array type is not the representative, we try to build an
4927 intermediate vector type of which the array type is the representative
4928 and to do the unchecked conversion between the vector types, in order
4929 to enable further simplifications in the middle-end. */
4930 else if (code == VECTOR_TYPE
4931 && ecode == ARRAY_TYPE
4932 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
4934 expr = convert (tem, expr);
4935 return unchecked_convert (type, expr, notrunc_p);
4938 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
4939 the alignment of the CONSTRUCTOR to speed up the copy operation. */
4940 else if (TREE_CODE (expr) == CONSTRUCTOR
4941 && code == RECORD_TYPE
4942 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
4944 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
4945 Empty, false, false, false, true),
4946 expr);
4947 return unchecked_convert (type, expr, notrunc_p);
4950 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
4951 else
4953 expr = maybe_unconstrained_array (expr);
4954 etype = TREE_TYPE (expr);
4955 ecode = TREE_CODE (etype);
4956 if (can_fold_for_view_convert_p (expr))
4957 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4958 else
4959 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4962 /* If the result is an integral type whose precision is not equal to its
4963 size, sign- or zero-extend the result. We need not do this if the input
4964 is an integral type of the same precision and signedness or if the output
4965 is a biased type or if both the input and output are unsigned. */
4966 if (!notrunc_p
4967 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4968 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4969 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4970 GET_MODE_BITSIZE (TYPE_MODE (type)))
4971 && !(INTEGRAL_TYPE_P (etype)
4972 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4973 && operand_equal_p (TYPE_RM_SIZE (type),
4974 (TYPE_RM_SIZE (etype) != 0
4975 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4977 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4979 tree base_type
4980 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4981 tree shift_expr
4982 = convert (base_type,
4983 size_binop (MINUS_EXPR,
4984 bitsize_int
4985 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4986 TYPE_RM_SIZE (type)));
4987 expr
4988 = convert (type,
4989 build_binary_op (RSHIFT_EXPR, base_type,
4990 build_binary_op (LSHIFT_EXPR, base_type,
4991 convert (base_type, expr),
4992 shift_expr),
4993 shift_expr));
4996 /* An unchecked conversion should never raise Constraint_Error. The code
4997 below assumes that GCC's conversion routines overflow the same way that
4998 the underlying hardware does. This is probably true. In the rare case
4999 when it is false, we can rely on the fact that such conversions are
5000 erroneous anyway. */
5001 if (TREE_CODE (expr) == INTEGER_CST)
5002 TREE_OVERFLOW (expr) = 0;
5004 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5005 show no longer constant. */
5006 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5007 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5008 OEP_ONLY_CONST))
5009 TREE_CONSTANT (expr) = 0;
5011 return expr;
5014 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5015 the latter being a record type as predicated by Is_Record_Type. */
5017 enum tree_code
5018 tree_code_for_record_type (Entity_Id gnat_type)
5020 Node_Id component_list, component;
5022 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5023 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5024 if (!Is_Unchecked_Union (gnat_type))
5025 return RECORD_TYPE;
5027 gnat_type = Implementation_Base_Type (gnat_type);
5028 component_list
5029 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5031 for (component = First_Non_Pragma (Component_Items (component_list));
5032 Present (component);
5033 component = Next_Non_Pragma (component))
5034 if (Ekind (Defining_Entity (component)) == E_Component)
5035 return RECORD_TYPE;
5037 return UNION_TYPE;
5040 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5041 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5042 according to the presence of an alignment clause on the type or, if it
5043 is an array, on the component type. */
5045 bool
5046 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5048 gnat_type = Underlying_Type (gnat_type);
5050 *align_clause = Present (Alignment_Clause (gnat_type));
5052 if (Is_Array_Type (gnat_type))
5054 gnat_type = Underlying_Type (Component_Type (gnat_type));
5055 if (Present (Alignment_Clause (gnat_type)))
5056 *align_clause = true;
5059 if (!Is_Floating_Point_Type (gnat_type))
5060 return false;
5062 if (UI_To_Int (Esize (gnat_type)) != 64)
5063 return false;
5065 return true;
5068 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5069 size is greater or equal to 64 bits, or an array of such a type. Set
5070 ALIGN_CLAUSE according to the presence of an alignment clause on the
5071 type or, if it is an array, on the component type. */
5073 bool
5074 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5076 gnat_type = Underlying_Type (gnat_type);
5078 *align_clause = Present (Alignment_Clause (gnat_type));
5080 if (Is_Array_Type (gnat_type))
5082 gnat_type = Underlying_Type (Component_Type (gnat_type));
5083 if (Present (Alignment_Clause (gnat_type)))
5084 *align_clause = true;
5087 if (!Is_Scalar_Type (gnat_type))
5088 return false;
5090 if (UI_To_Int (Esize (gnat_type)) < 64)
5091 return false;
5093 return true;
5096 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5097 component of an aggregate type. */
5099 bool
5100 type_for_nonaliased_component_p (tree gnu_type)
5102 /* If the type is passed by reference, we may have pointers to the
5103 component so it cannot be made non-aliased. */
5104 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5105 return false;
5107 /* We used to say that any component of aggregate type is aliased
5108 because the front-end may take 'Reference of it. The front-end
5109 has been enhanced in the meantime so as to use a renaming instead
5110 in most cases, but the back-end can probably take the address of
5111 such a component too so we go for the conservative stance.
5113 For instance, we might need the address of any array type, even
5114 if normally passed by copy, to construct a fat pointer if the
5115 component is used as an actual for an unconstrained formal.
5117 Likewise for record types: even if a specific record subtype is
5118 passed by copy, the parent type might be passed by ref (e.g. if
5119 it's of variable size) and we might take the address of a child
5120 component to pass to a parent formal. We have no way to check
5121 for such conditions here. */
5122 if (AGGREGATE_TYPE_P (gnu_type))
5123 return false;
5125 return true;
5128 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5130 bool
5131 smaller_form_type_p (tree type, tree orig_type)
5133 tree size, osize;
5135 /* We're not interested in variants here. */
5136 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5137 return false;
5139 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5140 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5141 return false;
5143 size = TYPE_SIZE (type);
5144 osize = TYPE_SIZE (orig_type);
5146 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5147 return false;
5149 return tree_int_cst_lt (size, osize) != 0;
5152 /* Perform final processing on global variables. */
5154 static GTY (()) tree dummy_global;
5156 void
5157 gnat_write_global_declarations (void)
5159 unsigned int i;
5160 tree iter;
5162 /* If we have declared types as used at the global level, insert them in
5163 the global hash table. We use a dummy variable for this purpose. */
5164 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
5166 struct varpool_node *node;
5167 char *label;
5169 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5170 dummy_global
5171 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5172 void_type_node);
5173 DECL_HARD_REGISTER (dummy_global) = 1;
5174 TREE_STATIC (dummy_global) = 1;
5175 node = varpool_node::get_create (dummy_global);
5176 node->definition = 1;
5177 node->force_output = 1;
5179 while (!types_used_by_cur_var_decl->is_empty ())
5181 tree t = types_used_by_cur_var_decl->pop ();
5182 types_used_by_var_decl_insert (t, dummy_global);
5186 /* Output debug information for all global type declarations first. This
5187 ensures that global types whose compilation hasn't been finalized yet,
5188 for example pointers to Taft amendment types, have their compilation
5189 finalized in the right context. */
5190 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5191 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5192 debug_hooks->global_decl (iter);
5194 /* Proceed to optimize and emit assembly. */
5195 symtab->finalize_compilation_unit ();
5197 /* After cgraph has had a chance to emit everything that's going to
5198 be emitted, output debug information for the rest of globals. */
5199 if (!seen_error ())
5201 timevar_push (TV_SYMOUT);
5202 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5203 if (TREE_CODE (iter) != TYPE_DECL && !DECL_IGNORED_P (iter))
5204 debug_hooks->global_decl (iter);
5205 timevar_pop (TV_SYMOUT);
5209 /* ************************************************************************
5210 * * GCC builtins support *
5211 * ************************************************************************ */
5213 /* The general scheme is fairly simple:
5215 For each builtin function/type to be declared, gnat_install_builtins calls
5216 internal facilities which eventually get to gnat_push_decl, which in turn
5217 tracks the so declared builtin function decls in the 'builtin_decls' global
5218 datastructure. When an Intrinsic subprogram declaration is processed, we
5219 search this global datastructure to retrieve the associated BUILT_IN DECL
5220 node. */
5222 /* Search the chain of currently available builtin declarations for a node
5223 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5224 found, if any, or NULL_TREE otherwise. */
5225 tree
5226 builtin_decl_for (tree name)
5228 unsigned i;
5229 tree decl;
5231 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5232 if (DECL_NAME (decl) == name)
5233 return decl;
5235 return NULL_TREE;
5238 /* The code below eventually exposes gnat_install_builtins, which declares
5239 the builtin types and functions we might need, either internally or as
5240 user accessible facilities.
5242 ??? This is a first implementation shot, still in rough shape. It is
5243 heavily inspired from the "C" family implementation, with chunks copied
5244 verbatim from there.
5246 Two obvious TODO candidates are
5247 o Use a more efficient name/decl mapping scheme
5248 o Devise a middle-end infrastructure to avoid having to copy
5249 pieces between front-ends. */
5251 /* ----------------------------------------------------------------------- *
5252 * BUILTIN ELEMENTARY TYPES *
5253 * ----------------------------------------------------------------------- */
5255 /* Standard data types to be used in builtin argument declarations. */
5257 enum c_tree_index
5259 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5260 CTI_STRING_TYPE,
5261 CTI_CONST_STRING_TYPE,
5263 CTI_MAX
5266 static tree c_global_trees[CTI_MAX];
5268 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5269 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5270 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5272 /* ??? In addition some attribute handlers, we currently don't support a
5273 (small) number of builtin-types, which in turns inhibits support for a
5274 number of builtin functions. */
5275 #define wint_type_node void_type_node
5276 #define intmax_type_node void_type_node
5277 #define uintmax_type_node void_type_node
5279 /* Build the void_list_node (void_type_node having been created). */
5281 static tree
5282 build_void_list_node (void)
5284 tree t = build_tree_list (NULL_TREE, void_type_node);
5285 return t;
5288 /* Used to help initialize the builtin-types.def table. When a type of
5289 the correct size doesn't exist, use error_mark_node instead of NULL.
5290 The later results in segfaults even when a decl using the type doesn't
5291 get invoked. */
5293 static tree
5294 builtin_type_for_size (int size, bool unsignedp)
5296 tree type = gnat_type_for_size (size, unsignedp);
5297 return type ? type : error_mark_node;
5300 /* Build/push the elementary type decls that builtin functions/types
5301 will need. */
5303 static void
5304 install_builtin_elementary_types (void)
5306 signed_size_type_node = gnat_signed_type (size_type_node);
5307 pid_type_node = integer_type_node;
5308 void_list_node = build_void_list_node ();
5310 string_type_node = build_pointer_type (char_type_node);
5311 const_string_type_node
5312 = build_pointer_type (build_qualified_type
5313 (char_type_node, TYPE_QUAL_CONST));
5316 /* ----------------------------------------------------------------------- *
5317 * BUILTIN FUNCTION TYPES *
5318 * ----------------------------------------------------------------------- */
5320 /* Now, builtin function types per se. */
5322 enum c_builtin_type
5324 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5325 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5326 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5327 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5328 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5329 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5330 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5331 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5332 ARG6) NAME,
5333 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5334 ARG6, ARG7) NAME,
5335 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5336 ARG6, ARG7, ARG8) NAME,
5337 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5338 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5339 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5340 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5341 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5342 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5343 NAME,
5344 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5345 #include "builtin-types.def"
5346 #undef DEF_PRIMITIVE_TYPE
5347 #undef DEF_FUNCTION_TYPE_0
5348 #undef DEF_FUNCTION_TYPE_1
5349 #undef DEF_FUNCTION_TYPE_2
5350 #undef DEF_FUNCTION_TYPE_3
5351 #undef DEF_FUNCTION_TYPE_4
5352 #undef DEF_FUNCTION_TYPE_5
5353 #undef DEF_FUNCTION_TYPE_6
5354 #undef DEF_FUNCTION_TYPE_7
5355 #undef DEF_FUNCTION_TYPE_8
5356 #undef DEF_FUNCTION_TYPE_VAR_0
5357 #undef DEF_FUNCTION_TYPE_VAR_1
5358 #undef DEF_FUNCTION_TYPE_VAR_2
5359 #undef DEF_FUNCTION_TYPE_VAR_3
5360 #undef DEF_FUNCTION_TYPE_VAR_4
5361 #undef DEF_FUNCTION_TYPE_VAR_5
5362 #undef DEF_POINTER_TYPE
5363 BT_LAST
5366 typedef enum c_builtin_type builtin_type;
5368 /* A temporary array used in communication with def_fn_type. */
5369 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5371 /* A helper function for install_builtin_types. Build function type
5372 for DEF with return type RET and N arguments. If VAR is true, then the
5373 function should be variadic after those N arguments.
5375 Takes special care not to ICE if any of the types involved are
5376 error_mark_node, which indicates that said type is not in fact available
5377 (see builtin_type_for_size). In which case the function type as a whole
5378 should be error_mark_node. */
5380 static void
5381 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5383 tree t;
5384 tree *args = XALLOCAVEC (tree, n);
5385 va_list list;
5386 int i;
5388 va_start (list, n);
5389 for (i = 0; i < n; ++i)
5391 builtin_type a = (builtin_type) va_arg (list, int);
5392 t = builtin_types[a];
5393 if (t == error_mark_node)
5394 goto egress;
5395 args[i] = t;
5398 t = builtin_types[ret];
5399 if (t == error_mark_node)
5400 goto egress;
5401 if (var)
5402 t = build_varargs_function_type_array (t, n, args);
5403 else
5404 t = build_function_type_array (t, n, args);
5406 egress:
5407 builtin_types[def] = t;
5408 va_end (list);
5411 /* Build the builtin function types and install them in the builtin_types
5412 array for later use in builtin function decls. */
5414 static void
5415 install_builtin_function_types (void)
5417 tree va_list_ref_type_node;
5418 tree va_list_arg_type_node;
5420 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5422 va_list_arg_type_node = va_list_ref_type_node =
5423 build_pointer_type (TREE_TYPE (va_list_type_node));
5425 else
5427 va_list_arg_type_node = va_list_type_node;
5428 va_list_ref_type_node = build_reference_type (va_list_type_node);
5431 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5432 builtin_types[ENUM] = VALUE;
5433 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5434 def_fn_type (ENUM, RETURN, 0, 0);
5435 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5436 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5437 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5438 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5439 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5440 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5441 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5442 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5443 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5444 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5445 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5446 ARG6) \
5447 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5448 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5449 ARG6, ARG7) \
5450 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5451 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5452 ARG6, ARG7, ARG8) \
5453 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5454 ARG7, ARG8);
5455 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5456 def_fn_type (ENUM, RETURN, 1, 0);
5457 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5458 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5459 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5460 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5461 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5462 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5463 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5464 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5465 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5466 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5467 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5468 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5470 #include "builtin-types.def"
5472 #undef DEF_PRIMITIVE_TYPE
5473 #undef DEF_FUNCTION_TYPE_0
5474 #undef DEF_FUNCTION_TYPE_1
5475 #undef DEF_FUNCTION_TYPE_2
5476 #undef DEF_FUNCTION_TYPE_3
5477 #undef DEF_FUNCTION_TYPE_4
5478 #undef DEF_FUNCTION_TYPE_5
5479 #undef DEF_FUNCTION_TYPE_6
5480 #undef DEF_FUNCTION_TYPE_7
5481 #undef DEF_FUNCTION_TYPE_8
5482 #undef DEF_FUNCTION_TYPE_VAR_0
5483 #undef DEF_FUNCTION_TYPE_VAR_1
5484 #undef DEF_FUNCTION_TYPE_VAR_2
5485 #undef DEF_FUNCTION_TYPE_VAR_3
5486 #undef DEF_FUNCTION_TYPE_VAR_4
5487 #undef DEF_FUNCTION_TYPE_VAR_5
5488 #undef DEF_POINTER_TYPE
5489 builtin_types[(int) BT_LAST] = NULL_TREE;
5492 /* ----------------------------------------------------------------------- *
5493 * BUILTIN ATTRIBUTES *
5494 * ----------------------------------------------------------------------- */
5496 enum built_in_attribute
5498 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5499 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5500 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5501 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5502 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5503 #include "builtin-attrs.def"
5504 #undef DEF_ATTR_NULL_TREE
5505 #undef DEF_ATTR_INT
5506 #undef DEF_ATTR_STRING
5507 #undef DEF_ATTR_IDENT
5508 #undef DEF_ATTR_TREE_LIST
5509 ATTR_LAST
5512 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5514 static void
5515 install_builtin_attributes (void)
5517 /* Fill in the built_in_attributes array. */
5518 #define DEF_ATTR_NULL_TREE(ENUM) \
5519 built_in_attributes[(int) ENUM] = NULL_TREE;
5520 #define DEF_ATTR_INT(ENUM, VALUE) \
5521 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5522 #define DEF_ATTR_STRING(ENUM, VALUE) \
5523 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5524 #define DEF_ATTR_IDENT(ENUM, STRING) \
5525 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5526 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5527 built_in_attributes[(int) ENUM] \
5528 = tree_cons (built_in_attributes[(int) PURPOSE], \
5529 built_in_attributes[(int) VALUE], \
5530 built_in_attributes[(int) CHAIN]);
5531 #include "builtin-attrs.def"
5532 #undef DEF_ATTR_NULL_TREE
5533 #undef DEF_ATTR_INT
5534 #undef DEF_ATTR_STRING
5535 #undef DEF_ATTR_IDENT
5536 #undef DEF_ATTR_TREE_LIST
5539 /* Handle a "const" attribute; arguments as in
5540 struct attribute_spec.handler. */
5542 static tree
5543 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5544 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5545 bool *no_add_attrs)
5547 if (TREE_CODE (*node) == FUNCTION_DECL)
5548 TREE_READONLY (*node) = 1;
5549 else
5550 *no_add_attrs = true;
5552 return NULL_TREE;
5555 /* Handle a "nothrow" attribute; arguments as in
5556 struct attribute_spec.handler. */
5558 static tree
5559 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5560 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5561 bool *no_add_attrs)
5563 if (TREE_CODE (*node) == FUNCTION_DECL)
5564 TREE_NOTHROW (*node) = 1;
5565 else
5566 *no_add_attrs = true;
5568 return NULL_TREE;
5571 /* Handle a "pure" attribute; arguments as in
5572 struct attribute_spec.handler. */
5574 static tree
5575 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5576 int ARG_UNUSED (flags), bool *no_add_attrs)
5578 if (TREE_CODE (*node) == FUNCTION_DECL)
5579 DECL_PURE_P (*node) = 1;
5580 /* ??? TODO: Support types. */
5581 else
5583 warning (OPT_Wattributes, "%qs attribute ignored",
5584 IDENTIFIER_POINTER (name));
5585 *no_add_attrs = true;
5588 return NULL_TREE;
5591 /* Handle a "no vops" attribute; arguments as in
5592 struct attribute_spec.handler. */
5594 static tree
5595 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5596 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5597 bool *ARG_UNUSED (no_add_attrs))
5599 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5600 DECL_IS_NOVOPS (*node) = 1;
5601 return NULL_TREE;
5604 /* Helper for nonnull attribute handling; fetch the operand number
5605 from the attribute argument list. */
5607 static bool
5608 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5610 /* Verify the arg number is a constant. */
5611 if (!tree_fits_uhwi_p (arg_num_expr))
5612 return false;
5614 *valp = TREE_INT_CST_LOW (arg_num_expr);
5615 return true;
5618 /* Handle the "nonnull" attribute. */
5619 static tree
5620 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5621 tree args, int ARG_UNUSED (flags),
5622 bool *no_add_attrs)
5624 tree type = *node;
5625 unsigned HOST_WIDE_INT attr_arg_num;
5627 /* If no arguments are specified, all pointer arguments should be
5628 non-null. Verify a full prototype is given so that the arguments
5629 will have the correct types when we actually check them later. */
5630 if (!args)
5632 if (!prototype_p (type))
5634 error ("nonnull attribute without arguments on a non-prototype");
5635 *no_add_attrs = true;
5637 return NULL_TREE;
5640 /* Argument list specified. Verify that each argument number references
5641 a pointer argument. */
5642 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5644 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5646 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5648 error ("nonnull argument has invalid operand number (argument %lu)",
5649 (unsigned long) attr_arg_num);
5650 *no_add_attrs = true;
5651 return NULL_TREE;
5654 if (prototype_p (type))
5656 function_args_iterator iter;
5657 tree argument;
5659 function_args_iter_init (&iter, type);
5660 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5662 argument = function_args_iter_cond (&iter);
5663 if (!argument || ck_num == arg_num)
5664 break;
5667 if (!argument
5668 || TREE_CODE (argument) == VOID_TYPE)
5670 error ("nonnull argument with out-of-range operand number "
5671 "(argument %lu, operand %lu)",
5672 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5673 *no_add_attrs = true;
5674 return NULL_TREE;
5677 if (TREE_CODE (argument) != POINTER_TYPE)
5679 error ("nonnull argument references non-pointer operand "
5680 "(argument %lu, operand %lu)",
5681 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5682 *no_add_attrs = true;
5683 return NULL_TREE;
5688 return NULL_TREE;
5691 /* Handle a "sentinel" attribute. */
5693 static tree
5694 handle_sentinel_attribute (tree *node, tree name, tree args,
5695 int ARG_UNUSED (flags), bool *no_add_attrs)
5697 if (!prototype_p (*node))
5699 warning (OPT_Wattributes,
5700 "%qs attribute requires prototypes with named arguments",
5701 IDENTIFIER_POINTER (name));
5702 *no_add_attrs = true;
5704 else
5706 if (!stdarg_p (*node))
5708 warning (OPT_Wattributes,
5709 "%qs attribute only applies to variadic functions",
5710 IDENTIFIER_POINTER (name));
5711 *no_add_attrs = true;
5715 if (args)
5717 tree position = TREE_VALUE (args);
5719 if (TREE_CODE (position) != INTEGER_CST)
5721 warning (0, "requested position is not an integer constant");
5722 *no_add_attrs = true;
5724 else
5726 if (tree_int_cst_lt (position, integer_zero_node))
5728 warning (0, "requested position is less than zero");
5729 *no_add_attrs = true;
5734 return NULL_TREE;
5737 /* Handle a "noreturn" attribute; arguments as in
5738 struct attribute_spec.handler. */
5740 static tree
5741 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5742 int ARG_UNUSED (flags), bool *no_add_attrs)
5744 tree type = TREE_TYPE (*node);
5746 /* See FIXME comment in c_common_attribute_table. */
5747 if (TREE_CODE (*node) == FUNCTION_DECL)
5748 TREE_THIS_VOLATILE (*node) = 1;
5749 else if (TREE_CODE (type) == POINTER_TYPE
5750 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5751 TREE_TYPE (*node)
5752 = build_pointer_type
5753 (build_type_variant (TREE_TYPE (type),
5754 TYPE_READONLY (TREE_TYPE (type)), 1));
5755 else
5757 warning (OPT_Wattributes, "%qs attribute ignored",
5758 IDENTIFIER_POINTER (name));
5759 *no_add_attrs = true;
5762 return NULL_TREE;
5765 /* Handle a "leaf" attribute; arguments as in
5766 struct attribute_spec.handler. */
5768 static tree
5769 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5770 int ARG_UNUSED (flags), bool *no_add_attrs)
5772 if (TREE_CODE (*node) != FUNCTION_DECL)
5774 warning (OPT_Wattributes, "%qE attribute ignored", name);
5775 *no_add_attrs = true;
5777 if (!TREE_PUBLIC (*node))
5779 warning (OPT_Wattributes, "%qE attribute has no effect", name);
5780 *no_add_attrs = true;
5783 return NULL_TREE;
5786 /* Handle a "always_inline" attribute; arguments as in
5787 struct attribute_spec.handler. */
5789 static tree
5790 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5791 int ARG_UNUSED (flags), bool *no_add_attrs)
5793 if (TREE_CODE (*node) == FUNCTION_DECL)
5795 /* Set the attribute and mark it for disregarding inline limits. */
5796 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
5798 else
5800 warning (OPT_Wattributes, "%qE attribute ignored", name);
5801 *no_add_attrs = true;
5804 return NULL_TREE;
5807 /* Handle a "malloc" attribute; arguments as in
5808 struct attribute_spec.handler. */
5810 static tree
5811 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5812 int ARG_UNUSED (flags), bool *no_add_attrs)
5814 if (TREE_CODE (*node) == FUNCTION_DECL
5815 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5816 DECL_IS_MALLOC (*node) = 1;
5817 else
5819 warning (OPT_Wattributes, "%qs attribute ignored",
5820 IDENTIFIER_POINTER (name));
5821 *no_add_attrs = true;
5824 return NULL_TREE;
5827 /* Fake handler for attributes we don't properly support. */
5829 tree
5830 fake_attribute_handler (tree * ARG_UNUSED (node),
5831 tree ARG_UNUSED (name),
5832 tree ARG_UNUSED (args),
5833 int ARG_UNUSED (flags),
5834 bool * ARG_UNUSED (no_add_attrs))
5836 return NULL_TREE;
5839 /* Handle a "type_generic" attribute. */
5841 static tree
5842 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5843 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5844 bool * ARG_UNUSED (no_add_attrs))
5846 /* Ensure we have a function type. */
5847 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5849 /* Ensure we have a variadic function. */
5850 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
5852 return NULL_TREE;
5855 /* Handle a "vector_size" attribute; arguments as in
5856 struct attribute_spec.handler. */
5858 static tree
5859 handle_vector_size_attribute (tree *node, tree name, tree args,
5860 int ARG_UNUSED (flags), bool *no_add_attrs)
5862 tree type = *node;
5863 tree vector_type;
5865 *no_add_attrs = true;
5867 /* We need to provide for vector pointers, vector arrays, and
5868 functions returning vectors. For example:
5870 __attribute__((vector_size(16))) short *foo;
5872 In this case, the mode is SI, but the type being modified is
5873 HI, so we need to look further. */
5874 while (POINTER_TYPE_P (type)
5875 || TREE_CODE (type) == FUNCTION_TYPE
5876 || TREE_CODE (type) == ARRAY_TYPE)
5877 type = TREE_TYPE (type);
5879 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
5880 if (!vector_type)
5881 return NULL_TREE;
5883 /* Build back pointers if needed. */
5884 *node = reconstruct_complex_type (*node, vector_type);
5886 return NULL_TREE;
5889 /* Handle a "vector_type" attribute; arguments as in
5890 struct attribute_spec.handler. */
5892 static tree
5893 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5894 int ARG_UNUSED (flags), bool *no_add_attrs)
5896 tree type = *node;
5897 tree vector_type;
5899 *no_add_attrs = true;
5901 if (TREE_CODE (type) != ARRAY_TYPE)
5903 error ("attribute %qs applies to array types only",
5904 IDENTIFIER_POINTER (name));
5905 return NULL_TREE;
5908 vector_type = build_vector_type_for_array (type, name);
5909 if (!vector_type)
5910 return NULL_TREE;
5912 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
5913 *node = vector_type;
5915 return NULL_TREE;
5918 /* ----------------------------------------------------------------------- *
5919 * BUILTIN FUNCTIONS *
5920 * ----------------------------------------------------------------------- */
5922 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5923 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5924 if nonansi_p and flag_no_nonansi_builtin. */
5926 static void
5927 def_builtin_1 (enum built_in_function fncode,
5928 const char *name,
5929 enum built_in_class fnclass,
5930 tree fntype, tree libtype,
5931 bool both_p, bool fallback_p,
5932 bool nonansi_p ATTRIBUTE_UNUSED,
5933 tree fnattrs, bool implicit_p)
5935 tree decl;
5936 const char *libname;
5938 /* Preserve an already installed decl. It most likely was setup in advance
5939 (e.g. as part of the internal builtins) for specific reasons. */
5940 if (builtin_decl_explicit (fncode) != NULL_TREE)
5941 return;
5943 gcc_assert ((!both_p && !fallback_p)
5944 || !strncmp (name, "__builtin_",
5945 strlen ("__builtin_")));
5947 libname = name + strlen ("__builtin_");
5948 decl = add_builtin_function (name, fntype, fncode, fnclass,
5949 (fallback_p ? libname : NULL),
5950 fnattrs);
5951 if (both_p)
5952 /* ??? This is normally further controlled by command-line options
5953 like -fno-builtin, but we don't have them for Ada. */
5954 add_builtin_function (libname, libtype, fncode, fnclass,
5955 NULL, fnattrs);
5957 set_builtin_decl (fncode, decl, implicit_p);
5960 static int flag_isoc94 = 0;
5961 static int flag_isoc99 = 0;
5962 static int flag_isoc11 = 0;
5964 /* Install what the common builtins.def offers. */
5966 static void
5967 install_builtin_functions (void)
5969 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5970 NONANSI_P, ATTRS, IMPLICIT, COND) \
5971 if (NAME && COND) \
5972 def_builtin_1 (ENUM, NAME, CLASS, \
5973 builtin_types[(int) TYPE], \
5974 builtin_types[(int) LIBTYPE], \
5975 BOTH_P, FALLBACK_P, NONANSI_P, \
5976 built_in_attributes[(int) ATTRS], IMPLICIT);
5977 #include "builtins.def"
5978 #undef DEF_BUILTIN
5981 /* ----------------------------------------------------------------------- *
5982 * BUILTIN FUNCTIONS *
5983 * ----------------------------------------------------------------------- */
5985 /* Install the builtin functions we might need. */
5987 void
5988 gnat_install_builtins (void)
5990 install_builtin_elementary_types ();
5991 install_builtin_function_types ();
5992 install_builtin_attributes ();
5994 /* Install builtins used by generic middle-end pieces first. Some of these
5995 know about internal specificities and control attributes accordingly, for
5996 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5997 the generic definition from builtins.def. */
5998 build_common_builtin_nodes ();
6000 /* Now, install the target specific builtins, such as the AltiVec family on
6001 ppc, and the common set as exposed by builtins.def. */
6002 targetm.init_builtins ();
6003 install_builtin_functions ();
6006 #include "gt-ada-utils.h"
6007 #include "gtype-ada.h"