* gcc-interface/utils.c (type_for_vector_element_p): New predicate.
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob8172f5f9900c00cf25fc208d44b607c1be6b3056
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "stringpool.h"
32 #include "stor-layout.h"
33 #include "attribs.h"
34 #include "varasm.h"
35 #include "flags.h"
36 #include "toplev.h"
37 #include "diagnostic-core.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "debug.h"
41 #include "convert.h"
42 #include "target.h"
43 #include "common/common-target.h"
44 #include "langhooks.h"
45 #include "cgraph.h"
46 #include "diagnostic.h"
47 #include "timevar.h"
48 #include "tree-dump.h"
49 #include "tree-inline.h"
50 #include "tree-iterator.h"
52 #include "ada.h"
53 #include "types.h"
54 #include "atree.h"
55 #include "elists.h"
56 #include "namet.h"
57 #include "nlists.h"
58 #include "stringt.h"
59 #include "uintp.h"
60 #include "fe.h"
61 #include "sinfo.h"
62 #include "einfo.h"
63 #include "ada-tree.h"
64 #include "gigi.h"
66 /* If nonzero, pretend we are allocating at global level. */
67 int force_global;
69 /* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72 int double_float_alignment;
74 /* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77 int double_scalar_alignment;
79 /* Tree nodes for the various types and decls we create. */
80 tree gnat_std_decls[(int) ADT_LAST];
82 /* Functions to call for each of the possible raise reasons. */
83 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
85 /* Likewise, but with extra info for each of the possible raise reasons. */
86 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
88 /* Forward declarations for handlers of attributes. */
89 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute,
113 false },
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
115 false },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute,
117 false },
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
119 false },
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
121 false },
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
123 false },
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
125 false },
126 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
127 false },
128 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
129 false },
130 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
131 false },
133 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
134 false },
135 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
136 false },
137 { "may_alias", 0, 0, false, true, false, NULL, false },
139 /* ??? format and format_arg are heavy and not supported, which actually
140 prevents support for stdio builtins, which we however declare as part
141 of the common builtins.def contents. */
142 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
143 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
145 { NULL, 0, 0, false, false, false, NULL, false }
148 /* Associates a GNAT tree node to a GCC tree node. It is used in
149 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
150 of `save_gnu_tree' for more info. */
151 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
153 #define GET_GNU_TREE(GNAT_ENTITY) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
156 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
157 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
159 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
160 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
162 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
163 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
165 #define GET_DUMMY_NODE(GNAT_ENTITY) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
168 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
169 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
171 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
172 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
174 /* This variable keeps a table for types for each precision so that we only
175 allocate each of them once. Signed and unsigned types are kept separate.
177 Note that these types are only used when fold-const requests something
178 special. Perhaps we should NOT share these types; we'll see how it
179 goes later. */
180 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
182 /* Likewise for float types, but record these by mode. */
183 static GTY(()) tree float_types[NUM_MACHINE_MODES];
185 /* For each binding contour we allocate a binding_level structure to indicate
186 the binding depth. */
188 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
189 /* The binding level containing this one (the enclosing binding level). */
190 struct gnat_binding_level *chain;
191 /* The BLOCK node for this level. */
192 tree block;
193 /* If nonzero, the setjmp buffer that needs to be updated for any
194 variable-sized definition within this context. */
195 tree jmpbuf_decl;
198 /* The binding level currently in effect. */
199 static GTY(()) struct gnat_binding_level *current_binding_level;
201 /* A chain of gnat_binding_level structures awaiting reuse. */
202 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
204 /* The context to be used for global declarations. */
205 static GTY(()) tree global_context;
207 /* An array of global declarations. */
208 static GTY(()) vec<tree, va_gc> *global_decls;
210 /* An array of builtin function declarations. */
211 static GTY(()) vec<tree, va_gc> *builtin_decls;
213 /* An array of global renaming pointers. */
214 static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
216 /* A chain of unused BLOCK nodes. */
217 static GTY((deletable)) tree free_block_chain;
219 static int pad_type_hash_marked_p (const void *p);
220 static hashval_t pad_type_hash_hash (const void *p);
221 static int pad_type_hash_eq (const void *p1, const void *p2);
223 /* A hash table of padded types. It is modelled on the generic type
224 hash table in tree.c, which must thus be used as a reference. */
225 struct GTY(()) pad_type_hash {
226 unsigned long hash;
227 tree type;
230 static GTY ((if_marked ("pad_type_hash_marked_p"),
231 param_is (struct pad_type_hash)))
232 htab_t pad_type_hash_table;
234 static tree merge_sizes (tree, tree, tree, bool, bool);
235 static tree compute_related_constant (tree, tree);
236 static tree split_plus (tree, tree *);
237 static tree float_type_for_precision (int, enum machine_mode);
238 static tree convert_to_fat_pointer (tree, tree);
239 static unsigned int scale_by_factor_of (tree, unsigned int);
240 static bool potential_alignment_gap (tree, tree, tree);
242 /* Initialize data structures of the utils.c module. */
244 void
245 init_gnat_utils (void)
247 /* Initialize the association of GNAT nodes to GCC trees. */
248 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
250 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
251 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
253 /* Initialize the hash table of padded types. */
254 pad_type_hash_table
255 = htab_create_ggc (512, pad_type_hash_hash, pad_type_hash_eq, 0);
258 /* Destroy data structures of the utils.c module. */
260 void
261 destroy_gnat_utils (void)
263 /* Destroy the association of GNAT nodes to GCC trees. */
264 ggc_free (associate_gnat_to_gnu);
265 associate_gnat_to_gnu = NULL;
267 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
268 ggc_free (dummy_node_table);
269 dummy_node_table = NULL;
271 /* Destroy the hash table of padded types. */
272 htab_delete (pad_type_hash_table);
273 pad_type_hash_table = NULL;
275 /* Invalidate the global renaming pointers. */
276 invalidate_global_renaming_pointers ();
279 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
280 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
281 If NO_CHECK is true, the latter check is suppressed.
283 If GNU_DECL is zero, reset a previous association. */
285 void
286 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
288 /* Check that GNAT_ENTITY is not already defined and that it is being set
289 to something which is a decl. If that is not the case, this usually
290 means GNAT_ENTITY is defined twice, but occasionally is due to some
291 Gigi problem. */
292 gcc_assert (!(gnu_decl
293 && (PRESENT_GNU_TREE (gnat_entity)
294 || (!no_check && !DECL_P (gnu_decl)))));
296 SET_GNU_TREE (gnat_entity, gnu_decl);
299 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
300 that was associated with it. If there is no such tree node, abort.
302 In some cases, such as delayed elaboration or expressions that need to
303 be elaborated only once, GNAT_ENTITY is really not an entity. */
305 tree
306 get_gnu_tree (Entity_Id gnat_entity)
308 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
309 return GET_GNU_TREE (gnat_entity);
312 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
314 bool
315 present_gnu_tree (Entity_Id gnat_entity)
317 return PRESENT_GNU_TREE (gnat_entity);
320 /* Make a dummy type corresponding to GNAT_TYPE. */
322 tree
323 make_dummy_type (Entity_Id gnat_type)
325 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
326 tree gnu_type;
328 /* If there is an equivalent type, get its underlying type. */
329 if (Present (gnat_underlying))
330 gnat_underlying = Gigi_Equivalent_Type (Underlying_Type (gnat_underlying));
332 /* If there was no equivalent type (can only happen when just annotating
333 types) or underlying type, go back to the original type. */
334 if (No (gnat_underlying))
335 gnat_underlying = gnat_type;
337 /* If it there already a dummy type, use that one. Else make one. */
338 if (PRESENT_DUMMY_NODE (gnat_underlying))
339 return GET_DUMMY_NODE (gnat_underlying);
341 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
342 an ENUMERAL_TYPE. */
343 gnu_type = make_node (Is_Record_Type (gnat_underlying)
344 ? tree_code_for_record_type (gnat_underlying)
345 : ENUMERAL_TYPE);
346 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
347 TYPE_DUMMY_P (gnu_type) = 1;
348 TYPE_STUB_DECL (gnu_type)
349 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
350 if (Is_By_Reference_Type (gnat_underlying))
351 TYPE_BY_REFERENCE_P (gnu_type) = 1;
353 SET_DUMMY_NODE (gnat_underlying, gnu_type);
355 return gnu_type;
358 /* Return the dummy type that was made for GNAT_TYPE, if any. */
360 tree
361 get_dummy_type (Entity_Id gnat_type)
363 return GET_DUMMY_NODE (gnat_type);
366 /* Build dummy fat and thin pointer types whose designated type is specified
367 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
369 void
370 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
372 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
373 tree gnu_fat_type, fields, gnu_object_type;
375 gnu_template_type = make_node (RECORD_TYPE);
376 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
377 TYPE_DUMMY_P (gnu_template_type) = 1;
378 gnu_ptr_template = build_pointer_type (gnu_template_type);
380 gnu_array_type = make_node (ENUMERAL_TYPE);
381 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
382 TYPE_DUMMY_P (gnu_array_type) = 1;
383 gnu_ptr_array = build_pointer_type (gnu_array_type);
385 gnu_fat_type = make_node (RECORD_TYPE);
386 /* Build a stub DECL to trigger the special processing for fat pointer types
387 in gnat_pushdecl. */
388 TYPE_NAME (gnu_fat_type)
389 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
390 gnu_fat_type);
391 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
392 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
393 DECL_CHAIN (fields)
394 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
395 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
396 finish_fat_pointer_type (gnu_fat_type, fields);
397 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
398 /* Suppress debug info until after the type is completed. */
399 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
401 gnu_object_type = make_node (RECORD_TYPE);
402 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
403 TYPE_DUMMY_P (gnu_object_type) = 1;
405 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
406 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
409 /* Return true if we are in the global binding level. */
411 bool
412 global_bindings_p (void)
414 return force_global || current_function_decl == NULL_TREE;
417 /* Enter a new binding level. */
419 void
420 gnat_pushlevel (void)
422 struct gnat_binding_level *newlevel = NULL;
424 /* Reuse a struct for this binding level, if there is one. */
425 if (free_binding_level)
427 newlevel = free_binding_level;
428 free_binding_level = free_binding_level->chain;
430 else
431 newlevel = ggc_alloc_gnat_binding_level ();
433 /* Use a free BLOCK, if any; otherwise, allocate one. */
434 if (free_block_chain)
436 newlevel->block = free_block_chain;
437 free_block_chain = BLOCK_CHAIN (free_block_chain);
438 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
440 else
441 newlevel->block = make_node (BLOCK);
443 /* Point the BLOCK we just made to its parent. */
444 if (current_binding_level)
445 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
447 BLOCK_VARS (newlevel->block) = NULL_TREE;
448 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
449 TREE_USED (newlevel->block) = 1;
451 /* Add this level to the front of the chain (stack) of active levels. */
452 newlevel->chain = current_binding_level;
453 newlevel->jmpbuf_decl = NULL_TREE;
454 current_binding_level = newlevel;
457 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
458 and point FNDECL to this BLOCK. */
460 void
461 set_current_block_context (tree fndecl)
463 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
464 DECL_INITIAL (fndecl) = current_binding_level->block;
465 set_block_for_group (current_binding_level->block);
468 /* Set the jmpbuf_decl for the current binding level to DECL. */
470 void
471 set_block_jmpbuf_decl (tree decl)
473 current_binding_level->jmpbuf_decl = decl;
476 /* Get the jmpbuf_decl, if any, for the current binding level. */
478 tree
479 get_block_jmpbuf_decl (void)
481 return current_binding_level->jmpbuf_decl;
484 /* Exit a binding level. Set any BLOCK into the current code group. */
486 void
487 gnat_poplevel (void)
489 struct gnat_binding_level *level = current_binding_level;
490 tree block = level->block;
492 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
493 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
495 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
496 are no variables free the block and merge its subblocks into those of its
497 parent block. Otherwise, add it to the list of its parent. */
498 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
500 else if (BLOCK_VARS (block) == NULL_TREE)
502 BLOCK_SUBBLOCKS (level->chain->block)
503 = block_chainon (BLOCK_SUBBLOCKS (block),
504 BLOCK_SUBBLOCKS (level->chain->block));
505 BLOCK_CHAIN (block) = free_block_chain;
506 free_block_chain = block;
508 else
510 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
511 BLOCK_SUBBLOCKS (level->chain->block) = block;
512 TREE_USED (block) = 1;
513 set_block_for_group (block);
516 /* Free this binding structure. */
517 current_binding_level = level->chain;
518 level->chain = free_binding_level;
519 free_binding_level = level;
522 /* Exit a binding level and discard the associated BLOCK. */
524 void
525 gnat_zaplevel (void)
527 struct gnat_binding_level *level = current_binding_level;
528 tree block = level->block;
530 BLOCK_CHAIN (block) = free_block_chain;
531 free_block_chain = block;
533 /* Free this binding structure. */
534 current_binding_level = level->chain;
535 level->chain = free_binding_level;
536 free_binding_level = level;
539 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
541 static void
542 gnat_set_type_context (tree type, tree context)
544 tree decl = TYPE_STUB_DECL (type);
546 TYPE_CONTEXT (type) = context;
548 while (decl && DECL_PARALLEL_TYPE (decl))
550 TYPE_CONTEXT (DECL_PARALLEL_TYPE (decl)) = context;
551 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
555 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
556 for location information and flag propagation. */
558 void
559 gnat_pushdecl (tree decl, Node_Id gnat_node)
561 /* If DECL is public external or at top level, it has global context. */
562 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
564 if (!global_context)
565 global_context = build_translation_unit_decl (NULL_TREE);
566 DECL_CONTEXT (decl) = global_context;
568 else
570 DECL_CONTEXT (decl) = current_function_decl;
572 /* Functions imported in another function are not really nested.
573 For really nested functions mark them initially as needing
574 a static chain for uses of that flag before unnesting;
575 lower_nested_functions will then recompute it. */
576 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
577 DECL_STATIC_CHAIN (decl) = 1;
580 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
582 /* Set the location of DECL and emit a declaration for it. */
583 if (Present (gnat_node))
584 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
586 add_decl_expr (decl, gnat_node);
588 /* Put the declaration on the list. The list of declarations is in reverse
589 order. The list will be reversed later. Put global declarations in the
590 globals list and local ones in the current block. But skip TYPE_DECLs
591 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
592 with the debugger and aren't needed anyway. */
593 if (!(TREE_CODE (decl) == TYPE_DECL
594 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
596 if (DECL_EXTERNAL (decl))
598 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
599 vec_safe_push (builtin_decls, decl);
601 else if (global_bindings_p ())
602 vec_safe_push (global_decls, decl);
603 else
605 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
606 BLOCK_VARS (current_binding_level->block) = decl;
610 /* For the declaration of a type, set its name if it either is not already
611 set or if the previous type name was not derived from a source name.
612 We'd rather have the type named with a real name and all the pointer
613 types to the same object have the same POINTER_TYPE node. Code in the
614 equivalent function of c-decl.c makes a copy of the type node here, but
615 that may cause us trouble with incomplete types. We make an exception
616 for fat pointer types because the compiler automatically builds them
617 for unconstrained array types and the debugger uses them to represent
618 both these and pointers to these. */
619 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
621 tree t = TREE_TYPE (decl);
623 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
625 /* Array and pointer types aren't "tagged" types so we force the
626 type to be associated with its typedef in the DWARF back-end,
627 in order to make sure that the latter is always preserved. */
628 if (!DECL_ARTIFICIAL (decl)
629 && (TREE_CODE (t) == ARRAY_TYPE
630 || TREE_CODE (t) == POINTER_TYPE))
632 tree tt = build_distinct_type_copy (t);
633 if (TREE_CODE (t) == POINTER_TYPE)
634 TYPE_NEXT_PTR_TO (t) = tt;
635 TYPE_NAME (tt) = DECL_NAME (decl);
636 gnat_set_type_context (tt, DECL_CONTEXT (decl));
637 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
638 DECL_ORIGINAL_TYPE (decl) = tt;
641 else if (TYPE_IS_FAT_POINTER_P (t))
643 /* We need a variant for the placeholder machinery to work. */
644 tree tt = build_variant_type_copy (t);
645 TYPE_NAME (tt) = decl;
646 gnat_set_type_context (tt, DECL_CONTEXT (decl));
647 TREE_USED (tt) = TREE_USED (t);
648 TREE_TYPE (decl) = tt;
649 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
650 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
651 else
652 DECL_ORIGINAL_TYPE (decl) = t;
653 DECL_ARTIFICIAL (decl) = 0;
654 t = NULL_TREE;
656 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
658 else
659 t = NULL_TREE;
661 /* Propagate the name to all the anonymous variants. This is needed
662 for the type qualifiers machinery to work properly. */
663 if (t)
664 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
665 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
667 TYPE_NAME (t) = decl;
668 gnat_set_type_context (t, DECL_CONTEXT (decl));
673 /* Create a record type that contains a SIZE bytes long field of TYPE with a
674 starting bit position so that it is aligned to ALIGN bits, and leaving at
675 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
676 record is guaranteed to get. GNAT_NODE is used for the position of the
677 associated TYPE_DECL. */
679 tree
680 make_aligning_type (tree type, unsigned int align, tree size,
681 unsigned int base_align, int room, Node_Id gnat_node)
683 /* We will be crafting a record type with one field at a position set to be
684 the next multiple of ALIGN past record'address + room bytes. We use a
685 record placeholder to express record'address. */
686 tree record_type = make_node (RECORD_TYPE);
687 tree record = build0 (PLACEHOLDER_EXPR, record_type);
689 tree record_addr_st
690 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
692 /* The diagram below summarizes the shape of what we manipulate:
694 <--------- pos ---------->
695 { +------------+-------------+-----------------+
696 record =>{ |############| ... | field (type) |
697 { +------------+-------------+-----------------+
698 |<-- room -->|<- voffset ->|<---- size ----->|
701 record_addr vblock_addr
703 Every length is in sizetype bytes there, except "pos" which has to be
704 set as a bit position in the GCC tree for the record. */
705 tree room_st = size_int (room);
706 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
707 tree voffset_st, pos, field;
709 tree name = TYPE_IDENTIFIER (type);
711 name = concat_name (name, "ALIGN");
712 TYPE_NAME (record_type) = name;
714 /* Compute VOFFSET and then POS. The next byte position multiple of some
715 alignment after some address is obtained by "and"ing the alignment minus
716 1 with the two's complement of the address. */
717 voffset_st = size_binop (BIT_AND_EXPR,
718 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
719 size_int ((align / BITS_PER_UNIT) - 1));
721 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
722 pos = size_binop (MULT_EXPR,
723 convert (bitsizetype,
724 size_binop (PLUS_EXPR, room_st, voffset_st)),
725 bitsize_unit_node);
727 /* Craft the GCC record representation. We exceptionally do everything
728 manually here because 1) our generic circuitry is not quite ready to
729 handle the complex position/size expressions we are setting up, 2) we
730 have a strong simplifying factor at hand: we know the maximum possible
731 value of voffset, and 3) we have to set/reset at least the sizes in
732 accordance with this maximum value anyway, as we need them to convey
733 what should be "alloc"ated for this type.
735 Use -1 as the 'addressable' indication for the field to prevent the
736 creation of a bitfield. We don't need one, it would have damaging
737 consequences on the alignment computation, and create_field_decl would
738 make one without this special argument, for instance because of the
739 complex position expression. */
740 field = create_field_decl (get_identifier ("F"), type, record_type, size,
741 pos, 1, -1);
742 TYPE_FIELDS (record_type) = field;
744 TYPE_ALIGN (record_type) = base_align;
745 TYPE_USER_ALIGN (record_type) = 1;
747 TYPE_SIZE (record_type)
748 = size_binop (PLUS_EXPR,
749 size_binop (MULT_EXPR, convert (bitsizetype, size),
750 bitsize_unit_node),
751 bitsize_int (align + room * BITS_PER_UNIT));
752 TYPE_SIZE_UNIT (record_type)
753 = size_binop (PLUS_EXPR, size,
754 size_int (room + align / BITS_PER_UNIT));
756 SET_TYPE_MODE (record_type, BLKmode);
757 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
759 /* Declare it now since it will never be declared otherwise. This is
760 necessary to ensure that its subtrees are properly marked. */
761 create_type_decl (name, record_type, true, false, gnat_node);
763 return record_type;
766 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
767 as the field type of a packed record if IN_RECORD is true, or as the
768 component type of a packed array if IN_RECORD is false. See if we can
769 rewrite it either as a type that has a non-BLKmode, which we can pack
770 tighter in the packed record case, or as a smaller type. If so, return
771 the new type. If not, return the original type. */
773 tree
774 make_packable_type (tree type, bool in_record)
776 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
777 unsigned HOST_WIDE_INT new_size;
778 tree new_type, old_field, field_list = NULL_TREE;
779 unsigned int align;
781 /* No point in doing anything if the size is zero. */
782 if (size == 0)
783 return type;
785 new_type = make_node (TREE_CODE (type));
787 /* Copy the name and flags from the old type to that of the new.
788 Note that we rely on the pointer equality created here for
789 TYPE_NAME to look through conversions in various places. */
790 TYPE_NAME (new_type) = TYPE_NAME (type);
791 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
792 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
793 if (TREE_CODE (type) == RECORD_TYPE)
794 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
796 /* If we are in a record and have a small size, set the alignment to
797 try for an integral mode. Otherwise set it to try for a smaller
798 type with BLKmode. */
799 if (in_record && size <= MAX_FIXED_MODE_SIZE)
801 align = ceil_pow2 (size);
802 TYPE_ALIGN (new_type) = align;
803 new_size = (size + align - 1) & -align;
805 else
807 unsigned HOST_WIDE_INT align;
809 /* Do not try to shrink the size if the RM size is not constant. */
810 if (TYPE_CONTAINS_TEMPLATE_P (type)
811 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
812 return type;
814 /* Round the RM size up to a unit boundary to get the minimal size
815 for a BLKmode record. Give up if it's already the size. */
816 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
817 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
818 if (new_size == size)
819 return type;
821 align = new_size & -new_size;
822 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
825 TYPE_USER_ALIGN (new_type) = 1;
827 /* Now copy the fields, keeping the position and size as we don't want
828 to change the layout by propagating the packedness downwards. */
829 for (old_field = TYPE_FIELDS (type); old_field;
830 old_field = DECL_CHAIN (old_field))
832 tree new_field_type = TREE_TYPE (old_field);
833 tree new_field, new_size;
835 if (RECORD_OR_UNION_TYPE_P (new_field_type)
836 && !TYPE_FAT_POINTER_P (new_field_type)
837 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
838 new_field_type = make_packable_type (new_field_type, true);
840 /* However, for the last field in a not already packed record type
841 that is of an aggregate type, we need to use the RM size in the
842 packable version of the record type, see finish_record_type. */
843 if (!DECL_CHAIN (old_field)
844 && !TYPE_PACKED (type)
845 && RECORD_OR_UNION_TYPE_P (new_field_type)
846 && !TYPE_FAT_POINTER_P (new_field_type)
847 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
848 && TYPE_ADA_SIZE (new_field_type))
849 new_size = TYPE_ADA_SIZE (new_field_type);
850 else
851 new_size = DECL_SIZE (old_field);
853 new_field
854 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
855 new_size, bit_position (old_field),
856 TYPE_PACKED (type),
857 !DECL_NONADDRESSABLE_P (old_field));
859 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
860 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
861 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
862 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
864 DECL_CHAIN (new_field) = field_list;
865 field_list = new_field;
868 finish_record_type (new_type, nreverse (field_list), 2, false);
869 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
870 if (TYPE_STUB_DECL (type))
871 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
872 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
874 /* If this is a padding record, we never want to make the size smaller
875 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
876 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
878 TYPE_SIZE (new_type) = TYPE_SIZE (type);
879 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
880 new_size = size;
882 else
884 TYPE_SIZE (new_type) = bitsize_int (new_size);
885 TYPE_SIZE_UNIT (new_type)
886 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
889 if (!TYPE_CONTAINS_TEMPLATE_P (type))
890 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
892 compute_record_mode (new_type);
894 /* Try harder to get a packable type if necessary, for example
895 in case the record itself contains a BLKmode field. */
896 if (in_record && TYPE_MODE (new_type) == BLKmode)
897 SET_TYPE_MODE (new_type,
898 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
900 /* If neither the mode nor the size has shrunk, return the old type. */
901 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
902 return type;
904 return new_type;
907 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
908 If TYPE is the best type, return it. Otherwise, make a new type. We
909 only support new integral and pointer types. FOR_BIASED is true if
910 we are making a biased type. */
912 tree
913 make_type_from_size (tree type, tree size_tree, bool for_biased)
915 unsigned HOST_WIDE_INT size;
916 bool biased_p;
917 tree new_type;
919 /* If size indicates an error, just return TYPE to avoid propagating
920 the error. Likewise if it's too large to represent. */
921 if (!size_tree || !tree_fits_uhwi_p (size_tree))
922 return type;
924 size = tree_to_uhwi (size_tree);
926 switch (TREE_CODE (type))
928 case INTEGER_TYPE:
929 case ENUMERAL_TYPE:
930 case BOOLEAN_TYPE:
931 biased_p = (TREE_CODE (type) == INTEGER_TYPE
932 && TYPE_BIASED_REPRESENTATION_P (type));
934 /* Integer types with precision 0 are forbidden. */
935 if (size == 0)
936 size = 1;
938 /* Only do something if the type isn't a packed array type and doesn't
939 already have the proper size and the size isn't too large. */
940 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
941 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
942 || size > LONG_LONG_TYPE_SIZE)
943 break;
945 biased_p |= for_biased;
946 if (TYPE_UNSIGNED (type) || biased_p)
947 new_type = make_unsigned_type (size);
948 else
949 new_type = make_signed_type (size);
950 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
951 SET_TYPE_RM_MIN_VALUE (new_type,
952 convert (TREE_TYPE (new_type),
953 TYPE_MIN_VALUE (type)));
954 SET_TYPE_RM_MAX_VALUE (new_type,
955 convert (TREE_TYPE (new_type),
956 TYPE_MAX_VALUE (type)));
957 /* Copy the name to show that it's essentially the same type and
958 not a subrange type. */
959 TYPE_NAME (new_type) = TYPE_NAME (type);
960 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
961 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
962 return new_type;
964 case RECORD_TYPE:
965 /* Do something if this is a fat pointer, in which case we
966 may need to return the thin pointer. */
967 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
969 enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
970 if (!targetm.valid_pointer_mode (p_mode))
971 p_mode = ptr_mode;
972 return
973 build_pointer_type_for_mode
974 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
975 p_mode, 0);
977 break;
979 case POINTER_TYPE:
980 /* Only do something if this is a thin pointer, in which case we
981 may need to return the fat pointer. */
982 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
983 return
984 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
985 break;
987 default:
988 break;
991 return type;
994 /* See if the data pointed to by the hash table slot is marked. */
996 static int
997 pad_type_hash_marked_p (const void *p)
999 const_tree const type = ((const struct pad_type_hash *) p)->type;
1001 return ggc_marked_p (type);
1004 /* Return the cached hash value. */
1006 static hashval_t
1007 pad_type_hash_hash (const void *p)
1009 return ((const struct pad_type_hash *) p)->hash;
1012 /* Return 1 iff the padded types are equivalent. */
1014 static int
1015 pad_type_hash_eq (const void *p1, const void *p2)
1017 const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
1018 const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
1019 tree type1, type2;
1021 if (t1->hash != t2->hash)
1022 return 0;
1024 type1 = t1->type;
1025 type2 = t2->type;
1027 /* We consider that the padded types are equivalent if they pad the same
1028 type and have the same size, alignment and RM size. Taking the mode
1029 into account is redundant since it is determined by the others. */
1030 return
1031 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1032 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1033 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1034 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1037 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1038 if needed. We have already verified that SIZE and TYPE are large enough.
1039 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1040 IS_COMPONENT_TYPE is true if this is being done for the component type of
1041 an array. IS_USER_TYPE is true if the original type needs to be completed.
1042 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1043 the RM size of the resulting type is to be set to SIZE too. */
1045 tree
1046 maybe_pad_type (tree type, tree size, unsigned int align,
1047 Entity_Id gnat_entity, bool is_component_type,
1048 bool is_user_type, bool definition, bool set_rm_size)
1050 tree orig_size = TYPE_SIZE (type);
1051 unsigned int orig_align = TYPE_ALIGN (type);
1052 tree record, field;
1054 /* If TYPE is a padded type, see if it agrees with any size and alignment
1055 we were given. If so, return the original type. Otherwise, strip
1056 off the padding, since we will either be returning the inner type
1057 or repadding it. If no size or alignment is specified, use that of
1058 the original padded type. */
1059 if (TYPE_IS_PADDING_P (type))
1061 if ((!size
1062 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1063 && (align == 0 || align == orig_align))
1064 return type;
1066 if (!size)
1067 size = orig_size;
1068 if (align == 0)
1069 align = orig_align;
1071 type = TREE_TYPE (TYPE_FIELDS (type));
1072 orig_size = TYPE_SIZE (type);
1073 orig_align = TYPE_ALIGN (type);
1076 /* If the size is either not being changed or is being made smaller (which
1077 is not done here and is only valid for bitfields anyway), show the size
1078 isn't changing. Likewise, clear the alignment if it isn't being
1079 changed. Then return if we aren't doing anything. */
1080 if (size
1081 && (operand_equal_p (size, orig_size, 0)
1082 || (TREE_CODE (orig_size) == INTEGER_CST
1083 && tree_int_cst_lt (size, orig_size))))
1084 size = NULL_TREE;
1086 if (align == orig_align)
1087 align = 0;
1089 if (align == 0 && !size)
1090 return type;
1092 /* If requested, complete the original type and give it a name. */
1093 if (is_user_type)
1094 create_type_decl (get_entity_name (gnat_entity), type,
1095 !Comes_From_Source (gnat_entity),
1096 !(TYPE_NAME (type)
1097 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1098 && DECL_IGNORED_P (TYPE_NAME (type))),
1099 gnat_entity);
1101 /* We used to modify the record in place in some cases, but that could
1102 generate incorrect debugging information. So make a new record
1103 type and name. */
1104 record = make_node (RECORD_TYPE);
1105 TYPE_PADDING_P (record) = 1;
1107 if (Present (gnat_entity))
1108 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1110 TYPE_ALIGN (record) = align ? align : orig_align;
1111 TYPE_SIZE (record) = size ? size : orig_size;
1112 TYPE_SIZE_UNIT (record)
1113 = convert (sizetype,
1114 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1115 bitsize_unit_node));
1117 /* If we are changing the alignment and the input type is a record with
1118 BLKmode and a small constant size, try to make a form that has an
1119 integral mode. This might allow the padding record to also have an
1120 integral mode, which will be much more efficient. There is no point
1121 in doing so if a size is specified unless it is also a small constant
1122 size and it is incorrect to do so if we cannot guarantee that the mode
1123 will be naturally aligned since the field must always be addressable.
1125 ??? This might not always be a win when done for a stand-alone object:
1126 since the nominal and the effective type of the object will now have
1127 different modes, a VIEW_CONVERT_EXPR will be required for converting
1128 between them and it might be hard to overcome afterwards, including
1129 at the RTL level when the stand-alone object is accessed as a whole. */
1130 if (align != 0
1131 && RECORD_OR_UNION_TYPE_P (type)
1132 && TYPE_MODE (type) == BLKmode
1133 && !TYPE_BY_REFERENCE_P (type)
1134 && TREE_CODE (orig_size) == INTEGER_CST
1135 && !TREE_OVERFLOW (orig_size)
1136 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1137 && (!size
1138 || (TREE_CODE (size) == INTEGER_CST
1139 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1141 tree packable_type = make_packable_type (type, true);
1142 if (TYPE_MODE (packable_type) != BLKmode
1143 && align >= TYPE_ALIGN (packable_type))
1144 type = packable_type;
1147 /* Now create the field with the original size. */
1148 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1149 bitsize_zero_node, 0, 1);
1150 DECL_INTERNAL_P (field) = 1;
1152 /* Do not emit debug info until after the auxiliary record is built. */
1153 finish_record_type (record, field, 1, false);
1155 /* Set the RM size if requested. */
1156 if (set_rm_size)
1158 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1160 /* If the padded type is complete and has constant size, we canonicalize
1161 it by means of the hash table. This is consistent with the language
1162 semantics and ensures that gigi and the middle-end have a common view
1163 of these padded types. */
1164 if (TREE_CONSTANT (TYPE_SIZE (record)))
1166 hashval_t hashcode;
1167 struct pad_type_hash in, *h;
1168 void **loc;
1170 hashcode = iterative_hash_object (TYPE_HASH (type), 0);
1171 hashcode = iterative_hash_expr (TYPE_SIZE (record), hashcode);
1172 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (record), hashcode);
1173 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (record), hashcode);
1175 in.hash = hashcode;
1176 in.type = record;
1177 h = (struct pad_type_hash *)
1178 htab_find_with_hash (pad_type_hash_table, &in, hashcode);
1179 if (h)
1181 record = h->type;
1182 goto built;
1185 h = ggc_alloc_pad_type_hash ();
1186 h->hash = hashcode;
1187 h->type = record;
1188 loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode,
1189 INSERT);
1190 *loc = (void *)h;
1194 /* Unless debugging information isn't being written for the input type,
1195 write a record that shows what we are a subtype of and also make a
1196 variable that indicates our size, if still variable. */
1197 if (TREE_CODE (orig_size) != INTEGER_CST
1198 && TYPE_NAME (record)
1199 && TYPE_NAME (type)
1200 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1201 && DECL_IGNORED_P (TYPE_NAME (type))))
1203 tree marker = make_node (RECORD_TYPE);
1204 tree name = TYPE_IDENTIFIER (record);
1205 tree orig_name = TYPE_IDENTIFIER (type);
1207 TYPE_NAME (marker) = concat_name (name, "XVS");
1208 finish_record_type (marker,
1209 create_field_decl (orig_name,
1210 build_reference_type (type),
1211 marker, NULL_TREE, NULL_TREE,
1212 0, 0),
1213 0, true);
1215 add_parallel_type (record, marker);
1217 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1218 TYPE_SIZE_UNIT (marker)
1219 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1220 TYPE_SIZE_UNIT (record), false, false, false,
1221 false, NULL, gnat_entity);
1224 rest_of_record_type_compilation (record);
1226 built:
1227 /* If the size was widened explicitly, maybe give a warning. Take the
1228 original size as the maximum size of the input if there was an
1229 unconstrained record involved and round it up to the specified alignment,
1230 if one was specified. But don't do it if we are just annotating types
1231 and the type is tagged, since tagged types aren't fully laid out in this
1232 mode. */
1233 if (!size
1234 || TREE_CODE (size) == COND_EXPR
1235 || TREE_CODE (size) == MAX_EXPR
1236 || No (gnat_entity)
1237 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1238 return record;
1240 if (CONTAINS_PLACEHOLDER_P (orig_size))
1241 orig_size = max_size (orig_size, true);
1243 if (align)
1244 orig_size = round_up (orig_size, align);
1246 if (!operand_equal_p (size, orig_size, 0)
1247 && !(TREE_CODE (size) == INTEGER_CST
1248 && TREE_CODE (orig_size) == INTEGER_CST
1249 && (TREE_OVERFLOW (size)
1250 || TREE_OVERFLOW (orig_size)
1251 || tree_int_cst_lt (size, orig_size))))
1253 Node_Id gnat_error_node = Empty;
1255 if (Is_Packed_Array_Type (gnat_entity))
1256 gnat_entity = Original_Array_Type (gnat_entity);
1258 if ((Ekind (gnat_entity) == E_Component
1259 || Ekind (gnat_entity) == E_Discriminant)
1260 && Present (Component_Clause (gnat_entity)))
1261 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1262 else if (Present (Size_Clause (gnat_entity)))
1263 gnat_error_node = Expression (Size_Clause (gnat_entity));
1265 /* Generate message only for entities that come from source, since
1266 if we have an entity created by expansion, the message will be
1267 generated for some other corresponding source entity. */
1268 if (Comes_From_Source (gnat_entity))
1270 if (Present (gnat_error_node))
1271 post_error_ne_tree ("{^ }bits of & unused?",
1272 gnat_error_node, gnat_entity,
1273 size_diffop (size, orig_size));
1274 else if (is_component_type)
1275 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1276 gnat_entity, gnat_entity,
1277 size_diffop (size, orig_size));
1281 return record;
1284 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1285 If this is a multi-dimensional array type, do this recursively.
1287 OP may be
1288 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1289 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1290 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1292 void
1293 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1295 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1296 of a one-dimensional array, since the padding has the same alias set
1297 as the field type, but if it's a multi-dimensional array, we need to
1298 see the inner types. */
1299 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1300 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1301 || TYPE_PADDING_P (gnu_old_type)))
1302 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1304 /* Unconstrained array types are deemed incomplete and would thus be given
1305 alias set 0. Retrieve the underlying array type. */
1306 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1307 gnu_old_type
1308 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1309 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1310 gnu_new_type
1311 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1313 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1314 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1315 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1316 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1318 switch (op)
1320 case ALIAS_SET_COPY:
1321 /* The alias set shouldn't be copied between array types with different
1322 aliasing settings because this can break the aliasing relationship
1323 between the array type and its element type. */
1324 #ifndef ENABLE_CHECKING
1325 if (flag_strict_aliasing)
1326 #endif
1327 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1328 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1329 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1330 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1332 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1333 break;
1335 case ALIAS_SET_SUBSET:
1336 case ALIAS_SET_SUPERSET:
1338 alias_set_type old_set = get_alias_set (gnu_old_type);
1339 alias_set_type new_set = get_alias_set (gnu_new_type);
1341 /* Do nothing if the alias sets conflict. This ensures that we
1342 never call record_alias_subset several times for the same pair
1343 or at all for alias set 0. */
1344 if (!alias_sets_conflict_p (old_set, new_set))
1346 if (op == ALIAS_SET_SUBSET)
1347 record_alias_subset (old_set, new_set);
1348 else
1349 record_alias_subset (new_set, old_set);
1352 break;
1354 default:
1355 gcc_unreachable ();
1358 record_component_aliases (gnu_new_type);
1361 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1362 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1364 void
1365 record_builtin_type (const char *name, tree type, bool artificial_p)
1367 tree type_decl = build_decl (input_location,
1368 TYPE_DECL, get_identifier (name), type);
1369 DECL_ARTIFICIAL (type_decl) = artificial_p;
1370 TYPE_ARTIFICIAL (type) = artificial_p;
1371 gnat_pushdecl (type_decl, Empty);
1373 if (debug_hooks->type_decl)
1374 debug_hooks->type_decl (type_decl, false);
1377 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1378 finish constructing the record type as a fat pointer type. */
1380 void
1381 finish_fat_pointer_type (tree record_type, tree field_list)
1383 /* Make sure we can put it into a register. */
1384 if (STRICT_ALIGNMENT)
1385 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
1387 /* Show what it really is. */
1388 TYPE_FAT_POINTER_P (record_type) = 1;
1390 /* Do not emit debug info for it since the types of its fields may still be
1391 incomplete at this point. */
1392 finish_record_type (record_type, field_list, 0, false);
1394 /* Force type_contains_placeholder_p to return true on it. Although the
1395 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1396 type but the representation of the unconstrained array. */
1397 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1400 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1401 finish constructing the record or union type. If REP_LEVEL is zero, this
1402 record has no representation clause and so will be entirely laid out here.
1403 If REP_LEVEL is one, this record has a representation clause and has been
1404 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1405 this record is derived from a parent record and thus inherits its layout;
1406 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1407 we need to write debug information about this type. */
1409 void
1410 finish_record_type (tree record_type, tree field_list, int rep_level,
1411 bool debug_info_p)
1413 enum tree_code code = TREE_CODE (record_type);
1414 tree name = TYPE_IDENTIFIER (record_type);
1415 tree ada_size = bitsize_zero_node;
1416 tree size = bitsize_zero_node;
1417 bool had_size = TYPE_SIZE (record_type) != 0;
1418 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1419 bool had_align = TYPE_ALIGN (record_type) != 0;
1420 tree field;
1422 TYPE_FIELDS (record_type) = field_list;
1424 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1425 generate debug info and have a parallel type. */
1426 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1428 /* Globally initialize the record first. If this is a rep'ed record,
1429 that just means some initializations; otherwise, layout the record. */
1430 if (rep_level > 0)
1432 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
1434 if (!had_size_unit)
1435 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1437 if (!had_size)
1438 TYPE_SIZE (record_type) = bitsize_zero_node;
1440 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1441 out just like a UNION_TYPE, since the size will be fixed. */
1442 else if (code == QUAL_UNION_TYPE)
1443 code = UNION_TYPE;
1445 else
1447 /* Ensure there isn't a size already set. There can be in an error
1448 case where there is a rep clause but all fields have errors and
1449 no longer have a position. */
1450 TYPE_SIZE (record_type) = 0;
1452 /* Ensure we use the traditional GCC layout for bitfields when we need
1453 to pack the record type or have a representation clause. The other
1454 possible layout (Microsoft C compiler), if available, would prevent
1455 efficient packing in almost all cases. */
1456 #ifdef TARGET_MS_BITFIELD_LAYOUT
1457 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1458 decl_attributes (&record_type,
1459 tree_cons (get_identifier ("gcc_struct"),
1460 NULL_TREE, NULL_TREE),
1461 ATTR_FLAG_TYPE_IN_PLACE);
1462 #endif
1464 layout_type (record_type);
1467 /* At this point, the position and size of each field is known. It was
1468 either set before entry by a rep clause, or by laying out the type above.
1470 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1471 to compute the Ada size; the GCC size and alignment (for rep'ed records
1472 that are not padding types); and the mode (for rep'ed records). We also
1473 clear the DECL_BIT_FIELD indication for the cases we know have not been
1474 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1476 if (code == QUAL_UNION_TYPE)
1477 field_list = nreverse (field_list);
1479 for (field = field_list; field; field = DECL_CHAIN (field))
1481 tree type = TREE_TYPE (field);
1482 tree pos = bit_position (field);
1483 tree this_size = DECL_SIZE (field);
1484 tree this_ada_size;
1486 if (RECORD_OR_UNION_TYPE_P (type)
1487 && !TYPE_FAT_POINTER_P (type)
1488 && !TYPE_CONTAINS_TEMPLATE_P (type)
1489 && TYPE_ADA_SIZE (type))
1490 this_ada_size = TYPE_ADA_SIZE (type);
1491 else
1492 this_ada_size = this_size;
1494 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1495 if (DECL_BIT_FIELD (field)
1496 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1498 unsigned int align = TYPE_ALIGN (type);
1500 /* In the general case, type alignment is required. */
1501 if (value_factor_p (pos, align))
1503 /* The enclosing record type must be sufficiently aligned.
1504 Otherwise, if no alignment was specified for it and it
1505 has been laid out already, bump its alignment to the
1506 desired one if this is compatible with its size. */
1507 if (TYPE_ALIGN (record_type) >= align)
1509 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1510 DECL_BIT_FIELD (field) = 0;
1512 else if (!had_align
1513 && rep_level == 0
1514 && value_factor_p (TYPE_SIZE (record_type), align))
1516 TYPE_ALIGN (record_type) = align;
1517 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1518 DECL_BIT_FIELD (field) = 0;
1522 /* In the non-strict alignment case, only byte alignment is. */
1523 if (!STRICT_ALIGNMENT
1524 && DECL_BIT_FIELD (field)
1525 && value_factor_p (pos, BITS_PER_UNIT))
1526 DECL_BIT_FIELD (field) = 0;
1529 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1530 field is technically not addressable. Except that it can actually
1531 be addressed if it is BLKmode and happens to be properly aligned. */
1532 if (DECL_BIT_FIELD (field)
1533 && !(DECL_MODE (field) == BLKmode
1534 && value_factor_p (pos, BITS_PER_UNIT)))
1535 DECL_NONADDRESSABLE_P (field) = 1;
1537 /* A type must be as aligned as its most aligned field that is not
1538 a bit-field. But this is already enforced by layout_type. */
1539 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1540 TYPE_ALIGN (record_type)
1541 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1543 switch (code)
1545 case UNION_TYPE:
1546 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1547 size = size_binop (MAX_EXPR, size, this_size);
1548 break;
1550 case QUAL_UNION_TYPE:
1551 ada_size
1552 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1553 this_ada_size, ada_size);
1554 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1555 this_size, size);
1556 break;
1558 case RECORD_TYPE:
1559 /* Since we know here that all fields are sorted in order of
1560 increasing bit position, the size of the record is one
1561 higher than the ending bit of the last field processed
1562 unless we have a rep clause, since in that case we might
1563 have a field outside a QUAL_UNION_TYPE that has a higher ending
1564 position. So use a MAX in that case. Also, if this field is a
1565 QUAL_UNION_TYPE, we need to take into account the previous size in
1566 the case of empty variants. */
1567 ada_size
1568 = merge_sizes (ada_size, pos, this_ada_size,
1569 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1570 size
1571 = merge_sizes (size, pos, this_size,
1572 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1573 break;
1575 default:
1576 gcc_unreachable ();
1580 if (code == QUAL_UNION_TYPE)
1581 nreverse (field_list);
1583 if (rep_level < 2)
1585 /* If this is a padding record, we never want to make the size smaller
1586 than what was specified in it, if any. */
1587 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1588 size = TYPE_SIZE (record_type);
1590 /* Now set any of the values we've just computed that apply. */
1591 if (!TYPE_FAT_POINTER_P (record_type)
1592 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1593 SET_TYPE_ADA_SIZE (record_type, ada_size);
1595 if (rep_level > 0)
1597 tree size_unit = had_size_unit
1598 ? TYPE_SIZE_UNIT (record_type)
1599 : convert (sizetype,
1600 size_binop (CEIL_DIV_EXPR, size,
1601 bitsize_unit_node));
1602 unsigned int align = TYPE_ALIGN (record_type);
1604 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1605 TYPE_SIZE_UNIT (record_type)
1606 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1608 compute_record_mode (record_type);
1612 if (debug_info_p)
1613 rest_of_record_type_compilation (record_type);
1616 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1618 void
1619 add_parallel_type (tree type, tree parallel_type)
1621 tree decl = TYPE_STUB_DECL (type);
1623 while (DECL_PARALLEL_TYPE (decl))
1624 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1626 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1629 /* Return true if TYPE has a parallel type. */
1631 static bool
1632 has_parallel_type (tree type)
1634 tree decl = TYPE_STUB_DECL (type);
1636 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1639 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1640 associated with it. It need not be invoked directly in most cases since
1641 finish_record_type takes care of doing so, but this can be necessary if
1642 a parallel type is to be attached to the record type. */
1644 void
1645 rest_of_record_type_compilation (tree record_type)
1647 bool var_size = false;
1648 tree field;
1650 /* If this is a padded type, the bulk of the debug info has already been
1651 generated for the field's type. */
1652 if (TYPE_IS_PADDING_P (record_type))
1653 return;
1655 /* If the type already has a parallel type (XVS type), then we're done. */
1656 if (has_parallel_type (record_type))
1657 return;
1659 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1661 /* We need to make an XVE/XVU record if any field has variable size,
1662 whether or not the record does. For example, if we have a union,
1663 it may be that all fields, rounded up to the alignment, have the
1664 same size, in which case we'll use that size. But the debug
1665 output routines (except Dwarf2) won't be able to output the fields,
1666 so we need to make the special record. */
1667 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1668 /* If a field has a non-constant qualifier, the record will have
1669 variable size too. */
1670 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1671 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1673 var_size = true;
1674 break;
1678 /* If this record type is of variable size, make a parallel record type that
1679 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1680 if (var_size)
1682 tree new_record_type
1683 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1684 ? UNION_TYPE : TREE_CODE (record_type));
1685 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
1686 tree last_pos = bitsize_zero_node;
1687 tree old_field, prev_old_field = NULL_TREE;
1689 new_name
1690 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1691 ? "XVU" : "XVE");
1692 TYPE_NAME (new_record_type) = new_name;
1693 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1694 TYPE_STUB_DECL (new_record_type)
1695 = create_type_stub_decl (new_name, new_record_type);
1696 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1697 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1698 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1699 TYPE_SIZE_UNIT (new_record_type)
1700 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1702 /* Now scan all the fields, replacing each field with a new field
1703 corresponding to the new encoding. */
1704 for (old_field = TYPE_FIELDS (record_type); old_field;
1705 old_field = DECL_CHAIN (old_field))
1707 tree field_type = TREE_TYPE (old_field);
1708 tree field_name = DECL_NAME (old_field);
1709 tree curpos = bit_position (old_field);
1710 tree pos, new_field;
1711 bool var = false;
1712 unsigned int align = 0;
1714 /* We're going to do some pattern matching below so remove as many
1715 conversions as possible. */
1716 curpos = remove_conversions (curpos, true);
1718 /* See how the position was modified from the last position.
1720 There are two basic cases we support: a value was added
1721 to the last position or the last position was rounded to
1722 a boundary and they something was added. Check for the
1723 first case first. If not, see if there is any evidence
1724 of rounding. If so, round the last position and retry.
1726 If this is a union, the position can be taken as zero. */
1727 if (TREE_CODE (new_record_type) == UNION_TYPE)
1728 pos = bitsize_zero_node;
1729 else
1730 pos = compute_related_constant (curpos, last_pos);
1732 if (!pos
1733 && TREE_CODE (curpos) == MULT_EXPR
1734 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
1736 tree offset = TREE_OPERAND (curpos, 0);
1737 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1738 align = scale_by_factor_of (offset, align);
1739 last_pos = round_up (last_pos, align);
1740 pos = compute_related_constant (curpos, last_pos);
1742 else if (!pos
1743 && TREE_CODE (curpos) == PLUS_EXPR
1744 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
1745 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1746 && tree_fits_uhwi_p
1747 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
1749 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1750 unsigned HOST_WIDE_INT addend
1751 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1752 align
1753 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
1754 align = scale_by_factor_of (offset, align);
1755 align = MIN (align, addend & -addend);
1756 last_pos = round_up (last_pos, align);
1757 pos = compute_related_constant (curpos, last_pos);
1759 else if (potential_alignment_gap (prev_old_field, old_field, pos))
1761 align = TYPE_ALIGN (field_type);
1762 last_pos = round_up (last_pos, align);
1763 pos = compute_related_constant (curpos, last_pos);
1766 /* If we can't compute a position, set it to zero.
1768 ??? We really should abort here, but it's too much work
1769 to get this correct for all cases. */
1770 if (!pos)
1771 pos = bitsize_zero_node;
1773 /* See if this type is variable-sized and make a pointer type
1774 and indicate the indirection if so. Beware that the debug
1775 back-end may adjust the position computed above according
1776 to the alignment of the field type, i.e. the pointer type
1777 in this case, if we don't preventively counter that. */
1778 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1780 field_type = build_pointer_type (field_type);
1781 if (align != 0 && TYPE_ALIGN (field_type) > align)
1783 field_type = copy_node (field_type);
1784 TYPE_ALIGN (field_type) = align;
1786 var = true;
1789 /* Make a new field name, if necessary. */
1790 if (var || align != 0)
1792 char suffix[16];
1794 if (align != 0)
1795 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1796 align / BITS_PER_UNIT);
1797 else
1798 strcpy (suffix, "XVL");
1800 field_name = concat_name (field_name, suffix);
1803 new_field
1804 = create_field_decl (field_name, field_type, new_record_type,
1805 DECL_SIZE (old_field), pos, 0, 0);
1806 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1807 TYPE_FIELDS (new_record_type) = new_field;
1809 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1810 zero. The only time it's not the last field of the record
1811 is when there are other components at fixed positions after
1812 it (meaning there was a rep clause for every field) and we
1813 want to be able to encode them. */
1814 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1815 (TREE_CODE (TREE_TYPE (old_field))
1816 == QUAL_UNION_TYPE)
1817 ? bitsize_zero_node
1818 : DECL_SIZE (old_field));
1819 prev_old_field = old_field;
1822 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
1824 add_parallel_type (record_type, new_record_type);
1828 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1829 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1830 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1831 replace a value of zero with the old size. If HAS_REP is true, we take the
1832 MAX of the end position of this field with LAST_SIZE. In all other cases,
1833 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1835 static tree
1836 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1837 bool has_rep)
1839 tree type = TREE_TYPE (last_size);
1840 tree new_size;
1842 if (!special || TREE_CODE (size) != COND_EXPR)
1844 new_size = size_binop (PLUS_EXPR, first_bit, size);
1845 if (has_rep)
1846 new_size = size_binop (MAX_EXPR, last_size, new_size);
1849 else
1850 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1851 integer_zerop (TREE_OPERAND (size, 1))
1852 ? last_size : merge_sizes (last_size, first_bit,
1853 TREE_OPERAND (size, 1),
1854 1, has_rep),
1855 integer_zerop (TREE_OPERAND (size, 2))
1856 ? last_size : merge_sizes (last_size, first_bit,
1857 TREE_OPERAND (size, 2),
1858 1, has_rep));
1860 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1861 when fed through substitute_in_expr) into thinking that a constant
1862 size is not constant. */
1863 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1864 new_size = TREE_OPERAND (new_size, 0);
1866 return new_size;
1869 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1870 related by the addition of a constant. Return that constant if so. */
1872 static tree
1873 compute_related_constant (tree op0, tree op1)
1875 tree op0_var, op1_var;
1876 tree op0_con = split_plus (op0, &op0_var);
1877 tree op1_con = split_plus (op1, &op1_var);
1878 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1880 if (operand_equal_p (op0_var, op1_var, 0))
1881 return result;
1882 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1883 return result;
1884 else
1885 return 0;
1888 /* Utility function of above to split a tree OP which may be a sum, into a
1889 constant part, which is returned, and a variable part, which is stored
1890 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1891 bitsizetype. */
1893 static tree
1894 split_plus (tree in, tree *pvar)
1896 /* Strip conversions in order to ease the tree traversal and maximize the
1897 potential for constant or plus/minus discovery. We need to be careful
1898 to always return and set *pvar to bitsizetype trees, but it's worth
1899 the effort. */
1900 in = remove_conversions (in, false);
1902 *pvar = convert (bitsizetype, in);
1904 if (TREE_CODE (in) == INTEGER_CST)
1906 *pvar = bitsize_zero_node;
1907 return convert (bitsizetype, in);
1909 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1911 tree lhs_var, rhs_var;
1912 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1913 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1915 if (lhs_var == TREE_OPERAND (in, 0)
1916 && rhs_var == TREE_OPERAND (in, 1))
1917 return bitsize_zero_node;
1919 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1920 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1922 else
1923 return bitsize_zero_node;
1926 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1927 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1928 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1929 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1930 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1931 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1932 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1933 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1934 invisible reference. */
1936 tree
1937 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1938 bool return_unconstrained_p, bool return_by_direct_ref_p,
1939 bool return_by_invisi_ref_p)
1941 /* A list of the data type nodes of the subprogram formal parameters.
1942 This list is generated by traversing the input list of PARM_DECL
1943 nodes. */
1944 vec<tree, va_gc> *param_type_list = NULL;
1945 tree t, type;
1947 for (t = param_decl_list; t; t = DECL_CHAIN (t))
1948 vec_safe_push (param_type_list, TREE_TYPE (t));
1950 type = build_function_type_vec (return_type, param_type_list);
1952 /* TYPE may have been shared since GCC hashes types. If it has a different
1953 CICO_LIST, make a copy. Likewise for the various flags. */
1954 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1955 return_by_direct_ref_p, return_by_invisi_ref_p))
1957 type = copy_type (type);
1958 TYPE_CI_CO_LIST (type) = cico_list;
1959 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1960 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1961 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1964 return type;
1967 /* Return a copy of TYPE but safe to modify in any way. */
1969 tree
1970 copy_type (tree type)
1972 tree new_type = copy_node (type);
1974 /* Unshare the language-specific data. */
1975 if (TYPE_LANG_SPECIFIC (type))
1977 TYPE_LANG_SPECIFIC (new_type) = NULL;
1978 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1981 /* And the contents of the language-specific slot if needed. */
1982 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1983 && TYPE_RM_VALUES (type))
1985 TYPE_RM_VALUES (new_type) = NULL_TREE;
1986 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1987 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1988 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1991 /* copy_node clears this field instead of copying it, because it is
1992 aliased with TREE_CHAIN. */
1993 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1995 TYPE_POINTER_TO (new_type) = 0;
1996 TYPE_REFERENCE_TO (new_type) = 0;
1997 TYPE_MAIN_VARIANT (new_type) = new_type;
1998 TYPE_NEXT_VARIANT (new_type) = 0;
2000 return new_type;
2003 /* Return a subtype of sizetype with range MIN to MAX and whose
2004 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2005 of the associated TYPE_DECL. */
2007 tree
2008 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2010 /* First build a type for the desired range. */
2011 tree type = build_nonshared_range_type (sizetype, min, max);
2013 /* Then set the index type. */
2014 SET_TYPE_INDEX_TYPE (type, index);
2015 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2017 return type;
2020 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2021 sizetype is used. */
2023 tree
2024 create_range_type (tree type, tree min, tree max)
2026 tree range_type;
2028 if (type == NULL_TREE)
2029 type = sizetype;
2031 /* First build a type with the base range. */
2032 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2033 TYPE_MAX_VALUE (type));
2035 /* Then set the actual range. */
2036 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
2037 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
2039 return range_type;
2042 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2043 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2044 its data type. */
2046 tree
2047 create_type_stub_decl (tree type_name, tree type)
2049 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2050 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2051 emitted in DWARF. */
2052 tree type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2053 DECL_ARTIFICIAL (type_decl) = 1;
2054 TYPE_ARTIFICIAL (type) = 1;
2055 return type_decl;
2058 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2059 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2060 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2061 true if we need to write debug information about this type. GNAT_NODE
2062 is used for the position of the decl. */
2064 tree
2065 create_type_decl (tree type_name, tree type, bool artificial_p,
2066 bool debug_info_p, Node_Id gnat_node)
2068 enum tree_code code = TREE_CODE (type);
2069 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2070 tree type_decl;
2072 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2073 gcc_assert (!TYPE_IS_DUMMY_P (type));
2075 /* If the type hasn't been named yet, we're naming it; preserve an existing
2076 TYPE_STUB_DECL that has been attached to it for some purpose. */
2077 if (!named && TYPE_STUB_DECL (type))
2079 type_decl = TYPE_STUB_DECL (type);
2080 DECL_NAME (type_decl) = type_name;
2082 else
2083 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2085 DECL_ARTIFICIAL (type_decl) = artificial_p;
2086 TYPE_ARTIFICIAL (type) = artificial_p;
2088 /* Add this decl to the current binding level. */
2089 gnat_pushdecl (type_decl, gnat_node);
2091 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2092 This causes the name to be also viewed as a "tag" by the debug
2093 back-end, with the advantage that no DW_TAG_typedef is emitted
2094 for artificial "tagged" types in DWARF. */
2095 if (!named)
2096 TYPE_STUB_DECL (type) = type_decl;
2098 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2099 back-end doesn't support, and for others if we don't need to. */
2100 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2101 DECL_IGNORED_P (type_decl) = 1;
2103 return type_decl;
2106 /* Return a VAR_DECL or CONST_DECL node.
2108 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2109 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2110 the GCC tree for an optional initial expression; NULL_TREE if none.
2112 CONST_FLAG is true if this variable is constant, in which case we might
2113 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2115 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2116 definition to be made visible outside of the current compilation unit, for
2117 instance variable definitions in a package specification.
2119 EXTERN_FLAG is true when processing an external variable declaration (as
2120 opposed to a definition: no storage is to be allocated for the variable).
2122 STATIC_FLAG is only relevant when not at top level. In that case
2123 it indicates whether to always allocate storage to the variable.
2125 GNAT_NODE is used for the position of the decl. */
2127 tree
2128 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2129 bool const_flag, bool public_flag, bool extern_flag,
2130 bool static_flag, bool const_decl_allowed_p,
2131 struct attrib *attr_list, Node_Id gnat_node)
2133 /* Whether the initializer is a constant initializer. At the global level
2134 or for an external object or an object to be allocated in static memory,
2135 we check that it is a valid constant expression for use in initializing
2136 a static variable; otherwise, we only check that it is constant. */
2137 bool init_const
2138 = (var_init != 0
2139 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
2140 && (global_bindings_p () || extern_flag || static_flag
2141 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
2142 : TREE_CONSTANT (var_init)));
2144 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2145 case the initializer may be used in-lieu of the DECL node (as done in
2146 Identifier_to_gnu). This is useful to prevent the need of elaboration
2147 code when an identifier for which such a decl is made is in turn used as
2148 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2149 but extra constraints apply to this choice (see below) and are not
2150 relevant to the distinction we wish to make. */
2151 bool constant_p = const_flag && init_const;
2153 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2154 and may be used for scalars in general but not for aggregates. */
2155 tree var_decl
2156 = build_decl (input_location,
2157 (constant_p && const_decl_allowed_p
2158 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2159 var_name, type);
2161 /* If this is external, throw away any initializations (they will be done
2162 elsewhere) unless this is a constant for which we would like to remain
2163 able to get the initializer. If we are defining a global here, leave a
2164 constant initialization and save any variable elaborations for the
2165 elaboration routine. If we are just annotating types, throw away the
2166 initialization if it isn't a constant. */
2167 if ((extern_flag && !constant_p)
2168 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2169 var_init = NULL_TREE;
2171 /* At the global level, an initializer requiring code to be generated
2172 produces elaboration statements. Check that such statements are allowed,
2173 that is, not violating a No_Elaboration_Code restriction. */
2174 if (global_bindings_p () && var_init != 0 && !init_const)
2175 Check_Elaboration_Code_Allowed (gnat_node);
2177 DECL_INITIAL (var_decl) = var_init;
2178 TREE_READONLY (var_decl) = const_flag;
2179 DECL_EXTERNAL (var_decl) = extern_flag;
2180 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
2181 TREE_CONSTANT (var_decl) = constant_p;
2182 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
2183 = TYPE_VOLATILE (type);
2185 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2186 try to fiddle with DECL_COMMON. However, on platforms that don't
2187 support global BSS sections, uninitialized global variables would
2188 go in DATA instead, thus increasing the size of the executable. */
2189 if (!flag_no_common
2190 && TREE_CODE (var_decl) == VAR_DECL
2191 && TREE_PUBLIC (var_decl)
2192 && !have_global_bss_p ())
2193 DECL_COMMON (var_decl) = 1;
2195 /* At the global binding level, we need to allocate static storage for the
2196 variable if it isn't external. Otherwise, we allocate automatic storage
2197 unless requested not to. */
2198 TREE_STATIC (var_decl)
2199 = !extern_flag && (static_flag || global_bindings_p ());
2201 /* For an external constant whose initializer is not absolute, do not emit
2202 debug info. In DWARF this would mean a global relocation in a read-only
2203 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2204 if (extern_flag
2205 && constant_p
2206 && var_init
2207 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2208 != null_pointer_node)
2209 DECL_IGNORED_P (var_decl) = 1;
2211 if (TREE_SIDE_EFFECTS (var_decl))
2212 TREE_ADDRESSABLE (var_decl) = 1;
2214 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2215 if (TREE_CODE (var_decl) == VAR_DECL)
2216 process_attributes (&var_decl, &attr_list, true, gnat_node);
2218 /* Add this decl to the current binding level. */
2219 gnat_pushdecl (var_decl, gnat_node);
2221 if (TREE_CODE (var_decl) == VAR_DECL)
2223 if (asm_name)
2224 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2226 if (global_bindings_p ())
2227 rest_of_decl_compilation (var_decl, true, 0);
2230 return var_decl;
2233 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2235 static bool
2236 aggregate_type_contains_array_p (tree type)
2238 switch (TREE_CODE (type))
2240 case RECORD_TYPE:
2241 case UNION_TYPE:
2242 case QUAL_UNION_TYPE:
2244 tree field;
2245 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2246 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2247 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2248 return true;
2249 return false;
2252 case ARRAY_TYPE:
2253 return true;
2255 default:
2256 gcc_unreachable ();
2260 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2261 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2262 nonzero, it is the specified size of the field. If POS is nonzero, it is
2263 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2264 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2265 means we are allowed to take the address of the field; if it is negative,
2266 we should not make a bitfield, which is used by make_aligning_type. */
2268 tree
2269 create_field_decl (tree field_name, tree field_type, tree record_type,
2270 tree size, tree pos, int packed, int addressable)
2272 tree field_decl = build_decl (input_location,
2273 FIELD_DECL, field_name, field_type);
2275 DECL_CONTEXT (field_decl) = record_type;
2276 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2278 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2279 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2280 Likewise for an aggregate without specified position that contains an
2281 array, because in this case slices of variable length of this array
2282 must be handled by GCC and variable-sized objects need to be aligned
2283 to at least a byte boundary. */
2284 if (packed && (TYPE_MODE (field_type) == BLKmode
2285 || (!pos
2286 && AGGREGATE_TYPE_P (field_type)
2287 && aggregate_type_contains_array_p (field_type))))
2288 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2290 /* If a size is specified, use it. Otherwise, if the record type is packed
2291 compute a size to use, which may differ from the object's natural size.
2292 We always set a size in this case to trigger the checks for bitfield
2293 creation below, which is typically required when no position has been
2294 specified. */
2295 if (size)
2296 size = convert (bitsizetype, size);
2297 else if (packed == 1)
2299 size = rm_size (field_type);
2300 if (TYPE_MODE (field_type) == BLKmode)
2301 size = round_up (size, BITS_PER_UNIT);
2304 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2305 specified for two reasons: first if the size differs from the natural
2306 size. Second, if the alignment is insufficient. There are a number of
2307 ways the latter can be true.
2309 We never make a bitfield if the type of the field has a nonconstant size,
2310 because no such entity requiring bitfield operations should reach here.
2312 We do *preventively* make a bitfield when there might be the need for it
2313 but we don't have all the necessary information to decide, as is the case
2314 of a field with no specified position in a packed record.
2316 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2317 in layout_decl or finish_record_type to clear the bit_field indication if
2318 it is in fact not needed. */
2319 if (addressable >= 0
2320 && size
2321 && TREE_CODE (size) == INTEGER_CST
2322 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2323 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2324 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2325 || packed
2326 || (TYPE_ALIGN (record_type) != 0
2327 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2329 DECL_BIT_FIELD (field_decl) = 1;
2330 DECL_SIZE (field_decl) = size;
2331 if (!packed && !pos)
2333 if (TYPE_ALIGN (record_type) != 0
2334 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2335 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2336 else
2337 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2341 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2343 /* Bump the alignment if need be, either for bitfield/packing purposes or
2344 to satisfy the type requirements if no such consideration applies. When
2345 we get the alignment from the type, indicate if this is from an explicit
2346 user request, which prevents stor-layout from lowering it later on. */
2348 unsigned int bit_align
2349 = (DECL_BIT_FIELD (field_decl) ? 1
2350 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2352 if (bit_align > DECL_ALIGN (field_decl))
2353 DECL_ALIGN (field_decl) = bit_align;
2354 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2356 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2357 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2361 if (pos)
2363 /* We need to pass in the alignment the DECL is known to have.
2364 This is the lowest-order bit set in POS, but no more than
2365 the alignment of the record, if one is specified. Note
2366 that an alignment of 0 is taken as infinite. */
2367 unsigned int known_align;
2369 if (tree_fits_uhwi_p (pos))
2370 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2371 else
2372 known_align = BITS_PER_UNIT;
2374 if (TYPE_ALIGN (record_type)
2375 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2376 known_align = TYPE_ALIGN (record_type);
2378 layout_decl (field_decl, known_align);
2379 SET_DECL_OFFSET_ALIGN (field_decl,
2380 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2381 : BITS_PER_UNIT);
2382 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2383 &DECL_FIELD_BIT_OFFSET (field_decl),
2384 DECL_OFFSET_ALIGN (field_decl), pos);
2387 /* In addition to what our caller says, claim the field is addressable if we
2388 know that its type is not suitable.
2390 The field may also be "technically" nonaddressable, meaning that even if
2391 we attempt to take the field's address we will actually get the address
2392 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2393 value we have at this point is not accurate enough, so we don't account
2394 for this here and let finish_record_type decide. */
2395 if (!addressable && !type_for_nonaliased_component_p (field_type))
2396 addressable = 1;
2398 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2400 return field_decl;
2403 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2404 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2405 (either an In parameter or an address of a pass-by-ref parameter). */
2407 tree
2408 create_param_decl (tree param_name, tree param_type, bool readonly)
2410 tree param_decl = build_decl (input_location,
2411 PARM_DECL, param_name, param_type);
2413 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2414 can lead to various ABI violations. */
2415 if (targetm.calls.promote_prototypes (NULL_TREE)
2416 && INTEGRAL_TYPE_P (param_type)
2417 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2419 /* We have to be careful about biased types here. Make a subtype
2420 of integer_type_node with the proper biasing. */
2421 if (TREE_CODE (param_type) == INTEGER_TYPE
2422 && TYPE_BIASED_REPRESENTATION_P (param_type))
2424 tree subtype
2425 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2426 TREE_TYPE (subtype) = integer_type_node;
2427 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2428 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2429 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
2430 param_type = subtype;
2432 else
2433 param_type = integer_type_node;
2436 DECL_ARG_TYPE (param_decl) = param_type;
2437 TREE_READONLY (param_decl) = readonly;
2438 return param_decl;
2441 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2442 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2443 changed. GNAT_NODE is used for the position of error messages. */
2445 void
2446 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2447 Node_Id gnat_node)
2449 struct attrib *attr;
2451 for (attr = *attr_list; attr; attr = attr->next)
2452 switch (attr->type)
2454 case ATTR_MACHINE_ATTRIBUTE:
2455 Sloc_to_locus (Sloc (gnat_node), &input_location);
2456 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2457 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2458 break;
2460 case ATTR_LINK_ALIAS:
2461 if (!DECL_EXTERNAL (*node))
2463 TREE_STATIC (*node) = 1;
2464 assemble_alias (*node, attr->name);
2466 break;
2468 case ATTR_WEAK_EXTERNAL:
2469 if (SUPPORTS_WEAK)
2470 declare_weak (*node);
2471 else
2472 post_error ("?weak declarations not supported on this target",
2473 attr->error_point);
2474 break;
2476 case ATTR_LINK_SECTION:
2477 if (targetm_common.have_named_sections)
2479 DECL_SECTION_NAME (*node)
2480 = build_string (IDENTIFIER_LENGTH (attr->name),
2481 IDENTIFIER_POINTER (attr->name));
2482 DECL_COMMON (*node) = 0;
2484 else
2485 post_error ("?section attributes are not supported for this target",
2486 attr->error_point);
2487 break;
2489 case ATTR_LINK_CONSTRUCTOR:
2490 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2491 TREE_USED (*node) = 1;
2492 break;
2494 case ATTR_LINK_DESTRUCTOR:
2495 DECL_STATIC_DESTRUCTOR (*node) = 1;
2496 TREE_USED (*node) = 1;
2497 break;
2499 case ATTR_THREAD_LOCAL_STORAGE:
2500 DECL_TLS_MODEL (*node) = decl_default_tls_model (*node);
2501 DECL_COMMON (*node) = 0;
2502 break;
2505 *attr_list = NULL;
2508 /* Record DECL as a global renaming pointer. */
2510 void
2511 record_global_renaming_pointer (tree decl)
2513 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
2514 vec_safe_push (global_renaming_pointers, decl);
2517 /* Invalidate the global renaming pointers. */
2519 void
2520 invalidate_global_renaming_pointers (void)
2522 unsigned int i;
2523 tree iter;
2525 if (global_renaming_pointers == NULL)
2526 return;
2528 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
2529 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2531 vec_free (global_renaming_pointers);
2534 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2535 a power of 2. */
2537 bool
2538 value_factor_p (tree value, HOST_WIDE_INT factor)
2540 if (tree_fits_uhwi_p (value))
2541 return tree_to_uhwi (value) % factor == 0;
2543 if (TREE_CODE (value) == MULT_EXPR)
2544 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2545 || value_factor_p (TREE_OPERAND (value, 1), factor));
2547 return false;
2550 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2552 static unsigned int
2553 scale_by_factor_of (tree expr, unsigned int value)
2555 expr = remove_conversions (expr, true);
2557 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2558 corresponding to the number of trailing zeros of the mask. */
2559 if (TREE_CODE (expr) == BIT_AND_EXPR
2560 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2562 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2563 unsigned int i = 0;
2565 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2567 mask >>= 1;
2568 value *= 2;
2569 i++;
2573 return value;
2576 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2577 unless we can prove these 2 fields are laid out in such a way that no gap
2578 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2579 is the distance in bits between the end of PREV_FIELD and the starting
2580 position of CURR_FIELD. It is ignored if null. */
2582 static bool
2583 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2585 /* If this is the first field of the record, there cannot be any gap */
2586 if (!prev_field)
2587 return false;
2589 /* If the previous field is a union type, then return false: The only
2590 time when such a field is not the last field of the record is when
2591 there are other components at fixed positions after it (meaning there
2592 was a rep clause for every field), in which case we don't want the
2593 alignment constraint to override them. */
2594 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2595 return false;
2597 /* If the distance between the end of prev_field and the beginning of
2598 curr_field is constant, then there is a gap if the value of this
2599 constant is not null. */
2600 if (offset && tree_fits_uhwi_p (offset))
2601 return !integer_zerop (offset);
2603 /* If the size and position of the previous field are constant,
2604 then check the sum of this size and position. There will be a gap
2605 iff it is not multiple of the current field alignment. */
2606 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
2607 && tree_fits_uhwi_p (bit_position (prev_field)))
2608 return ((tree_to_uhwi (bit_position (prev_field))
2609 + tree_to_uhwi (DECL_SIZE (prev_field)))
2610 % DECL_ALIGN (curr_field) != 0);
2612 /* If both the position and size of the previous field are multiples
2613 of the current field alignment, there cannot be any gap. */
2614 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2615 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2616 return false;
2618 /* Fallback, return that there may be a potential gap */
2619 return true;
2622 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2623 of the decl. */
2625 tree
2626 create_label_decl (tree label_name, Node_Id gnat_node)
2628 tree label_decl
2629 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
2631 DECL_MODE (label_decl) = VOIDmode;
2633 /* Add this decl to the current binding level. */
2634 gnat_pushdecl (label_decl, gnat_node);
2636 return label_decl;
2639 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
2640 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2641 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
2642 PARM_DECL nodes chained through the DECL_CHAIN field).
2644 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
2645 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2646 used for the position of the decl. */
2648 tree
2649 create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
2650 tree param_decl_list, enum inline_status_t inline_status,
2651 bool public_flag, bool extern_flag, bool artificial_flag,
2652 struct attrib *attr_list, Node_Id gnat_node)
2654 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
2655 subprog_type);
2656 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
2657 TREE_TYPE (subprog_type));
2658 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
2660 /* If this is a non-inline function nested inside an inlined external
2661 function, we cannot honor both requests without cloning the nested
2662 function in the current unit since it is private to the other unit.
2663 We could inline the nested function as well but it's probably better
2664 to err on the side of too little inlining. */
2665 if (inline_status != is_enabled
2666 && !public_flag
2667 && current_function_decl
2668 && DECL_DECLARED_INLINE_P (current_function_decl)
2669 && DECL_EXTERNAL (current_function_decl))
2670 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
2672 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
2673 DECL_EXTERNAL (subprog_decl) = extern_flag;
2675 switch (inline_status)
2677 case is_suppressed:
2678 DECL_UNINLINABLE (subprog_decl) = 1;
2679 break;
2681 case is_disabled:
2682 break;
2684 case is_enabled:
2685 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
2686 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
2687 break;
2689 default:
2690 gcc_unreachable ();
2693 TREE_PUBLIC (subprog_decl) = public_flag;
2694 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
2695 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
2696 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
2698 DECL_ARTIFICIAL (result_decl) = 1;
2699 DECL_IGNORED_P (result_decl) = 1;
2700 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
2701 DECL_RESULT (subprog_decl) = result_decl;
2703 if (asm_name)
2705 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
2707 /* The expand_main_function circuitry expects "main_identifier_node" to
2708 designate the DECL_NAME of the 'main' entry point, in turn expected
2709 to be declared as the "main" function literally by default. Ada
2710 program entry points are typically declared with a different name
2711 within the binder generated file, exported as 'main' to satisfy the
2712 system expectations. Force main_identifier_node in this case. */
2713 if (asm_name == main_identifier_node)
2714 DECL_NAME (subprog_decl) = main_identifier_node;
2717 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
2719 /* Add this decl to the current binding level. */
2720 gnat_pushdecl (subprog_decl, gnat_node);
2722 /* Output the assembler code and/or RTL for the declaration. */
2723 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2725 return subprog_decl;
2728 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2729 body. This routine needs to be invoked before processing the declarations
2730 appearing in the subprogram. */
2732 void
2733 begin_subprog_body (tree subprog_decl)
2735 tree param_decl;
2737 announce_function (subprog_decl);
2739 /* This function is being defined. */
2740 TREE_STATIC (subprog_decl) = 1;
2742 current_function_decl = subprog_decl;
2744 /* Enter a new binding level and show that all the parameters belong to
2745 this function. */
2746 gnat_pushlevel ();
2748 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
2749 param_decl = DECL_CHAIN (param_decl))
2750 DECL_CONTEXT (param_decl) = subprog_decl;
2752 make_decl_rtl (subprog_decl);
2755 /* Finish translating the current subprogram and set its BODY. */
2757 void
2758 end_subprog_body (tree body)
2760 tree fndecl = current_function_decl;
2762 /* Attach the BLOCK for this level to the function and pop the level. */
2763 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2764 DECL_INITIAL (fndecl) = current_binding_level->block;
2765 gnat_poplevel ();
2767 /* Mark the RESULT_DECL as being in this subprogram. */
2768 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2770 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2771 if (TREE_CODE (body) == BIND_EXPR)
2773 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
2774 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
2777 DECL_SAVED_TREE (fndecl) = body;
2779 current_function_decl = decl_function_context (fndecl);
2782 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
2784 void
2785 rest_of_subprog_body_compilation (tree subprog_decl)
2787 /* We cannot track the location of errors past this point. */
2788 error_gnat_node = Empty;
2790 /* If we're only annotating types, don't actually compile this function. */
2791 if (type_annotate_only)
2792 return;
2794 /* Dump functions before gimplification. */
2795 dump_function (TDI_original, subprog_decl);
2797 if (!decl_function_context (subprog_decl))
2798 cgraph_finalize_function (subprog_decl, false);
2799 else
2800 /* Register this function with cgraph just far enough to get it
2801 added to our parent's nested function list. */
2802 (void) cgraph_get_create_node (subprog_decl);
2805 tree
2806 gnat_builtin_function (tree decl)
2808 gnat_pushdecl (decl, Empty);
2809 return decl;
2812 /* Return an integer type with the number of bits of precision given by
2813 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2814 it is a signed type. */
2816 tree
2817 gnat_type_for_size (unsigned precision, int unsignedp)
2819 tree t;
2820 char type_name[20];
2822 if (precision <= 2 * MAX_BITS_PER_WORD
2823 && signed_and_unsigned_types[precision][unsignedp])
2824 return signed_and_unsigned_types[precision][unsignedp];
2826 if (unsignedp)
2827 t = make_unsigned_type (precision);
2828 else
2829 t = make_signed_type (precision);
2831 if (precision <= 2 * MAX_BITS_PER_WORD)
2832 signed_and_unsigned_types[precision][unsignedp] = t;
2834 if (!TYPE_NAME (t))
2836 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
2837 TYPE_NAME (t) = get_identifier (type_name);
2840 return t;
2843 /* Likewise for floating-point types. */
2845 static tree
2846 float_type_for_precision (int precision, enum machine_mode mode)
2848 tree t;
2849 char type_name[20];
2851 if (float_types[(int) mode])
2852 return float_types[(int) mode];
2854 float_types[(int) mode] = t = make_node (REAL_TYPE);
2855 TYPE_PRECISION (t) = precision;
2856 layout_type (t);
2858 gcc_assert (TYPE_MODE (t) == mode);
2859 if (!TYPE_NAME (t))
2861 sprintf (type_name, "FLOAT_%d", precision);
2862 TYPE_NAME (t) = get_identifier (type_name);
2865 return t;
2868 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2869 an unsigned type; otherwise a signed type is returned. */
2871 tree
2872 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2874 if (mode == BLKmode)
2875 return NULL_TREE;
2877 if (mode == VOIDmode)
2878 return void_type_node;
2880 if (COMPLEX_MODE_P (mode))
2881 return NULL_TREE;
2883 if (SCALAR_FLOAT_MODE_P (mode))
2884 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2886 if (SCALAR_INT_MODE_P (mode))
2887 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2889 if (VECTOR_MODE_P (mode))
2891 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2892 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2893 if (inner_type)
2894 return build_vector_type_for_mode (inner_type, mode);
2897 return NULL_TREE;
2900 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2902 tree
2903 gnat_unsigned_type (tree type_node)
2905 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2907 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2909 type = copy_node (type);
2910 TREE_TYPE (type) = type_node;
2912 else if (TREE_TYPE (type_node)
2913 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2914 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2916 type = copy_node (type);
2917 TREE_TYPE (type) = TREE_TYPE (type_node);
2920 return type;
2923 /* Return the signed version of a TYPE_NODE, a scalar type. */
2925 tree
2926 gnat_signed_type (tree type_node)
2928 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2930 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2932 type = copy_node (type);
2933 TREE_TYPE (type) = type_node;
2935 else if (TREE_TYPE (type_node)
2936 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2937 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2939 type = copy_node (type);
2940 TREE_TYPE (type) = TREE_TYPE (type_node);
2943 return type;
2946 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2947 transparently converted to each other. */
2950 gnat_types_compatible_p (tree t1, tree t2)
2952 enum tree_code code;
2954 /* This is the default criterion. */
2955 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2956 return 1;
2958 /* We only check structural equivalence here. */
2959 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2960 return 0;
2962 /* Vector types are also compatible if they have the same number of subparts
2963 and the same form of (scalar) element type. */
2964 if (code == VECTOR_TYPE
2965 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2966 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2967 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2968 return 1;
2970 /* Array types are also compatible if they are constrained and have the same
2971 domain(s) and the same component type. */
2972 if (code == ARRAY_TYPE
2973 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2974 || (TYPE_DOMAIN (t1)
2975 && TYPE_DOMAIN (t2)
2976 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2977 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2978 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2979 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
2980 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2981 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2982 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
2983 return 1;
2985 return 0;
2988 /* Return true if EXPR is a useless type conversion. */
2990 bool
2991 gnat_useless_type_conversion (tree expr)
2993 if (CONVERT_EXPR_P (expr)
2994 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
2995 || TREE_CODE (expr) == NON_LVALUE_EXPR)
2996 return gnat_types_compatible_p (TREE_TYPE (expr),
2997 TREE_TYPE (TREE_OPERAND (expr, 0)));
2999 return false;
3002 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3004 bool
3005 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3006 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3008 return TYPE_CI_CO_LIST (t) == cico_list
3009 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3010 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3011 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3014 /* EXP is an expression for the size of an object. If this size contains
3015 discriminant references, replace them with the maximum (if MAX_P) or
3016 minimum (if !MAX_P) possible value of the discriminant. */
3018 tree
3019 max_size (tree exp, bool max_p)
3021 enum tree_code code = TREE_CODE (exp);
3022 tree type = TREE_TYPE (exp);
3024 switch (TREE_CODE_CLASS (code))
3026 case tcc_declaration:
3027 case tcc_constant:
3028 return exp;
3030 case tcc_vl_exp:
3031 if (code == CALL_EXPR)
3033 tree t, *argarray;
3034 int n, i;
3036 t = maybe_inline_call_in_expr (exp);
3037 if (t)
3038 return max_size (t, max_p);
3040 n = call_expr_nargs (exp);
3041 gcc_assert (n > 0);
3042 argarray = XALLOCAVEC (tree, n);
3043 for (i = 0; i < n; i++)
3044 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3045 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3047 break;
3049 case tcc_reference:
3050 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3051 modify. Otherwise, we treat it like a variable. */
3052 if (!CONTAINS_PLACEHOLDER_P (exp))
3053 return exp;
3055 type = TREE_TYPE (TREE_OPERAND (exp, 1));
3056 return
3057 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
3059 case tcc_comparison:
3060 return max_p ? size_one_node : size_zero_node;
3062 case tcc_unary:
3063 if (code == NON_LVALUE_EXPR)
3064 return max_size (TREE_OPERAND (exp, 0), max_p);
3066 return fold_build1 (code, type,
3067 max_size (TREE_OPERAND (exp, 0),
3068 code == NEGATE_EXPR ? !max_p : max_p));
3070 case tcc_binary:
3072 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3073 tree rhs = max_size (TREE_OPERAND (exp, 1),
3074 code == MINUS_EXPR ? !max_p : max_p);
3076 /* Special-case wanting the maximum value of a MIN_EXPR.
3077 In that case, if one side overflows, return the other. */
3078 if (max_p && code == MIN_EXPR)
3080 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3081 return lhs;
3083 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3084 return rhs;
3087 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3088 overflowing and the RHS a variable. */
3089 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3090 && TREE_CODE (lhs) == INTEGER_CST
3091 && TREE_OVERFLOW (lhs)
3092 && !TREE_CONSTANT (rhs))
3093 return lhs;
3095 return size_binop (code, lhs, rhs);
3098 case tcc_expression:
3099 switch (TREE_CODE_LENGTH (code))
3101 case 1:
3102 if (code == SAVE_EXPR)
3103 return exp;
3105 return fold_build1 (code, type,
3106 max_size (TREE_OPERAND (exp, 0), max_p));
3108 case 2:
3109 if (code == COMPOUND_EXPR)
3110 return max_size (TREE_OPERAND (exp, 1), max_p);
3112 return fold_build2 (code, type,
3113 max_size (TREE_OPERAND (exp, 0), max_p),
3114 max_size (TREE_OPERAND (exp, 1), max_p));
3116 case 3:
3117 if (code == COND_EXPR)
3118 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3119 max_size (TREE_OPERAND (exp, 1), max_p),
3120 max_size (TREE_OPERAND (exp, 2), max_p));
3122 default:
3123 break;
3126 /* Other tree classes cannot happen. */
3127 default:
3128 break;
3131 gcc_unreachable ();
3134 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3135 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3136 Return a constructor for the template. */
3138 tree
3139 build_template (tree template_type, tree array_type, tree expr)
3141 vec<constructor_elt, va_gc> *template_elts = NULL;
3142 tree bound_list = NULL_TREE;
3143 tree field;
3145 while (TREE_CODE (array_type) == RECORD_TYPE
3146 && (TYPE_PADDING_P (array_type)
3147 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3148 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3150 if (TREE_CODE (array_type) == ARRAY_TYPE
3151 || (TREE_CODE (array_type) == INTEGER_TYPE
3152 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3153 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3155 /* First make the list for a CONSTRUCTOR for the template. Go down the
3156 field list of the template instead of the type chain because this
3157 array might be an Ada array of arrays and we can't tell where the
3158 nested arrays stop being the underlying object. */
3160 for (field = TYPE_FIELDS (template_type); field;
3161 (bound_list
3162 ? (bound_list = TREE_CHAIN (bound_list))
3163 : (array_type = TREE_TYPE (array_type))),
3164 field = DECL_CHAIN (DECL_CHAIN (field)))
3166 tree bounds, min, max;
3168 /* If we have a bound list, get the bounds from there. Likewise
3169 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3170 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3171 This will give us a maximum range. */
3172 if (bound_list)
3173 bounds = TREE_VALUE (bound_list);
3174 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3175 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3176 else if (expr && TREE_CODE (expr) == PARM_DECL
3177 && DECL_BY_COMPONENT_PTR_P (expr))
3178 bounds = TREE_TYPE (field);
3179 else
3180 gcc_unreachable ();
3182 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3183 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3185 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3186 substitute it from OBJECT. */
3187 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3188 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3190 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3191 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3194 return gnat_build_constructor (template_type, template_elts);
3197 /* Return true if TYPE is suitable for the element type of a vector. */
3199 static bool
3200 type_for_vector_element_p (tree type)
3202 enum machine_mode mode;
3204 if (!INTEGRAL_TYPE_P (type)
3205 && !SCALAR_FLOAT_TYPE_P (type)
3206 && !FIXED_POINT_TYPE_P (type))
3207 return false;
3209 mode = TYPE_MODE (type);
3210 if (GET_MODE_CLASS (mode) != MODE_INT
3211 && !SCALAR_FLOAT_MODE_P (mode)
3212 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3213 return false;
3215 return true;
3218 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3219 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3220 attribute declaration and want to issue error messages on failure. */
3222 static tree
3223 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3225 unsigned HOST_WIDE_INT size_int, inner_size_int;
3226 int nunits;
3228 /* Silently punt on variable sizes. We can't make vector types for them,
3229 need to ignore them on front-end generated subtypes of unconstrained
3230 base types, and this attribute is for binding implementors, not end
3231 users, so we should never get there from legitimate explicit uses. */
3232 if (!tree_fits_uhwi_p (size))
3233 return NULL_TREE;
3234 size_int = tree_to_uhwi (size);
3236 if (!type_for_vector_element_p (inner_type))
3238 if (attribute)
3239 error ("invalid element type for attribute %qs",
3240 IDENTIFIER_POINTER (attribute));
3241 return NULL_TREE;
3243 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3245 if (size_int % inner_size_int)
3247 if (attribute)
3248 error ("vector size not an integral multiple of component size");
3249 return NULL_TREE;
3252 if (size_int == 0)
3254 if (attribute)
3255 error ("zero vector size");
3256 return NULL_TREE;
3259 nunits = size_int / inner_size_int;
3260 if (nunits & (nunits - 1))
3262 if (attribute)
3263 error ("number of components of vector not a power of two");
3264 return NULL_TREE;
3267 return build_vector_type (inner_type, nunits);
3270 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3271 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3272 processing the attribute and want to issue error messages on failure. */
3274 static tree
3275 build_vector_type_for_array (tree array_type, tree attribute)
3277 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3278 TYPE_SIZE_UNIT (array_type),
3279 attribute);
3280 if (!vector_type)
3281 return NULL_TREE;
3283 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3284 return vector_type;
3287 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3288 being built; the new decl is chained on to the front of the list. */
3290 static tree
3291 make_descriptor_field (const char *name, tree type, tree rec_type,
3292 tree initial, tree field_list)
3294 tree field
3295 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
3296 NULL_TREE, 0, 0);
3298 DECL_INITIAL (field) = initial;
3299 DECL_CHAIN (field) = field_list;
3300 return field;
3303 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3304 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3305 type contains in its DECL_INITIAL the expression to use when a constructor
3306 is made for the type. GNAT_ENTITY is an entity used to print out an error
3307 message if the mechanism cannot be applied to an object of that type and
3308 also for the name. */
3310 tree
3311 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
3313 tree record_type = make_node (RECORD_TYPE);
3314 tree pointer32_type, pointer64_type;
3315 tree field_list = NULL_TREE;
3316 int klass, ndim, i, dtype = 0;
3317 tree inner_type, tem;
3318 tree *idx_arr;
3320 /* If TYPE is an unconstrained array, use the underlying array type. */
3321 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3322 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3324 /* If this is an array, compute the number of dimensions in the array,
3325 get the index types, and point to the inner type. */
3326 if (TREE_CODE (type) != ARRAY_TYPE)
3327 ndim = 0;
3328 else
3329 for (ndim = 1, inner_type = type;
3330 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3331 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3332 ndim++, inner_type = TREE_TYPE (inner_type))
3335 idx_arr = XALLOCAVEC (tree, ndim);
3337 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
3338 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3339 for (i = ndim - 1, inner_type = type;
3340 i >= 0;
3341 i--, inner_type = TREE_TYPE (inner_type))
3342 idx_arr[i] = TYPE_DOMAIN (inner_type);
3343 else
3344 for (i = 0, inner_type = type;
3345 i < ndim;
3346 i++, inner_type = TREE_TYPE (inner_type))
3347 idx_arr[i] = TYPE_DOMAIN (inner_type);
3349 /* Now get the DTYPE value. */
3350 switch (TREE_CODE (type))
3352 case INTEGER_TYPE:
3353 case ENUMERAL_TYPE:
3354 case BOOLEAN_TYPE:
3355 if (TYPE_VAX_FLOATING_POINT_P (type))
3356 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
3358 case 6:
3359 dtype = 10;
3360 break;
3361 case 9:
3362 dtype = 11;
3363 break;
3364 case 15:
3365 dtype = 27;
3366 break;
3368 else
3369 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3371 case 8:
3372 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3373 break;
3374 case 16:
3375 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3376 break;
3377 case 32:
3378 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3379 break;
3380 case 64:
3381 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3382 break;
3383 case 128:
3384 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3385 break;
3387 break;
3389 case REAL_TYPE:
3390 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3391 break;
3393 case COMPLEX_TYPE:
3394 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3395 && TYPE_VAX_FLOATING_POINT_P (type))
3396 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
3398 case 6:
3399 dtype = 12;
3400 break;
3401 case 9:
3402 dtype = 13;
3403 break;
3404 case 15:
3405 dtype = 29;
3407 else
3408 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3409 break;
3411 case ARRAY_TYPE:
3412 dtype = 14;
3413 break;
3415 default:
3416 break;
3419 /* Get the CLASS value. */
3420 switch (mech)
3422 case By_Descriptor_A:
3423 case By_Short_Descriptor_A:
3424 klass = 4;
3425 break;
3426 case By_Descriptor_NCA:
3427 case By_Short_Descriptor_NCA:
3428 klass = 10;
3429 break;
3430 case By_Descriptor_SB:
3431 case By_Short_Descriptor_SB:
3432 klass = 15;
3433 break;
3434 case By_Descriptor:
3435 case By_Short_Descriptor:
3436 case By_Descriptor_S:
3437 case By_Short_Descriptor_S:
3438 default:
3439 klass = 1;
3440 break;
3443 /* Make the type for a descriptor for VMS. The first four fields are the
3444 same for all types. */
3445 field_list
3446 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
3447 size_in_bytes ((mech == By_Descriptor_A
3448 || mech == By_Short_Descriptor_A)
3449 ? inner_type : type),
3450 field_list);
3451 field_list
3452 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
3453 size_int (dtype), field_list);
3454 field_list
3455 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
3456 size_int (klass), field_list);
3458 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
3459 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3461 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3462 that we cannot build a template call to the CE routine as it would get a
3463 wrong source location; instead we use a second placeholder for it. */
3464 tem = build_unary_op (ADDR_EXPR, pointer64_type,
3465 build0 (PLACEHOLDER_EXPR, type));
3466 tem = build3 (COND_EXPR, pointer32_type,
3467 Pmode != SImode
3468 ? build_binary_op (GE_EXPR, boolean_type_node, tem,
3469 build_int_cstu (pointer64_type, 0x80000000))
3470 : boolean_false_node,
3471 build0 (PLACEHOLDER_EXPR, void_type_node),
3472 convert (pointer32_type, tem));
3474 field_list
3475 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
3476 field_list);
3478 switch (mech)
3480 case By_Descriptor:
3481 case By_Short_Descriptor:
3482 case By_Descriptor_S:
3483 case By_Short_Descriptor_S:
3484 break;
3486 case By_Descriptor_SB:
3487 case By_Short_Descriptor_SB:
3488 field_list
3489 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3490 record_type,
3491 (TREE_CODE (type) == ARRAY_TYPE
3492 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3493 : size_zero_node),
3494 field_list);
3495 field_list
3496 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3497 record_type,
3498 (TREE_CODE (type) == ARRAY_TYPE
3499 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3500 : size_zero_node),
3501 field_list);
3502 break;
3504 case By_Descriptor_A:
3505 case By_Short_Descriptor_A:
3506 case By_Descriptor_NCA:
3507 case By_Short_Descriptor_NCA:
3508 field_list
3509 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3510 record_type, size_zero_node, field_list);
3512 field_list
3513 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3514 record_type, size_zero_node, field_list);
3516 field_list
3517 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3518 record_type,
3519 size_int ((mech == By_Descriptor_NCA
3520 || mech == By_Short_Descriptor_NCA)
3522 /* Set FL_COLUMN, FL_COEFF, and
3523 FL_BOUNDS. */
3524 : (TREE_CODE (type) == ARRAY_TYPE
3525 && TYPE_CONVENTION_FORTRAN_P
3526 (type)
3527 ? 224 : 192)),
3528 field_list);
3530 field_list
3531 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3532 record_type, size_int (ndim), field_list);
3534 field_list
3535 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3536 record_type, size_in_bytes (type),
3537 field_list);
3539 /* Now build a pointer to the 0,0,0... element. */
3540 tem = build0 (PLACEHOLDER_EXPR, type);
3541 for (i = 0, inner_type = type; i < ndim;
3542 i++, inner_type = TREE_TYPE (inner_type))
3543 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3544 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3545 NULL_TREE, NULL_TREE);
3547 field_list
3548 = make_descriptor_field ("A0", pointer32_type, record_type,
3549 build1 (ADDR_EXPR, pointer32_type, tem),
3550 field_list);
3552 /* Next come the addressing coefficients. */
3553 tem = size_one_node;
3554 for (i = 0; i < ndim; i++)
3556 char fname[3];
3557 tree idx_length
3558 = size_binop (MULT_EXPR, tem,
3559 size_binop (PLUS_EXPR,
3560 size_binop (MINUS_EXPR,
3561 TYPE_MAX_VALUE (idx_arr[i]),
3562 TYPE_MIN_VALUE (idx_arr[i])),
3563 size_int (1)));
3565 fname[0] = ((mech == By_Descriptor_NCA ||
3566 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
3567 fname[1] = '0' + i, fname[2] = 0;
3568 field_list
3569 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3570 record_type, idx_length, field_list);
3572 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
3573 tem = idx_length;
3576 /* Finally here are the bounds. */
3577 for (i = 0; i < ndim; i++)
3579 char fname[3];
3581 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3582 field_list
3583 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3584 record_type, TYPE_MIN_VALUE (idx_arr[i]),
3585 field_list);
3587 fname[0] = 'U';
3588 field_list
3589 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3590 record_type, TYPE_MAX_VALUE (idx_arr[i]),
3591 field_list);
3593 break;
3595 default:
3596 post_error ("unsupported descriptor type for &", gnat_entity);
3599 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
3600 finish_record_type (record_type, nreverse (field_list), 0, false);
3601 return record_type;
3604 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3605 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3606 type contains in its DECL_INITIAL the expression to use when a constructor
3607 is made for the type. GNAT_ENTITY is an entity used to print out an error
3608 message if the mechanism cannot be applied to an object of that type and
3609 also for the name. */
3611 tree
3612 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
3614 tree record_type = make_node (RECORD_TYPE);
3615 tree pointer64_type;
3616 tree field_list = NULL_TREE;
3617 int klass, ndim, i, dtype = 0;
3618 tree inner_type, tem;
3619 tree *idx_arr;
3621 /* If TYPE is an unconstrained array, use the underlying array type. */
3622 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3623 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3625 /* If this is an array, compute the number of dimensions in the array,
3626 get the index types, and point to the inner type. */
3627 if (TREE_CODE (type) != ARRAY_TYPE)
3628 ndim = 0;
3629 else
3630 for (ndim = 1, inner_type = type;
3631 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3632 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3633 ndim++, inner_type = TREE_TYPE (inner_type))
3636 idx_arr = XALLOCAVEC (tree, ndim);
3638 if (mech != By_Descriptor_NCA
3639 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3640 for (i = ndim - 1, inner_type = type;
3641 i >= 0;
3642 i--, inner_type = TREE_TYPE (inner_type))
3643 idx_arr[i] = TYPE_DOMAIN (inner_type);
3644 else
3645 for (i = 0, inner_type = type;
3646 i < ndim;
3647 i++, inner_type = TREE_TYPE (inner_type))
3648 idx_arr[i] = TYPE_DOMAIN (inner_type);
3650 /* Now get the DTYPE value. */
3651 switch (TREE_CODE (type))
3653 case INTEGER_TYPE:
3654 case ENUMERAL_TYPE:
3655 case BOOLEAN_TYPE:
3656 if (TYPE_VAX_FLOATING_POINT_P (type))
3657 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
3659 case 6:
3660 dtype = 10;
3661 break;
3662 case 9:
3663 dtype = 11;
3664 break;
3665 case 15:
3666 dtype = 27;
3667 break;
3669 else
3670 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3672 case 8:
3673 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3674 break;
3675 case 16:
3676 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3677 break;
3678 case 32:
3679 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3680 break;
3681 case 64:
3682 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3683 break;
3684 case 128:
3685 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3686 break;
3688 break;
3690 case REAL_TYPE:
3691 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3692 break;
3694 case COMPLEX_TYPE:
3695 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3696 && TYPE_VAX_FLOATING_POINT_P (type))
3697 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
3699 case 6:
3700 dtype = 12;
3701 break;
3702 case 9:
3703 dtype = 13;
3704 break;
3705 case 15:
3706 dtype = 29;
3708 else
3709 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3710 break;
3712 case ARRAY_TYPE:
3713 dtype = 14;
3714 break;
3716 default:
3717 break;
3720 /* Get the CLASS value. */
3721 switch (mech)
3723 case By_Descriptor_A:
3724 klass = 4;
3725 break;
3726 case By_Descriptor_NCA:
3727 klass = 10;
3728 break;
3729 case By_Descriptor_SB:
3730 klass = 15;
3731 break;
3732 case By_Descriptor:
3733 case By_Descriptor_S:
3734 default:
3735 klass = 1;
3736 break;
3739 /* Make the type for a 64-bit descriptor for VMS. The first six fields
3740 are the same for all types. */
3741 field_list
3742 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
3743 record_type, size_int (1), field_list);
3744 field_list
3745 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
3746 record_type, size_int (dtype), field_list);
3747 field_list
3748 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
3749 record_type, size_int (klass), field_list);
3750 field_list
3751 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
3752 record_type, size_int (-1), field_list);
3753 field_list
3754 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
3755 record_type,
3756 size_in_bytes (mech == By_Descriptor_A
3757 ? inner_type : type),
3758 field_list);
3760 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3762 field_list
3763 = make_descriptor_field ("POINTER", pointer64_type, record_type,
3764 build_unary_op (ADDR_EXPR, pointer64_type,
3765 build0 (PLACEHOLDER_EXPR, type)),
3766 field_list);
3768 switch (mech)
3770 case By_Descriptor:
3771 case By_Descriptor_S:
3772 break;
3774 case By_Descriptor_SB:
3775 field_list
3776 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
3777 record_type,
3778 (TREE_CODE (type) == ARRAY_TYPE
3779 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3780 : size_zero_node),
3781 field_list);
3782 field_list
3783 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
3784 record_type,
3785 (TREE_CODE (type) == ARRAY_TYPE
3786 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3787 : size_zero_node),
3788 field_list);
3789 break;
3791 case By_Descriptor_A:
3792 case By_Descriptor_NCA:
3793 field_list
3794 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3795 record_type, size_zero_node, field_list);
3797 field_list
3798 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3799 record_type, size_zero_node, field_list);
3801 dtype = (mech == By_Descriptor_NCA
3803 /* Set FL_COLUMN, FL_COEFF, and
3804 FL_BOUNDS. */
3805 : (TREE_CODE (type) == ARRAY_TYPE
3806 && TYPE_CONVENTION_FORTRAN_P (type)
3807 ? 224 : 192));
3808 field_list
3809 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3810 record_type, size_int (dtype),
3811 field_list);
3813 field_list
3814 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3815 record_type, size_int (ndim), field_list);
3817 field_list
3818 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
3819 record_type, size_int (0), field_list);
3820 field_list
3821 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
3822 record_type, size_in_bytes (type),
3823 field_list);
3825 /* Now build a pointer to the 0,0,0... element. */
3826 tem = build0 (PLACEHOLDER_EXPR, type);
3827 for (i = 0, inner_type = type; i < ndim;
3828 i++, inner_type = TREE_TYPE (inner_type))
3829 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3830 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3831 NULL_TREE, NULL_TREE);
3833 field_list
3834 = make_descriptor_field ("A0", pointer64_type, record_type,
3835 build1 (ADDR_EXPR, pointer64_type, tem),
3836 field_list);
3838 /* Next come the addressing coefficients. */
3839 tem = size_one_node;
3840 for (i = 0; i < ndim; i++)
3842 char fname[3];
3843 tree idx_length
3844 = size_binop (MULT_EXPR, tem,
3845 size_binop (PLUS_EXPR,
3846 size_binop (MINUS_EXPR,
3847 TYPE_MAX_VALUE (idx_arr[i]),
3848 TYPE_MIN_VALUE (idx_arr[i])),
3849 size_int (1)));
3851 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3852 fname[1] = '0' + i, fname[2] = 0;
3853 field_list
3854 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3855 record_type, idx_length, field_list);
3857 if (mech == By_Descriptor_NCA)
3858 tem = idx_length;
3861 /* Finally here are the bounds. */
3862 for (i = 0; i < ndim; i++)
3864 char fname[3];
3866 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3867 field_list
3868 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3869 record_type,
3870 TYPE_MIN_VALUE (idx_arr[i]), field_list);
3872 fname[0] = 'U';
3873 field_list
3874 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3875 record_type,
3876 TYPE_MAX_VALUE (idx_arr[i]), field_list);
3878 break;
3880 default:
3881 post_error ("unsupported descriptor type for &", gnat_entity);
3884 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
3885 finish_record_type (record_type, nreverse (field_list), 0, false);
3886 return record_type;
3889 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3890 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3892 tree
3893 fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
3895 vec<constructor_elt, va_gc> *v = NULL;
3896 tree field;
3898 gnu_expr = maybe_unconstrained_array (gnu_expr);
3899 gnu_expr = gnat_protect_expr (gnu_expr);
3900 gnat_mark_addressable (gnu_expr);
3902 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3903 routine in case we have a 32-bit descriptor. */
3904 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3905 build_call_raise (CE_Range_Check_Failed, gnat_actual,
3906 N_Raise_Constraint_Error),
3907 gnu_expr);
3909 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3911 tree value
3912 = convert (TREE_TYPE (field),
3913 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3914 gnu_expr));
3915 CONSTRUCTOR_APPEND_ELT (v, field, value);
3918 return gnat_build_constructor (gnu_type, v);
3921 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3922 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3923 which the VMS descriptor is passed. */
3925 static tree
3926 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3928 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3929 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3930 /* The CLASS field is the 3rd field in the descriptor. */
3931 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3932 /* The POINTER field is the 6th field in the descriptor. */
3933 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
3935 /* Retrieve the value of the POINTER field. */
3936 tree gnu_expr64
3937 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3939 if (POINTER_TYPE_P (gnu_type))
3940 return convert (gnu_type, gnu_expr64);
3942 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3944 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3945 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
3946 tree template_type = TREE_TYPE (p_bounds_type);
3947 tree min_field = TYPE_FIELDS (template_type);
3948 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
3949 tree template_tree, template_addr, aflags, dimct, t, u;
3950 /* See the head comment of build_vms_descriptor. */
3951 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3952 tree lfield, ufield;
3953 vec<constructor_elt, va_gc> *v;
3955 /* Convert POINTER to the pointer-to-array type. */
3956 gnu_expr64 = convert (p_array_type, gnu_expr64);
3958 switch (iklass)
3960 case 1: /* Class S */
3961 case 15: /* Class SB */
3962 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3963 vec_alloc (v, 2);
3964 t = DECL_CHAIN (DECL_CHAIN (klass));
3965 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3966 CONSTRUCTOR_APPEND_ELT (v, min_field,
3967 convert (TREE_TYPE (min_field),
3968 integer_one_node));
3969 CONSTRUCTOR_APPEND_ELT (v, max_field,
3970 convert (TREE_TYPE (max_field), t));
3971 template_tree = gnat_build_constructor (template_type, v);
3972 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3974 /* For class S, we are done. */
3975 if (iklass == 1)
3976 break;
3978 /* Test that we really have a SB descriptor, like DEC Ada. */
3979 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3980 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3981 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3982 /* If so, there is already a template in the descriptor and
3983 it is located right after the POINTER field. The fields are
3984 64bits so they must be repacked. */
3985 t = DECL_CHAIN (pointer);
3986 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3987 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3989 t = DECL_CHAIN (t);
3990 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3991 ufield = convert
3992 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3994 /* Build the template in the form of a constructor. */
3995 vec_alloc (v, 2);
3996 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3997 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3998 ufield);
3999 template_tree = gnat_build_constructor (template_type, v);
4001 /* Otherwise use the {1, LENGTH} template we build above. */
4002 template_addr = build3 (COND_EXPR, p_bounds_type, u,
4003 build_unary_op (ADDR_EXPR, p_bounds_type,
4004 template_tree),
4005 template_addr);
4006 break;
4008 case 4: /* Class A */
4009 /* The AFLAGS field is the 3rd field after the pointer in the
4010 descriptor. */
4011 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
4012 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4013 /* The DIMCT field is the next field in the descriptor after
4014 aflags. */
4015 t = DECL_CHAIN (t);
4016 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4017 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4018 or FL_COEFF or FL_BOUNDS not set. */
4019 u = build_int_cst (TREE_TYPE (aflags), 192);
4020 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
4021 build_binary_op (NE_EXPR, boolean_type_node,
4022 dimct,
4023 convert (TREE_TYPE (dimct),
4024 size_one_node)),
4025 build_binary_op (NE_EXPR, boolean_type_node,
4026 build2 (BIT_AND_EXPR,
4027 TREE_TYPE (aflags),
4028 aflags, u),
4029 u));
4030 /* There is already a template in the descriptor and it is located
4031 in block 3. The fields are 64bits so they must be repacked. */
4032 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
4033 (t)))));
4034 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4035 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
4037 t = DECL_CHAIN (t);
4038 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4039 ufield = convert
4040 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
4042 /* Build the template in the form of a constructor. */
4043 vec_alloc (v, 2);
4044 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
4045 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
4046 ufield);
4047 template_tree = gnat_build_constructor (template_type, v);
4048 template_tree = build3 (COND_EXPR, template_type, u,
4049 build_call_raise (CE_Length_Check_Failed, Empty,
4050 N_Raise_Constraint_Error),
4051 template_tree);
4052 template_addr
4053 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
4054 break;
4056 case 10: /* Class NCA */
4057 default:
4058 post_error ("unsupported descriptor type for &", gnat_subprog);
4059 template_addr = integer_zero_node;
4060 break;
4063 /* Build the fat pointer in the form of a constructor. */
4064 vec_alloc (v, 2);
4065 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
4066 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
4067 template_addr);
4068 return gnat_build_constructor (gnu_type, v);
4071 else
4072 gcc_unreachable ();
4075 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
4076 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
4077 which the VMS descriptor is passed. */
4079 static tree
4080 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
4082 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4083 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4084 /* The CLASS field is the 3rd field in the descriptor. */
4085 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
4086 /* The POINTER field is the 4th field in the descriptor. */
4087 tree pointer = DECL_CHAIN (klass);
4089 /* Retrieve the value of the POINTER field. */
4090 tree gnu_expr32
4091 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
4093 if (POINTER_TYPE_P (gnu_type))
4094 return convert (gnu_type, gnu_expr32);
4096 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
4098 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
4099 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
4100 tree template_type = TREE_TYPE (p_bounds_type);
4101 tree min_field = TYPE_FIELDS (template_type);
4102 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
4103 tree template_tree, template_addr, aflags, dimct, t, u;
4104 /* See the head comment of build_vms_descriptor. */
4105 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
4106 vec<constructor_elt, va_gc> *v;
4108 /* Convert POINTER to the pointer-to-array type. */
4109 gnu_expr32 = convert (p_array_type, gnu_expr32);
4111 switch (iklass)
4113 case 1: /* Class S */
4114 case 15: /* Class SB */
4115 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
4116 vec_alloc (v, 2);
4117 t = TYPE_FIELDS (desc_type);
4118 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4119 CONSTRUCTOR_APPEND_ELT (v, min_field,
4120 convert (TREE_TYPE (min_field),
4121 integer_one_node));
4122 CONSTRUCTOR_APPEND_ELT (v, max_field,
4123 convert (TREE_TYPE (max_field), t));
4124 template_tree = gnat_build_constructor (template_type, v);
4125 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
4127 /* For class S, we are done. */
4128 if (iklass == 1)
4129 break;
4131 /* Test that we really have a SB descriptor, like DEC Ada. */
4132 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
4133 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
4134 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
4135 /* If so, there is already a template in the descriptor and
4136 it is located right after the POINTER field. */
4137 t = DECL_CHAIN (pointer);
4138 template_tree
4139 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4140 /* Otherwise use the {1, LENGTH} template we build above. */
4141 template_addr = build3 (COND_EXPR, p_bounds_type, u,
4142 build_unary_op (ADDR_EXPR, p_bounds_type,
4143 template_tree),
4144 template_addr);
4145 break;
4147 case 4: /* Class A */
4148 /* The AFLAGS field is the 7th field in the descriptor. */
4149 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
4150 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4151 /* The DIMCT field is the 8th field in the descriptor. */
4152 t = DECL_CHAIN (t);
4153 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4154 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4155 or FL_COEFF or FL_BOUNDS not set. */
4156 u = build_int_cst (TREE_TYPE (aflags), 192);
4157 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
4158 build_binary_op (NE_EXPR, boolean_type_node,
4159 dimct,
4160 convert (TREE_TYPE (dimct),
4161 size_one_node)),
4162 build_binary_op (NE_EXPR, boolean_type_node,
4163 build2 (BIT_AND_EXPR,
4164 TREE_TYPE (aflags),
4165 aflags, u),
4166 u));
4167 /* There is already a template in the descriptor and it is
4168 located at the start of block 3 (12th field). */
4169 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
4170 template_tree
4171 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4172 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
4173 build_call_raise (CE_Length_Check_Failed, Empty,
4174 N_Raise_Constraint_Error),
4175 template_tree);
4176 template_addr
4177 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
4178 break;
4180 case 10: /* Class NCA */
4181 default:
4182 post_error ("unsupported descriptor type for &", gnat_subprog);
4183 template_addr = integer_zero_node;
4184 break;
4187 /* Build the fat pointer in the form of a constructor. */
4188 vec_alloc (v, 2);
4189 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
4190 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
4191 template_addr);
4193 return gnat_build_constructor (gnu_type, v);
4196 else
4197 gcc_unreachable ();
4200 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4201 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
4202 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
4203 descriptor is passed. */
4205 tree
4206 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
4207 Entity_Id gnat_subprog)
4209 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4210 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4211 tree mbo = TYPE_FIELDS (desc_type);
4212 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
4213 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
4214 tree is64bit, gnu_expr32, gnu_expr64;
4216 /* If the field name is not MBO, it must be 32-bit and no alternate.
4217 Otherwise primary must be 64-bit and alternate 32-bit. */
4218 if (strcmp (mbostr, "MBO") != 0)
4220 tree ret = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
4221 return ret;
4224 /* Build the test for 64-bit descriptor. */
4225 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
4226 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
4227 is64bit
4228 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
4229 build_binary_op (EQ_EXPR, boolean_type_node,
4230 convert (integer_type_node, mbo),
4231 integer_one_node),
4232 build_binary_op (EQ_EXPR, boolean_type_node,
4233 convert (integer_type_node, mbmo),
4234 integer_minus_one_node));
4236 /* Build the 2 possible end results. */
4237 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
4238 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
4239 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
4240 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
4243 /* Build a type to be used to represent an aliased object whose nominal type
4244 is an unconstrained array. This consists of a RECORD_TYPE containing a
4245 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4246 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4247 an arbitrary unconstrained object. Use NAME as the name of the record.
4248 DEBUG_INFO_P is true if we need to write debug information for the type. */
4250 tree
4251 build_unc_object_type (tree template_type, tree object_type, tree name,
4252 bool debug_info_p)
4254 tree type = make_node (RECORD_TYPE);
4255 tree template_field
4256 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4257 NULL_TREE, NULL_TREE, 0, 1);
4258 tree array_field
4259 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4260 NULL_TREE, NULL_TREE, 0, 1);
4262 TYPE_NAME (type) = name;
4263 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
4264 DECL_CHAIN (template_field) = array_field;
4265 finish_record_type (type, template_field, 0, true);
4267 /* Declare it now since it will never be declared otherwise. This is
4268 necessary to ensure that its subtrees are properly marked. */
4269 create_type_decl (name, type, true, debug_info_p, Empty);
4271 return type;
4274 /* Same, taking a thin or fat pointer type instead of a template type. */
4276 tree
4277 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
4278 tree name, bool debug_info_p)
4280 tree template_type;
4282 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
4284 template_type
4285 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
4286 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
4287 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
4289 return
4290 build_unc_object_type (template_type, object_type, name, debug_info_p);
4293 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4294 In the normal case this is just two adjustments, but we have more to
4295 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4297 void
4298 update_pointer_to (tree old_type, tree new_type)
4300 tree ptr = TYPE_POINTER_TO (old_type);
4301 tree ref = TYPE_REFERENCE_TO (old_type);
4302 tree t;
4304 /* If this is the main variant, process all the other variants first. */
4305 if (TYPE_MAIN_VARIANT (old_type) == old_type)
4306 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4307 update_pointer_to (t, new_type);
4309 /* If no pointers and no references, we are done. */
4310 if (!ptr && !ref)
4311 return;
4313 /* Merge the old type qualifiers in the new type.
4315 Each old variant has qualifiers for specific reasons, and the new
4316 designated type as well. Each set of qualifiers represents useful
4317 information grabbed at some point, and merging the two simply unifies
4318 these inputs into the final type description.
4320 Consider for instance a volatile type frozen after an access to constant
4321 type designating it; after the designated type's freeze, we get here with
4322 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4323 when the access type was processed. We will make a volatile and readonly
4324 designated type, because that's what it really is.
4326 We might also get here for a non-dummy OLD_TYPE variant with different
4327 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4328 to private record type elaboration (see the comments around the call to
4329 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4330 the qualifiers in those cases too, to avoid accidentally discarding the
4331 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4332 new_type
4333 = build_qualified_type (new_type,
4334 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4336 /* If old type and new type are identical, there is nothing to do. */
4337 if (old_type == new_type)
4338 return;
4340 /* Otherwise, first handle the simple case. */
4341 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4343 tree new_ptr, new_ref;
4345 /* If pointer or reference already points to new type, nothing to do.
4346 This can happen as update_pointer_to can be invoked multiple times
4347 on the same couple of types because of the type variants. */
4348 if ((ptr && TREE_TYPE (ptr) == new_type)
4349 || (ref && TREE_TYPE (ref) == new_type))
4350 return;
4352 /* Chain PTR and its variants at the end. */
4353 new_ptr = TYPE_POINTER_TO (new_type);
4354 if (new_ptr)
4356 while (TYPE_NEXT_PTR_TO (new_ptr))
4357 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4358 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4360 else
4361 TYPE_POINTER_TO (new_type) = ptr;
4363 /* Now adjust them. */
4364 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4365 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4367 TREE_TYPE (t) = new_type;
4368 if (TYPE_NULL_BOUNDS (t))
4369 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4372 /* Chain REF and its variants at the end. */
4373 new_ref = TYPE_REFERENCE_TO (new_type);
4374 if (new_ref)
4376 while (TYPE_NEXT_REF_TO (new_ref))
4377 new_ref = TYPE_NEXT_REF_TO (new_ref);
4378 TYPE_NEXT_REF_TO (new_ref) = ref;
4380 else
4381 TYPE_REFERENCE_TO (new_type) = ref;
4383 /* Now adjust them. */
4384 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4385 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4386 TREE_TYPE (t) = new_type;
4388 TYPE_POINTER_TO (old_type) = NULL_TREE;
4389 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4392 /* Now deal with the unconstrained array case. In this case the pointer
4393 is actually a record where both fields are pointers to dummy nodes.
4394 Turn them into pointers to the correct types using update_pointer_to.
4395 Likewise for the pointer to the object record (thin pointer). */
4396 else
4398 tree new_ptr = TYPE_POINTER_TO (new_type);
4400 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4402 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4403 since update_pointer_to can be invoked multiple times on the same
4404 couple of types because of the type variants. */
4405 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4406 return;
4408 update_pointer_to
4409 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4410 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4412 update_pointer_to
4413 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4414 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4416 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4417 TYPE_OBJECT_RECORD_TYPE (new_type));
4419 TYPE_POINTER_TO (old_type) = NULL_TREE;
4423 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4424 unconstrained one. This involves making or finding a template. */
4426 static tree
4427 convert_to_fat_pointer (tree type, tree expr)
4429 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4430 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4431 tree etype = TREE_TYPE (expr);
4432 tree template_addr;
4433 vec<constructor_elt, va_gc> *v;
4434 vec_alloc (v, 2);
4436 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4437 array (compare_fat_pointers ensures that this is the full discriminant)
4438 and a valid pointer to the bounds. This latter property is necessary
4439 since the compiler can hoist the load of the bounds done through it. */
4440 if (integer_zerop (expr))
4442 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4443 tree null_bounds, t;
4445 if (TYPE_NULL_BOUNDS (ptr_template_type))
4446 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4447 else
4449 /* The template type can still be dummy at this point so we build an
4450 empty constructor. The middle-end will fill it in with zeros. */
4451 t = build_constructor (template_type,
4452 NULL);
4453 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4454 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4455 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4458 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4459 fold_convert (p_array_type, null_pointer_node));
4460 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4461 t = build_constructor (type, v);
4462 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4463 TREE_CONSTANT (t) = 0;
4464 TREE_STATIC (t) = 1;
4466 return t;
4469 /* If EXPR is a thin pointer, make template and data from the record. */
4470 if (TYPE_IS_THIN_POINTER_P (etype))
4472 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4474 expr = gnat_protect_expr (expr);
4476 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4477 the thin pointer value has been shifted so we shift it back to get
4478 the template address. */
4479 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4481 template_addr
4482 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4483 fold_build1 (NEGATE_EXPR, sizetype,
4484 byte_position
4485 (DECL_CHAIN (field))));
4486 template_addr
4487 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4488 template_addr);
4491 /* Otherwise we explicitly take the address of the fields. */
4492 else
4494 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4495 template_addr
4496 = build_unary_op (ADDR_EXPR, NULL_TREE,
4497 build_component_ref (expr, NULL_TREE, field,
4498 false));
4499 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4500 build_component_ref (expr, NULL_TREE,
4501 DECL_CHAIN (field),
4502 false));
4506 /* Otherwise, build the constructor for the template. */
4507 else
4508 template_addr
4509 = build_unary_op (ADDR_EXPR, NULL_TREE,
4510 build_template (template_type, TREE_TYPE (etype),
4511 expr));
4513 /* The final result is a constructor for the fat pointer.
4515 If EXPR is an argument of a foreign convention subprogram, the type it
4516 points to is directly the component type. In this case, the expression
4517 type may not match the corresponding FIELD_DECL type at this point, so we
4518 call "convert" here to fix that up if necessary. This type consistency is
4519 required, for instance because it ensures that possible later folding of
4520 COMPONENT_REFs against this constructor always yields something of the
4521 same type as the initial reference.
4523 Note that the call to "build_template" above is still fine because it
4524 will only refer to the provided TEMPLATE_TYPE in this case. */
4525 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4526 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4527 return gnat_build_constructor (type, v);
4530 /* Create an expression whose value is that of EXPR,
4531 converted to type TYPE. The TREE_TYPE of the value
4532 is always TYPE. This function implements all reasonable
4533 conversions; callers should filter out those that are
4534 not permitted by the language being compiled. */
4536 tree
4537 convert (tree type, tree expr)
4539 tree etype = TREE_TYPE (expr);
4540 enum tree_code ecode = TREE_CODE (etype);
4541 enum tree_code code = TREE_CODE (type);
4543 /* If the expression is already of the right type, we are done. */
4544 if (etype == type)
4545 return expr;
4547 /* If both input and output have padding and are of variable size, do this
4548 as an unchecked conversion. Likewise if one is a mere variant of the
4549 other, so we avoid a pointless unpad/repad sequence. */
4550 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4551 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4552 && (!TREE_CONSTANT (TYPE_SIZE (type))
4553 || !TREE_CONSTANT (TYPE_SIZE (etype))
4554 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4555 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4556 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4559 /* If the output type has padding, convert to the inner type and make a
4560 constructor to build the record, unless a variable size is involved. */
4561 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4563 vec<constructor_elt, va_gc> *v;
4565 /* If we previously converted from another type and our type is
4566 of variable size, remove the conversion to avoid the need for
4567 variable-sized temporaries. Likewise for a conversion between
4568 original and packable version. */
4569 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4570 && (!TREE_CONSTANT (TYPE_SIZE (type))
4571 || (ecode == RECORD_TYPE
4572 && TYPE_NAME (etype)
4573 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4574 expr = TREE_OPERAND (expr, 0);
4576 /* If we are just removing the padding from expr, convert the original
4577 object if we have variable size in order to avoid the need for some
4578 variable-sized temporaries. Likewise if the padding is a variant
4579 of the other, so we avoid a pointless unpad/repad sequence. */
4580 if (TREE_CODE (expr) == COMPONENT_REF
4581 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4582 && (!TREE_CONSTANT (TYPE_SIZE (type))
4583 || TYPE_MAIN_VARIANT (type)
4584 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4585 || (ecode == RECORD_TYPE
4586 && TYPE_NAME (etype)
4587 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4588 return convert (type, TREE_OPERAND (expr, 0));
4590 /* If the inner type is of self-referential size and the expression type
4591 is a record, do this as an unchecked conversion. But first pad the
4592 expression if possible to have the same size on both sides. */
4593 if (ecode == RECORD_TYPE
4594 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4596 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4597 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4598 false, false, false, true),
4599 expr);
4600 return unchecked_convert (type, expr, false);
4603 /* If we are converting between array types with variable size, do the
4604 final conversion as an unchecked conversion, again to avoid the need
4605 for some variable-sized temporaries. If valid, this conversion is
4606 very likely purely technical and without real effects. */
4607 if (ecode == ARRAY_TYPE
4608 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4609 && !TREE_CONSTANT (TYPE_SIZE (etype))
4610 && !TREE_CONSTANT (TYPE_SIZE (type)))
4611 return unchecked_convert (type,
4612 convert (TREE_TYPE (TYPE_FIELDS (type)),
4613 expr),
4614 false);
4616 vec_alloc (v, 1);
4617 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4618 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4619 return gnat_build_constructor (type, v);
4622 /* If the input type has padding, remove it and convert to the output type.
4623 The conditions ordering is arranged to ensure that the output type is not
4624 a padding type here, as it is not clear whether the conversion would
4625 always be correct if this was to happen. */
4626 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4628 tree unpadded;
4630 /* If we have just converted to this padded type, just get the
4631 inner expression. */
4632 if (TREE_CODE (expr) == CONSTRUCTOR
4633 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4634 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4635 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
4637 /* Otherwise, build an explicit component reference. */
4638 else
4639 unpadded
4640 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4642 return convert (type, unpadded);
4645 /* If the input is a biased type, adjust first. */
4646 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4647 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4648 fold_convert (TREE_TYPE (etype),
4649 expr),
4650 TYPE_MIN_VALUE (etype)));
4652 /* If the input is a justified modular type, we need to extract the actual
4653 object before converting it to any other type with the exceptions of an
4654 unconstrained array or of a mere type variant. It is useful to avoid the
4655 extraction and conversion in the type variant case because it could end
4656 up replacing a VAR_DECL expr by a constructor and we might be about the
4657 take the address of the result. */
4658 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4659 && code != UNCONSTRAINED_ARRAY_TYPE
4660 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4661 return convert (type, build_component_ref (expr, NULL_TREE,
4662 TYPE_FIELDS (etype), false));
4664 /* If converting to a type that contains a template, convert to the data
4665 type and then build the template. */
4666 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4668 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4669 vec<constructor_elt, va_gc> *v;
4670 vec_alloc (v, 2);
4672 /* If the source already has a template, get a reference to the
4673 associated array only, as we are going to rebuild a template
4674 for the target type anyway. */
4675 expr = maybe_unconstrained_array (expr);
4677 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4678 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4679 obj_type, NULL_TREE));
4680 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4681 convert (obj_type, expr));
4682 return gnat_build_constructor (type, v);
4685 /* There are some cases of expressions that we process specially. */
4686 switch (TREE_CODE (expr))
4688 case ERROR_MARK:
4689 return expr;
4691 case NULL_EXPR:
4692 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4693 conversion in gnat_expand_expr. NULL_EXPR does not represent
4694 and actual value, so no conversion is needed. */
4695 expr = copy_node (expr);
4696 TREE_TYPE (expr) = type;
4697 return expr;
4699 case STRING_CST:
4700 /* If we are converting a STRING_CST to another constrained array type,
4701 just make a new one in the proper type. */
4702 if (code == ecode && AGGREGATE_TYPE_P (etype)
4703 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4704 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4706 expr = copy_node (expr);
4707 TREE_TYPE (expr) = type;
4708 return expr;
4710 break;
4712 case VECTOR_CST:
4713 /* If we are converting a VECTOR_CST to a mere type variant, just make
4714 a new one in the proper type. */
4715 if (code == ecode && gnat_types_compatible_p (type, etype))
4717 expr = copy_node (expr);
4718 TREE_TYPE (expr) = type;
4719 return expr;
4722 case CONSTRUCTOR:
4723 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4724 another padding type around the same type, just make a new one in
4725 the proper type. */
4726 if (code == ecode
4727 && (gnat_types_compatible_p (type, etype)
4728 || (code == RECORD_TYPE
4729 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4730 && TREE_TYPE (TYPE_FIELDS (type))
4731 == TREE_TYPE (TYPE_FIELDS (etype)))))
4733 expr = copy_node (expr);
4734 TREE_TYPE (expr) = type;
4735 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4736 return expr;
4739 /* Likewise for a conversion between original and packable version, or
4740 conversion between types of the same size and with the same list of
4741 fields, but we have to work harder to preserve type consistency. */
4742 if (code == ecode
4743 && code == RECORD_TYPE
4744 && (TYPE_NAME (type) == TYPE_NAME (etype)
4745 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4748 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4749 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4750 vec<constructor_elt, va_gc> *v;
4751 vec_alloc (v, len);
4752 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4753 unsigned HOST_WIDE_INT idx;
4754 tree index, value;
4756 /* Whether we need to clear TREE_CONSTANT et al. on the output
4757 constructor when we convert in place. */
4758 bool clear_constant = false;
4760 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4762 /* Skip the missing fields in the CONSTRUCTOR. */
4763 while (efield && field && !SAME_FIELD_P (efield, index))
4765 efield = DECL_CHAIN (efield);
4766 field = DECL_CHAIN (field);
4768 /* The field must be the same. */
4769 if (!(efield && field && SAME_FIELD_P (efield, field)))
4770 break;
4771 constructor_elt elt
4772 = {field, convert (TREE_TYPE (field), value)};
4773 v->quick_push (elt);
4775 /* If packing has made this field a bitfield and the input
4776 value couldn't be emitted statically any more, we need to
4777 clear TREE_CONSTANT on our output. */
4778 if (!clear_constant
4779 && TREE_CONSTANT (expr)
4780 && !CONSTRUCTOR_BITFIELD_P (efield)
4781 && CONSTRUCTOR_BITFIELD_P (field)
4782 && !initializer_constant_valid_for_bitfield_p (value))
4783 clear_constant = true;
4785 efield = DECL_CHAIN (efield);
4786 field = DECL_CHAIN (field);
4789 /* If we have been able to match and convert all the input fields
4790 to their output type, convert in place now. We'll fallback to a
4791 view conversion downstream otherwise. */
4792 if (idx == len)
4794 expr = copy_node (expr);
4795 TREE_TYPE (expr) = type;
4796 CONSTRUCTOR_ELTS (expr) = v;
4797 if (clear_constant)
4798 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4799 return expr;
4803 /* Likewise for a conversion between array type and vector type with a
4804 compatible representative array. */
4805 else if (code == VECTOR_TYPE
4806 && ecode == ARRAY_TYPE
4807 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4808 etype))
4810 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4811 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4812 vec<constructor_elt, va_gc> *v;
4813 unsigned HOST_WIDE_INT ix;
4814 tree value;
4816 /* Build a VECTOR_CST from a *constant* array constructor. */
4817 if (TREE_CONSTANT (expr))
4819 bool constant_p = true;
4821 /* Iterate through elements and check if all constructor
4822 elements are *_CSTs. */
4823 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4824 if (!CONSTANT_CLASS_P (value))
4826 constant_p = false;
4827 break;
4830 if (constant_p)
4831 return build_vector_from_ctor (type,
4832 CONSTRUCTOR_ELTS (expr));
4835 /* Otherwise, build a regular vector constructor. */
4836 vec_alloc (v, len);
4837 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4839 constructor_elt elt = {NULL_TREE, value};
4840 v->quick_push (elt);
4842 expr = copy_node (expr);
4843 TREE_TYPE (expr) = type;
4844 CONSTRUCTOR_ELTS (expr) = v;
4845 return expr;
4847 break;
4849 case UNCONSTRAINED_ARRAY_REF:
4850 /* First retrieve the underlying array. */
4851 expr = maybe_unconstrained_array (expr);
4852 etype = TREE_TYPE (expr);
4853 ecode = TREE_CODE (etype);
4854 break;
4856 case VIEW_CONVERT_EXPR:
4858 /* GCC 4.x is very sensitive to type consistency overall, and view
4859 conversions thus are very frequent. Even though just "convert"ing
4860 the inner operand to the output type is fine in most cases, it
4861 might expose unexpected input/output type mismatches in special
4862 circumstances so we avoid such recursive calls when we can. */
4863 tree op0 = TREE_OPERAND (expr, 0);
4865 /* If we are converting back to the original type, we can just
4866 lift the input conversion. This is a common occurrence with
4867 switches back-and-forth amongst type variants. */
4868 if (type == TREE_TYPE (op0))
4869 return op0;
4871 /* Otherwise, if we're converting between two aggregate or vector
4872 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4873 target type in place or to just convert the inner expression. */
4874 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4875 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4877 /* If we are converting between mere variants, we can just
4878 substitute the VIEW_CONVERT_EXPR in place. */
4879 if (gnat_types_compatible_p (type, etype))
4880 return build1 (VIEW_CONVERT_EXPR, type, op0);
4882 /* Otherwise, we may just bypass the input view conversion unless
4883 one of the types is a fat pointer, which is handled by
4884 specialized code below which relies on exact type matching. */
4885 else if (!TYPE_IS_FAT_POINTER_P (type)
4886 && !TYPE_IS_FAT_POINTER_P (etype))
4887 return convert (type, op0);
4890 break;
4893 default:
4894 break;
4897 /* Check for converting to a pointer to an unconstrained array. */
4898 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4899 return convert_to_fat_pointer (type, expr);
4901 /* If we are converting between two aggregate or vector types that are mere
4902 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4903 to a vector type from its representative array type. */
4904 else if ((code == ecode
4905 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4906 && gnat_types_compatible_p (type, etype))
4907 || (code == VECTOR_TYPE
4908 && ecode == ARRAY_TYPE
4909 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4910 etype)))
4911 return build1 (VIEW_CONVERT_EXPR, type, expr);
4913 /* If we are converting between tagged types, try to upcast properly. */
4914 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4915 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4917 tree child_etype = etype;
4918 do {
4919 tree field = TYPE_FIELDS (child_etype);
4920 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4921 return build_component_ref (expr, NULL_TREE, field, false);
4922 child_etype = TREE_TYPE (field);
4923 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4926 /* If we are converting from a smaller form of record type back to it, just
4927 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4928 size on both sides. */
4929 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4930 && smaller_form_type_p (etype, type))
4932 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4933 false, false, false, true),
4934 expr);
4935 return build1 (VIEW_CONVERT_EXPR, type, expr);
4938 /* In all other cases of related types, make a NOP_EXPR. */
4939 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4940 return fold_convert (type, expr);
4942 switch (code)
4944 case VOID_TYPE:
4945 return fold_build1 (CONVERT_EXPR, type, expr);
4947 case INTEGER_TYPE:
4948 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4949 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4950 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4951 return unchecked_convert (type, expr, false);
4952 else if (TYPE_BIASED_REPRESENTATION_P (type))
4953 return fold_convert (type,
4954 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4955 convert (TREE_TYPE (type), expr),
4956 TYPE_MIN_VALUE (type)));
4958 /* ... fall through ... */
4960 case ENUMERAL_TYPE:
4961 case BOOLEAN_TYPE:
4962 /* If we are converting an additive expression to an integer type
4963 with lower precision, be wary of the optimization that can be
4964 applied by convert_to_integer. There are 2 problematic cases:
4965 - if the first operand was originally of a biased type,
4966 because we could be recursively called to convert it
4967 to an intermediate type and thus rematerialize the
4968 additive operator endlessly,
4969 - if the expression contains a placeholder, because an
4970 intermediate conversion that changes the sign could
4971 be inserted and thus introduce an artificial overflow
4972 at compile time when the placeholder is substituted. */
4973 if (code == INTEGER_TYPE
4974 && ecode == INTEGER_TYPE
4975 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4976 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4978 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4980 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4981 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4982 || CONTAINS_PLACEHOLDER_P (expr))
4983 return build1 (NOP_EXPR, type, expr);
4986 return fold (convert_to_integer (type, expr));
4988 case POINTER_TYPE:
4989 case REFERENCE_TYPE:
4990 /* If converting between two thin pointers, adjust if needed to account
4991 for differing offsets from the base pointer, depending on whether
4992 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4993 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4995 tree etype_pos
4996 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4997 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4998 : size_zero_node;
4999 tree type_pos
5000 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
5001 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
5002 : size_zero_node;
5003 tree byte_diff = size_diffop (type_pos, etype_pos);
5005 expr = build1 (NOP_EXPR, type, expr);
5006 if (integer_zerop (byte_diff))
5007 return expr;
5009 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
5010 fold_convert (sizetype, byte_diff));
5013 /* If converting fat pointer to normal or thin pointer, get the pointer
5014 to the array and then convert it. */
5015 if (TYPE_IS_FAT_POINTER_P (etype))
5016 expr
5017 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
5019 return fold (convert_to_pointer (type, expr));
5021 case REAL_TYPE:
5022 return fold (convert_to_real (type, expr));
5024 case RECORD_TYPE:
5025 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
5027 vec<constructor_elt, va_gc> *v;
5028 vec_alloc (v, 1);
5030 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
5031 convert (TREE_TYPE (TYPE_FIELDS (type)),
5032 expr));
5033 return gnat_build_constructor (type, v);
5036 /* ... fall through ... */
5038 case ARRAY_TYPE:
5039 /* In these cases, assume the front-end has validated the conversion.
5040 If the conversion is valid, it will be a bit-wise conversion, so
5041 it can be viewed as an unchecked conversion. */
5042 return unchecked_convert (type, expr, false);
5044 case UNION_TYPE:
5045 /* This is a either a conversion between a tagged type and some
5046 subtype, which we have to mark as a UNION_TYPE because of
5047 overlapping fields or a conversion of an Unchecked_Union. */
5048 return unchecked_convert (type, expr, false);
5050 case UNCONSTRAINED_ARRAY_TYPE:
5051 /* If the input is a VECTOR_TYPE, convert to the representative
5052 array type first. */
5053 if (ecode == VECTOR_TYPE)
5055 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
5056 etype = TREE_TYPE (expr);
5057 ecode = TREE_CODE (etype);
5060 /* If EXPR is a constrained array, take its address, convert it to a
5061 fat pointer, and then dereference it. Likewise if EXPR is a
5062 record containing both a template and a constrained array.
5063 Note that a record representing a justified modular type
5064 always represents a packed constrained array. */
5065 if (ecode == ARRAY_TYPE
5066 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
5067 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
5068 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
5069 return
5070 build_unary_op
5071 (INDIRECT_REF, NULL_TREE,
5072 convert_to_fat_pointer (TREE_TYPE (type),
5073 build_unary_op (ADDR_EXPR,
5074 NULL_TREE, expr)));
5076 /* Do something very similar for converting one unconstrained
5077 array to another. */
5078 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
5079 return
5080 build_unary_op (INDIRECT_REF, NULL_TREE,
5081 convert (TREE_TYPE (type),
5082 build_unary_op (ADDR_EXPR,
5083 NULL_TREE, expr)));
5084 else
5085 gcc_unreachable ();
5087 case COMPLEX_TYPE:
5088 return fold (convert_to_complex (type, expr));
5090 default:
5091 gcc_unreachable ();
5095 /* Create an expression whose value is that of EXPR converted to the common
5096 index type, which is sizetype. EXPR is supposed to be in the base type
5097 of the GNAT index type. Calling it is equivalent to doing
5099 convert (sizetype, expr)
5101 but we try to distribute the type conversion with the knowledge that EXPR
5102 cannot overflow in its type. This is a best-effort approach and we fall
5103 back to the above expression as soon as difficulties are encountered.
5105 This is necessary to overcome issues that arise when the GNAT base index
5106 type and the GCC common index type (sizetype) don't have the same size,
5107 which is quite frequent on 64-bit architectures. In this case, and if
5108 the GNAT base index type is signed but the iteration type of the loop has
5109 been forced to unsigned, the loop scalar evolution engine cannot compute
5110 a simple evolution for the general induction variables associated with the
5111 array indices, because it will preserve the wrap-around semantics in the
5112 unsigned type of their "inner" part. As a result, many loop optimizations
5113 are blocked.
5115 The solution is to use a special (basic) induction variable that is at
5116 least as large as sizetype, and to express the aforementioned general
5117 induction variables in terms of this induction variable, eliminating
5118 the problematic intermediate truncation to the GNAT base index type.
5119 This is possible as long as the original expression doesn't overflow
5120 and if the middle-end hasn't introduced artificial overflows in the
5121 course of the various simplification it can make to the expression. */
5123 tree
5124 convert_to_index_type (tree expr)
5126 enum tree_code code = TREE_CODE (expr);
5127 tree type = TREE_TYPE (expr);
5129 /* If the type is unsigned, overflow is allowed so we cannot be sure that
5130 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
5131 if (TYPE_UNSIGNED (type) || !optimize)
5132 return convert (sizetype, expr);
5134 switch (code)
5136 case VAR_DECL:
5137 /* The main effect of the function: replace a loop parameter with its
5138 associated special induction variable. */
5139 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
5140 expr = DECL_INDUCTION_VAR (expr);
5141 break;
5143 CASE_CONVERT:
5145 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
5146 /* Bail out as soon as we suspect some sort of type frobbing. */
5147 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5148 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5149 break;
5152 /* ... fall through ... */
5154 case NON_LVALUE_EXPR:
5155 return fold_build1 (code, sizetype,
5156 convert_to_index_type (TREE_OPERAND (expr, 0)));
5158 case PLUS_EXPR:
5159 case MINUS_EXPR:
5160 case MULT_EXPR:
5161 return fold_build2 (code, sizetype,
5162 convert_to_index_type (TREE_OPERAND (expr, 0)),
5163 convert_to_index_type (TREE_OPERAND (expr, 1)));
5165 case COMPOUND_EXPR:
5166 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5167 convert_to_index_type (TREE_OPERAND (expr, 1)));
5169 case COND_EXPR:
5170 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5171 convert_to_index_type (TREE_OPERAND (expr, 1)),
5172 convert_to_index_type (TREE_OPERAND (expr, 2)));
5174 default:
5175 break;
5178 return convert (sizetype, expr);
5181 /* Remove all conversions that are done in EXP. This includes converting
5182 from a padded type or to a justified modular type. If TRUE_ADDRESS
5183 is true, always return the address of the containing object even if
5184 the address is not bit-aligned. */
5186 tree
5187 remove_conversions (tree exp, bool true_address)
5189 switch (TREE_CODE (exp))
5191 case CONSTRUCTOR:
5192 if (true_address
5193 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5194 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5195 return
5196 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
5197 break;
5199 case COMPONENT_REF:
5200 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
5201 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5202 break;
5204 CASE_CONVERT:
5205 case VIEW_CONVERT_EXPR:
5206 case NON_LVALUE_EXPR:
5207 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5209 default:
5210 break;
5213 return exp;
5216 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5217 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5218 likewise return an expression pointing to the underlying array. */
5220 tree
5221 maybe_unconstrained_array (tree exp)
5223 enum tree_code code = TREE_CODE (exp);
5224 tree type = TREE_TYPE (exp);
5226 switch (TREE_CODE (type))
5228 case UNCONSTRAINED_ARRAY_TYPE:
5229 if (code == UNCONSTRAINED_ARRAY_REF)
5231 const bool read_only = TREE_READONLY (exp);
5232 const bool no_trap = TREE_THIS_NOTRAP (exp);
5234 exp = TREE_OPERAND (exp, 0);
5235 type = TREE_TYPE (exp);
5237 if (TREE_CODE (exp) == COND_EXPR)
5239 tree op1
5240 = build_unary_op (INDIRECT_REF, NULL_TREE,
5241 build_component_ref (TREE_OPERAND (exp, 1),
5242 NULL_TREE,
5243 TYPE_FIELDS (type),
5244 false));
5245 tree op2
5246 = build_unary_op (INDIRECT_REF, NULL_TREE,
5247 build_component_ref (TREE_OPERAND (exp, 2),
5248 NULL_TREE,
5249 TYPE_FIELDS (type),
5250 false));
5252 exp = build3 (COND_EXPR,
5253 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5254 TREE_OPERAND (exp, 0), op1, op2);
5256 else
5258 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5259 build_component_ref (exp, NULL_TREE,
5260 TYPE_FIELDS (type),
5261 false));
5262 TREE_READONLY (exp) = read_only;
5263 TREE_THIS_NOTRAP (exp) = no_trap;
5267 else if (code == NULL_EXPR)
5268 exp = build1 (NULL_EXPR,
5269 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5270 TREE_OPERAND (exp, 0));
5271 break;
5273 case RECORD_TYPE:
5274 /* If this is a padded type and it contains a template, convert to the
5275 unpadded type first. */
5276 if (TYPE_PADDING_P (type)
5277 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5278 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5280 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5281 type = TREE_TYPE (exp);
5284 if (TYPE_CONTAINS_TEMPLATE_P (type))
5286 exp = build_component_ref (exp, NULL_TREE,
5287 DECL_CHAIN (TYPE_FIELDS (type)),
5288 false);
5289 type = TREE_TYPE (exp);
5291 /* If the array type is padded, convert to the unpadded type. */
5292 if (TYPE_IS_PADDING_P (type))
5293 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5295 break;
5297 default:
5298 break;
5301 return exp;
5304 /* Return true if EXPR is an expression that can be folded as an operand
5305 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5307 static bool
5308 can_fold_for_view_convert_p (tree expr)
5310 tree t1, t2;
5312 /* The folder will fold NOP_EXPRs between integral types with the same
5313 precision (in the middle-end's sense). We cannot allow it if the
5314 types don't have the same precision in the Ada sense as well. */
5315 if (TREE_CODE (expr) != NOP_EXPR)
5316 return true;
5318 t1 = TREE_TYPE (expr);
5319 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5321 /* Defer to the folder for non-integral conversions. */
5322 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5323 return true;
5325 /* Only fold conversions that preserve both precisions. */
5326 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5327 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5328 return true;
5330 return false;
5333 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5334 If NOTRUNC_P is true, truncation operations should be suppressed.
5336 Special care is required with (source or target) integral types whose
5337 precision is not equal to their size, to make sure we fetch or assign
5338 the value bits whose location might depend on the endianness, e.g.
5340 Rmsize : constant := 8;
5341 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5343 type Bit_Array is array (1 .. Rmsize) of Boolean;
5344 pragma Pack (Bit_Array);
5346 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5348 Value : Int := 2#1000_0001#;
5349 Vbits : Bit_Array := To_Bit_Array (Value);
5351 we expect the 8 bits at Vbits'Address to always contain Value, while
5352 their original location depends on the endianness, at Value'Address
5353 on a little-endian architecture but not on a big-endian one. */
5355 tree
5356 unchecked_convert (tree type, tree expr, bool notrunc_p)
5358 tree etype = TREE_TYPE (expr);
5359 enum tree_code ecode = TREE_CODE (etype);
5360 enum tree_code code = TREE_CODE (type);
5361 tree tem;
5362 int c;
5364 /* If the expression is already of the right type, we are done. */
5365 if (etype == type)
5366 return expr;
5368 /* If both types types are integral just do a normal conversion.
5369 Likewise for a conversion to an unconstrained array. */
5370 if ((((INTEGRAL_TYPE_P (type)
5371 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
5372 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5373 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5374 && ((INTEGRAL_TYPE_P (etype)
5375 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
5376 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5377 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5378 || code == UNCONSTRAINED_ARRAY_TYPE)
5380 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
5382 tree ntype = copy_type (etype);
5383 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5384 TYPE_MAIN_VARIANT (ntype) = ntype;
5385 expr = build1 (NOP_EXPR, ntype, expr);
5388 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5390 tree rtype = copy_type (type);
5391 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5392 TYPE_MAIN_VARIANT (rtype) = rtype;
5393 expr = convert (rtype, expr);
5394 expr = build1 (NOP_EXPR, type, expr);
5396 else
5397 expr = convert (type, expr);
5400 /* If we are converting to an integral type whose precision is not equal
5401 to its size, first unchecked convert to a record type that contains an
5402 field of the given precision. Then extract the field. */
5403 else if (INTEGRAL_TYPE_P (type)
5404 && TYPE_RM_SIZE (type)
5405 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5406 GET_MODE_BITSIZE (TYPE_MODE (type))))
5408 tree rec_type = make_node (RECORD_TYPE);
5409 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5410 tree field_type, field;
5412 if (TYPE_UNSIGNED (type))
5413 field_type = make_unsigned_type (prec);
5414 else
5415 field_type = make_signed_type (prec);
5416 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5418 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5419 NULL_TREE, bitsize_zero_node, 1, 0);
5421 finish_record_type (rec_type, field, 1, false);
5423 expr = unchecked_convert (rec_type, expr, notrunc_p);
5424 expr = build_component_ref (expr, NULL_TREE, field, false);
5425 expr = fold_build1 (NOP_EXPR, type, expr);
5428 /* Similarly if we are converting from an integral type whose precision is
5429 not equal to its size, first copy into a field of the given precision
5430 and unchecked convert the record type. */
5431 else if (INTEGRAL_TYPE_P (etype)
5432 && TYPE_RM_SIZE (etype)
5433 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
5434 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5436 tree rec_type = make_node (RECORD_TYPE);
5437 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5438 vec<constructor_elt, va_gc> *v;
5439 vec_alloc (v, 1);
5440 tree field_type, field;
5442 if (TYPE_UNSIGNED (etype))
5443 field_type = make_unsigned_type (prec);
5444 else
5445 field_type = make_signed_type (prec);
5446 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5448 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5449 NULL_TREE, bitsize_zero_node, 1, 0);
5451 finish_record_type (rec_type, field, 1, false);
5453 expr = fold_build1 (NOP_EXPR, field_type, expr);
5454 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5455 expr = gnat_build_constructor (rec_type, v);
5456 expr = unchecked_convert (type, expr, notrunc_p);
5459 /* If we are converting from a scalar type to a type with a different size,
5460 we need to pad to have the same size on both sides.
5462 ??? We cannot do it unconditionally because unchecked conversions are
5463 used liberally by the front-end to implement polymorphism, e.g. in:
5465 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5466 return p___size__4 (p__object!(S191s.all));
5468 so we skip all expressions that are references. */
5469 else if (!REFERENCE_CLASS_P (expr)
5470 && !AGGREGATE_TYPE_P (etype)
5471 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5472 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5474 if (c < 0)
5476 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5477 false, false, false, true),
5478 expr);
5479 expr = unchecked_convert (type, expr, notrunc_p);
5481 else
5483 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5484 false, false, false, true);
5485 expr = unchecked_convert (rec_type, expr, notrunc_p);
5486 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
5487 false);
5491 /* We have a special case when we are converting between two unconstrained
5492 array types. In that case, take the address, convert the fat pointer
5493 types, and dereference. */
5494 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5495 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5496 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5497 build_unary_op (ADDR_EXPR, NULL_TREE,
5498 expr)));
5500 /* Another special case is when we are converting to a vector type from its
5501 representative array type; this a regular conversion. */
5502 else if (code == VECTOR_TYPE
5503 && ecode == ARRAY_TYPE
5504 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5505 etype))
5506 expr = convert (type, expr);
5508 /* And, if the array type is not the representative, we try to build an
5509 intermediate vector type of which the array type is the representative
5510 and to do the unchecked conversion between the vector types, in order
5511 to enable further simplifications in the middle-end. */
5512 else if (code == VECTOR_TYPE
5513 && ecode == ARRAY_TYPE
5514 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5516 expr = convert (tem, expr);
5517 return unchecked_convert (type, expr, notrunc_p);
5520 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5521 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5522 else if (TREE_CODE (expr) == CONSTRUCTOR
5523 && code == RECORD_TYPE
5524 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5526 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5527 Empty, false, false, false, true),
5528 expr);
5529 return unchecked_convert (type, expr, notrunc_p);
5532 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5533 else
5535 expr = maybe_unconstrained_array (expr);
5536 etype = TREE_TYPE (expr);
5537 ecode = TREE_CODE (etype);
5538 if (can_fold_for_view_convert_p (expr))
5539 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5540 else
5541 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5544 /* If the result is an integral type whose precision is not equal to its
5545 size, sign- or zero-extend the result. We need not do this if the input
5546 is an integral type of the same precision and signedness or if the output
5547 is a biased type or if both the input and output are unsigned. */
5548 if (!notrunc_p
5549 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
5550 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5551 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5552 GET_MODE_BITSIZE (TYPE_MODE (type)))
5553 && !(INTEGRAL_TYPE_P (etype)
5554 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5555 && operand_equal_p (TYPE_RM_SIZE (type),
5556 (TYPE_RM_SIZE (etype) != 0
5557 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5559 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5561 tree base_type
5562 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
5563 tree shift_expr
5564 = convert (base_type,
5565 size_binop (MINUS_EXPR,
5566 bitsize_int
5567 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5568 TYPE_RM_SIZE (type)));
5569 expr
5570 = convert (type,
5571 build_binary_op (RSHIFT_EXPR, base_type,
5572 build_binary_op (LSHIFT_EXPR, base_type,
5573 convert (base_type, expr),
5574 shift_expr),
5575 shift_expr));
5578 /* An unchecked conversion should never raise Constraint_Error. The code
5579 below assumes that GCC's conversion routines overflow the same way that
5580 the underlying hardware does. This is probably true. In the rare case
5581 when it is false, we can rely on the fact that such conversions are
5582 erroneous anyway. */
5583 if (TREE_CODE (expr) == INTEGER_CST)
5584 TREE_OVERFLOW (expr) = 0;
5586 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5587 show no longer constant. */
5588 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5589 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5590 OEP_ONLY_CONST))
5591 TREE_CONSTANT (expr) = 0;
5593 return expr;
5596 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5597 the latter being a record type as predicated by Is_Record_Type. */
5599 enum tree_code
5600 tree_code_for_record_type (Entity_Id gnat_type)
5602 Node_Id component_list, component;
5604 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5605 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5606 if (!Is_Unchecked_Union (gnat_type))
5607 return RECORD_TYPE;
5609 gnat_type = Implementation_Base_Type (gnat_type);
5610 component_list
5611 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5613 for (component = First_Non_Pragma (Component_Items (component_list));
5614 Present (component);
5615 component = Next_Non_Pragma (component))
5616 if (Ekind (Defining_Entity (component)) == E_Component)
5617 return RECORD_TYPE;
5619 return UNION_TYPE;
5622 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5623 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5624 according to the presence of an alignment clause on the type or, if it
5625 is an array, on the component type. */
5627 bool
5628 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5630 gnat_type = Underlying_Type (gnat_type);
5632 *align_clause = Present (Alignment_Clause (gnat_type));
5634 if (Is_Array_Type (gnat_type))
5636 gnat_type = Underlying_Type (Component_Type (gnat_type));
5637 if (Present (Alignment_Clause (gnat_type)))
5638 *align_clause = true;
5641 if (!Is_Floating_Point_Type (gnat_type))
5642 return false;
5644 if (UI_To_Int (Esize (gnat_type)) != 64)
5645 return false;
5647 return true;
5650 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5651 size is greater or equal to 64 bits, or an array of such a type. Set
5652 ALIGN_CLAUSE according to the presence of an alignment clause on the
5653 type or, if it is an array, on the component type. */
5655 bool
5656 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5658 gnat_type = Underlying_Type (gnat_type);
5660 *align_clause = Present (Alignment_Clause (gnat_type));
5662 if (Is_Array_Type (gnat_type))
5664 gnat_type = Underlying_Type (Component_Type (gnat_type));
5665 if (Present (Alignment_Clause (gnat_type)))
5666 *align_clause = true;
5669 if (!Is_Scalar_Type (gnat_type))
5670 return false;
5672 if (UI_To_Int (Esize (gnat_type)) < 64)
5673 return false;
5675 return true;
5678 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5679 component of an aggregate type. */
5681 bool
5682 type_for_nonaliased_component_p (tree gnu_type)
5684 /* If the type is passed by reference, we may have pointers to the
5685 component so it cannot be made non-aliased. */
5686 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5687 return false;
5689 /* We used to say that any component of aggregate type is aliased
5690 because the front-end may take 'Reference of it. The front-end
5691 has been enhanced in the meantime so as to use a renaming instead
5692 in most cases, but the back-end can probably take the address of
5693 such a component too so we go for the conservative stance.
5695 For instance, we might need the address of any array type, even
5696 if normally passed by copy, to construct a fat pointer if the
5697 component is used as an actual for an unconstrained formal.
5699 Likewise for record types: even if a specific record subtype is
5700 passed by copy, the parent type might be passed by ref (e.g. if
5701 it's of variable size) and we might take the address of a child
5702 component to pass to a parent formal. We have no way to check
5703 for such conditions here. */
5704 if (AGGREGATE_TYPE_P (gnu_type))
5705 return false;
5707 return true;
5710 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5712 bool
5713 smaller_form_type_p (tree type, tree orig_type)
5715 tree size, osize;
5717 /* We're not interested in variants here. */
5718 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5719 return false;
5721 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5722 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5723 return false;
5725 size = TYPE_SIZE (type);
5726 osize = TYPE_SIZE (orig_type);
5728 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5729 return false;
5731 return tree_int_cst_lt (size, osize) != 0;
5734 /* Perform final processing on global variables. */
5736 static GTY (()) tree dummy_global;
5738 void
5739 gnat_write_global_declarations (void)
5741 unsigned int i;
5742 tree iter;
5744 /* If we have declared types as used at the global level, insert them in
5745 the global hash table. We use a dummy variable for this purpose. */
5746 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
5748 struct varpool_node *node;
5749 char *label;
5751 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5752 dummy_global
5753 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5754 void_type_node);
5755 TREE_STATIC (dummy_global) = 1;
5756 TREE_ASM_WRITTEN (dummy_global) = 1;
5757 node = varpool_node_for_decl (dummy_global);
5758 node->force_output = 1;
5760 while (!types_used_by_cur_var_decl->is_empty ())
5762 tree t = types_used_by_cur_var_decl->pop ();
5763 types_used_by_var_decl_insert (t, dummy_global);
5767 /* Output debug information for all global type declarations first. This
5768 ensures that global types whose compilation hasn't been finalized yet,
5769 for example pointers to Taft amendment types, have their compilation
5770 finalized in the right context. */
5771 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5772 if (TREE_CODE (iter) == TYPE_DECL)
5773 debug_hooks->global_decl (iter);
5775 /* Proceed to optimize and emit assembly. */
5776 finalize_compilation_unit ();
5778 /* After cgraph has had a chance to emit everything that's going to
5779 be emitted, output debug information for the rest of globals. */
5780 if (!seen_error ())
5782 timevar_push (TV_SYMOUT);
5783 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5784 if (TREE_CODE (iter) != TYPE_DECL)
5785 debug_hooks->global_decl (iter);
5786 timevar_pop (TV_SYMOUT);
5790 /* ************************************************************************
5791 * * GCC builtins support *
5792 * ************************************************************************ */
5794 /* The general scheme is fairly simple:
5796 For each builtin function/type to be declared, gnat_install_builtins calls
5797 internal facilities which eventually get to gnat_push_decl, which in turn
5798 tracks the so declared builtin function decls in the 'builtin_decls' global
5799 datastructure. When an Intrinsic subprogram declaration is processed, we
5800 search this global datastructure to retrieve the associated BUILT_IN DECL
5801 node. */
5803 /* Search the chain of currently available builtin declarations for a node
5804 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5805 found, if any, or NULL_TREE otherwise. */
5806 tree
5807 builtin_decl_for (tree name)
5809 unsigned i;
5810 tree decl;
5812 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5813 if (DECL_NAME (decl) == name)
5814 return decl;
5816 return NULL_TREE;
5819 /* The code below eventually exposes gnat_install_builtins, which declares
5820 the builtin types and functions we might need, either internally or as
5821 user accessible facilities.
5823 ??? This is a first implementation shot, still in rough shape. It is
5824 heavily inspired from the "C" family implementation, with chunks copied
5825 verbatim from there.
5827 Two obvious TODO candidates are
5828 o Use a more efficient name/decl mapping scheme
5829 o Devise a middle-end infrastructure to avoid having to copy
5830 pieces between front-ends. */
5832 /* ----------------------------------------------------------------------- *
5833 * BUILTIN ELEMENTARY TYPES *
5834 * ----------------------------------------------------------------------- */
5836 /* Standard data types to be used in builtin argument declarations. */
5838 enum c_tree_index
5840 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5841 CTI_STRING_TYPE,
5842 CTI_CONST_STRING_TYPE,
5844 CTI_MAX
5847 static tree c_global_trees[CTI_MAX];
5849 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5850 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5851 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5853 /* ??? In addition some attribute handlers, we currently don't support a
5854 (small) number of builtin-types, which in turns inhibits support for a
5855 number of builtin functions. */
5856 #define wint_type_node void_type_node
5857 #define intmax_type_node void_type_node
5858 #define uintmax_type_node void_type_node
5860 /* Build the void_list_node (void_type_node having been created). */
5862 static tree
5863 build_void_list_node (void)
5865 tree t = build_tree_list (NULL_TREE, void_type_node);
5866 return t;
5869 /* Used to help initialize the builtin-types.def table. When a type of
5870 the correct size doesn't exist, use error_mark_node instead of NULL.
5871 The later results in segfaults even when a decl using the type doesn't
5872 get invoked. */
5874 static tree
5875 builtin_type_for_size (int size, bool unsignedp)
5877 tree type = gnat_type_for_size (size, unsignedp);
5878 return type ? type : error_mark_node;
5881 /* Build/push the elementary type decls that builtin functions/types
5882 will need. */
5884 static void
5885 install_builtin_elementary_types (void)
5887 signed_size_type_node = gnat_signed_type (size_type_node);
5888 pid_type_node = integer_type_node;
5889 void_list_node = build_void_list_node ();
5891 string_type_node = build_pointer_type (char_type_node);
5892 const_string_type_node
5893 = build_pointer_type (build_qualified_type
5894 (char_type_node, TYPE_QUAL_CONST));
5897 /* ----------------------------------------------------------------------- *
5898 * BUILTIN FUNCTION TYPES *
5899 * ----------------------------------------------------------------------- */
5901 /* Now, builtin function types per se. */
5903 enum c_builtin_type
5905 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5906 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5907 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5908 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5909 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5910 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5911 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5912 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5913 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5914 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7, ARG8) NAME,
5915 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5916 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5917 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5918 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5919 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5920 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5921 NAME,
5922 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5923 #include "builtin-types.def"
5924 #undef DEF_PRIMITIVE_TYPE
5925 #undef DEF_FUNCTION_TYPE_0
5926 #undef DEF_FUNCTION_TYPE_1
5927 #undef DEF_FUNCTION_TYPE_2
5928 #undef DEF_FUNCTION_TYPE_3
5929 #undef DEF_FUNCTION_TYPE_4
5930 #undef DEF_FUNCTION_TYPE_5
5931 #undef DEF_FUNCTION_TYPE_6
5932 #undef DEF_FUNCTION_TYPE_7
5933 #undef DEF_FUNCTION_TYPE_8
5934 #undef DEF_FUNCTION_TYPE_VAR_0
5935 #undef DEF_FUNCTION_TYPE_VAR_1
5936 #undef DEF_FUNCTION_TYPE_VAR_2
5937 #undef DEF_FUNCTION_TYPE_VAR_3
5938 #undef DEF_FUNCTION_TYPE_VAR_4
5939 #undef DEF_FUNCTION_TYPE_VAR_5
5940 #undef DEF_POINTER_TYPE
5941 BT_LAST
5944 typedef enum c_builtin_type builtin_type;
5946 /* A temporary array used in communication with def_fn_type. */
5947 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5949 /* A helper function for install_builtin_types. Build function type
5950 for DEF with return type RET and N arguments. If VAR is true, then the
5951 function should be variadic after those N arguments.
5953 Takes special care not to ICE if any of the types involved are
5954 error_mark_node, which indicates that said type is not in fact available
5955 (see builtin_type_for_size). In which case the function type as a whole
5956 should be error_mark_node. */
5958 static void
5959 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5961 tree t;
5962 tree *args = XALLOCAVEC (tree, n);
5963 va_list list;
5964 int i;
5966 va_start (list, n);
5967 for (i = 0; i < n; ++i)
5969 builtin_type a = (builtin_type) va_arg (list, int);
5970 t = builtin_types[a];
5971 if (t == error_mark_node)
5972 goto egress;
5973 args[i] = t;
5976 t = builtin_types[ret];
5977 if (t == error_mark_node)
5978 goto egress;
5979 if (var)
5980 t = build_varargs_function_type_array (t, n, args);
5981 else
5982 t = build_function_type_array (t, n, args);
5984 egress:
5985 builtin_types[def] = t;
5986 va_end (list);
5989 /* Build the builtin function types and install them in the builtin_types
5990 array for later use in builtin function decls. */
5992 static void
5993 install_builtin_function_types (void)
5995 tree va_list_ref_type_node;
5996 tree va_list_arg_type_node;
5998 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
6000 va_list_arg_type_node = va_list_ref_type_node =
6001 build_pointer_type (TREE_TYPE (va_list_type_node));
6003 else
6005 va_list_arg_type_node = va_list_type_node;
6006 va_list_ref_type_node = build_reference_type (va_list_type_node);
6009 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
6010 builtin_types[ENUM] = VALUE;
6011 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
6012 def_fn_type (ENUM, RETURN, 0, 0);
6013 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
6014 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
6015 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
6016 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
6017 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6018 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
6019 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6020 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
6021 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6022 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6023 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6024 ARG6) \
6025 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6026 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6027 ARG6, ARG7) \
6028 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6029 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6030 ARG6, ARG7, ARG8) \
6031 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6032 ARG7, ARG8);
6033 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
6034 def_fn_type (ENUM, RETURN, 1, 0);
6035 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
6036 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
6037 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
6038 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
6039 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6040 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
6041 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6042 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
6043 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6044 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6045 #define DEF_POINTER_TYPE(ENUM, TYPE) \
6046 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
6048 #include "builtin-types.def"
6050 #undef DEF_PRIMITIVE_TYPE
6051 #undef DEF_FUNCTION_TYPE_1
6052 #undef DEF_FUNCTION_TYPE_2
6053 #undef DEF_FUNCTION_TYPE_3
6054 #undef DEF_FUNCTION_TYPE_4
6055 #undef DEF_FUNCTION_TYPE_5
6056 #undef DEF_FUNCTION_TYPE_6
6057 #undef DEF_FUNCTION_TYPE_VAR_0
6058 #undef DEF_FUNCTION_TYPE_VAR_1
6059 #undef DEF_FUNCTION_TYPE_VAR_2
6060 #undef DEF_FUNCTION_TYPE_VAR_3
6061 #undef DEF_FUNCTION_TYPE_VAR_4
6062 #undef DEF_FUNCTION_TYPE_VAR_5
6063 #undef DEF_POINTER_TYPE
6064 builtin_types[(int) BT_LAST] = NULL_TREE;
6067 /* ----------------------------------------------------------------------- *
6068 * BUILTIN ATTRIBUTES *
6069 * ----------------------------------------------------------------------- */
6071 enum built_in_attribute
6073 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
6074 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
6075 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
6076 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
6077 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
6078 #include "builtin-attrs.def"
6079 #undef DEF_ATTR_NULL_TREE
6080 #undef DEF_ATTR_INT
6081 #undef DEF_ATTR_STRING
6082 #undef DEF_ATTR_IDENT
6083 #undef DEF_ATTR_TREE_LIST
6084 ATTR_LAST
6087 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
6089 static void
6090 install_builtin_attributes (void)
6092 /* Fill in the built_in_attributes array. */
6093 #define DEF_ATTR_NULL_TREE(ENUM) \
6094 built_in_attributes[(int) ENUM] = NULL_TREE;
6095 #define DEF_ATTR_INT(ENUM, VALUE) \
6096 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
6097 #define DEF_ATTR_STRING(ENUM, VALUE) \
6098 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
6099 #define DEF_ATTR_IDENT(ENUM, STRING) \
6100 built_in_attributes[(int) ENUM] = get_identifier (STRING);
6101 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
6102 built_in_attributes[(int) ENUM] \
6103 = tree_cons (built_in_attributes[(int) PURPOSE], \
6104 built_in_attributes[(int) VALUE], \
6105 built_in_attributes[(int) CHAIN]);
6106 #include "builtin-attrs.def"
6107 #undef DEF_ATTR_NULL_TREE
6108 #undef DEF_ATTR_INT
6109 #undef DEF_ATTR_STRING
6110 #undef DEF_ATTR_IDENT
6111 #undef DEF_ATTR_TREE_LIST
6114 /* Handle a "const" attribute; arguments as in
6115 struct attribute_spec.handler. */
6117 static tree
6118 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6119 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6120 bool *no_add_attrs)
6122 if (TREE_CODE (*node) == FUNCTION_DECL)
6123 TREE_READONLY (*node) = 1;
6124 else
6125 *no_add_attrs = true;
6127 return NULL_TREE;
6130 /* Handle a "nothrow" attribute; arguments as in
6131 struct attribute_spec.handler. */
6133 static tree
6134 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6135 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6136 bool *no_add_attrs)
6138 if (TREE_CODE (*node) == FUNCTION_DECL)
6139 TREE_NOTHROW (*node) = 1;
6140 else
6141 *no_add_attrs = true;
6143 return NULL_TREE;
6146 /* Handle a "pure" attribute; arguments as in
6147 struct attribute_spec.handler. */
6149 static tree
6150 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6151 int ARG_UNUSED (flags), bool *no_add_attrs)
6153 if (TREE_CODE (*node) == FUNCTION_DECL)
6154 DECL_PURE_P (*node) = 1;
6155 /* ??? TODO: Support types. */
6156 else
6158 warning (OPT_Wattributes, "%qs attribute ignored",
6159 IDENTIFIER_POINTER (name));
6160 *no_add_attrs = true;
6163 return NULL_TREE;
6166 /* Handle a "no vops" attribute; arguments as in
6167 struct attribute_spec.handler. */
6169 static tree
6170 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6171 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6172 bool *ARG_UNUSED (no_add_attrs))
6174 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6175 DECL_IS_NOVOPS (*node) = 1;
6176 return NULL_TREE;
6179 /* Helper for nonnull attribute handling; fetch the operand number
6180 from the attribute argument list. */
6182 static bool
6183 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6185 /* Verify the arg number is a constant. */
6186 if (TREE_CODE (arg_num_expr) != INTEGER_CST
6187 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
6188 return false;
6190 *valp = TREE_INT_CST_LOW (arg_num_expr);
6191 return true;
6194 /* Handle the "nonnull" attribute. */
6195 static tree
6196 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6197 tree args, int ARG_UNUSED (flags),
6198 bool *no_add_attrs)
6200 tree type = *node;
6201 unsigned HOST_WIDE_INT attr_arg_num;
6203 /* If no arguments are specified, all pointer arguments should be
6204 non-null. Verify a full prototype is given so that the arguments
6205 will have the correct types when we actually check them later. */
6206 if (!args)
6208 if (!prototype_p (type))
6210 error ("nonnull attribute without arguments on a non-prototype");
6211 *no_add_attrs = true;
6213 return NULL_TREE;
6216 /* Argument list specified. Verify that each argument number references
6217 a pointer argument. */
6218 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6220 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6222 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6224 error ("nonnull argument has invalid operand number (argument %lu)",
6225 (unsigned long) attr_arg_num);
6226 *no_add_attrs = true;
6227 return NULL_TREE;
6230 if (prototype_p (type))
6232 function_args_iterator iter;
6233 tree argument;
6235 function_args_iter_init (&iter, type);
6236 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6238 argument = function_args_iter_cond (&iter);
6239 if (!argument || ck_num == arg_num)
6240 break;
6243 if (!argument
6244 || TREE_CODE (argument) == VOID_TYPE)
6246 error ("nonnull argument with out-of-range operand number "
6247 "(argument %lu, operand %lu)",
6248 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6249 *no_add_attrs = true;
6250 return NULL_TREE;
6253 if (TREE_CODE (argument) != POINTER_TYPE)
6255 error ("nonnull argument references non-pointer operand "
6256 "(argument %lu, operand %lu)",
6257 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6258 *no_add_attrs = true;
6259 return NULL_TREE;
6264 return NULL_TREE;
6267 /* Handle a "sentinel" attribute. */
6269 static tree
6270 handle_sentinel_attribute (tree *node, tree name, tree args,
6271 int ARG_UNUSED (flags), bool *no_add_attrs)
6273 if (!prototype_p (*node))
6275 warning (OPT_Wattributes,
6276 "%qs attribute requires prototypes with named arguments",
6277 IDENTIFIER_POINTER (name));
6278 *no_add_attrs = true;
6280 else
6282 if (!stdarg_p (*node))
6284 warning (OPT_Wattributes,
6285 "%qs attribute only applies to variadic functions",
6286 IDENTIFIER_POINTER (name));
6287 *no_add_attrs = true;
6291 if (args)
6293 tree position = TREE_VALUE (args);
6295 if (TREE_CODE (position) != INTEGER_CST)
6297 warning (0, "requested position is not an integer constant");
6298 *no_add_attrs = true;
6300 else
6302 if (tree_int_cst_lt (position, integer_zero_node))
6304 warning (0, "requested position is less than zero");
6305 *no_add_attrs = true;
6310 return NULL_TREE;
6313 /* Handle a "noreturn" attribute; arguments as in
6314 struct attribute_spec.handler. */
6316 static tree
6317 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6318 int ARG_UNUSED (flags), bool *no_add_attrs)
6320 tree type = TREE_TYPE (*node);
6322 /* See FIXME comment in c_common_attribute_table. */
6323 if (TREE_CODE (*node) == FUNCTION_DECL)
6324 TREE_THIS_VOLATILE (*node) = 1;
6325 else if (TREE_CODE (type) == POINTER_TYPE
6326 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6327 TREE_TYPE (*node)
6328 = build_pointer_type
6329 (build_type_variant (TREE_TYPE (type),
6330 TYPE_READONLY (TREE_TYPE (type)), 1));
6331 else
6333 warning (OPT_Wattributes, "%qs attribute ignored",
6334 IDENTIFIER_POINTER (name));
6335 *no_add_attrs = true;
6338 return NULL_TREE;
6341 /* Handle a "leaf" attribute; arguments as in
6342 struct attribute_spec.handler. */
6344 static tree
6345 handle_leaf_attribute (tree *node, tree name,
6346 tree ARG_UNUSED (args),
6347 int ARG_UNUSED (flags), bool *no_add_attrs)
6349 if (TREE_CODE (*node) != FUNCTION_DECL)
6351 warning (OPT_Wattributes, "%qE attribute ignored", name);
6352 *no_add_attrs = true;
6354 if (!TREE_PUBLIC (*node))
6356 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6357 *no_add_attrs = true;
6360 return NULL_TREE;
6363 /* Handle a "malloc" attribute; arguments as in
6364 struct attribute_spec.handler. */
6366 static tree
6367 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6368 int ARG_UNUSED (flags), bool *no_add_attrs)
6370 if (TREE_CODE (*node) == FUNCTION_DECL
6371 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6372 DECL_IS_MALLOC (*node) = 1;
6373 else
6375 warning (OPT_Wattributes, "%qs attribute ignored",
6376 IDENTIFIER_POINTER (name));
6377 *no_add_attrs = true;
6380 return NULL_TREE;
6383 /* Fake handler for attributes we don't properly support. */
6385 tree
6386 fake_attribute_handler (tree * ARG_UNUSED (node),
6387 tree ARG_UNUSED (name),
6388 tree ARG_UNUSED (args),
6389 int ARG_UNUSED (flags),
6390 bool * ARG_UNUSED (no_add_attrs))
6392 return NULL_TREE;
6395 /* Handle a "type_generic" attribute. */
6397 static tree
6398 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6399 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6400 bool * ARG_UNUSED (no_add_attrs))
6402 /* Ensure we have a function type. */
6403 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6405 /* Ensure we have a variadic function. */
6406 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6408 return NULL_TREE;
6411 /* Handle a "vector_size" attribute; arguments as in
6412 struct attribute_spec.handler. */
6414 static tree
6415 handle_vector_size_attribute (tree *node, tree name, tree args,
6416 int ARG_UNUSED (flags), bool *no_add_attrs)
6418 tree type = *node;
6419 tree vector_type;
6421 *no_add_attrs = true;
6423 /* We need to provide for vector pointers, vector arrays, and
6424 functions returning vectors. For example:
6426 __attribute__((vector_size(16))) short *foo;
6428 In this case, the mode is SI, but the type being modified is
6429 HI, so we need to look further. */
6430 while (POINTER_TYPE_P (type)
6431 || TREE_CODE (type) == FUNCTION_TYPE
6432 || TREE_CODE (type) == ARRAY_TYPE)
6433 type = TREE_TYPE (type);
6435 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6436 if (!vector_type)
6437 return NULL_TREE;
6439 /* Build back pointers if needed. */
6440 *node = reconstruct_complex_type (*node, vector_type);
6442 return NULL_TREE;
6445 /* Handle a "vector_type" attribute; arguments as in
6446 struct attribute_spec.handler. */
6448 static tree
6449 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6450 int ARG_UNUSED (flags), bool *no_add_attrs)
6452 tree type = *node;
6453 tree vector_type;
6455 *no_add_attrs = true;
6457 if (TREE_CODE (type) != ARRAY_TYPE)
6459 error ("attribute %qs applies to array types only",
6460 IDENTIFIER_POINTER (name));
6461 return NULL_TREE;
6464 vector_type = build_vector_type_for_array (type, name);
6465 if (!vector_type)
6466 return NULL_TREE;
6468 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6469 *node = vector_type;
6471 return NULL_TREE;
6474 /* ----------------------------------------------------------------------- *
6475 * BUILTIN FUNCTIONS *
6476 * ----------------------------------------------------------------------- */
6478 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6479 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6480 if nonansi_p and flag_no_nonansi_builtin. */
6482 static void
6483 def_builtin_1 (enum built_in_function fncode,
6484 const char *name,
6485 enum built_in_class fnclass,
6486 tree fntype, tree libtype,
6487 bool both_p, bool fallback_p,
6488 bool nonansi_p ATTRIBUTE_UNUSED,
6489 tree fnattrs, bool implicit_p)
6491 tree decl;
6492 const char *libname;
6494 /* Preserve an already installed decl. It most likely was setup in advance
6495 (e.g. as part of the internal builtins) for specific reasons. */
6496 if (builtin_decl_explicit (fncode) != NULL_TREE)
6497 return;
6499 gcc_assert ((!both_p && !fallback_p)
6500 || !strncmp (name, "__builtin_",
6501 strlen ("__builtin_")));
6503 libname = name + strlen ("__builtin_");
6504 decl = add_builtin_function (name, fntype, fncode, fnclass,
6505 (fallback_p ? libname : NULL),
6506 fnattrs);
6507 if (both_p)
6508 /* ??? This is normally further controlled by command-line options
6509 like -fno-builtin, but we don't have them for Ada. */
6510 add_builtin_function (libname, libtype, fncode, fnclass,
6511 NULL, fnattrs);
6513 set_builtin_decl (fncode, decl, implicit_p);
6516 static int flag_isoc94 = 0;
6517 static int flag_isoc99 = 0;
6519 /* Install what the common builtins.def offers. */
6521 static void
6522 install_builtin_functions (void)
6524 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6525 NONANSI_P, ATTRS, IMPLICIT, COND) \
6526 if (NAME && COND) \
6527 def_builtin_1 (ENUM, NAME, CLASS, \
6528 builtin_types[(int) TYPE], \
6529 builtin_types[(int) LIBTYPE], \
6530 BOTH_P, FALLBACK_P, NONANSI_P, \
6531 built_in_attributes[(int) ATTRS], IMPLICIT);
6532 #include "builtins.def"
6533 #undef DEF_BUILTIN
6536 /* ----------------------------------------------------------------------- *
6537 * BUILTIN FUNCTIONS *
6538 * ----------------------------------------------------------------------- */
6540 /* Install the builtin functions we might need. */
6542 void
6543 gnat_install_builtins (void)
6545 install_builtin_elementary_types ();
6546 install_builtin_function_types ();
6547 install_builtin_attributes ();
6549 /* Install builtins used by generic middle-end pieces first. Some of these
6550 know about internal specificities and control attributes accordingly, for
6551 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6552 the generic definition from builtins.def. */
6553 build_common_builtin_nodes ();
6555 /* Now, install the target specific builtins, such as the AltiVec family on
6556 ppc, and the common set as exposed by builtins.def. */
6557 targetm.init_builtins ();
6558 install_builtin_functions ();
6561 #include "gt-ada-utils.h"
6562 #include "gtype-ada.h"