gcc/
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob014fe361b76d333500ee17ab505e0f6c3386971e
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "stringpool.h"
32 #include "stor-layout.h"
33 #include "attribs.h"
34 #include "varasm.h"
35 #include "flags.h"
36 #include "toplev.h"
37 #include "diagnostic-core.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "debug.h"
41 #include "convert.h"
42 #include "target.h"
43 #include "common/common-target.h"
44 #include "langhooks.h"
45 #include "cgraph.h"
46 #include "diagnostic.h"
47 #include "timevar.h"
48 #include "tree-dump.h"
49 #include "tree-inline.h"
50 #include "tree-iterator.h"
52 #include "ada.h"
53 #include "types.h"
54 #include "atree.h"
55 #include "elists.h"
56 #include "namet.h"
57 #include "nlists.h"
58 #include "stringt.h"
59 #include "uintp.h"
60 #include "fe.h"
61 #include "sinfo.h"
62 #include "einfo.h"
63 #include "ada-tree.h"
64 #include "gigi.h"
66 /* If nonzero, pretend we are allocating at global level. */
67 int force_global;
69 /* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72 int double_float_alignment;
74 /* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77 int double_scalar_alignment;
79 /* Tree nodes for the various types and decls we create. */
80 tree gnat_std_decls[(int) ADT_LAST];
82 /* Functions to call for each of the possible raise reasons. */
83 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
85 /* Likewise, but with extra info for each of the possible raise reasons. */
86 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
88 /* Forward declarations for handlers of attributes. */
89 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute,
113 false },
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
115 false },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute,
117 false },
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
119 false },
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
121 false },
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
123 false },
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
125 false },
126 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
127 false },
128 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
129 false },
130 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
131 false },
133 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
134 false },
135 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
136 false },
137 { "may_alias", 0, 0, false, true, false, NULL, false },
139 /* ??? format and format_arg are heavy and not supported, which actually
140 prevents support for stdio builtins, which we however declare as part
141 of the common builtins.def contents. */
142 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
143 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
145 { NULL, 0, 0, false, false, false, NULL, false }
148 /* Associates a GNAT tree node to a GCC tree node. It is used in
149 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
150 of `save_gnu_tree' for more info. */
151 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
153 #define GET_GNU_TREE(GNAT_ENTITY) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
156 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
157 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
159 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
160 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
162 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
163 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
165 #define GET_DUMMY_NODE(GNAT_ENTITY) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
168 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
169 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
171 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
172 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
174 /* This variable keeps a table for types for each precision so that we only
175 allocate each of them once. Signed and unsigned types are kept separate.
177 Note that these types are only used when fold-const requests something
178 special. Perhaps we should NOT share these types; we'll see how it
179 goes later. */
180 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
182 /* Likewise for float types, but record these by mode. */
183 static GTY(()) tree float_types[NUM_MACHINE_MODES];
185 /* For each binding contour we allocate a binding_level structure to indicate
186 the binding depth. */
188 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
189 /* The binding level containing this one (the enclosing binding level). */
190 struct gnat_binding_level *chain;
191 /* The BLOCK node for this level. */
192 tree block;
193 /* If nonzero, the setjmp buffer that needs to be updated for any
194 variable-sized definition within this context. */
195 tree jmpbuf_decl;
198 /* The binding level currently in effect. */
199 static GTY(()) struct gnat_binding_level *current_binding_level;
201 /* A chain of gnat_binding_level structures awaiting reuse. */
202 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
204 /* The context to be used for global declarations. */
205 static GTY(()) tree global_context;
207 /* An array of global declarations. */
208 static GTY(()) vec<tree, va_gc> *global_decls;
210 /* An array of builtin function declarations. */
211 static GTY(()) vec<tree, va_gc> *builtin_decls;
213 /* An array of global renaming pointers. */
214 static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
216 /* A chain of unused BLOCK nodes. */
217 static GTY((deletable)) tree free_block_chain;
219 static int pad_type_hash_marked_p (const void *p);
220 static hashval_t pad_type_hash_hash (const void *p);
221 static int pad_type_hash_eq (const void *p1, const void *p2);
223 /* A hash table of padded types. It is modelled on the generic type
224 hash table in tree.c, which must thus be used as a reference. */
225 struct GTY(()) pad_type_hash {
226 unsigned long hash;
227 tree type;
230 static GTY ((if_marked ("pad_type_hash_marked_p"),
231 param_is (struct pad_type_hash)))
232 htab_t pad_type_hash_table;
234 static tree merge_sizes (tree, tree, tree, bool, bool);
235 static tree compute_related_constant (tree, tree);
236 static tree split_plus (tree, tree *);
237 static tree float_type_for_precision (int, enum machine_mode);
238 static tree convert_to_fat_pointer (tree, tree);
239 static unsigned int scale_by_factor_of (tree, unsigned int);
240 static bool potential_alignment_gap (tree, tree, tree);
242 /* Initialize data structures of the utils.c module. */
244 void
245 init_gnat_utils (void)
247 /* Initialize the association of GNAT nodes to GCC trees. */
248 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
250 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
251 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
253 /* Initialize the hash table of padded types. */
254 pad_type_hash_table = htab_create_ggc (512, pad_type_hash_hash,
255 pad_type_hash_eq, 0);
258 /* Destroy data structures of the utils.c module. */
260 void
261 destroy_gnat_utils (void)
263 /* Destroy the association of GNAT nodes to GCC trees. */
264 ggc_free (associate_gnat_to_gnu);
265 associate_gnat_to_gnu = NULL;
267 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
268 ggc_free (dummy_node_table);
269 dummy_node_table = NULL;
271 /* Destroy the hash table of padded types. */
272 htab_delete (pad_type_hash_table);
273 pad_type_hash_table = NULL;
275 /* Invalidate the global renaming pointers. */
276 invalidate_global_renaming_pointers ();
279 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
280 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
281 If NO_CHECK is true, the latter check is suppressed.
283 If GNU_DECL is zero, reset a previous association. */
285 void
286 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
288 /* Check that GNAT_ENTITY is not already defined and that it is being set
289 to something which is a decl. If that is not the case, this usually
290 means GNAT_ENTITY is defined twice, but occasionally is due to some
291 Gigi problem. */
292 gcc_assert (!(gnu_decl
293 && (PRESENT_GNU_TREE (gnat_entity)
294 || (!no_check && !DECL_P (gnu_decl)))));
296 SET_GNU_TREE (gnat_entity, gnu_decl);
299 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
300 that was associated with it. If there is no such tree node, abort.
302 In some cases, such as delayed elaboration or expressions that need to
303 be elaborated only once, GNAT_ENTITY is really not an entity. */
305 tree
306 get_gnu_tree (Entity_Id gnat_entity)
308 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
309 return GET_GNU_TREE (gnat_entity);
312 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
314 bool
315 present_gnu_tree (Entity_Id gnat_entity)
317 return PRESENT_GNU_TREE (gnat_entity);
320 /* Make a dummy type corresponding to GNAT_TYPE. */
322 tree
323 make_dummy_type (Entity_Id gnat_type)
325 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
326 tree gnu_type;
328 /* If there is an equivalent type, get its underlying type. */
329 if (Present (gnat_underlying))
330 gnat_underlying = Gigi_Equivalent_Type (Underlying_Type (gnat_underlying));
332 /* If there was no equivalent type (can only happen when just annotating
333 types) or underlying type, go back to the original type. */
334 if (No (gnat_underlying))
335 gnat_underlying = gnat_type;
337 /* If it there already a dummy type, use that one. Else make one. */
338 if (PRESENT_DUMMY_NODE (gnat_underlying))
339 return GET_DUMMY_NODE (gnat_underlying);
341 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
342 an ENUMERAL_TYPE. */
343 gnu_type = make_node (Is_Record_Type (gnat_underlying)
344 ? tree_code_for_record_type (gnat_underlying)
345 : ENUMERAL_TYPE);
346 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
347 TYPE_DUMMY_P (gnu_type) = 1;
348 TYPE_STUB_DECL (gnu_type)
349 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
350 if (Is_By_Reference_Type (gnat_underlying))
351 TYPE_BY_REFERENCE_P (gnu_type) = 1;
353 SET_DUMMY_NODE (gnat_underlying, gnu_type);
355 return gnu_type;
358 /* Return the dummy type that was made for GNAT_TYPE, if any. */
360 tree
361 get_dummy_type (Entity_Id gnat_type)
363 return GET_DUMMY_NODE (gnat_type);
366 /* Build dummy fat and thin pointer types whose designated type is specified
367 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
369 void
370 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
372 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
373 tree gnu_fat_type, fields, gnu_object_type;
375 gnu_template_type = make_node (RECORD_TYPE);
376 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
377 TYPE_DUMMY_P (gnu_template_type) = 1;
378 gnu_ptr_template = build_pointer_type (gnu_template_type);
380 gnu_array_type = make_node (ENUMERAL_TYPE);
381 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
382 TYPE_DUMMY_P (gnu_array_type) = 1;
383 gnu_ptr_array = build_pointer_type (gnu_array_type);
385 gnu_fat_type = make_node (RECORD_TYPE);
386 /* Build a stub DECL to trigger the special processing for fat pointer types
387 in gnat_pushdecl. */
388 TYPE_NAME (gnu_fat_type)
389 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
390 gnu_fat_type);
391 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
392 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
393 DECL_CHAIN (fields)
394 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
395 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
396 finish_fat_pointer_type (gnu_fat_type, fields);
397 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
398 /* Suppress debug info until after the type is completed. */
399 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
401 gnu_object_type = make_node (RECORD_TYPE);
402 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
403 TYPE_DUMMY_P (gnu_object_type) = 1;
405 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
406 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
409 /* Return true if we are in the global binding level. */
411 bool
412 global_bindings_p (void)
414 return force_global || current_function_decl == NULL_TREE;
417 /* Enter a new binding level. */
419 void
420 gnat_pushlevel (void)
422 struct gnat_binding_level *newlevel = NULL;
424 /* Reuse a struct for this binding level, if there is one. */
425 if (free_binding_level)
427 newlevel = free_binding_level;
428 free_binding_level = free_binding_level->chain;
430 else
431 newlevel = ggc_alloc_gnat_binding_level ();
433 /* Use a free BLOCK, if any; otherwise, allocate one. */
434 if (free_block_chain)
436 newlevel->block = free_block_chain;
437 free_block_chain = BLOCK_CHAIN (free_block_chain);
438 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
440 else
441 newlevel->block = make_node (BLOCK);
443 /* Point the BLOCK we just made to its parent. */
444 if (current_binding_level)
445 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
447 BLOCK_VARS (newlevel->block) = NULL_TREE;
448 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
449 TREE_USED (newlevel->block) = 1;
451 /* Add this level to the front of the chain (stack) of active levels. */
452 newlevel->chain = current_binding_level;
453 newlevel->jmpbuf_decl = NULL_TREE;
454 current_binding_level = newlevel;
457 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
458 and point FNDECL to this BLOCK. */
460 void
461 set_current_block_context (tree fndecl)
463 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
464 DECL_INITIAL (fndecl) = current_binding_level->block;
465 set_block_for_group (current_binding_level->block);
468 /* Set the jmpbuf_decl for the current binding level to DECL. */
470 void
471 set_block_jmpbuf_decl (tree decl)
473 current_binding_level->jmpbuf_decl = decl;
476 /* Get the jmpbuf_decl, if any, for the current binding level. */
478 tree
479 get_block_jmpbuf_decl (void)
481 return current_binding_level->jmpbuf_decl;
484 /* Exit a binding level. Set any BLOCK into the current code group. */
486 void
487 gnat_poplevel (void)
489 struct gnat_binding_level *level = current_binding_level;
490 tree block = level->block;
492 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
493 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
495 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
496 are no variables free the block and merge its subblocks into those of its
497 parent block. Otherwise, add it to the list of its parent. */
498 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
500 else if (BLOCK_VARS (block) == NULL_TREE)
502 BLOCK_SUBBLOCKS (level->chain->block)
503 = block_chainon (BLOCK_SUBBLOCKS (block),
504 BLOCK_SUBBLOCKS (level->chain->block));
505 BLOCK_CHAIN (block) = free_block_chain;
506 free_block_chain = block;
508 else
510 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
511 BLOCK_SUBBLOCKS (level->chain->block) = block;
512 TREE_USED (block) = 1;
513 set_block_for_group (block);
516 /* Free this binding structure. */
517 current_binding_level = level->chain;
518 level->chain = free_binding_level;
519 free_binding_level = level;
522 /* Exit a binding level and discard the associated BLOCK. */
524 void
525 gnat_zaplevel (void)
527 struct gnat_binding_level *level = current_binding_level;
528 tree block = level->block;
530 BLOCK_CHAIN (block) = free_block_chain;
531 free_block_chain = block;
533 /* Free this binding structure. */
534 current_binding_level = level->chain;
535 level->chain = free_binding_level;
536 free_binding_level = level;
539 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
541 static void
542 gnat_set_type_context (tree type, tree context)
544 tree decl = TYPE_STUB_DECL (type);
546 TYPE_CONTEXT (type) = context;
548 while (decl && DECL_PARALLEL_TYPE (decl))
550 TYPE_CONTEXT (DECL_PARALLEL_TYPE (decl)) = context;
551 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
555 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
556 for location information and flag propagation. */
558 void
559 gnat_pushdecl (tree decl, Node_Id gnat_node)
561 /* If DECL is public external or at top level, it has global context. */
562 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
564 if (!global_context)
565 global_context = build_translation_unit_decl (NULL_TREE);
566 DECL_CONTEXT (decl) = global_context;
568 else
570 DECL_CONTEXT (decl) = current_function_decl;
572 /* Functions imported in another function are not really nested.
573 For really nested functions mark them initially as needing
574 a static chain for uses of that flag before unnesting;
575 lower_nested_functions will then recompute it. */
576 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
577 DECL_STATIC_CHAIN (decl) = 1;
580 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
582 /* Set the location of DECL and emit a declaration for it. */
583 if (Present (gnat_node))
584 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
586 add_decl_expr (decl, gnat_node);
588 /* Put the declaration on the list. The list of declarations is in reverse
589 order. The list will be reversed later. Put global declarations in the
590 globals list and local ones in the current block. But skip TYPE_DECLs
591 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
592 with the debugger and aren't needed anyway. */
593 if (!(TREE_CODE (decl) == TYPE_DECL
594 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
596 if (DECL_EXTERNAL (decl))
598 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
599 vec_safe_push (builtin_decls, decl);
601 else if (global_bindings_p ())
602 vec_safe_push (global_decls, decl);
603 else
605 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
606 BLOCK_VARS (current_binding_level->block) = decl;
610 /* For the declaration of a type, set its name if it either is not already
611 set or if the previous type name was not derived from a source name.
612 We'd rather have the type named with a real name and all the pointer
613 types to the same object have the same POINTER_TYPE node. Code in the
614 equivalent function of c-decl.c makes a copy of the type node here, but
615 that may cause us trouble with incomplete types. We make an exception
616 for fat pointer types because the compiler automatically builds them
617 for unconstrained array types and the debugger uses them to represent
618 both these and pointers to these. */
619 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
621 tree t = TREE_TYPE (decl);
623 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
625 /* Array and pointer types aren't "tagged" types so we force the
626 type to be associated with its typedef in the DWARF back-end,
627 in order to make sure that the latter is always preserved. */
628 if (!DECL_ARTIFICIAL (decl)
629 && (TREE_CODE (t) == ARRAY_TYPE
630 || TREE_CODE (t) == POINTER_TYPE))
632 tree tt = build_distinct_type_copy (t);
633 if (TREE_CODE (t) == POINTER_TYPE)
634 TYPE_NEXT_PTR_TO (t) = tt;
635 TYPE_NAME (tt) = DECL_NAME (decl);
636 gnat_set_type_context (tt, DECL_CONTEXT (decl));
637 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
638 DECL_ORIGINAL_TYPE (decl) = tt;
641 else if (TYPE_IS_FAT_POINTER_P (t))
643 /* We need a variant for the placeholder machinery to work. */
644 tree tt = build_variant_type_copy (t);
645 TYPE_NAME (tt) = decl;
646 gnat_set_type_context (tt, DECL_CONTEXT (decl));
647 TREE_USED (tt) = TREE_USED (t);
648 TREE_TYPE (decl) = tt;
649 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
650 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
651 else
652 DECL_ORIGINAL_TYPE (decl) = t;
653 DECL_ARTIFICIAL (decl) = 0;
654 t = NULL_TREE;
656 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
658 else
659 t = NULL_TREE;
661 /* Propagate the name to all the anonymous variants. This is needed
662 for the type qualifiers machinery to work properly. */
663 if (t)
664 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
665 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
667 TYPE_NAME (t) = decl;
668 gnat_set_type_context (t, DECL_CONTEXT (decl));
673 /* Create a record type that contains a SIZE bytes long field of TYPE with a
674 starting bit position so that it is aligned to ALIGN bits, and leaving at
675 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
676 record is guaranteed to get. GNAT_NODE is used for the position of the
677 associated TYPE_DECL. */
679 tree
680 make_aligning_type (tree type, unsigned int align, tree size,
681 unsigned int base_align, int room, Node_Id gnat_node)
683 /* We will be crafting a record type with one field at a position set to be
684 the next multiple of ALIGN past record'address + room bytes. We use a
685 record placeholder to express record'address. */
686 tree record_type = make_node (RECORD_TYPE);
687 tree record = build0 (PLACEHOLDER_EXPR, record_type);
689 tree record_addr_st
690 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
692 /* The diagram below summarizes the shape of what we manipulate:
694 <--------- pos ---------->
695 { +------------+-------------+-----------------+
696 record =>{ |############| ... | field (type) |
697 { +------------+-------------+-----------------+
698 |<-- room -->|<- voffset ->|<---- size ----->|
701 record_addr vblock_addr
703 Every length is in sizetype bytes there, except "pos" which has to be
704 set as a bit position in the GCC tree for the record. */
705 tree room_st = size_int (room);
706 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
707 tree voffset_st, pos, field;
709 tree name = TYPE_NAME (type);
711 if (TREE_CODE (name) == TYPE_DECL)
712 name = DECL_NAME (name);
713 name = concat_name (name, "ALIGN");
714 TYPE_NAME (record_type) = name;
716 /* Compute VOFFSET and then POS. The next byte position multiple of some
717 alignment after some address is obtained by "and"ing the alignment minus
718 1 with the two's complement of the address. */
719 voffset_st = size_binop (BIT_AND_EXPR,
720 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
721 size_int ((align / BITS_PER_UNIT) - 1));
723 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
724 pos = size_binop (MULT_EXPR,
725 convert (bitsizetype,
726 size_binop (PLUS_EXPR, room_st, voffset_st)),
727 bitsize_unit_node);
729 /* Craft the GCC record representation. We exceptionally do everything
730 manually here because 1) our generic circuitry is not quite ready to
731 handle the complex position/size expressions we are setting up, 2) we
732 have a strong simplifying factor at hand: we know the maximum possible
733 value of voffset, and 3) we have to set/reset at least the sizes in
734 accordance with this maximum value anyway, as we need them to convey
735 what should be "alloc"ated for this type.
737 Use -1 as the 'addressable' indication for the field to prevent the
738 creation of a bitfield. We don't need one, it would have damaging
739 consequences on the alignment computation, and create_field_decl would
740 make one without this special argument, for instance because of the
741 complex position expression. */
742 field = create_field_decl (get_identifier ("F"), type, record_type, size,
743 pos, 1, -1);
744 TYPE_FIELDS (record_type) = field;
746 TYPE_ALIGN (record_type) = base_align;
747 TYPE_USER_ALIGN (record_type) = 1;
749 TYPE_SIZE (record_type)
750 = size_binop (PLUS_EXPR,
751 size_binop (MULT_EXPR, convert (bitsizetype, size),
752 bitsize_unit_node),
753 bitsize_int (align + room * BITS_PER_UNIT));
754 TYPE_SIZE_UNIT (record_type)
755 = size_binop (PLUS_EXPR, size,
756 size_int (room + align / BITS_PER_UNIT));
758 SET_TYPE_MODE (record_type, BLKmode);
759 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
761 /* Declare it now since it will never be declared otherwise. This is
762 necessary to ensure that its subtrees are properly marked. */
763 create_type_decl (name, record_type, true, false, gnat_node);
765 return record_type;
768 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
769 as the field type of a packed record if IN_RECORD is true, or as the
770 component type of a packed array if IN_RECORD is false. See if we can
771 rewrite it either as a type that has a non-BLKmode, which we can pack
772 tighter in the packed record case, or as a smaller type. If so, return
773 the new type. If not, return the original type. */
775 tree
776 make_packable_type (tree type, bool in_record)
778 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
779 unsigned HOST_WIDE_INT new_size;
780 tree new_type, old_field, field_list = NULL_TREE;
781 unsigned int align;
783 /* No point in doing anything if the size is zero. */
784 if (size == 0)
785 return type;
787 new_type = make_node (TREE_CODE (type));
789 /* Copy the name and flags from the old type to that of the new.
790 Note that we rely on the pointer equality created here for
791 TYPE_NAME to look through conversions in various places. */
792 TYPE_NAME (new_type) = TYPE_NAME (type);
793 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
794 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
795 if (TREE_CODE (type) == RECORD_TYPE)
796 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
798 /* If we are in a record and have a small size, set the alignment to
799 try for an integral mode. Otherwise set it to try for a smaller
800 type with BLKmode. */
801 if (in_record && size <= MAX_FIXED_MODE_SIZE)
803 align = ceil_pow2 (size);
804 TYPE_ALIGN (new_type) = align;
805 new_size = (size + align - 1) & -align;
807 else
809 unsigned HOST_WIDE_INT align;
811 /* Do not try to shrink the size if the RM size is not constant. */
812 if (TYPE_CONTAINS_TEMPLATE_P (type)
813 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
814 return type;
816 /* Round the RM size up to a unit boundary to get the minimal size
817 for a BLKmode record. Give up if it's already the size. */
818 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
819 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
820 if (new_size == size)
821 return type;
823 align = new_size & -new_size;
824 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
827 TYPE_USER_ALIGN (new_type) = 1;
829 /* Now copy the fields, keeping the position and size as we don't want
830 to change the layout by propagating the packedness downwards. */
831 for (old_field = TYPE_FIELDS (type); old_field;
832 old_field = DECL_CHAIN (old_field))
834 tree new_field_type = TREE_TYPE (old_field);
835 tree new_field, new_size;
837 if (RECORD_OR_UNION_TYPE_P (new_field_type)
838 && !TYPE_FAT_POINTER_P (new_field_type)
839 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
840 new_field_type = make_packable_type (new_field_type, true);
842 /* However, for the last field in a not already packed record type
843 that is of an aggregate type, we need to use the RM size in the
844 packable version of the record type, see finish_record_type. */
845 if (!DECL_CHAIN (old_field)
846 && !TYPE_PACKED (type)
847 && RECORD_OR_UNION_TYPE_P (new_field_type)
848 && !TYPE_FAT_POINTER_P (new_field_type)
849 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
850 && TYPE_ADA_SIZE (new_field_type))
851 new_size = TYPE_ADA_SIZE (new_field_type);
852 else
853 new_size = DECL_SIZE (old_field);
855 new_field
856 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
857 new_size, bit_position (old_field),
858 TYPE_PACKED (type),
859 !DECL_NONADDRESSABLE_P (old_field));
861 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
862 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
863 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
864 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
866 DECL_CHAIN (new_field) = field_list;
867 field_list = new_field;
870 finish_record_type (new_type, nreverse (field_list), 2, false);
871 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
872 if (TYPE_STUB_DECL (type))
873 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
874 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
876 /* If this is a padding record, we never want to make the size smaller
877 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
878 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
880 TYPE_SIZE (new_type) = TYPE_SIZE (type);
881 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
882 new_size = size;
884 else
886 TYPE_SIZE (new_type) = bitsize_int (new_size);
887 TYPE_SIZE_UNIT (new_type)
888 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
891 if (!TYPE_CONTAINS_TEMPLATE_P (type))
892 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
894 compute_record_mode (new_type);
896 /* Try harder to get a packable type if necessary, for example
897 in case the record itself contains a BLKmode field. */
898 if (in_record && TYPE_MODE (new_type) == BLKmode)
899 SET_TYPE_MODE (new_type,
900 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
902 /* If neither the mode nor the size has shrunk, return the old type. */
903 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
904 return type;
906 return new_type;
909 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
910 If TYPE is the best type, return it. Otherwise, make a new type. We
911 only support new integral and pointer types. FOR_BIASED is true if
912 we are making a biased type. */
914 tree
915 make_type_from_size (tree type, tree size_tree, bool for_biased)
917 unsigned HOST_WIDE_INT size;
918 bool biased_p;
919 tree new_type;
921 /* If size indicates an error, just return TYPE to avoid propagating
922 the error. Likewise if it's too large to represent. */
923 if (!size_tree || !tree_fits_uhwi_p (size_tree))
924 return type;
926 size = tree_to_uhwi (size_tree);
928 switch (TREE_CODE (type))
930 case INTEGER_TYPE:
931 case ENUMERAL_TYPE:
932 case BOOLEAN_TYPE:
933 biased_p = (TREE_CODE (type) == INTEGER_TYPE
934 && TYPE_BIASED_REPRESENTATION_P (type));
936 /* Integer types with precision 0 are forbidden. */
937 if (size == 0)
938 size = 1;
940 /* Only do something if the type isn't a packed array type and doesn't
941 already have the proper size and the size isn't too large. */
942 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
943 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
944 || size > LONG_LONG_TYPE_SIZE)
945 break;
947 biased_p |= for_biased;
948 if (TYPE_UNSIGNED (type) || biased_p)
949 new_type = make_unsigned_type (size);
950 else
951 new_type = make_signed_type (size);
952 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
953 SET_TYPE_RM_MIN_VALUE (new_type,
954 convert (TREE_TYPE (new_type),
955 TYPE_MIN_VALUE (type)));
956 SET_TYPE_RM_MAX_VALUE (new_type,
957 convert (TREE_TYPE (new_type),
958 TYPE_MAX_VALUE (type)));
959 /* Copy the name to show that it's essentially the same type and
960 not a subrange type. */
961 TYPE_NAME (new_type) = TYPE_NAME (type);
962 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
963 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
964 return new_type;
966 case RECORD_TYPE:
967 /* Do something if this is a fat pointer, in which case we
968 may need to return the thin pointer. */
969 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
971 enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
972 if (!targetm.valid_pointer_mode (p_mode))
973 p_mode = ptr_mode;
974 return
975 build_pointer_type_for_mode
976 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
977 p_mode, 0);
979 break;
981 case POINTER_TYPE:
982 /* Only do something if this is a thin pointer, in which case we
983 may need to return the fat pointer. */
984 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
985 return
986 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
987 break;
989 default:
990 break;
993 return type;
996 /* See if the data pointed to by the hash table slot is marked. */
998 static int
999 pad_type_hash_marked_p (const void *p)
1001 const_tree const type = ((const struct pad_type_hash *) p)->type;
1003 return ggc_marked_p (type);
1006 /* Return the cached hash value. */
1008 static hashval_t
1009 pad_type_hash_hash (const void *p)
1011 return ((const struct pad_type_hash *) p)->hash;
1014 /* Return 1 iff the padded types are equivalent. */
1016 static int
1017 pad_type_hash_eq (const void *p1, const void *p2)
1019 const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
1020 const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
1021 tree type1, type2;
1023 if (t1->hash != t2->hash)
1024 return 0;
1026 type1 = t1->type;
1027 type2 = t2->type;
1029 /* We consider that the padded types are equivalent if they pad the same
1030 type and have the same size, alignment and RM size. Taking the mode
1031 into account is redundant since it is determined by the others. */
1032 return
1033 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1034 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1035 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1036 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1039 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1040 if needed. We have already verified that SIZE and TYPE are large enough.
1041 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1042 IS_COMPONENT_TYPE is true if this is being done for the component type of
1043 an array. IS_USER_TYPE is true if the original type needs to be completed.
1044 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1045 the RM size of the resulting type is to be set to SIZE too. */
1047 tree
1048 maybe_pad_type (tree type, tree size, unsigned int align,
1049 Entity_Id gnat_entity, bool is_component_type,
1050 bool is_user_type, bool definition, bool set_rm_size)
1052 tree orig_size = TYPE_SIZE (type);
1053 unsigned int orig_align = TYPE_ALIGN (type);
1054 tree record, field;
1056 /* If TYPE is a padded type, see if it agrees with any size and alignment
1057 we were given. If so, return the original type. Otherwise, strip
1058 off the padding, since we will either be returning the inner type
1059 or repadding it. If no size or alignment is specified, use that of
1060 the original padded type. */
1061 if (TYPE_IS_PADDING_P (type))
1063 if ((!size
1064 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1065 && (align == 0 || align == orig_align))
1066 return type;
1068 if (!size)
1069 size = orig_size;
1070 if (align == 0)
1071 align = orig_align;
1073 type = TREE_TYPE (TYPE_FIELDS (type));
1074 orig_size = TYPE_SIZE (type);
1075 orig_align = TYPE_ALIGN (type);
1078 /* If the size is either not being changed or is being made smaller (which
1079 is not done here and is only valid for bitfields anyway), show the size
1080 isn't changing. Likewise, clear the alignment if it isn't being
1081 changed. Then return if we aren't doing anything. */
1082 if (size
1083 && (operand_equal_p (size, orig_size, 0)
1084 || (TREE_CODE (orig_size) == INTEGER_CST
1085 && tree_int_cst_lt (size, orig_size))))
1086 size = NULL_TREE;
1088 if (align == orig_align)
1089 align = 0;
1091 if (align == 0 && !size)
1092 return type;
1094 /* If requested, complete the original type and give it a name. */
1095 if (is_user_type)
1096 create_type_decl (get_entity_name (gnat_entity), type,
1097 !Comes_From_Source (gnat_entity),
1098 !(TYPE_NAME (type)
1099 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1100 && DECL_IGNORED_P (TYPE_NAME (type))),
1101 gnat_entity);
1103 /* We used to modify the record in place in some cases, but that could
1104 generate incorrect debugging information. So make a new record
1105 type and name. */
1106 record = make_node (RECORD_TYPE);
1107 TYPE_PADDING_P (record) = 1;
1109 if (Present (gnat_entity))
1110 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1112 TYPE_ALIGN (record) = align ? align : orig_align;
1113 TYPE_SIZE (record) = size ? size : orig_size;
1114 TYPE_SIZE_UNIT (record)
1115 = convert (sizetype,
1116 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1117 bitsize_unit_node));
1119 /* If we are changing the alignment and the input type is a record with
1120 BLKmode and a small constant size, try to make a form that has an
1121 integral mode. This might allow the padding record to also have an
1122 integral mode, which will be much more efficient. There is no point
1123 in doing so if a size is specified unless it is also a small constant
1124 size and it is incorrect to do so if we cannot guarantee that the mode
1125 will be naturally aligned since the field must always be addressable.
1127 ??? This might not always be a win when done for a stand-alone object:
1128 since the nominal and the effective type of the object will now have
1129 different modes, a VIEW_CONVERT_EXPR will be required for converting
1130 between them and it might be hard to overcome afterwards, including
1131 at the RTL level when the stand-alone object is accessed as a whole. */
1132 if (align != 0
1133 && RECORD_OR_UNION_TYPE_P (type)
1134 && TYPE_MODE (type) == BLKmode
1135 && !TYPE_BY_REFERENCE_P (type)
1136 && TREE_CODE (orig_size) == INTEGER_CST
1137 && !TREE_OVERFLOW (orig_size)
1138 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1139 && (!size
1140 || (TREE_CODE (size) == INTEGER_CST
1141 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1143 tree packable_type = make_packable_type (type, true);
1144 if (TYPE_MODE (packable_type) != BLKmode
1145 && align >= TYPE_ALIGN (packable_type))
1146 type = packable_type;
1149 /* Now create the field with the original size. */
1150 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1151 bitsize_zero_node, 0, 1);
1152 DECL_INTERNAL_P (field) = 1;
1154 /* Do not emit debug info until after the auxiliary record is built. */
1155 finish_record_type (record, field, 1, false);
1157 /* Set the RM size if requested. */
1158 if (set_rm_size)
1160 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1162 /* If the padded type is complete and has constant size, we canonicalize
1163 it by means of the hash table. This is consistent with the language
1164 semantics and ensures that gigi and the middle-end have a common view
1165 of these padded types. */
1166 if (TREE_CONSTANT (TYPE_SIZE (record)))
1168 hashval_t hashcode;
1169 struct pad_type_hash in, *h;
1170 void **loc;
1172 hashcode = iterative_hash_object (TYPE_HASH (type), 0);
1173 hashcode = iterative_hash_expr (TYPE_SIZE (record), hashcode);
1174 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (record), hashcode);
1175 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (record), hashcode);
1177 in.hash = hashcode;
1178 in.type = record;
1179 h = (struct pad_type_hash *)
1180 htab_find_with_hash (pad_type_hash_table, &in, hashcode);
1181 if (h)
1183 record = h->type;
1184 goto built;
1187 h = ggc_alloc_pad_type_hash ();
1188 h->hash = hashcode;
1189 h->type = record;
1190 loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode,
1191 INSERT);
1192 *loc = (void *)h;
1196 /* Unless debugging information isn't being written for the input type,
1197 write a record that shows what we are a subtype of and also make a
1198 variable that indicates our size, if still variable. */
1199 if (TREE_CODE (orig_size) != INTEGER_CST
1200 && TYPE_NAME (record)
1201 && TYPE_NAME (type)
1202 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1203 && DECL_IGNORED_P (TYPE_NAME (type))))
1205 tree marker = make_node (RECORD_TYPE);
1206 tree name = TYPE_NAME (record);
1207 tree orig_name = TYPE_NAME (type);
1209 if (TREE_CODE (name) == TYPE_DECL)
1210 name = DECL_NAME (name);
1212 if (TREE_CODE (orig_name) == TYPE_DECL)
1213 orig_name = DECL_NAME (orig_name);
1215 TYPE_NAME (marker) = concat_name (name, "XVS");
1216 finish_record_type (marker,
1217 create_field_decl (orig_name,
1218 build_reference_type (type),
1219 marker, NULL_TREE, NULL_TREE,
1220 0, 0),
1221 0, true);
1223 add_parallel_type (record, marker);
1225 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1226 TYPE_SIZE_UNIT (marker)
1227 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1228 TYPE_SIZE_UNIT (record), false, false, false,
1229 false, NULL, gnat_entity);
1232 rest_of_record_type_compilation (record);
1234 built:
1235 /* If the size was widened explicitly, maybe give a warning. Take the
1236 original size as the maximum size of the input if there was an
1237 unconstrained record involved and round it up to the specified alignment,
1238 if one was specified. But don't do it if we are just annotating types
1239 and the type is tagged, since tagged types aren't fully laid out in this
1240 mode. */
1241 if (!size
1242 || TREE_CODE (size) == COND_EXPR
1243 || TREE_CODE (size) == MAX_EXPR
1244 || No (gnat_entity)
1245 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1246 return record;
1248 if (CONTAINS_PLACEHOLDER_P (orig_size))
1249 orig_size = max_size (orig_size, true);
1251 if (align)
1252 orig_size = round_up (orig_size, align);
1254 if (!operand_equal_p (size, orig_size, 0)
1255 && !(TREE_CODE (size) == INTEGER_CST
1256 && TREE_CODE (orig_size) == INTEGER_CST
1257 && (TREE_OVERFLOW (size)
1258 || TREE_OVERFLOW (orig_size)
1259 || tree_int_cst_lt (size, orig_size))))
1261 Node_Id gnat_error_node = Empty;
1263 if (Is_Packed_Array_Type (gnat_entity))
1264 gnat_entity = Original_Array_Type (gnat_entity);
1266 if ((Ekind (gnat_entity) == E_Component
1267 || Ekind (gnat_entity) == E_Discriminant)
1268 && Present (Component_Clause (gnat_entity)))
1269 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1270 else if (Present (Size_Clause (gnat_entity)))
1271 gnat_error_node = Expression (Size_Clause (gnat_entity));
1273 /* Generate message only for entities that come from source, since
1274 if we have an entity created by expansion, the message will be
1275 generated for some other corresponding source entity. */
1276 if (Comes_From_Source (gnat_entity))
1278 if (Present (gnat_error_node))
1279 post_error_ne_tree ("{^ }bits of & unused?",
1280 gnat_error_node, gnat_entity,
1281 size_diffop (size, orig_size));
1282 else if (is_component_type)
1283 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1284 gnat_entity, gnat_entity,
1285 size_diffop (size, orig_size));
1289 return record;
1292 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1293 If this is a multi-dimensional array type, do this recursively.
1295 OP may be
1296 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1297 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1298 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1300 void
1301 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1303 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1304 of a one-dimensional array, since the padding has the same alias set
1305 as the field type, but if it's a multi-dimensional array, we need to
1306 see the inner types. */
1307 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1308 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1309 || TYPE_PADDING_P (gnu_old_type)))
1310 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1312 /* Unconstrained array types are deemed incomplete and would thus be given
1313 alias set 0. Retrieve the underlying array type. */
1314 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1315 gnu_old_type
1316 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1317 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1318 gnu_new_type
1319 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1321 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1322 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1323 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1324 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1326 switch (op)
1328 case ALIAS_SET_COPY:
1329 /* The alias set shouldn't be copied between array types with different
1330 aliasing settings because this can break the aliasing relationship
1331 between the array type and its element type. */
1332 #ifndef ENABLE_CHECKING
1333 if (flag_strict_aliasing)
1334 #endif
1335 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1336 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1337 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1338 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1340 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1341 break;
1343 case ALIAS_SET_SUBSET:
1344 case ALIAS_SET_SUPERSET:
1346 alias_set_type old_set = get_alias_set (gnu_old_type);
1347 alias_set_type new_set = get_alias_set (gnu_new_type);
1349 /* Do nothing if the alias sets conflict. This ensures that we
1350 never call record_alias_subset several times for the same pair
1351 or at all for alias set 0. */
1352 if (!alias_sets_conflict_p (old_set, new_set))
1354 if (op == ALIAS_SET_SUBSET)
1355 record_alias_subset (old_set, new_set);
1356 else
1357 record_alias_subset (new_set, old_set);
1360 break;
1362 default:
1363 gcc_unreachable ();
1366 record_component_aliases (gnu_new_type);
1369 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1370 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1372 void
1373 record_builtin_type (const char *name, tree type, bool artificial_p)
1375 tree type_decl = build_decl (input_location,
1376 TYPE_DECL, get_identifier (name), type);
1377 DECL_ARTIFICIAL (type_decl) = artificial_p;
1378 TYPE_ARTIFICIAL (type) = artificial_p;
1379 gnat_pushdecl (type_decl, Empty);
1381 if (debug_hooks->type_decl)
1382 debug_hooks->type_decl (type_decl, false);
1385 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1386 finish constructing the record type as a fat pointer type. */
1388 void
1389 finish_fat_pointer_type (tree record_type, tree field_list)
1391 /* Make sure we can put it into a register. */
1392 if (STRICT_ALIGNMENT)
1393 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
1395 /* Show what it really is. */
1396 TYPE_FAT_POINTER_P (record_type) = 1;
1398 /* Do not emit debug info for it since the types of its fields may still be
1399 incomplete at this point. */
1400 finish_record_type (record_type, field_list, 0, false);
1402 /* Force type_contains_placeholder_p to return true on it. Although the
1403 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1404 type but the representation of the unconstrained array. */
1405 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1408 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1409 finish constructing the record or union type. If REP_LEVEL is zero, this
1410 record has no representation clause and so will be entirely laid out here.
1411 If REP_LEVEL is one, this record has a representation clause and has been
1412 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1413 this record is derived from a parent record and thus inherits its layout;
1414 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1415 we need to write debug information about this type. */
1417 void
1418 finish_record_type (tree record_type, tree field_list, int rep_level,
1419 bool debug_info_p)
1421 enum tree_code code = TREE_CODE (record_type);
1422 tree name = TYPE_NAME (record_type);
1423 tree ada_size = bitsize_zero_node;
1424 tree size = bitsize_zero_node;
1425 bool had_size = TYPE_SIZE (record_type) != 0;
1426 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1427 bool had_align = TYPE_ALIGN (record_type) != 0;
1428 tree field;
1430 TYPE_FIELDS (record_type) = field_list;
1432 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1433 generate debug info and have a parallel type. */
1434 if (name && TREE_CODE (name) == TYPE_DECL)
1435 name = DECL_NAME (name);
1436 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1438 /* Globally initialize the record first. If this is a rep'ed record,
1439 that just means some initializations; otherwise, layout the record. */
1440 if (rep_level > 0)
1442 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
1444 if (!had_size_unit)
1445 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1447 if (!had_size)
1448 TYPE_SIZE (record_type) = bitsize_zero_node;
1450 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1451 out just like a UNION_TYPE, since the size will be fixed. */
1452 else if (code == QUAL_UNION_TYPE)
1453 code = UNION_TYPE;
1455 else
1457 /* Ensure there isn't a size already set. There can be in an error
1458 case where there is a rep clause but all fields have errors and
1459 no longer have a position. */
1460 TYPE_SIZE (record_type) = 0;
1462 /* Ensure we use the traditional GCC layout for bitfields when we need
1463 to pack the record type or have a representation clause. The other
1464 possible layout (Microsoft C compiler), if available, would prevent
1465 efficient packing in almost all cases. */
1466 #ifdef TARGET_MS_BITFIELD_LAYOUT
1467 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1468 decl_attributes (&record_type,
1469 tree_cons (get_identifier ("gcc_struct"),
1470 NULL_TREE, NULL_TREE),
1471 ATTR_FLAG_TYPE_IN_PLACE);
1472 #endif
1474 layout_type (record_type);
1477 /* At this point, the position and size of each field is known. It was
1478 either set before entry by a rep clause, or by laying out the type above.
1480 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1481 to compute the Ada size; the GCC size and alignment (for rep'ed records
1482 that are not padding types); and the mode (for rep'ed records). We also
1483 clear the DECL_BIT_FIELD indication for the cases we know have not been
1484 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1486 if (code == QUAL_UNION_TYPE)
1487 field_list = nreverse (field_list);
1489 for (field = field_list; field; field = DECL_CHAIN (field))
1491 tree type = TREE_TYPE (field);
1492 tree pos = bit_position (field);
1493 tree this_size = DECL_SIZE (field);
1494 tree this_ada_size;
1496 if (RECORD_OR_UNION_TYPE_P (type)
1497 && !TYPE_FAT_POINTER_P (type)
1498 && !TYPE_CONTAINS_TEMPLATE_P (type)
1499 && TYPE_ADA_SIZE (type))
1500 this_ada_size = TYPE_ADA_SIZE (type);
1501 else
1502 this_ada_size = this_size;
1504 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1505 if (DECL_BIT_FIELD (field)
1506 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1508 unsigned int align = TYPE_ALIGN (type);
1510 /* In the general case, type alignment is required. */
1511 if (value_factor_p (pos, align))
1513 /* The enclosing record type must be sufficiently aligned.
1514 Otherwise, if no alignment was specified for it and it
1515 has been laid out already, bump its alignment to the
1516 desired one if this is compatible with its size. */
1517 if (TYPE_ALIGN (record_type) >= align)
1519 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1520 DECL_BIT_FIELD (field) = 0;
1522 else if (!had_align
1523 && rep_level == 0
1524 && value_factor_p (TYPE_SIZE (record_type), align))
1526 TYPE_ALIGN (record_type) = align;
1527 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1528 DECL_BIT_FIELD (field) = 0;
1532 /* In the non-strict alignment case, only byte alignment is. */
1533 if (!STRICT_ALIGNMENT
1534 && DECL_BIT_FIELD (field)
1535 && value_factor_p (pos, BITS_PER_UNIT))
1536 DECL_BIT_FIELD (field) = 0;
1539 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1540 field is technically not addressable. Except that it can actually
1541 be addressed if it is BLKmode and happens to be properly aligned. */
1542 if (DECL_BIT_FIELD (field)
1543 && !(DECL_MODE (field) == BLKmode
1544 && value_factor_p (pos, BITS_PER_UNIT)))
1545 DECL_NONADDRESSABLE_P (field) = 1;
1547 /* A type must be as aligned as its most aligned field that is not
1548 a bit-field. But this is already enforced by layout_type. */
1549 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1550 TYPE_ALIGN (record_type)
1551 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1553 switch (code)
1555 case UNION_TYPE:
1556 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1557 size = size_binop (MAX_EXPR, size, this_size);
1558 break;
1560 case QUAL_UNION_TYPE:
1561 ada_size
1562 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1563 this_ada_size, ada_size);
1564 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1565 this_size, size);
1566 break;
1568 case RECORD_TYPE:
1569 /* Since we know here that all fields are sorted in order of
1570 increasing bit position, the size of the record is one
1571 higher than the ending bit of the last field processed
1572 unless we have a rep clause, since in that case we might
1573 have a field outside a QUAL_UNION_TYPE that has a higher ending
1574 position. So use a MAX in that case. Also, if this field is a
1575 QUAL_UNION_TYPE, we need to take into account the previous size in
1576 the case of empty variants. */
1577 ada_size
1578 = merge_sizes (ada_size, pos, this_ada_size,
1579 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1580 size
1581 = merge_sizes (size, pos, this_size,
1582 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1583 break;
1585 default:
1586 gcc_unreachable ();
1590 if (code == QUAL_UNION_TYPE)
1591 nreverse (field_list);
1593 if (rep_level < 2)
1595 /* If this is a padding record, we never want to make the size smaller
1596 than what was specified in it, if any. */
1597 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1598 size = TYPE_SIZE (record_type);
1600 /* Now set any of the values we've just computed that apply. */
1601 if (!TYPE_FAT_POINTER_P (record_type)
1602 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1603 SET_TYPE_ADA_SIZE (record_type, ada_size);
1605 if (rep_level > 0)
1607 tree size_unit = had_size_unit
1608 ? TYPE_SIZE_UNIT (record_type)
1609 : convert (sizetype,
1610 size_binop (CEIL_DIV_EXPR, size,
1611 bitsize_unit_node));
1612 unsigned int align = TYPE_ALIGN (record_type);
1614 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1615 TYPE_SIZE_UNIT (record_type)
1616 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1618 compute_record_mode (record_type);
1622 if (debug_info_p)
1623 rest_of_record_type_compilation (record_type);
1626 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1628 void
1629 add_parallel_type (tree type, tree parallel_type)
1631 tree decl = TYPE_STUB_DECL (type);
1633 while (DECL_PARALLEL_TYPE (decl))
1634 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1636 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1639 /* Return true if TYPE has a parallel type. */
1641 static bool
1642 has_parallel_type (tree type)
1644 tree decl = TYPE_STUB_DECL (type);
1646 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1649 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1650 associated with it. It need not be invoked directly in most cases since
1651 finish_record_type takes care of doing so, but this can be necessary if
1652 a parallel type is to be attached to the record type. */
1654 void
1655 rest_of_record_type_compilation (tree record_type)
1657 bool var_size = false;
1658 tree field;
1660 /* If this is a padded type, the bulk of the debug info has already been
1661 generated for the field's type. */
1662 if (TYPE_IS_PADDING_P (record_type))
1663 return;
1665 /* If the type already has a parallel type (XVS type), then we're done. */
1666 if (has_parallel_type (record_type))
1667 return;
1669 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1671 /* We need to make an XVE/XVU record if any field has variable size,
1672 whether or not the record does. For example, if we have a union,
1673 it may be that all fields, rounded up to the alignment, have the
1674 same size, in which case we'll use that size. But the debug
1675 output routines (except Dwarf2) won't be able to output the fields,
1676 so we need to make the special record. */
1677 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1678 /* If a field has a non-constant qualifier, the record will have
1679 variable size too. */
1680 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1681 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1683 var_size = true;
1684 break;
1688 /* If this record type is of variable size, make a parallel record type that
1689 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1690 if (var_size)
1692 tree new_record_type
1693 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1694 ? UNION_TYPE : TREE_CODE (record_type));
1695 tree orig_name = TYPE_NAME (record_type), new_name;
1696 tree last_pos = bitsize_zero_node;
1697 tree old_field, prev_old_field = NULL_TREE;
1699 if (TREE_CODE (orig_name) == TYPE_DECL)
1700 orig_name = DECL_NAME (orig_name);
1702 new_name
1703 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1704 ? "XVU" : "XVE");
1705 TYPE_NAME (new_record_type) = new_name;
1706 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1707 TYPE_STUB_DECL (new_record_type)
1708 = create_type_stub_decl (new_name, new_record_type);
1709 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1710 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1711 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1712 TYPE_SIZE_UNIT (new_record_type)
1713 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1715 /* Now scan all the fields, replacing each field with a new field
1716 corresponding to the new encoding. */
1717 for (old_field = TYPE_FIELDS (record_type); old_field;
1718 old_field = DECL_CHAIN (old_field))
1720 tree field_type = TREE_TYPE (old_field);
1721 tree field_name = DECL_NAME (old_field);
1722 tree curpos = bit_position (old_field);
1723 tree pos, new_field;
1724 bool var = false;
1725 unsigned int align = 0;
1727 /* We're going to do some pattern matching below so remove as many
1728 conversions as possible. */
1729 curpos = remove_conversions (curpos, true);
1731 /* See how the position was modified from the last position.
1733 There are two basic cases we support: a value was added
1734 to the last position or the last position was rounded to
1735 a boundary and they something was added. Check for the
1736 first case first. If not, see if there is any evidence
1737 of rounding. If so, round the last position and retry.
1739 If this is a union, the position can be taken as zero. */
1740 if (TREE_CODE (new_record_type) == UNION_TYPE)
1741 pos = bitsize_zero_node;
1742 else
1743 pos = compute_related_constant (curpos, last_pos);
1745 if (!pos
1746 && TREE_CODE (curpos) == MULT_EXPR
1747 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
1749 tree offset = TREE_OPERAND (curpos, 0);
1750 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1751 align = scale_by_factor_of (offset, align);
1752 last_pos = round_up (last_pos, align);
1753 pos = compute_related_constant (curpos, last_pos);
1755 else if (!pos
1756 && TREE_CODE (curpos) == PLUS_EXPR
1757 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
1758 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1759 && tree_fits_uhwi_p
1760 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
1762 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1763 unsigned HOST_WIDE_INT addend
1764 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1765 align
1766 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
1767 align = scale_by_factor_of (offset, align);
1768 align = MIN (align, addend & -addend);
1769 last_pos = round_up (last_pos, align);
1770 pos = compute_related_constant (curpos, last_pos);
1772 else if (potential_alignment_gap (prev_old_field, old_field, pos))
1774 align = TYPE_ALIGN (field_type);
1775 last_pos = round_up (last_pos, align);
1776 pos = compute_related_constant (curpos, last_pos);
1779 /* If we can't compute a position, set it to zero.
1781 ??? We really should abort here, but it's too much work
1782 to get this correct for all cases. */
1783 if (!pos)
1784 pos = bitsize_zero_node;
1786 /* See if this type is variable-sized and make a pointer type
1787 and indicate the indirection if so. Beware that the debug
1788 back-end may adjust the position computed above according
1789 to the alignment of the field type, i.e. the pointer type
1790 in this case, if we don't preventively counter that. */
1791 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1793 field_type = build_pointer_type (field_type);
1794 if (align != 0 && TYPE_ALIGN (field_type) > align)
1796 field_type = copy_node (field_type);
1797 TYPE_ALIGN (field_type) = align;
1799 var = true;
1802 /* Make a new field name, if necessary. */
1803 if (var || align != 0)
1805 char suffix[16];
1807 if (align != 0)
1808 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1809 align / BITS_PER_UNIT);
1810 else
1811 strcpy (suffix, "XVL");
1813 field_name = concat_name (field_name, suffix);
1816 new_field
1817 = create_field_decl (field_name, field_type, new_record_type,
1818 DECL_SIZE (old_field), pos, 0, 0);
1819 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1820 TYPE_FIELDS (new_record_type) = new_field;
1822 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1823 zero. The only time it's not the last field of the record
1824 is when there are other components at fixed positions after
1825 it (meaning there was a rep clause for every field) and we
1826 want to be able to encode them. */
1827 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1828 (TREE_CODE (TREE_TYPE (old_field))
1829 == QUAL_UNION_TYPE)
1830 ? bitsize_zero_node
1831 : DECL_SIZE (old_field));
1832 prev_old_field = old_field;
1835 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
1837 add_parallel_type (record_type, new_record_type);
1841 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1842 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1843 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1844 replace a value of zero with the old size. If HAS_REP is true, we take the
1845 MAX of the end position of this field with LAST_SIZE. In all other cases,
1846 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1848 static tree
1849 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1850 bool has_rep)
1852 tree type = TREE_TYPE (last_size);
1853 tree new_size;
1855 if (!special || TREE_CODE (size) != COND_EXPR)
1857 new_size = size_binop (PLUS_EXPR, first_bit, size);
1858 if (has_rep)
1859 new_size = size_binop (MAX_EXPR, last_size, new_size);
1862 else
1863 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1864 integer_zerop (TREE_OPERAND (size, 1))
1865 ? last_size : merge_sizes (last_size, first_bit,
1866 TREE_OPERAND (size, 1),
1867 1, has_rep),
1868 integer_zerop (TREE_OPERAND (size, 2))
1869 ? last_size : merge_sizes (last_size, first_bit,
1870 TREE_OPERAND (size, 2),
1871 1, has_rep));
1873 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1874 when fed through substitute_in_expr) into thinking that a constant
1875 size is not constant. */
1876 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1877 new_size = TREE_OPERAND (new_size, 0);
1879 return new_size;
1882 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1883 related by the addition of a constant. Return that constant if so. */
1885 static tree
1886 compute_related_constant (tree op0, tree op1)
1888 tree op0_var, op1_var;
1889 tree op0_con = split_plus (op0, &op0_var);
1890 tree op1_con = split_plus (op1, &op1_var);
1891 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1893 if (operand_equal_p (op0_var, op1_var, 0))
1894 return result;
1895 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1896 return result;
1897 else
1898 return 0;
1901 /* Utility function of above to split a tree OP which may be a sum, into a
1902 constant part, which is returned, and a variable part, which is stored
1903 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1904 bitsizetype. */
1906 static tree
1907 split_plus (tree in, tree *pvar)
1909 /* Strip conversions in order to ease the tree traversal and maximize the
1910 potential for constant or plus/minus discovery. We need to be careful
1911 to always return and set *pvar to bitsizetype trees, but it's worth
1912 the effort. */
1913 in = remove_conversions (in, false);
1915 *pvar = convert (bitsizetype, in);
1917 if (TREE_CODE (in) == INTEGER_CST)
1919 *pvar = bitsize_zero_node;
1920 return convert (bitsizetype, in);
1922 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1924 tree lhs_var, rhs_var;
1925 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1926 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1928 if (lhs_var == TREE_OPERAND (in, 0)
1929 && rhs_var == TREE_OPERAND (in, 1))
1930 return bitsize_zero_node;
1932 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1933 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1935 else
1936 return bitsize_zero_node;
1939 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1940 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1941 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1942 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1943 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1944 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1945 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1946 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1947 invisible reference. */
1949 tree
1950 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1951 bool return_unconstrained_p, bool return_by_direct_ref_p,
1952 bool return_by_invisi_ref_p)
1954 /* A list of the data type nodes of the subprogram formal parameters.
1955 This list is generated by traversing the input list of PARM_DECL
1956 nodes. */
1957 vec<tree, va_gc> *param_type_list = NULL;
1958 tree t, type;
1960 for (t = param_decl_list; t; t = DECL_CHAIN (t))
1961 vec_safe_push (param_type_list, TREE_TYPE (t));
1963 type = build_function_type_vec (return_type, param_type_list);
1965 /* TYPE may have been shared since GCC hashes types. If it has a different
1966 CICO_LIST, make a copy. Likewise for the various flags. */
1967 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1968 return_by_direct_ref_p, return_by_invisi_ref_p))
1970 type = copy_type (type);
1971 TYPE_CI_CO_LIST (type) = cico_list;
1972 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1973 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1974 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1977 return type;
1980 /* Return a copy of TYPE but safe to modify in any way. */
1982 tree
1983 copy_type (tree type)
1985 tree new_type = copy_node (type);
1987 /* Unshare the language-specific data. */
1988 if (TYPE_LANG_SPECIFIC (type))
1990 TYPE_LANG_SPECIFIC (new_type) = NULL;
1991 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1994 /* And the contents of the language-specific slot if needed. */
1995 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1996 && TYPE_RM_VALUES (type))
1998 TYPE_RM_VALUES (new_type) = NULL_TREE;
1999 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2000 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2001 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2004 /* copy_node clears this field instead of copying it, because it is
2005 aliased with TREE_CHAIN. */
2006 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2008 TYPE_POINTER_TO (new_type) = 0;
2009 TYPE_REFERENCE_TO (new_type) = 0;
2010 TYPE_MAIN_VARIANT (new_type) = new_type;
2011 TYPE_NEXT_VARIANT (new_type) = 0;
2013 return new_type;
2016 /* Return a subtype of sizetype with range MIN to MAX and whose
2017 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2018 of the associated TYPE_DECL. */
2020 tree
2021 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2023 /* First build a type for the desired range. */
2024 tree type = build_nonshared_range_type (sizetype, min, max);
2026 /* Then set the index type. */
2027 SET_TYPE_INDEX_TYPE (type, index);
2028 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2030 return type;
2033 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2034 sizetype is used. */
2036 tree
2037 create_range_type (tree type, tree min, tree max)
2039 tree range_type;
2041 if (type == NULL_TREE)
2042 type = sizetype;
2044 /* First build a type with the base range. */
2045 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2046 TYPE_MAX_VALUE (type));
2048 /* Then set the actual range. */
2049 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
2050 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
2052 return range_type;
2055 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2056 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2057 its data type. */
2059 tree
2060 create_type_stub_decl (tree type_name, tree type)
2062 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2063 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2064 emitted in DWARF. */
2065 tree type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2066 DECL_ARTIFICIAL (type_decl) = 1;
2067 TYPE_ARTIFICIAL (type) = 1;
2068 return type_decl;
2071 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2072 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2073 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2074 true if we need to write debug information about this type. GNAT_NODE
2075 is used for the position of the decl. */
2077 tree
2078 create_type_decl (tree type_name, tree type, bool artificial_p,
2079 bool debug_info_p, Node_Id gnat_node)
2081 enum tree_code code = TREE_CODE (type);
2082 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2083 tree type_decl;
2085 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2086 gcc_assert (!TYPE_IS_DUMMY_P (type));
2088 /* If the type hasn't been named yet, we're naming it; preserve an existing
2089 TYPE_STUB_DECL that has been attached to it for some purpose. */
2090 if (!named && TYPE_STUB_DECL (type))
2092 type_decl = TYPE_STUB_DECL (type);
2093 DECL_NAME (type_decl) = type_name;
2095 else
2096 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2098 DECL_ARTIFICIAL (type_decl) = artificial_p;
2099 TYPE_ARTIFICIAL (type) = artificial_p;
2101 /* Add this decl to the current binding level. */
2102 gnat_pushdecl (type_decl, gnat_node);
2104 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2105 This causes the name to be also viewed as a "tag" by the debug
2106 back-end, with the advantage that no DW_TAG_typedef is emitted
2107 for artificial "tagged" types in DWARF. */
2108 if (!named)
2109 TYPE_STUB_DECL (type) = type_decl;
2111 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2112 back-end doesn't support, and for others if we don't need to. */
2113 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2114 DECL_IGNORED_P (type_decl) = 1;
2116 return type_decl;
2119 /* Return a VAR_DECL or CONST_DECL node.
2121 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2122 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2123 the GCC tree for an optional initial expression; NULL_TREE if none.
2125 CONST_FLAG is true if this variable is constant, in which case we might
2126 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2128 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2129 definition to be made visible outside of the current compilation unit, for
2130 instance variable definitions in a package specification.
2132 EXTERN_FLAG is true when processing an external variable declaration (as
2133 opposed to a definition: no storage is to be allocated for the variable).
2135 STATIC_FLAG is only relevant when not at top level. In that case
2136 it indicates whether to always allocate storage to the variable.
2138 GNAT_NODE is used for the position of the decl. */
2140 tree
2141 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2142 bool const_flag, bool public_flag, bool extern_flag,
2143 bool static_flag, bool const_decl_allowed_p,
2144 struct attrib *attr_list, Node_Id gnat_node)
2146 /* Whether the initializer is a constant initializer. At the global level
2147 or for an external object or an object to be allocated in static memory,
2148 we check that it is a valid constant expression for use in initializing
2149 a static variable; otherwise, we only check that it is constant. */
2150 bool init_const
2151 = (var_init != 0
2152 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
2153 && (global_bindings_p () || extern_flag || static_flag
2154 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
2155 : TREE_CONSTANT (var_init)));
2157 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2158 case the initializer may be used in-lieu of the DECL node (as done in
2159 Identifier_to_gnu). This is useful to prevent the need of elaboration
2160 code when an identifier for which such a decl is made is in turn used as
2161 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2162 but extra constraints apply to this choice (see below) and are not
2163 relevant to the distinction we wish to make. */
2164 bool constant_p = const_flag && init_const;
2166 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2167 and may be used for scalars in general but not for aggregates. */
2168 tree var_decl
2169 = build_decl (input_location,
2170 (constant_p && const_decl_allowed_p
2171 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2172 var_name, type);
2174 /* If this is external, throw away any initializations (they will be done
2175 elsewhere) unless this is a constant for which we would like to remain
2176 able to get the initializer. If we are defining a global here, leave a
2177 constant initialization and save any variable elaborations for the
2178 elaboration routine. If we are just annotating types, throw away the
2179 initialization if it isn't a constant. */
2180 if ((extern_flag && !constant_p)
2181 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2182 var_init = NULL_TREE;
2184 /* At the global level, an initializer requiring code to be generated
2185 produces elaboration statements. Check that such statements are allowed,
2186 that is, not violating a No_Elaboration_Code restriction. */
2187 if (global_bindings_p () && var_init != 0 && !init_const)
2188 Check_Elaboration_Code_Allowed (gnat_node);
2190 DECL_INITIAL (var_decl) = var_init;
2191 TREE_READONLY (var_decl) = const_flag;
2192 DECL_EXTERNAL (var_decl) = extern_flag;
2193 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
2194 TREE_CONSTANT (var_decl) = constant_p;
2195 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
2196 = TYPE_VOLATILE (type);
2198 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2199 try to fiddle with DECL_COMMON. However, on platforms that don't
2200 support global BSS sections, uninitialized global variables would
2201 go in DATA instead, thus increasing the size of the executable. */
2202 if (!flag_no_common
2203 && TREE_CODE (var_decl) == VAR_DECL
2204 && TREE_PUBLIC (var_decl)
2205 && !have_global_bss_p ())
2206 DECL_COMMON (var_decl) = 1;
2208 /* At the global binding level, we need to allocate static storage for the
2209 variable if it isn't external. Otherwise, we allocate automatic storage
2210 unless requested not to. */
2211 TREE_STATIC (var_decl)
2212 = !extern_flag && (static_flag || global_bindings_p ());
2214 /* For an external constant whose initializer is not absolute, do not emit
2215 debug info. In DWARF this would mean a global relocation in a read-only
2216 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2217 if (extern_flag
2218 && constant_p
2219 && var_init
2220 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2221 != null_pointer_node)
2222 DECL_IGNORED_P (var_decl) = 1;
2224 if (TREE_SIDE_EFFECTS (var_decl))
2225 TREE_ADDRESSABLE (var_decl) = 1;
2227 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2228 if (TREE_CODE (var_decl) == VAR_DECL)
2229 process_attributes (&var_decl, &attr_list, true, gnat_node);
2231 /* Add this decl to the current binding level. */
2232 gnat_pushdecl (var_decl, gnat_node);
2234 if (TREE_CODE (var_decl) == VAR_DECL)
2236 if (asm_name)
2237 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2239 if (global_bindings_p ())
2240 rest_of_decl_compilation (var_decl, true, 0);
2243 return var_decl;
2246 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2248 static bool
2249 aggregate_type_contains_array_p (tree type)
2251 switch (TREE_CODE (type))
2253 case RECORD_TYPE:
2254 case UNION_TYPE:
2255 case QUAL_UNION_TYPE:
2257 tree field;
2258 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2259 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2260 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2261 return true;
2262 return false;
2265 case ARRAY_TYPE:
2266 return true;
2268 default:
2269 gcc_unreachable ();
2273 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2274 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2275 nonzero, it is the specified size of the field. If POS is nonzero, it is
2276 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2277 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2278 means we are allowed to take the address of the field; if it is negative,
2279 we should not make a bitfield, which is used by make_aligning_type. */
2281 tree
2282 create_field_decl (tree field_name, tree field_type, tree record_type,
2283 tree size, tree pos, int packed, int addressable)
2285 tree field_decl = build_decl (input_location,
2286 FIELD_DECL, field_name, field_type);
2288 DECL_CONTEXT (field_decl) = record_type;
2289 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2291 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2292 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2293 Likewise for an aggregate without specified position that contains an
2294 array, because in this case slices of variable length of this array
2295 must be handled by GCC and variable-sized objects need to be aligned
2296 to at least a byte boundary. */
2297 if (packed && (TYPE_MODE (field_type) == BLKmode
2298 || (!pos
2299 && AGGREGATE_TYPE_P (field_type)
2300 && aggregate_type_contains_array_p (field_type))))
2301 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2303 /* If a size is specified, use it. Otherwise, if the record type is packed
2304 compute a size to use, which may differ from the object's natural size.
2305 We always set a size in this case to trigger the checks for bitfield
2306 creation below, which is typically required when no position has been
2307 specified. */
2308 if (size)
2309 size = convert (bitsizetype, size);
2310 else if (packed == 1)
2312 size = rm_size (field_type);
2313 if (TYPE_MODE (field_type) == BLKmode)
2314 size = round_up (size, BITS_PER_UNIT);
2317 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2318 specified for two reasons: first if the size differs from the natural
2319 size. Second, if the alignment is insufficient. There are a number of
2320 ways the latter can be true.
2322 We never make a bitfield if the type of the field has a nonconstant size,
2323 because no such entity requiring bitfield operations should reach here.
2325 We do *preventively* make a bitfield when there might be the need for it
2326 but we don't have all the necessary information to decide, as is the case
2327 of a field with no specified position in a packed record.
2329 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2330 in layout_decl or finish_record_type to clear the bit_field indication if
2331 it is in fact not needed. */
2332 if (addressable >= 0
2333 && size
2334 && TREE_CODE (size) == INTEGER_CST
2335 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2336 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2337 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2338 || packed
2339 || (TYPE_ALIGN (record_type) != 0
2340 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2342 DECL_BIT_FIELD (field_decl) = 1;
2343 DECL_SIZE (field_decl) = size;
2344 if (!packed && !pos)
2346 if (TYPE_ALIGN (record_type) != 0
2347 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2348 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2349 else
2350 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2354 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2356 /* Bump the alignment if need be, either for bitfield/packing purposes or
2357 to satisfy the type requirements if no such consideration applies. When
2358 we get the alignment from the type, indicate if this is from an explicit
2359 user request, which prevents stor-layout from lowering it later on. */
2361 unsigned int bit_align
2362 = (DECL_BIT_FIELD (field_decl) ? 1
2363 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2365 if (bit_align > DECL_ALIGN (field_decl))
2366 DECL_ALIGN (field_decl) = bit_align;
2367 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2369 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2370 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2374 if (pos)
2376 /* We need to pass in the alignment the DECL is known to have.
2377 This is the lowest-order bit set in POS, but no more than
2378 the alignment of the record, if one is specified. Note
2379 that an alignment of 0 is taken as infinite. */
2380 unsigned int known_align;
2382 if (tree_fits_uhwi_p (pos))
2383 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2384 else
2385 known_align = BITS_PER_UNIT;
2387 if (TYPE_ALIGN (record_type)
2388 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2389 known_align = TYPE_ALIGN (record_type);
2391 layout_decl (field_decl, known_align);
2392 SET_DECL_OFFSET_ALIGN (field_decl,
2393 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2394 : BITS_PER_UNIT);
2395 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2396 &DECL_FIELD_BIT_OFFSET (field_decl),
2397 DECL_OFFSET_ALIGN (field_decl), pos);
2400 /* In addition to what our caller says, claim the field is addressable if we
2401 know that its type is not suitable.
2403 The field may also be "technically" nonaddressable, meaning that even if
2404 we attempt to take the field's address we will actually get the address
2405 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2406 value we have at this point is not accurate enough, so we don't account
2407 for this here and let finish_record_type decide. */
2408 if (!addressable && !type_for_nonaliased_component_p (field_type))
2409 addressable = 1;
2411 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2413 return field_decl;
2416 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2417 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2418 (either an In parameter or an address of a pass-by-ref parameter). */
2420 tree
2421 create_param_decl (tree param_name, tree param_type, bool readonly)
2423 tree param_decl = build_decl (input_location,
2424 PARM_DECL, param_name, param_type);
2426 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2427 can lead to various ABI violations. */
2428 if (targetm.calls.promote_prototypes (NULL_TREE)
2429 && INTEGRAL_TYPE_P (param_type)
2430 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2432 /* We have to be careful about biased types here. Make a subtype
2433 of integer_type_node with the proper biasing. */
2434 if (TREE_CODE (param_type) == INTEGER_TYPE
2435 && TYPE_BIASED_REPRESENTATION_P (param_type))
2437 tree subtype
2438 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2439 TREE_TYPE (subtype) = integer_type_node;
2440 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2441 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2442 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
2443 param_type = subtype;
2445 else
2446 param_type = integer_type_node;
2449 DECL_ARG_TYPE (param_decl) = param_type;
2450 TREE_READONLY (param_decl) = readonly;
2451 return param_decl;
2454 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2455 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2456 changed. GNAT_NODE is used for the position of error messages. */
2458 void
2459 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2460 Node_Id gnat_node)
2462 struct attrib *attr;
2464 for (attr = *attr_list; attr; attr = attr->next)
2465 switch (attr->type)
2467 case ATTR_MACHINE_ATTRIBUTE:
2468 Sloc_to_locus (Sloc (gnat_node), &input_location);
2469 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2470 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2471 break;
2473 case ATTR_LINK_ALIAS:
2474 if (!DECL_EXTERNAL (*node))
2476 TREE_STATIC (*node) = 1;
2477 assemble_alias (*node, attr->name);
2479 break;
2481 case ATTR_WEAK_EXTERNAL:
2482 if (SUPPORTS_WEAK)
2483 declare_weak (*node);
2484 else
2485 post_error ("?weak declarations not supported on this target",
2486 attr->error_point);
2487 break;
2489 case ATTR_LINK_SECTION:
2490 if (targetm_common.have_named_sections)
2492 DECL_SECTION_NAME (*node)
2493 = build_string (IDENTIFIER_LENGTH (attr->name),
2494 IDENTIFIER_POINTER (attr->name));
2495 DECL_COMMON (*node) = 0;
2497 else
2498 post_error ("?section attributes are not supported for this target",
2499 attr->error_point);
2500 break;
2502 case ATTR_LINK_CONSTRUCTOR:
2503 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2504 TREE_USED (*node) = 1;
2505 break;
2507 case ATTR_LINK_DESTRUCTOR:
2508 DECL_STATIC_DESTRUCTOR (*node) = 1;
2509 TREE_USED (*node) = 1;
2510 break;
2512 case ATTR_THREAD_LOCAL_STORAGE:
2513 DECL_TLS_MODEL (*node) = decl_default_tls_model (*node);
2514 DECL_COMMON (*node) = 0;
2515 break;
2518 *attr_list = NULL;
2521 /* Record DECL as a global renaming pointer. */
2523 void
2524 record_global_renaming_pointer (tree decl)
2526 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
2527 vec_safe_push (global_renaming_pointers, decl);
2530 /* Invalidate the global renaming pointers. */
2532 void
2533 invalidate_global_renaming_pointers (void)
2535 unsigned int i;
2536 tree iter;
2538 if (global_renaming_pointers == NULL)
2539 return;
2541 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
2542 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2544 vec_free (global_renaming_pointers);
2547 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2548 a power of 2. */
2550 bool
2551 value_factor_p (tree value, HOST_WIDE_INT factor)
2553 if (tree_fits_uhwi_p (value))
2554 return tree_to_uhwi (value) % factor == 0;
2556 if (TREE_CODE (value) == MULT_EXPR)
2557 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2558 || value_factor_p (TREE_OPERAND (value, 1), factor));
2560 return false;
2563 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2565 static unsigned int
2566 scale_by_factor_of (tree expr, unsigned int value)
2568 expr = remove_conversions (expr, true);
2570 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2571 corresponding to the number of trailing zeros of the mask. */
2572 if (TREE_CODE (expr) == BIT_AND_EXPR
2573 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2575 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2576 unsigned int i = 0;
2578 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2580 mask >>= 1;
2581 value *= 2;
2582 i++;
2586 return value;
2589 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2590 unless we can prove these 2 fields are laid out in such a way that no gap
2591 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2592 is the distance in bits between the end of PREV_FIELD and the starting
2593 position of CURR_FIELD. It is ignored if null. */
2595 static bool
2596 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2598 /* If this is the first field of the record, there cannot be any gap */
2599 if (!prev_field)
2600 return false;
2602 /* If the previous field is a union type, then return false: The only
2603 time when such a field is not the last field of the record is when
2604 there are other components at fixed positions after it (meaning there
2605 was a rep clause for every field), in which case we don't want the
2606 alignment constraint to override them. */
2607 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2608 return false;
2610 /* If the distance between the end of prev_field and the beginning of
2611 curr_field is constant, then there is a gap if the value of this
2612 constant is not null. */
2613 if (offset && tree_fits_uhwi_p (offset))
2614 return !integer_zerop (offset);
2616 /* If the size and position of the previous field are constant,
2617 then check the sum of this size and position. There will be a gap
2618 iff it is not multiple of the current field alignment. */
2619 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
2620 && tree_fits_uhwi_p (bit_position (prev_field)))
2621 return ((tree_to_uhwi (bit_position (prev_field))
2622 + tree_to_uhwi (DECL_SIZE (prev_field)))
2623 % DECL_ALIGN (curr_field) != 0);
2625 /* If both the position and size of the previous field are multiples
2626 of the current field alignment, there cannot be any gap. */
2627 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2628 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2629 return false;
2631 /* Fallback, return that there may be a potential gap */
2632 return true;
2635 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2636 of the decl. */
2638 tree
2639 create_label_decl (tree label_name, Node_Id gnat_node)
2641 tree label_decl
2642 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
2644 DECL_MODE (label_decl) = VOIDmode;
2646 /* Add this decl to the current binding level. */
2647 gnat_pushdecl (label_decl, gnat_node);
2649 return label_decl;
2652 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
2653 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2654 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
2655 PARM_DECL nodes chained through the DECL_CHAIN field).
2657 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
2658 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2659 used for the position of the decl. */
2661 tree
2662 create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
2663 tree param_decl_list, enum inline_status_t inline_status,
2664 bool public_flag, bool extern_flag, bool artificial_flag,
2665 struct attrib *attr_list, Node_Id gnat_node)
2667 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
2668 subprog_type);
2669 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
2670 TREE_TYPE (subprog_type));
2671 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
2673 /* If this is a non-inline function nested inside an inlined external
2674 function, we cannot honor both requests without cloning the nested
2675 function in the current unit since it is private to the other unit.
2676 We could inline the nested function as well but it's probably better
2677 to err on the side of too little inlining. */
2678 if (inline_status != is_enabled
2679 && !public_flag
2680 && current_function_decl
2681 && DECL_DECLARED_INLINE_P (current_function_decl)
2682 && DECL_EXTERNAL (current_function_decl))
2683 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
2685 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
2686 DECL_EXTERNAL (subprog_decl) = extern_flag;
2688 switch (inline_status)
2690 case is_suppressed:
2691 DECL_UNINLINABLE (subprog_decl) = 1;
2692 break;
2694 case is_disabled:
2695 break;
2697 case is_enabled:
2698 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
2699 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
2700 break;
2702 default:
2703 gcc_unreachable ();
2706 TREE_PUBLIC (subprog_decl) = public_flag;
2707 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
2708 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
2709 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
2711 DECL_ARTIFICIAL (result_decl) = 1;
2712 DECL_IGNORED_P (result_decl) = 1;
2713 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
2714 DECL_RESULT (subprog_decl) = result_decl;
2716 if (asm_name)
2718 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
2720 /* The expand_main_function circuitry expects "main_identifier_node" to
2721 designate the DECL_NAME of the 'main' entry point, in turn expected
2722 to be declared as the "main" function literally by default. Ada
2723 program entry points are typically declared with a different name
2724 within the binder generated file, exported as 'main' to satisfy the
2725 system expectations. Force main_identifier_node in this case. */
2726 if (asm_name == main_identifier_node)
2727 DECL_NAME (subprog_decl) = main_identifier_node;
2730 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
2732 /* Add this decl to the current binding level. */
2733 gnat_pushdecl (subprog_decl, gnat_node);
2735 /* Output the assembler code and/or RTL for the declaration. */
2736 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2738 return subprog_decl;
2741 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2742 body. This routine needs to be invoked before processing the declarations
2743 appearing in the subprogram. */
2745 void
2746 begin_subprog_body (tree subprog_decl)
2748 tree param_decl;
2750 announce_function (subprog_decl);
2752 /* This function is being defined. */
2753 TREE_STATIC (subprog_decl) = 1;
2755 current_function_decl = subprog_decl;
2757 /* Enter a new binding level and show that all the parameters belong to
2758 this function. */
2759 gnat_pushlevel ();
2761 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
2762 param_decl = DECL_CHAIN (param_decl))
2763 DECL_CONTEXT (param_decl) = subprog_decl;
2765 make_decl_rtl (subprog_decl);
2768 /* Finish translating the current subprogram and set its BODY. */
2770 void
2771 end_subprog_body (tree body)
2773 tree fndecl = current_function_decl;
2775 /* Attach the BLOCK for this level to the function and pop the level. */
2776 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2777 DECL_INITIAL (fndecl) = current_binding_level->block;
2778 gnat_poplevel ();
2780 /* Mark the RESULT_DECL as being in this subprogram. */
2781 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2783 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2784 if (TREE_CODE (body) == BIND_EXPR)
2786 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
2787 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
2790 DECL_SAVED_TREE (fndecl) = body;
2792 current_function_decl = decl_function_context (fndecl);
2795 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
2797 void
2798 rest_of_subprog_body_compilation (tree subprog_decl)
2800 /* We cannot track the location of errors past this point. */
2801 error_gnat_node = Empty;
2803 /* If we're only annotating types, don't actually compile this function. */
2804 if (type_annotate_only)
2805 return;
2807 /* Dump functions before gimplification. */
2808 dump_function (TDI_original, subprog_decl);
2810 if (!decl_function_context (subprog_decl))
2811 cgraph_finalize_function (subprog_decl, false);
2812 else
2813 /* Register this function with cgraph just far enough to get it
2814 added to our parent's nested function list. */
2815 (void) cgraph_get_create_node (subprog_decl);
2818 tree
2819 gnat_builtin_function (tree decl)
2821 gnat_pushdecl (decl, Empty);
2822 return decl;
2825 /* Return an integer type with the number of bits of precision given by
2826 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2827 it is a signed type. */
2829 tree
2830 gnat_type_for_size (unsigned precision, int unsignedp)
2832 tree t;
2833 char type_name[20];
2835 if (precision <= 2 * MAX_BITS_PER_WORD
2836 && signed_and_unsigned_types[precision][unsignedp])
2837 return signed_and_unsigned_types[precision][unsignedp];
2839 if (unsignedp)
2840 t = make_unsigned_type (precision);
2841 else
2842 t = make_signed_type (precision);
2844 if (precision <= 2 * MAX_BITS_PER_WORD)
2845 signed_and_unsigned_types[precision][unsignedp] = t;
2847 if (!TYPE_NAME (t))
2849 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
2850 TYPE_NAME (t) = get_identifier (type_name);
2853 return t;
2856 /* Likewise for floating-point types. */
2858 static tree
2859 float_type_for_precision (int precision, enum machine_mode mode)
2861 tree t;
2862 char type_name[20];
2864 if (float_types[(int) mode])
2865 return float_types[(int) mode];
2867 float_types[(int) mode] = t = make_node (REAL_TYPE);
2868 TYPE_PRECISION (t) = precision;
2869 layout_type (t);
2871 gcc_assert (TYPE_MODE (t) == mode);
2872 if (!TYPE_NAME (t))
2874 sprintf (type_name, "FLOAT_%d", precision);
2875 TYPE_NAME (t) = get_identifier (type_name);
2878 return t;
2881 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2882 an unsigned type; otherwise a signed type is returned. */
2884 tree
2885 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2887 if (mode == BLKmode)
2888 return NULL_TREE;
2890 if (mode == VOIDmode)
2891 return void_type_node;
2893 if (COMPLEX_MODE_P (mode))
2894 return NULL_TREE;
2896 if (SCALAR_FLOAT_MODE_P (mode))
2897 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2899 if (SCALAR_INT_MODE_P (mode))
2900 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2902 if (VECTOR_MODE_P (mode))
2904 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2905 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2906 if (inner_type)
2907 return build_vector_type_for_mode (inner_type, mode);
2910 return NULL_TREE;
2913 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2915 tree
2916 gnat_unsigned_type (tree type_node)
2918 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2920 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2922 type = copy_node (type);
2923 TREE_TYPE (type) = type_node;
2925 else if (TREE_TYPE (type_node)
2926 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2927 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2929 type = copy_node (type);
2930 TREE_TYPE (type) = TREE_TYPE (type_node);
2933 return type;
2936 /* Return the signed version of a TYPE_NODE, a scalar type. */
2938 tree
2939 gnat_signed_type (tree type_node)
2941 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2943 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2945 type = copy_node (type);
2946 TREE_TYPE (type) = type_node;
2948 else if (TREE_TYPE (type_node)
2949 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2950 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2952 type = copy_node (type);
2953 TREE_TYPE (type) = TREE_TYPE (type_node);
2956 return type;
2959 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2960 transparently converted to each other. */
2963 gnat_types_compatible_p (tree t1, tree t2)
2965 enum tree_code code;
2967 /* This is the default criterion. */
2968 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2969 return 1;
2971 /* We only check structural equivalence here. */
2972 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2973 return 0;
2975 /* Vector types are also compatible if they have the same number of subparts
2976 and the same form of (scalar) element type. */
2977 if (code == VECTOR_TYPE
2978 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2979 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2980 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2981 return 1;
2983 /* Array types are also compatible if they are constrained and have the same
2984 domain(s) and the same component type. */
2985 if (code == ARRAY_TYPE
2986 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2987 || (TYPE_DOMAIN (t1)
2988 && TYPE_DOMAIN (t2)
2989 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2990 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2991 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2992 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
2993 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2994 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2995 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
2996 return 1;
2998 return 0;
3001 /* Return true if EXPR is a useless type conversion. */
3003 bool
3004 gnat_useless_type_conversion (tree expr)
3006 if (CONVERT_EXPR_P (expr)
3007 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3008 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3009 return gnat_types_compatible_p (TREE_TYPE (expr),
3010 TREE_TYPE (TREE_OPERAND (expr, 0)));
3012 return false;
3015 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3017 bool
3018 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3019 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3021 return TYPE_CI_CO_LIST (t) == cico_list
3022 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3023 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3024 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3027 /* EXP is an expression for the size of an object. If this size contains
3028 discriminant references, replace them with the maximum (if MAX_P) or
3029 minimum (if !MAX_P) possible value of the discriminant. */
3031 tree
3032 max_size (tree exp, bool max_p)
3034 enum tree_code code = TREE_CODE (exp);
3035 tree type = TREE_TYPE (exp);
3037 switch (TREE_CODE_CLASS (code))
3039 case tcc_declaration:
3040 case tcc_constant:
3041 return exp;
3043 case tcc_vl_exp:
3044 if (code == CALL_EXPR)
3046 tree t, *argarray;
3047 int n, i;
3049 t = maybe_inline_call_in_expr (exp);
3050 if (t)
3051 return max_size (t, max_p);
3053 n = call_expr_nargs (exp);
3054 gcc_assert (n > 0);
3055 argarray = XALLOCAVEC (tree, n);
3056 for (i = 0; i < n; i++)
3057 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3058 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3060 break;
3062 case tcc_reference:
3063 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3064 modify. Otherwise, we treat it like a variable. */
3065 if (!CONTAINS_PLACEHOLDER_P (exp))
3066 return exp;
3068 type = TREE_TYPE (TREE_OPERAND (exp, 1));
3069 return
3070 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
3072 case tcc_comparison:
3073 return max_p ? size_one_node : size_zero_node;
3075 case tcc_unary:
3076 if (code == NON_LVALUE_EXPR)
3077 return max_size (TREE_OPERAND (exp, 0), max_p);
3079 return fold_build1 (code, type,
3080 max_size (TREE_OPERAND (exp, 0),
3081 code == NEGATE_EXPR ? !max_p : max_p));
3083 case tcc_binary:
3085 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3086 tree rhs = max_size (TREE_OPERAND (exp, 1),
3087 code == MINUS_EXPR ? !max_p : max_p);
3089 /* Special-case wanting the maximum value of a MIN_EXPR.
3090 In that case, if one side overflows, return the other. */
3091 if (max_p && code == MIN_EXPR)
3093 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3094 return lhs;
3096 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3097 return rhs;
3100 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3101 overflowing and the RHS a variable. */
3102 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3103 && TREE_CODE (lhs) == INTEGER_CST
3104 && TREE_OVERFLOW (lhs)
3105 && !TREE_CONSTANT (rhs))
3106 return lhs;
3108 return size_binop (code, lhs, rhs);
3111 case tcc_expression:
3112 switch (TREE_CODE_LENGTH (code))
3114 case 1:
3115 if (code == SAVE_EXPR)
3116 return exp;
3118 return fold_build1 (code, type,
3119 max_size (TREE_OPERAND (exp, 0), max_p));
3121 case 2:
3122 if (code == COMPOUND_EXPR)
3123 return max_size (TREE_OPERAND (exp, 1), max_p);
3125 return fold_build2 (code, type,
3126 max_size (TREE_OPERAND (exp, 0), max_p),
3127 max_size (TREE_OPERAND (exp, 1), max_p));
3129 case 3:
3130 if (code == COND_EXPR)
3131 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3132 max_size (TREE_OPERAND (exp, 1), max_p),
3133 max_size (TREE_OPERAND (exp, 2), max_p));
3135 default:
3136 break;
3139 /* Other tree classes cannot happen. */
3140 default:
3141 break;
3144 gcc_unreachable ();
3147 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3148 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3149 Return a constructor for the template. */
3151 tree
3152 build_template (tree template_type, tree array_type, tree expr)
3154 vec<constructor_elt, va_gc> *template_elts = NULL;
3155 tree bound_list = NULL_TREE;
3156 tree field;
3158 while (TREE_CODE (array_type) == RECORD_TYPE
3159 && (TYPE_PADDING_P (array_type)
3160 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3161 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3163 if (TREE_CODE (array_type) == ARRAY_TYPE
3164 || (TREE_CODE (array_type) == INTEGER_TYPE
3165 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3166 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3168 /* First make the list for a CONSTRUCTOR for the template. Go down the
3169 field list of the template instead of the type chain because this
3170 array might be an Ada array of arrays and we can't tell where the
3171 nested arrays stop being the underlying object. */
3173 for (field = TYPE_FIELDS (template_type); field;
3174 (bound_list
3175 ? (bound_list = TREE_CHAIN (bound_list))
3176 : (array_type = TREE_TYPE (array_type))),
3177 field = DECL_CHAIN (DECL_CHAIN (field)))
3179 tree bounds, min, max;
3181 /* If we have a bound list, get the bounds from there. Likewise
3182 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3183 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3184 This will give us a maximum range. */
3185 if (bound_list)
3186 bounds = TREE_VALUE (bound_list);
3187 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3188 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3189 else if (expr && TREE_CODE (expr) == PARM_DECL
3190 && DECL_BY_COMPONENT_PTR_P (expr))
3191 bounds = TREE_TYPE (field);
3192 else
3193 gcc_unreachable ();
3195 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3196 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3198 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3199 substitute it from OBJECT. */
3200 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3201 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3203 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3204 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3207 return gnat_build_constructor (template_type, template_elts);
3210 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3211 being built; the new decl is chained on to the front of the list. */
3213 static tree
3214 make_descriptor_field (const char *name, tree type, tree rec_type,
3215 tree initial, tree field_list)
3217 tree field
3218 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
3219 NULL_TREE, 0, 0);
3221 DECL_INITIAL (field) = initial;
3222 DECL_CHAIN (field) = field_list;
3223 return field;
3226 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3227 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3228 type contains in its DECL_INITIAL the expression to use when a constructor
3229 is made for the type. GNAT_ENTITY is an entity used to print out an error
3230 message if the mechanism cannot be applied to an object of that type and
3231 also for the name. */
3233 tree
3234 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
3236 tree record_type = make_node (RECORD_TYPE);
3237 tree pointer32_type, pointer64_type;
3238 tree field_list = NULL_TREE;
3239 int klass, ndim, i, dtype = 0;
3240 tree inner_type, tem;
3241 tree *idx_arr;
3243 /* If TYPE is an unconstrained array, use the underlying array type. */
3244 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3245 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3247 /* If this is an array, compute the number of dimensions in the array,
3248 get the index types, and point to the inner type. */
3249 if (TREE_CODE (type) != ARRAY_TYPE)
3250 ndim = 0;
3251 else
3252 for (ndim = 1, inner_type = type;
3253 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3254 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3255 ndim++, inner_type = TREE_TYPE (inner_type))
3258 idx_arr = XALLOCAVEC (tree, ndim);
3260 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
3261 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3262 for (i = ndim - 1, inner_type = type;
3263 i >= 0;
3264 i--, inner_type = TREE_TYPE (inner_type))
3265 idx_arr[i] = TYPE_DOMAIN (inner_type);
3266 else
3267 for (i = 0, inner_type = type;
3268 i < ndim;
3269 i++, inner_type = TREE_TYPE (inner_type))
3270 idx_arr[i] = TYPE_DOMAIN (inner_type);
3272 /* Now get the DTYPE value. */
3273 switch (TREE_CODE (type))
3275 case INTEGER_TYPE:
3276 case ENUMERAL_TYPE:
3277 case BOOLEAN_TYPE:
3278 if (TYPE_VAX_FLOATING_POINT_P (type))
3279 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
3281 case 6:
3282 dtype = 10;
3283 break;
3284 case 9:
3285 dtype = 11;
3286 break;
3287 case 15:
3288 dtype = 27;
3289 break;
3291 else
3292 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3294 case 8:
3295 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3296 break;
3297 case 16:
3298 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3299 break;
3300 case 32:
3301 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3302 break;
3303 case 64:
3304 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3305 break;
3306 case 128:
3307 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3308 break;
3310 break;
3312 case REAL_TYPE:
3313 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3314 break;
3316 case COMPLEX_TYPE:
3317 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3318 && TYPE_VAX_FLOATING_POINT_P (type))
3319 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
3321 case 6:
3322 dtype = 12;
3323 break;
3324 case 9:
3325 dtype = 13;
3326 break;
3327 case 15:
3328 dtype = 29;
3330 else
3331 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3332 break;
3334 case ARRAY_TYPE:
3335 dtype = 14;
3336 break;
3338 default:
3339 break;
3342 /* Get the CLASS value. */
3343 switch (mech)
3345 case By_Descriptor_A:
3346 case By_Short_Descriptor_A:
3347 klass = 4;
3348 break;
3349 case By_Descriptor_NCA:
3350 case By_Short_Descriptor_NCA:
3351 klass = 10;
3352 break;
3353 case By_Descriptor_SB:
3354 case By_Short_Descriptor_SB:
3355 klass = 15;
3356 break;
3357 case By_Descriptor:
3358 case By_Short_Descriptor:
3359 case By_Descriptor_S:
3360 case By_Short_Descriptor_S:
3361 default:
3362 klass = 1;
3363 break;
3366 /* Make the type for a descriptor for VMS. The first four fields are the
3367 same for all types. */
3368 field_list
3369 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
3370 size_in_bytes ((mech == By_Descriptor_A
3371 || mech == By_Short_Descriptor_A)
3372 ? inner_type : type),
3373 field_list);
3374 field_list
3375 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
3376 size_int (dtype), field_list);
3377 field_list
3378 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
3379 size_int (klass), field_list);
3381 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
3382 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3384 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3385 that we cannot build a template call to the CE routine as it would get a
3386 wrong source location; instead we use a second placeholder for it. */
3387 tem = build_unary_op (ADDR_EXPR, pointer64_type,
3388 build0 (PLACEHOLDER_EXPR, type));
3389 tem = build3 (COND_EXPR, pointer32_type,
3390 Pmode != SImode
3391 ? build_binary_op (GE_EXPR, boolean_type_node, tem,
3392 build_int_cstu (pointer64_type, 0x80000000))
3393 : boolean_false_node,
3394 build0 (PLACEHOLDER_EXPR, void_type_node),
3395 convert (pointer32_type, tem));
3397 field_list
3398 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
3399 field_list);
3401 switch (mech)
3403 case By_Descriptor:
3404 case By_Short_Descriptor:
3405 case By_Descriptor_S:
3406 case By_Short_Descriptor_S:
3407 break;
3409 case By_Descriptor_SB:
3410 case By_Short_Descriptor_SB:
3411 field_list
3412 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3413 record_type,
3414 (TREE_CODE (type) == ARRAY_TYPE
3415 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3416 : size_zero_node),
3417 field_list);
3418 field_list
3419 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3420 record_type,
3421 (TREE_CODE (type) == ARRAY_TYPE
3422 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3423 : size_zero_node),
3424 field_list);
3425 break;
3427 case By_Descriptor_A:
3428 case By_Short_Descriptor_A:
3429 case By_Descriptor_NCA:
3430 case By_Short_Descriptor_NCA:
3431 field_list
3432 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3433 record_type, size_zero_node, field_list);
3435 field_list
3436 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3437 record_type, size_zero_node, field_list);
3439 field_list
3440 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3441 record_type,
3442 size_int ((mech == By_Descriptor_NCA
3443 || mech == By_Short_Descriptor_NCA)
3445 /* Set FL_COLUMN, FL_COEFF, and
3446 FL_BOUNDS. */
3447 : (TREE_CODE (type) == ARRAY_TYPE
3448 && TYPE_CONVENTION_FORTRAN_P
3449 (type)
3450 ? 224 : 192)),
3451 field_list);
3453 field_list
3454 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3455 record_type, size_int (ndim), field_list);
3457 field_list
3458 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3459 record_type, size_in_bytes (type),
3460 field_list);
3462 /* Now build a pointer to the 0,0,0... element. */
3463 tem = build0 (PLACEHOLDER_EXPR, type);
3464 for (i = 0, inner_type = type; i < ndim;
3465 i++, inner_type = TREE_TYPE (inner_type))
3466 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3467 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3468 NULL_TREE, NULL_TREE);
3470 field_list
3471 = make_descriptor_field ("A0", pointer32_type, record_type,
3472 build1 (ADDR_EXPR, pointer32_type, tem),
3473 field_list);
3475 /* Next come the addressing coefficients. */
3476 tem = size_one_node;
3477 for (i = 0; i < ndim; i++)
3479 char fname[3];
3480 tree idx_length
3481 = size_binop (MULT_EXPR, tem,
3482 size_binop (PLUS_EXPR,
3483 size_binop (MINUS_EXPR,
3484 TYPE_MAX_VALUE (idx_arr[i]),
3485 TYPE_MIN_VALUE (idx_arr[i])),
3486 size_int (1)));
3488 fname[0] = ((mech == By_Descriptor_NCA ||
3489 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
3490 fname[1] = '0' + i, fname[2] = 0;
3491 field_list
3492 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3493 record_type, idx_length, field_list);
3495 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
3496 tem = idx_length;
3499 /* Finally here are the bounds. */
3500 for (i = 0; i < ndim; i++)
3502 char fname[3];
3504 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3505 field_list
3506 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3507 record_type, TYPE_MIN_VALUE (idx_arr[i]),
3508 field_list);
3510 fname[0] = 'U';
3511 field_list
3512 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3513 record_type, TYPE_MAX_VALUE (idx_arr[i]),
3514 field_list);
3516 break;
3518 default:
3519 post_error ("unsupported descriptor type for &", gnat_entity);
3522 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
3523 finish_record_type (record_type, nreverse (field_list), 0, false);
3524 return record_type;
3527 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3528 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3529 type contains in its DECL_INITIAL the expression to use when a constructor
3530 is made for the type. GNAT_ENTITY is an entity used to print out an error
3531 message if the mechanism cannot be applied to an object of that type and
3532 also for the name. */
3534 tree
3535 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
3537 tree record_type = make_node (RECORD_TYPE);
3538 tree pointer64_type;
3539 tree field_list = NULL_TREE;
3540 int klass, ndim, i, dtype = 0;
3541 tree inner_type, tem;
3542 tree *idx_arr;
3544 /* If TYPE is an unconstrained array, use the underlying array type. */
3545 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3546 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3548 /* If this is an array, compute the number of dimensions in the array,
3549 get the index types, and point to the inner type. */
3550 if (TREE_CODE (type) != ARRAY_TYPE)
3551 ndim = 0;
3552 else
3553 for (ndim = 1, inner_type = type;
3554 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3555 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3556 ndim++, inner_type = TREE_TYPE (inner_type))
3559 idx_arr = XALLOCAVEC (tree, ndim);
3561 if (mech != By_Descriptor_NCA
3562 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3563 for (i = ndim - 1, inner_type = type;
3564 i >= 0;
3565 i--, inner_type = TREE_TYPE (inner_type))
3566 idx_arr[i] = TYPE_DOMAIN (inner_type);
3567 else
3568 for (i = 0, inner_type = type;
3569 i < ndim;
3570 i++, inner_type = TREE_TYPE (inner_type))
3571 idx_arr[i] = TYPE_DOMAIN (inner_type);
3573 /* Now get the DTYPE value. */
3574 switch (TREE_CODE (type))
3576 case INTEGER_TYPE:
3577 case ENUMERAL_TYPE:
3578 case BOOLEAN_TYPE:
3579 if (TYPE_VAX_FLOATING_POINT_P (type))
3580 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
3582 case 6:
3583 dtype = 10;
3584 break;
3585 case 9:
3586 dtype = 11;
3587 break;
3588 case 15:
3589 dtype = 27;
3590 break;
3592 else
3593 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3595 case 8:
3596 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3597 break;
3598 case 16:
3599 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3600 break;
3601 case 32:
3602 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3603 break;
3604 case 64:
3605 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3606 break;
3607 case 128:
3608 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3609 break;
3611 break;
3613 case REAL_TYPE:
3614 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3615 break;
3617 case COMPLEX_TYPE:
3618 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3619 && TYPE_VAX_FLOATING_POINT_P (type))
3620 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
3622 case 6:
3623 dtype = 12;
3624 break;
3625 case 9:
3626 dtype = 13;
3627 break;
3628 case 15:
3629 dtype = 29;
3631 else
3632 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3633 break;
3635 case ARRAY_TYPE:
3636 dtype = 14;
3637 break;
3639 default:
3640 break;
3643 /* Get the CLASS value. */
3644 switch (mech)
3646 case By_Descriptor_A:
3647 klass = 4;
3648 break;
3649 case By_Descriptor_NCA:
3650 klass = 10;
3651 break;
3652 case By_Descriptor_SB:
3653 klass = 15;
3654 break;
3655 case By_Descriptor:
3656 case By_Descriptor_S:
3657 default:
3658 klass = 1;
3659 break;
3662 /* Make the type for a 64-bit descriptor for VMS. The first six fields
3663 are the same for all types. */
3664 field_list
3665 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
3666 record_type, size_int (1), field_list);
3667 field_list
3668 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
3669 record_type, size_int (dtype), field_list);
3670 field_list
3671 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
3672 record_type, size_int (klass), field_list);
3673 field_list
3674 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
3675 record_type, size_int (-1), field_list);
3676 field_list
3677 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
3678 record_type,
3679 size_in_bytes (mech == By_Descriptor_A
3680 ? inner_type : type),
3681 field_list);
3683 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3685 field_list
3686 = make_descriptor_field ("POINTER", pointer64_type, record_type,
3687 build_unary_op (ADDR_EXPR, pointer64_type,
3688 build0 (PLACEHOLDER_EXPR, type)),
3689 field_list);
3691 switch (mech)
3693 case By_Descriptor:
3694 case By_Descriptor_S:
3695 break;
3697 case By_Descriptor_SB:
3698 field_list
3699 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
3700 record_type,
3701 (TREE_CODE (type) == ARRAY_TYPE
3702 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3703 : size_zero_node),
3704 field_list);
3705 field_list
3706 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
3707 record_type,
3708 (TREE_CODE (type) == ARRAY_TYPE
3709 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3710 : size_zero_node),
3711 field_list);
3712 break;
3714 case By_Descriptor_A:
3715 case By_Descriptor_NCA:
3716 field_list
3717 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3718 record_type, size_zero_node, field_list);
3720 field_list
3721 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3722 record_type, size_zero_node, field_list);
3724 dtype = (mech == By_Descriptor_NCA
3726 /* Set FL_COLUMN, FL_COEFF, and
3727 FL_BOUNDS. */
3728 : (TREE_CODE (type) == ARRAY_TYPE
3729 && TYPE_CONVENTION_FORTRAN_P (type)
3730 ? 224 : 192));
3731 field_list
3732 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3733 record_type, size_int (dtype),
3734 field_list);
3736 field_list
3737 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3738 record_type, size_int (ndim), field_list);
3740 field_list
3741 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
3742 record_type, size_int (0), field_list);
3743 field_list
3744 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
3745 record_type, size_in_bytes (type),
3746 field_list);
3748 /* Now build a pointer to the 0,0,0... element. */
3749 tem = build0 (PLACEHOLDER_EXPR, type);
3750 for (i = 0, inner_type = type; i < ndim;
3751 i++, inner_type = TREE_TYPE (inner_type))
3752 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3753 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3754 NULL_TREE, NULL_TREE);
3756 field_list
3757 = make_descriptor_field ("A0", pointer64_type, record_type,
3758 build1 (ADDR_EXPR, pointer64_type, tem),
3759 field_list);
3761 /* Next come the addressing coefficients. */
3762 tem = size_one_node;
3763 for (i = 0; i < ndim; i++)
3765 char fname[3];
3766 tree idx_length
3767 = size_binop (MULT_EXPR, tem,
3768 size_binop (PLUS_EXPR,
3769 size_binop (MINUS_EXPR,
3770 TYPE_MAX_VALUE (idx_arr[i]),
3771 TYPE_MIN_VALUE (idx_arr[i])),
3772 size_int (1)));
3774 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3775 fname[1] = '0' + i, fname[2] = 0;
3776 field_list
3777 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3778 record_type, idx_length, field_list);
3780 if (mech == By_Descriptor_NCA)
3781 tem = idx_length;
3784 /* Finally here are the bounds. */
3785 for (i = 0; i < ndim; i++)
3787 char fname[3];
3789 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3790 field_list
3791 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3792 record_type,
3793 TYPE_MIN_VALUE (idx_arr[i]), field_list);
3795 fname[0] = 'U';
3796 field_list
3797 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3798 record_type,
3799 TYPE_MAX_VALUE (idx_arr[i]), field_list);
3801 break;
3803 default:
3804 post_error ("unsupported descriptor type for &", gnat_entity);
3807 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
3808 finish_record_type (record_type, nreverse (field_list), 0, false);
3809 return record_type;
3812 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3813 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3815 tree
3816 fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
3818 vec<constructor_elt, va_gc> *v = NULL;
3819 tree field;
3821 gnu_expr = maybe_unconstrained_array (gnu_expr);
3822 gnu_expr = gnat_protect_expr (gnu_expr);
3823 gnat_mark_addressable (gnu_expr);
3825 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3826 routine in case we have a 32-bit descriptor. */
3827 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3828 build_call_raise (CE_Range_Check_Failed, gnat_actual,
3829 N_Raise_Constraint_Error),
3830 gnu_expr);
3832 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3834 tree value
3835 = convert (TREE_TYPE (field),
3836 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3837 gnu_expr));
3838 CONSTRUCTOR_APPEND_ELT (v, field, value);
3841 return gnat_build_constructor (gnu_type, v);
3844 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3845 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3846 which the VMS descriptor is passed. */
3848 static tree
3849 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3851 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3852 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3853 /* The CLASS field is the 3rd field in the descriptor. */
3854 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3855 /* The POINTER field is the 6th field in the descriptor. */
3856 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
3858 /* Retrieve the value of the POINTER field. */
3859 tree gnu_expr64
3860 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3862 if (POINTER_TYPE_P (gnu_type))
3863 return convert (gnu_type, gnu_expr64);
3865 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3867 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3868 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
3869 tree template_type = TREE_TYPE (p_bounds_type);
3870 tree min_field = TYPE_FIELDS (template_type);
3871 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
3872 tree template_tree, template_addr, aflags, dimct, t, u;
3873 /* See the head comment of build_vms_descriptor. */
3874 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3875 tree lfield, ufield;
3876 vec<constructor_elt, va_gc> *v;
3878 /* Convert POINTER to the pointer-to-array type. */
3879 gnu_expr64 = convert (p_array_type, gnu_expr64);
3881 switch (iklass)
3883 case 1: /* Class S */
3884 case 15: /* Class SB */
3885 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3886 vec_alloc (v, 2);
3887 t = DECL_CHAIN (DECL_CHAIN (klass));
3888 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3889 CONSTRUCTOR_APPEND_ELT (v, min_field,
3890 convert (TREE_TYPE (min_field),
3891 integer_one_node));
3892 CONSTRUCTOR_APPEND_ELT (v, max_field,
3893 convert (TREE_TYPE (max_field), t));
3894 template_tree = gnat_build_constructor (template_type, v);
3895 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3897 /* For class S, we are done. */
3898 if (iklass == 1)
3899 break;
3901 /* Test that we really have a SB descriptor, like DEC Ada. */
3902 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3903 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3904 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3905 /* If so, there is already a template in the descriptor and
3906 it is located right after the POINTER field. The fields are
3907 64bits so they must be repacked. */
3908 t = DECL_CHAIN (pointer);
3909 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3910 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3912 t = DECL_CHAIN (t);
3913 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3914 ufield = convert
3915 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3917 /* Build the template in the form of a constructor. */
3918 vec_alloc (v, 2);
3919 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3920 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3921 ufield);
3922 template_tree = gnat_build_constructor (template_type, v);
3924 /* Otherwise use the {1, LENGTH} template we build above. */
3925 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3926 build_unary_op (ADDR_EXPR, p_bounds_type,
3927 template_tree),
3928 template_addr);
3929 break;
3931 case 4: /* Class A */
3932 /* The AFLAGS field is the 3rd field after the pointer in the
3933 descriptor. */
3934 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3935 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3936 /* The DIMCT field is the next field in the descriptor after
3937 aflags. */
3938 t = DECL_CHAIN (t);
3939 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3940 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3941 or FL_COEFF or FL_BOUNDS not set. */
3942 u = build_int_cst (TREE_TYPE (aflags), 192);
3943 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3944 build_binary_op (NE_EXPR, boolean_type_node,
3945 dimct,
3946 convert (TREE_TYPE (dimct),
3947 size_one_node)),
3948 build_binary_op (NE_EXPR, boolean_type_node,
3949 build2 (BIT_AND_EXPR,
3950 TREE_TYPE (aflags),
3951 aflags, u),
3952 u));
3953 /* There is already a template in the descriptor and it is located
3954 in block 3. The fields are 64bits so they must be repacked. */
3955 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3956 (t)))));
3957 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3958 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3960 t = DECL_CHAIN (t);
3961 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3962 ufield = convert
3963 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3965 /* Build the template in the form of a constructor. */
3966 vec_alloc (v, 2);
3967 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3968 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3969 ufield);
3970 template_tree = gnat_build_constructor (template_type, v);
3971 template_tree = build3 (COND_EXPR, template_type, u,
3972 build_call_raise (CE_Length_Check_Failed, Empty,
3973 N_Raise_Constraint_Error),
3974 template_tree);
3975 template_addr
3976 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3977 break;
3979 case 10: /* Class NCA */
3980 default:
3981 post_error ("unsupported descriptor type for &", gnat_subprog);
3982 template_addr = integer_zero_node;
3983 break;
3986 /* Build the fat pointer in the form of a constructor. */
3987 vec_alloc (v, 2);
3988 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
3989 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3990 template_addr);
3991 return gnat_build_constructor (gnu_type, v);
3994 else
3995 gcc_unreachable ();
3998 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3999 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
4000 which the VMS descriptor is passed. */
4002 static tree
4003 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
4005 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4006 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4007 /* The CLASS field is the 3rd field in the descriptor. */
4008 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
4009 /* The POINTER field is the 4th field in the descriptor. */
4010 tree pointer = DECL_CHAIN (klass);
4012 /* Retrieve the value of the POINTER field. */
4013 tree gnu_expr32
4014 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
4016 if (POINTER_TYPE_P (gnu_type))
4017 return convert (gnu_type, gnu_expr32);
4019 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
4021 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
4022 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
4023 tree template_type = TREE_TYPE (p_bounds_type);
4024 tree min_field = TYPE_FIELDS (template_type);
4025 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
4026 tree template_tree, template_addr, aflags, dimct, t, u;
4027 /* See the head comment of build_vms_descriptor. */
4028 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
4029 vec<constructor_elt, va_gc> *v;
4031 /* Convert POINTER to the pointer-to-array type. */
4032 gnu_expr32 = convert (p_array_type, gnu_expr32);
4034 switch (iklass)
4036 case 1: /* Class S */
4037 case 15: /* Class SB */
4038 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
4039 vec_alloc (v, 2);
4040 t = TYPE_FIELDS (desc_type);
4041 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4042 CONSTRUCTOR_APPEND_ELT (v, min_field,
4043 convert (TREE_TYPE (min_field),
4044 integer_one_node));
4045 CONSTRUCTOR_APPEND_ELT (v, max_field,
4046 convert (TREE_TYPE (max_field), t));
4047 template_tree = gnat_build_constructor (template_type, v);
4048 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
4050 /* For class S, we are done. */
4051 if (iklass == 1)
4052 break;
4054 /* Test that we really have a SB descriptor, like DEC Ada. */
4055 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
4056 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
4057 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
4058 /* If so, there is already a template in the descriptor and
4059 it is located right after the POINTER field. */
4060 t = DECL_CHAIN (pointer);
4061 template_tree
4062 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4063 /* Otherwise use the {1, LENGTH} template we build above. */
4064 template_addr = build3 (COND_EXPR, p_bounds_type, u,
4065 build_unary_op (ADDR_EXPR, p_bounds_type,
4066 template_tree),
4067 template_addr);
4068 break;
4070 case 4: /* Class A */
4071 /* The AFLAGS field is the 7th field in the descriptor. */
4072 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
4073 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4074 /* The DIMCT field is the 8th field in the descriptor. */
4075 t = DECL_CHAIN (t);
4076 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4077 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4078 or FL_COEFF or FL_BOUNDS not set. */
4079 u = build_int_cst (TREE_TYPE (aflags), 192);
4080 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
4081 build_binary_op (NE_EXPR, boolean_type_node,
4082 dimct,
4083 convert (TREE_TYPE (dimct),
4084 size_one_node)),
4085 build_binary_op (NE_EXPR, boolean_type_node,
4086 build2 (BIT_AND_EXPR,
4087 TREE_TYPE (aflags),
4088 aflags, u),
4089 u));
4090 /* There is already a template in the descriptor and it is
4091 located at the start of block 3 (12th field). */
4092 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
4093 template_tree
4094 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4095 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
4096 build_call_raise (CE_Length_Check_Failed, Empty,
4097 N_Raise_Constraint_Error),
4098 template_tree);
4099 template_addr
4100 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
4101 break;
4103 case 10: /* Class NCA */
4104 default:
4105 post_error ("unsupported descriptor type for &", gnat_subprog);
4106 template_addr = integer_zero_node;
4107 break;
4110 /* Build the fat pointer in the form of a constructor. */
4111 vec_alloc (v, 2);
4112 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
4113 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
4114 template_addr);
4116 return gnat_build_constructor (gnu_type, v);
4119 else
4120 gcc_unreachable ();
4123 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4124 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
4125 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
4126 descriptor is passed. */
4128 tree
4129 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
4130 Entity_Id gnat_subprog)
4132 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4133 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4134 tree mbo = TYPE_FIELDS (desc_type);
4135 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
4136 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
4137 tree is64bit, gnu_expr32, gnu_expr64;
4139 /* If the field name is not MBO, it must be 32-bit and no alternate.
4140 Otherwise primary must be 64-bit and alternate 32-bit. */
4141 if (strcmp (mbostr, "MBO") != 0)
4143 tree ret = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
4144 return ret;
4147 /* Build the test for 64-bit descriptor. */
4148 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
4149 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
4150 is64bit
4151 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
4152 build_binary_op (EQ_EXPR, boolean_type_node,
4153 convert (integer_type_node, mbo),
4154 integer_one_node),
4155 build_binary_op (EQ_EXPR, boolean_type_node,
4156 convert (integer_type_node, mbmo),
4157 integer_minus_one_node));
4159 /* Build the 2 possible end results. */
4160 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
4161 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
4162 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
4163 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
4166 /* Build a type to be used to represent an aliased object whose nominal type
4167 is an unconstrained array. This consists of a RECORD_TYPE containing a
4168 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4169 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4170 an arbitrary unconstrained object. Use NAME as the name of the record.
4171 DEBUG_INFO_P is true if we need to write debug information for the type. */
4173 tree
4174 build_unc_object_type (tree template_type, tree object_type, tree name,
4175 bool debug_info_p)
4177 tree type = make_node (RECORD_TYPE);
4178 tree template_field
4179 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4180 NULL_TREE, NULL_TREE, 0, 1);
4181 tree array_field
4182 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4183 NULL_TREE, NULL_TREE, 0, 1);
4185 TYPE_NAME (type) = name;
4186 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
4187 DECL_CHAIN (template_field) = array_field;
4188 finish_record_type (type, template_field, 0, true);
4190 /* Declare it now since it will never be declared otherwise. This is
4191 necessary to ensure that its subtrees are properly marked. */
4192 create_type_decl (name, type, true, debug_info_p, Empty);
4194 return type;
4197 /* Same, taking a thin or fat pointer type instead of a template type. */
4199 tree
4200 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
4201 tree name, bool debug_info_p)
4203 tree template_type;
4205 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
4207 template_type
4208 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
4209 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
4210 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
4212 return
4213 build_unc_object_type (template_type, object_type, name, debug_info_p);
4216 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4217 In the normal case this is just two adjustments, but we have more to
4218 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4220 void
4221 update_pointer_to (tree old_type, tree new_type)
4223 tree ptr = TYPE_POINTER_TO (old_type);
4224 tree ref = TYPE_REFERENCE_TO (old_type);
4225 tree t;
4227 /* If this is the main variant, process all the other variants first. */
4228 if (TYPE_MAIN_VARIANT (old_type) == old_type)
4229 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4230 update_pointer_to (t, new_type);
4232 /* If no pointers and no references, we are done. */
4233 if (!ptr && !ref)
4234 return;
4236 /* Merge the old type qualifiers in the new type.
4238 Each old variant has qualifiers for specific reasons, and the new
4239 designated type as well. Each set of qualifiers represents useful
4240 information grabbed at some point, and merging the two simply unifies
4241 these inputs into the final type description.
4243 Consider for instance a volatile type frozen after an access to constant
4244 type designating it; after the designated type's freeze, we get here with
4245 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4246 when the access type was processed. We will make a volatile and readonly
4247 designated type, because that's what it really is.
4249 We might also get here for a non-dummy OLD_TYPE variant with different
4250 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4251 to private record type elaboration (see the comments around the call to
4252 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4253 the qualifiers in those cases too, to avoid accidentally discarding the
4254 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4255 new_type
4256 = build_qualified_type (new_type,
4257 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4259 /* If old type and new type are identical, there is nothing to do. */
4260 if (old_type == new_type)
4261 return;
4263 /* Otherwise, first handle the simple case. */
4264 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4266 tree new_ptr, new_ref;
4268 /* If pointer or reference already points to new type, nothing to do.
4269 This can happen as update_pointer_to can be invoked multiple times
4270 on the same couple of types because of the type variants. */
4271 if ((ptr && TREE_TYPE (ptr) == new_type)
4272 || (ref && TREE_TYPE (ref) == new_type))
4273 return;
4275 /* Chain PTR and its variants at the end. */
4276 new_ptr = TYPE_POINTER_TO (new_type);
4277 if (new_ptr)
4279 while (TYPE_NEXT_PTR_TO (new_ptr))
4280 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4281 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4283 else
4284 TYPE_POINTER_TO (new_type) = ptr;
4286 /* Now adjust them. */
4287 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4288 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4290 TREE_TYPE (t) = new_type;
4291 if (TYPE_NULL_BOUNDS (t))
4292 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4295 /* Chain REF and its variants at the end. */
4296 new_ref = TYPE_REFERENCE_TO (new_type);
4297 if (new_ref)
4299 while (TYPE_NEXT_REF_TO (new_ref))
4300 new_ref = TYPE_NEXT_REF_TO (new_ref);
4301 TYPE_NEXT_REF_TO (new_ref) = ref;
4303 else
4304 TYPE_REFERENCE_TO (new_type) = ref;
4306 /* Now adjust them. */
4307 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4308 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4309 TREE_TYPE (t) = new_type;
4311 TYPE_POINTER_TO (old_type) = NULL_TREE;
4312 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4315 /* Now deal with the unconstrained array case. In this case the pointer
4316 is actually a record where both fields are pointers to dummy nodes.
4317 Turn them into pointers to the correct types using update_pointer_to.
4318 Likewise for the pointer to the object record (thin pointer). */
4319 else
4321 tree new_ptr = TYPE_POINTER_TO (new_type);
4323 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4325 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4326 since update_pointer_to can be invoked multiple times on the same
4327 couple of types because of the type variants. */
4328 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4329 return;
4331 update_pointer_to
4332 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4333 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4335 update_pointer_to
4336 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4337 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4339 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4340 TYPE_OBJECT_RECORD_TYPE (new_type));
4342 TYPE_POINTER_TO (old_type) = NULL_TREE;
4346 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4347 unconstrained one. This involves making or finding a template. */
4349 static tree
4350 convert_to_fat_pointer (tree type, tree expr)
4352 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4353 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4354 tree etype = TREE_TYPE (expr);
4355 tree template_addr;
4356 vec<constructor_elt, va_gc> *v;
4357 vec_alloc (v, 2);
4359 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4360 array (compare_fat_pointers ensures that this is the full discriminant)
4361 and a valid pointer to the bounds. This latter property is necessary
4362 since the compiler can hoist the load of the bounds done through it. */
4363 if (integer_zerop (expr))
4365 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4366 tree null_bounds, t;
4368 if (TYPE_NULL_BOUNDS (ptr_template_type))
4369 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4370 else
4372 /* The template type can still be dummy at this point so we build an
4373 empty constructor. The middle-end will fill it in with zeros. */
4374 t = build_constructor (template_type,
4375 NULL);
4376 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4377 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4378 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4381 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4382 fold_convert (p_array_type, null_pointer_node));
4383 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4384 t = build_constructor (type, v);
4385 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4386 TREE_CONSTANT (t) = 0;
4387 TREE_STATIC (t) = 1;
4389 return t;
4392 /* If EXPR is a thin pointer, make template and data from the record. */
4393 if (TYPE_IS_THIN_POINTER_P (etype))
4395 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4397 expr = gnat_protect_expr (expr);
4399 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4400 the thin pointer value has been shifted so we shift it back to get
4401 the template address. */
4402 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4404 template_addr
4405 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4406 fold_build1 (NEGATE_EXPR, sizetype,
4407 byte_position
4408 (DECL_CHAIN (field))));
4409 template_addr
4410 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4411 template_addr);
4414 /* Otherwise we explicitly take the address of the fields. */
4415 else
4417 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4418 template_addr
4419 = build_unary_op (ADDR_EXPR, NULL_TREE,
4420 build_component_ref (expr, NULL_TREE, field,
4421 false));
4422 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4423 build_component_ref (expr, NULL_TREE,
4424 DECL_CHAIN (field),
4425 false));
4429 /* Otherwise, build the constructor for the template. */
4430 else
4431 template_addr
4432 = build_unary_op (ADDR_EXPR, NULL_TREE,
4433 build_template (template_type, TREE_TYPE (etype),
4434 expr));
4436 /* The final result is a constructor for the fat pointer.
4438 If EXPR is an argument of a foreign convention subprogram, the type it
4439 points to is directly the component type. In this case, the expression
4440 type may not match the corresponding FIELD_DECL type at this point, so we
4441 call "convert" here to fix that up if necessary. This type consistency is
4442 required, for instance because it ensures that possible later folding of
4443 COMPONENT_REFs against this constructor always yields something of the
4444 same type as the initial reference.
4446 Note that the call to "build_template" above is still fine because it
4447 will only refer to the provided TEMPLATE_TYPE in this case. */
4448 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4449 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4450 return gnat_build_constructor (type, v);
4453 /* Create an expression whose value is that of EXPR,
4454 converted to type TYPE. The TREE_TYPE of the value
4455 is always TYPE. This function implements all reasonable
4456 conversions; callers should filter out those that are
4457 not permitted by the language being compiled. */
4459 tree
4460 convert (tree type, tree expr)
4462 tree etype = TREE_TYPE (expr);
4463 enum tree_code ecode = TREE_CODE (etype);
4464 enum tree_code code = TREE_CODE (type);
4466 /* If the expression is already of the right type, we are done. */
4467 if (etype == type)
4468 return expr;
4470 /* If both input and output have padding and are of variable size, do this
4471 as an unchecked conversion. Likewise if one is a mere variant of the
4472 other, so we avoid a pointless unpad/repad sequence. */
4473 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4474 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4475 && (!TREE_CONSTANT (TYPE_SIZE (type))
4476 || !TREE_CONSTANT (TYPE_SIZE (etype))
4477 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4478 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4479 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4482 /* If the output type has padding, convert to the inner type and make a
4483 constructor to build the record, unless a variable size is involved. */
4484 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4486 vec<constructor_elt, va_gc> *v;
4488 /* If we previously converted from another type and our type is
4489 of variable size, remove the conversion to avoid the need for
4490 variable-sized temporaries. Likewise for a conversion between
4491 original and packable version. */
4492 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4493 && (!TREE_CONSTANT (TYPE_SIZE (type))
4494 || (ecode == RECORD_TYPE
4495 && TYPE_NAME (etype)
4496 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4497 expr = TREE_OPERAND (expr, 0);
4499 /* If we are just removing the padding from expr, convert the original
4500 object if we have variable size in order to avoid the need for some
4501 variable-sized temporaries. Likewise if the padding is a variant
4502 of the other, so we avoid a pointless unpad/repad sequence. */
4503 if (TREE_CODE (expr) == COMPONENT_REF
4504 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4505 && (!TREE_CONSTANT (TYPE_SIZE (type))
4506 || TYPE_MAIN_VARIANT (type)
4507 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4508 || (ecode == RECORD_TYPE
4509 && TYPE_NAME (etype)
4510 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4511 return convert (type, TREE_OPERAND (expr, 0));
4513 /* If the inner type is of self-referential size and the expression type
4514 is a record, do this as an unchecked conversion. But first pad the
4515 expression if possible to have the same size on both sides. */
4516 if (ecode == RECORD_TYPE
4517 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4519 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4520 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4521 false, false, false, true),
4522 expr);
4523 return unchecked_convert (type, expr, false);
4526 /* If we are converting between array types with variable size, do the
4527 final conversion as an unchecked conversion, again to avoid the need
4528 for some variable-sized temporaries. If valid, this conversion is
4529 very likely purely technical and without real effects. */
4530 if (ecode == ARRAY_TYPE
4531 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4532 && !TREE_CONSTANT (TYPE_SIZE (etype))
4533 && !TREE_CONSTANT (TYPE_SIZE (type)))
4534 return unchecked_convert (type,
4535 convert (TREE_TYPE (TYPE_FIELDS (type)),
4536 expr),
4537 false);
4539 vec_alloc (v, 1);
4540 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4541 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4542 return gnat_build_constructor (type, v);
4545 /* If the input type has padding, remove it and convert to the output type.
4546 The conditions ordering is arranged to ensure that the output type is not
4547 a padding type here, as it is not clear whether the conversion would
4548 always be correct if this was to happen. */
4549 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4551 tree unpadded;
4553 /* If we have just converted to this padded type, just get the
4554 inner expression. */
4555 if (TREE_CODE (expr) == CONSTRUCTOR
4556 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4557 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4558 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
4560 /* Otherwise, build an explicit component reference. */
4561 else
4562 unpadded
4563 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4565 return convert (type, unpadded);
4568 /* If the input is a biased type, adjust first. */
4569 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4570 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4571 fold_convert (TREE_TYPE (etype),
4572 expr),
4573 TYPE_MIN_VALUE (etype)));
4575 /* If the input is a justified modular type, we need to extract the actual
4576 object before converting it to any other type with the exceptions of an
4577 unconstrained array or of a mere type variant. It is useful to avoid the
4578 extraction and conversion in the type variant case because it could end
4579 up replacing a VAR_DECL expr by a constructor and we might be about the
4580 take the address of the result. */
4581 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4582 && code != UNCONSTRAINED_ARRAY_TYPE
4583 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4584 return convert (type, build_component_ref (expr, NULL_TREE,
4585 TYPE_FIELDS (etype), false));
4587 /* If converting to a type that contains a template, convert to the data
4588 type and then build the template. */
4589 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4591 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4592 vec<constructor_elt, va_gc> *v;
4593 vec_alloc (v, 2);
4595 /* If the source already has a template, get a reference to the
4596 associated array only, as we are going to rebuild a template
4597 for the target type anyway. */
4598 expr = maybe_unconstrained_array (expr);
4600 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4601 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4602 obj_type, NULL_TREE));
4603 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4604 convert (obj_type, expr));
4605 return gnat_build_constructor (type, v);
4608 /* There are some cases of expressions that we process specially. */
4609 switch (TREE_CODE (expr))
4611 case ERROR_MARK:
4612 return expr;
4614 case NULL_EXPR:
4615 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4616 conversion in gnat_expand_expr. NULL_EXPR does not represent
4617 and actual value, so no conversion is needed. */
4618 expr = copy_node (expr);
4619 TREE_TYPE (expr) = type;
4620 return expr;
4622 case STRING_CST:
4623 /* If we are converting a STRING_CST to another constrained array type,
4624 just make a new one in the proper type. */
4625 if (code == ecode && AGGREGATE_TYPE_P (etype)
4626 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4627 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4629 expr = copy_node (expr);
4630 TREE_TYPE (expr) = type;
4631 return expr;
4633 break;
4635 case VECTOR_CST:
4636 /* If we are converting a VECTOR_CST to a mere type variant, just make
4637 a new one in the proper type. */
4638 if (code == ecode && gnat_types_compatible_p (type, etype))
4640 expr = copy_node (expr);
4641 TREE_TYPE (expr) = type;
4642 return expr;
4645 case CONSTRUCTOR:
4646 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4647 another padding type around the same type, just make a new one in
4648 the proper type. */
4649 if (code == ecode
4650 && (gnat_types_compatible_p (type, etype)
4651 || (code == RECORD_TYPE
4652 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4653 && TREE_TYPE (TYPE_FIELDS (type))
4654 == TREE_TYPE (TYPE_FIELDS (etype)))))
4656 expr = copy_node (expr);
4657 TREE_TYPE (expr) = type;
4658 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4659 return expr;
4662 /* Likewise for a conversion between original and packable version, or
4663 conversion between types of the same size and with the same list of
4664 fields, but we have to work harder to preserve type consistency. */
4665 if (code == ecode
4666 && code == RECORD_TYPE
4667 && (TYPE_NAME (type) == TYPE_NAME (etype)
4668 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4671 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4672 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4673 vec<constructor_elt, va_gc> *v;
4674 vec_alloc (v, len);
4675 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4676 unsigned HOST_WIDE_INT idx;
4677 tree index, value;
4679 /* Whether we need to clear TREE_CONSTANT et al. on the output
4680 constructor when we convert in place. */
4681 bool clear_constant = false;
4683 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4685 /* Skip the missing fields in the CONSTRUCTOR. */
4686 while (efield && field && !SAME_FIELD_P (efield, index))
4688 efield = DECL_CHAIN (efield);
4689 field = DECL_CHAIN (field);
4691 /* The field must be the same. */
4692 if (!(efield && field && SAME_FIELD_P (efield, field)))
4693 break;
4694 constructor_elt elt
4695 = {field, convert (TREE_TYPE (field), value)};
4696 v->quick_push (elt);
4698 /* If packing has made this field a bitfield and the input
4699 value couldn't be emitted statically any more, we need to
4700 clear TREE_CONSTANT on our output. */
4701 if (!clear_constant
4702 && TREE_CONSTANT (expr)
4703 && !CONSTRUCTOR_BITFIELD_P (efield)
4704 && CONSTRUCTOR_BITFIELD_P (field)
4705 && !initializer_constant_valid_for_bitfield_p (value))
4706 clear_constant = true;
4708 efield = DECL_CHAIN (efield);
4709 field = DECL_CHAIN (field);
4712 /* If we have been able to match and convert all the input fields
4713 to their output type, convert in place now. We'll fallback to a
4714 view conversion downstream otherwise. */
4715 if (idx == len)
4717 expr = copy_node (expr);
4718 TREE_TYPE (expr) = type;
4719 CONSTRUCTOR_ELTS (expr) = v;
4720 if (clear_constant)
4721 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4722 return expr;
4726 /* Likewise for a conversion between array type and vector type with a
4727 compatible representative array. */
4728 else if (code == VECTOR_TYPE
4729 && ecode == ARRAY_TYPE
4730 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4731 etype))
4733 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4734 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4735 vec<constructor_elt, va_gc> *v;
4736 unsigned HOST_WIDE_INT ix;
4737 tree value;
4739 /* Build a VECTOR_CST from a *constant* array constructor. */
4740 if (TREE_CONSTANT (expr))
4742 bool constant_p = true;
4744 /* Iterate through elements and check if all constructor
4745 elements are *_CSTs. */
4746 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4747 if (!CONSTANT_CLASS_P (value))
4749 constant_p = false;
4750 break;
4753 if (constant_p)
4754 return build_vector_from_ctor (type,
4755 CONSTRUCTOR_ELTS (expr));
4758 /* Otherwise, build a regular vector constructor. */
4759 vec_alloc (v, len);
4760 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4762 constructor_elt elt = {NULL_TREE, value};
4763 v->quick_push (elt);
4765 expr = copy_node (expr);
4766 TREE_TYPE (expr) = type;
4767 CONSTRUCTOR_ELTS (expr) = v;
4768 return expr;
4770 break;
4772 case UNCONSTRAINED_ARRAY_REF:
4773 /* First retrieve the underlying array. */
4774 expr = maybe_unconstrained_array (expr);
4775 etype = TREE_TYPE (expr);
4776 ecode = TREE_CODE (etype);
4777 break;
4779 case VIEW_CONVERT_EXPR:
4781 /* GCC 4.x is very sensitive to type consistency overall, and view
4782 conversions thus are very frequent. Even though just "convert"ing
4783 the inner operand to the output type is fine in most cases, it
4784 might expose unexpected input/output type mismatches in special
4785 circumstances so we avoid such recursive calls when we can. */
4786 tree op0 = TREE_OPERAND (expr, 0);
4788 /* If we are converting back to the original type, we can just
4789 lift the input conversion. This is a common occurrence with
4790 switches back-and-forth amongst type variants. */
4791 if (type == TREE_TYPE (op0))
4792 return op0;
4794 /* Otherwise, if we're converting between two aggregate or vector
4795 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4796 target type in place or to just convert the inner expression. */
4797 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4798 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4800 /* If we are converting between mere variants, we can just
4801 substitute the VIEW_CONVERT_EXPR in place. */
4802 if (gnat_types_compatible_p (type, etype))
4803 return build1 (VIEW_CONVERT_EXPR, type, op0);
4805 /* Otherwise, we may just bypass the input view conversion unless
4806 one of the types is a fat pointer, which is handled by
4807 specialized code below which relies on exact type matching. */
4808 else if (!TYPE_IS_FAT_POINTER_P (type)
4809 && !TYPE_IS_FAT_POINTER_P (etype))
4810 return convert (type, op0);
4813 break;
4816 default:
4817 break;
4820 /* Check for converting to a pointer to an unconstrained array. */
4821 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4822 return convert_to_fat_pointer (type, expr);
4824 /* If we are converting between two aggregate or vector types that are mere
4825 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4826 to a vector type from its representative array type. */
4827 else if ((code == ecode
4828 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4829 && gnat_types_compatible_p (type, etype))
4830 || (code == VECTOR_TYPE
4831 && ecode == ARRAY_TYPE
4832 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4833 etype)))
4834 return build1 (VIEW_CONVERT_EXPR, type, expr);
4836 /* If we are converting between tagged types, try to upcast properly. */
4837 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4838 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4840 tree child_etype = etype;
4841 do {
4842 tree field = TYPE_FIELDS (child_etype);
4843 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4844 return build_component_ref (expr, NULL_TREE, field, false);
4845 child_etype = TREE_TYPE (field);
4846 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4849 /* If we are converting from a smaller form of record type back to it, just
4850 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4851 size on both sides. */
4852 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4853 && smaller_form_type_p (etype, type))
4855 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4856 false, false, false, true),
4857 expr);
4858 return build1 (VIEW_CONVERT_EXPR, type, expr);
4861 /* In all other cases of related types, make a NOP_EXPR. */
4862 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4863 return fold_convert (type, expr);
4865 switch (code)
4867 case VOID_TYPE:
4868 return fold_build1 (CONVERT_EXPR, type, expr);
4870 case INTEGER_TYPE:
4871 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4872 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4873 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4874 return unchecked_convert (type, expr, false);
4875 else if (TYPE_BIASED_REPRESENTATION_P (type))
4876 return fold_convert (type,
4877 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4878 convert (TREE_TYPE (type), expr),
4879 TYPE_MIN_VALUE (type)));
4881 /* ... fall through ... */
4883 case ENUMERAL_TYPE:
4884 case BOOLEAN_TYPE:
4885 /* If we are converting an additive expression to an integer type
4886 with lower precision, be wary of the optimization that can be
4887 applied by convert_to_integer. There are 2 problematic cases:
4888 - if the first operand was originally of a biased type,
4889 because we could be recursively called to convert it
4890 to an intermediate type and thus rematerialize the
4891 additive operator endlessly,
4892 - if the expression contains a placeholder, because an
4893 intermediate conversion that changes the sign could
4894 be inserted and thus introduce an artificial overflow
4895 at compile time when the placeholder is substituted. */
4896 if (code == INTEGER_TYPE
4897 && ecode == INTEGER_TYPE
4898 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4899 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4901 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4903 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4904 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4905 || CONTAINS_PLACEHOLDER_P (expr))
4906 return build1 (NOP_EXPR, type, expr);
4909 return fold (convert_to_integer (type, expr));
4911 case POINTER_TYPE:
4912 case REFERENCE_TYPE:
4913 /* If converting between two thin pointers, adjust if needed to account
4914 for differing offsets from the base pointer, depending on whether
4915 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4916 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4918 tree etype_pos
4919 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4920 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4921 : size_zero_node;
4922 tree type_pos
4923 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4924 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4925 : size_zero_node;
4926 tree byte_diff = size_diffop (type_pos, etype_pos);
4928 expr = build1 (NOP_EXPR, type, expr);
4929 if (integer_zerop (byte_diff))
4930 return expr;
4932 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4933 fold_convert (sizetype, byte_diff));
4936 /* If converting fat pointer to normal or thin pointer, get the pointer
4937 to the array and then convert it. */
4938 if (TYPE_IS_FAT_POINTER_P (etype))
4939 expr
4940 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4942 return fold (convert_to_pointer (type, expr));
4944 case REAL_TYPE:
4945 return fold (convert_to_real (type, expr));
4947 case RECORD_TYPE:
4948 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4950 vec<constructor_elt, va_gc> *v;
4951 vec_alloc (v, 1);
4953 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4954 convert (TREE_TYPE (TYPE_FIELDS (type)),
4955 expr));
4956 return gnat_build_constructor (type, v);
4959 /* ... fall through ... */
4961 case ARRAY_TYPE:
4962 /* In these cases, assume the front-end has validated the conversion.
4963 If the conversion is valid, it will be a bit-wise conversion, so
4964 it can be viewed as an unchecked conversion. */
4965 return unchecked_convert (type, expr, false);
4967 case UNION_TYPE:
4968 /* This is a either a conversion between a tagged type and some
4969 subtype, which we have to mark as a UNION_TYPE because of
4970 overlapping fields or a conversion of an Unchecked_Union. */
4971 return unchecked_convert (type, expr, false);
4973 case UNCONSTRAINED_ARRAY_TYPE:
4974 /* If the input is a VECTOR_TYPE, convert to the representative
4975 array type first. */
4976 if (ecode == VECTOR_TYPE)
4978 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4979 etype = TREE_TYPE (expr);
4980 ecode = TREE_CODE (etype);
4983 /* If EXPR is a constrained array, take its address, convert it to a
4984 fat pointer, and then dereference it. Likewise if EXPR is a
4985 record containing both a template and a constrained array.
4986 Note that a record representing a justified modular type
4987 always represents a packed constrained array. */
4988 if (ecode == ARRAY_TYPE
4989 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4990 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4991 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4992 return
4993 build_unary_op
4994 (INDIRECT_REF, NULL_TREE,
4995 convert_to_fat_pointer (TREE_TYPE (type),
4996 build_unary_op (ADDR_EXPR,
4997 NULL_TREE, expr)));
4999 /* Do something very similar for converting one unconstrained
5000 array to another. */
5001 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
5002 return
5003 build_unary_op (INDIRECT_REF, NULL_TREE,
5004 convert (TREE_TYPE (type),
5005 build_unary_op (ADDR_EXPR,
5006 NULL_TREE, expr)));
5007 else
5008 gcc_unreachable ();
5010 case COMPLEX_TYPE:
5011 return fold (convert_to_complex (type, expr));
5013 default:
5014 gcc_unreachable ();
5018 /* Create an expression whose value is that of EXPR converted to the common
5019 index type, which is sizetype. EXPR is supposed to be in the base type
5020 of the GNAT index type. Calling it is equivalent to doing
5022 convert (sizetype, expr)
5024 but we try to distribute the type conversion with the knowledge that EXPR
5025 cannot overflow in its type. This is a best-effort approach and we fall
5026 back to the above expression as soon as difficulties are encountered.
5028 This is necessary to overcome issues that arise when the GNAT base index
5029 type and the GCC common index type (sizetype) don't have the same size,
5030 which is quite frequent on 64-bit architectures. In this case, and if
5031 the GNAT base index type is signed but the iteration type of the loop has
5032 been forced to unsigned, the loop scalar evolution engine cannot compute
5033 a simple evolution for the general induction variables associated with the
5034 array indices, because it will preserve the wrap-around semantics in the
5035 unsigned type of their "inner" part. As a result, many loop optimizations
5036 are blocked.
5038 The solution is to use a special (basic) induction variable that is at
5039 least as large as sizetype, and to express the aforementioned general
5040 induction variables in terms of this induction variable, eliminating
5041 the problematic intermediate truncation to the GNAT base index type.
5042 This is possible as long as the original expression doesn't overflow
5043 and if the middle-end hasn't introduced artificial overflows in the
5044 course of the various simplification it can make to the expression. */
5046 tree
5047 convert_to_index_type (tree expr)
5049 enum tree_code code = TREE_CODE (expr);
5050 tree type = TREE_TYPE (expr);
5052 /* If the type is unsigned, overflow is allowed so we cannot be sure that
5053 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
5054 if (TYPE_UNSIGNED (type) || !optimize)
5055 return convert (sizetype, expr);
5057 switch (code)
5059 case VAR_DECL:
5060 /* The main effect of the function: replace a loop parameter with its
5061 associated special induction variable. */
5062 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
5063 expr = DECL_INDUCTION_VAR (expr);
5064 break;
5066 CASE_CONVERT:
5068 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
5069 /* Bail out as soon as we suspect some sort of type frobbing. */
5070 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5071 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5072 break;
5075 /* ... fall through ... */
5077 case NON_LVALUE_EXPR:
5078 return fold_build1 (code, sizetype,
5079 convert_to_index_type (TREE_OPERAND (expr, 0)));
5081 case PLUS_EXPR:
5082 case MINUS_EXPR:
5083 case MULT_EXPR:
5084 return fold_build2 (code, sizetype,
5085 convert_to_index_type (TREE_OPERAND (expr, 0)),
5086 convert_to_index_type (TREE_OPERAND (expr, 1)));
5088 case COMPOUND_EXPR:
5089 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5090 convert_to_index_type (TREE_OPERAND (expr, 1)));
5092 case COND_EXPR:
5093 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5094 convert_to_index_type (TREE_OPERAND (expr, 1)),
5095 convert_to_index_type (TREE_OPERAND (expr, 2)));
5097 default:
5098 break;
5101 return convert (sizetype, expr);
5104 /* Remove all conversions that are done in EXP. This includes converting
5105 from a padded type or to a justified modular type. If TRUE_ADDRESS
5106 is true, always return the address of the containing object even if
5107 the address is not bit-aligned. */
5109 tree
5110 remove_conversions (tree exp, bool true_address)
5112 switch (TREE_CODE (exp))
5114 case CONSTRUCTOR:
5115 if (true_address
5116 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5117 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5118 return
5119 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
5120 break;
5122 case COMPONENT_REF:
5123 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
5124 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5125 break;
5127 CASE_CONVERT:
5128 case VIEW_CONVERT_EXPR:
5129 case NON_LVALUE_EXPR:
5130 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5132 default:
5133 break;
5136 return exp;
5139 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5140 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5141 likewise return an expression pointing to the underlying array. */
5143 tree
5144 maybe_unconstrained_array (tree exp)
5146 enum tree_code code = TREE_CODE (exp);
5147 tree type = TREE_TYPE (exp);
5149 switch (TREE_CODE (type))
5151 case UNCONSTRAINED_ARRAY_TYPE:
5152 if (code == UNCONSTRAINED_ARRAY_REF)
5154 const bool read_only = TREE_READONLY (exp);
5155 const bool no_trap = TREE_THIS_NOTRAP (exp);
5157 exp = TREE_OPERAND (exp, 0);
5158 type = TREE_TYPE (exp);
5160 if (TREE_CODE (exp) == COND_EXPR)
5162 tree op1
5163 = build_unary_op (INDIRECT_REF, NULL_TREE,
5164 build_component_ref (TREE_OPERAND (exp, 1),
5165 NULL_TREE,
5166 TYPE_FIELDS (type),
5167 false));
5168 tree op2
5169 = build_unary_op (INDIRECT_REF, NULL_TREE,
5170 build_component_ref (TREE_OPERAND (exp, 2),
5171 NULL_TREE,
5172 TYPE_FIELDS (type),
5173 false));
5175 exp = build3 (COND_EXPR,
5176 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5177 TREE_OPERAND (exp, 0), op1, op2);
5179 else
5181 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5182 build_component_ref (exp, NULL_TREE,
5183 TYPE_FIELDS (type),
5184 false));
5185 TREE_READONLY (exp) = read_only;
5186 TREE_THIS_NOTRAP (exp) = no_trap;
5190 else if (code == NULL_EXPR)
5191 exp = build1 (NULL_EXPR,
5192 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5193 TREE_OPERAND (exp, 0));
5194 break;
5196 case RECORD_TYPE:
5197 /* If this is a padded type and it contains a template, convert to the
5198 unpadded type first. */
5199 if (TYPE_PADDING_P (type)
5200 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5201 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5203 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5204 type = TREE_TYPE (exp);
5207 if (TYPE_CONTAINS_TEMPLATE_P (type))
5209 exp = build_component_ref (exp, NULL_TREE,
5210 DECL_CHAIN (TYPE_FIELDS (type)),
5211 false);
5212 type = TREE_TYPE (exp);
5214 /* If the array type is padded, convert to the unpadded type. */
5215 if (TYPE_IS_PADDING_P (type))
5216 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5218 break;
5220 default:
5221 break;
5224 return exp;
5227 /* Return true if EXPR is an expression that can be folded as an operand
5228 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5230 static bool
5231 can_fold_for_view_convert_p (tree expr)
5233 tree t1, t2;
5235 /* The folder will fold NOP_EXPRs between integral types with the same
5236 precision (in the middle-end's sense). We cannot allow it if the
5237 types don't have the same precision in the Ada sense as well. */
5238 if (TREE_CODE (expr) != NOP_EXPR)
5239 return true;
5241 t1 = TREE_TYPE (expr);
5242 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5244 /* Defer to the folder for non-integral conversions. */
5245 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5246 return true;
5248 /* Only fold conversions that preserve both precisions. */
5249 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5250 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5251 return true;
5253 return false;
5256 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5257 If NOTRUNC_P is true, truncation operations should be suppressed.
5259 Special care is required with (source or target) integral types whose
5260 precision is not equal to their size, to make sure we fetch or assign
5261 the value bits whose location might depend on the endianness, e.g.
5263 Rmsize : constant := 8;
5264 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5266 type Bit_Array is array (1 .. Rmsize) of Boolean;
5267 pragma Pack (Bit_Array);
5269 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5271 Value : Int := 2#1000_0001#;
5272 Vbits : Bit_Array := To_Bit_Array (Value);
5274 we expect the 8 bits at Vbits'Address to always contain Value, while
5275 their original location depends on the endianness, at Value'Address
5276 on a little-endian architecture but not on a big-endian one. */
5278 tree
5279 unchecked_convert (tree type, tree expr, bool notrunc_p)
5281 tree etype = TREE_TYPE (expr);
5282 enum tree_code ecode = TREE_CODE (etype);
5283 enum tree_code code = TREE_CODE (type);
5284 int c;
5286 /* If the expression is already of the right type, we are done. */
5287 if (etype == type)
5288 return expr;
5290 /* If both types types are integral just do a normal conversion.
5291 Likewise for a conversion to an unconstrained array. */
5292 if ((((INTEGRAL_TYPE_P (type)
5293 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
5294 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5295 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5296 && ((INTEGRAL_TYPE_P (etype)
5297 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
5298 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5299 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5300 || code == UNCONSTRAINED_ARRAY_TYPE)
5302 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
5304 tree ntype = copy_type (etype);
5305 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5306 TYPE_MAIN_VARIANT (ntype) = ntype;
5307 expr = build1 (NOP_EXPR, ntype, expr);
5310 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5312 tree rtype = copy_type (type);
5313 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5314 TYPE_MAIN_VARIANT (rtype) = rtype;
5315 expr = convert (rtype, expr);
5316 expr = build1 (NOP_EXPR, type, expr);
5318 else
5319 expr = convert (type, expr);
5322 /* If we are converting to an integral type whose precision is not equal
5323 to its size, first unchecked convert to a record type that contains an
5324 field of the given precision. Then extract the field. */
5325 else if (INTEGRAL_TYPE_P (type)
5326 && TYPE_RM_SIZE (type)
5327 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5328 GET_MODE_BITSIZE (TYPE_MODE (type))))
5330 tree rec_type = make_node (RECORD_TYPE);
5331 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5332 tree field_type, field;
5334 if (TYPE_UNSIGNED (type))
5335 field_type = make_unsigned_type (prec);
5336 else
5337 field_type = make_signed_type (prec);
5338 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5340 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5341 NULL_TREE, bitsize_zero_node, 1, 0);
5343 finish_record_type (rec_type, field, 1, false);
5345 expr = unchecked_convert (rec_type, expr, notrunc_p);
5346 expr = build_component_ref (expr, NULL_TREE, field, false);
5347 expr = fold_build1 (NOP_EXPR, type, expr);
5350 /* Similarly if we are converting from an integral type whose precision is
5351 not equal to its size, first copy into a field of the given precision
5352 and unchecked convert the record type. */
5353 else if (INTEGRAL_TYPE_P (etype)
5354 && TYPE_RM_SIZE (etype)
5355 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
5356 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5358 tree rec_type = make_node (RECORD_TYPE);
5359 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5360 vec<constructor_elt, va_gc> *v;
5361 vec_alloc (v, 1);
5362 tree field_type, field;
5364 if (TYPE_UNSIGNED (etype))
5365 field_type = make_unsigned_type (prec);
5366 else
5367 field_type = make_signed_type (prec);
5368 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5370 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5371 NULL_TREE, bitsize_zero_node, 1, 0);
5373 finish_record_type (rec_type, field, 1, false);
5375 expr = fold_build1 (NOP_EXPR, field_type, expr);
5376 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5377 expr = gnat_build_constructor (rec_type, v);
5378 expr = unchecked_convert (type, expr, notrunc_p);
5381 /* If we are converting from a scalar type to a type with a different size,
5382 we need to pad to have the same size on both sides.
5384 ??? We cannot do it unconditionally because unchecked conversions are
5385 used liberally by the front-end to implement polymorphism, e.g. in:
5387 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5388 return p___size__4 (p__object!(S191s.all));
5390 so we skip all expressions that are references. */
5391 else if (!REFERENCE_CLASS_P (expr)
5392 && !AGGREGATE_TYPE_P (etype)
5393 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5394 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5396 if (c < 0)
5398 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5399 false, false, false, true),
5400 expr);
5401 expr = unchecked_convert (type, expr, notrunc_p);
5403 else
5405 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5406 false, false, false, true);
5407 expr = unchecked_convert (rec_type, expr, notrunc_p);
5408 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
5409 false);
5413 /* We have a special case when we are converting between two unconstrained
5414 array types. In that case, take the address, convert the fat pointer
5415 types, and dereference. */
5416 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5417 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5418 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5419 build_unary_op (ADDR_EXPR, NULL_TREE,
5420 expr)));
5422 /* Another special case is when we are converting to a vector type from its
5423 representative array type; this a regular conversion. */
5424 else if (code == VECTOR_TYPE
5425 && ecode == ARRAY_TYPE
5426 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5427 etype))
5428 expr = convert (type, expr);
5430 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5431 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5432 else if (TREE_CODE (expr) == CONSTRUCTOR
5433 && code == RECORD_TYPE
5434 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5436 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5437 Empty, false, false, false, true),
5438 expr);
5439 return unchecked_convert (type, expr, notrunc_p);
5442 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5443 else
5445 expr = maybe_unconstrained_array (expr);
5446 etype = TREE_TYPE (expr);
5447 ecode = TREE_CODE (etype);
5448 if (can_fold_for_view_convert_p (expr))
5449 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5450 else
5451 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5454 /* If the result is an integral type whose precision is not equal to its
5455 size, sign- or zero-extend the result. We need not do this if the input
5456 is an integral type of the same precision and signedness or if the output
5457 is a biased type or if both the input and output are unsigned. */
5458 if (!notrunc_p
5459 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
5460 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5461 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5462 GET_MODE_BITSIZE (TYPE_MODE (type)))
5463 && !(INTEGRAL_TYPE_P (etype)
5464 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5465 && operand_equal_p (TYPE_RM_SIZE (type),
5466 (TYPE_RM_SIZE (etype) != 0
5467 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5469 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5471 tree base_type
5472 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
5473 tree shift_expr
5474 = convert (base_type,
5475 size_binop (MINUS_EXPR,
5476 bitsize_int
5477 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5478 TYPE_RM_SIZE (type)));
5479 expr
5480 = convert (type,
5481 build_binary_op (RSHIFT_EXPR, base_type,
5482 build_binary_op (LSHIFT_EXPR, base_type,
5483 convert (base_type, expr),
5484 shift_expr),
5485 shift_expr));
5488 /* An unchecked conversion should never raise Constraint_Error. The code
5489 below assumes that GCC's conversion routines overflow the same way that
5490 the underlying hardware does. This is probably true. In the rare case
5491 when it is false, we can rely on the fact that such conversions are
5492 erroneous anyway. */
5493 if (TREE_CODE (expr) == INTEGER_CST)
5494 TREE_OVERFLOW (expr) = 0;
5496 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5497 show no longer constant. */
5498 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5499 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5500 OEP_ONLY_CONST))
5501 TREE_CONSTANT (expr) = 0;
5503 return expr;
5506 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5507 the latter being a record type as predicated by Is_Record_Type. */
5509 enum tree_code
5510 tree_code_for_record_type (Entity_Id gnat_type)
5512 Node_Id component_list, component;
5514 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5515 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5516 if (!Is_Unchecked_Union (gnat_type))
5517 return RECORD_TYPE;
5519 gnat_type = Implementation_Base_Type (gnat_type);
5520 component_list
5521 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5523 for (component = First_Non_Pragma (Component_Items (component_list));
5524 Present (component);
5525 component = Next_Non_Pragma (component))
5526 if (Ekind (Defining_Entity (component)) == E_Component)
5527 return RECORD_TYPE;
5529 return UNION_TYPE;
5532 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5533 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5534 according to the presence of an alignment clause on the type or, if it
5535 is an array, on the component type. */
5537 bool
5538 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5540 gnat_type = Underlying_Type (gnat_type);
5542 *align_clause = Present (Alignment_Clause (gnat_type));
5544 if (Is_Array_Type (gnat_type))
5546 gnat_type = Underlying_Type (Component_Type (gnat_type));
5547 if (Present (Alignment_Clause (gnat_type)))
5548 *align_clause = true;
5551 if (!Is_Floating_Point_Type (gnat_type))
5552 return false;
5554 if (UI_To_Int (Esize (gnat_type)) != 64)
5555 return false;
5557 return true;
5560 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5561 size is greater or equal to 64 bits, or an array of such a type. Set
5562 ALIGN_CLAUSE according to the presence of an alignment clause on the
5563 type or, if it is an array, on the component type. */
5565 bool
5566 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5568 gnat_type = Underlying_Type (gnat_type);
5570 *align_clause = Present (Alignment_Clause (gnat_type));
5572 if (Is_Array_Type (gnat_type))
5574 gnat_type = Underlying_Type (Component_Type (gnat_type));
5575 if (Present (Alignment_Clause (gnat_type)))
5576 *align_clause = true;
5579 if (!Is_Scalar_Type (gnat_type))
5580 return false;
5582 if (UI_To_Int (Esize (gnat_type)) < 64)
5583 return false;
5585 return true;
5588 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5589 component of an aggregate type. */
5591 bool
5592 type_for_nonaliased_component_p (tree gnu_type)
5594 /* If the type is passed by reference, we may have pointers to the
5595 component so it cannot be made non-aliased. */
5596 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5597 return false;
5599 /* We used to say that any component of aggregate type is aliased
5600 because the front-end may take 'Reference of it. The front-end
5601 has been enhanced in the meantime so as to use a renaming instead
5602 in most cases, but the back-end can probably take the address of
5603 such a component too so we go for the conservative stance.
5605 For instance, we might need the address of any array type, even
5606 if normally passed by copy, to construct a fat pointer if the
5607 component is used as an actual for an unconstrained formal.
5609 Likewise for record types: even if a specific record subtype is
5610 passed by copy, the parent type might be passed by ref (e.g. if
5611 it's of variable size) and we might take the address of a child
5612 component to pass to a parent formal. We have no way to check
5613 for such conditions here. */
5614 if (AGGREGATE_TYPE_P (gnu_type))
5615 return false;
5617 return true;
5620 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5622 bool
5623 smaller_form_type_p (tree type, tree orig_type)
5625 tree size, osize;
5627 /* We're not interested in variants here. */
5628 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5629 return false;
5631 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5632 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5633 return false;
5635 size = TYPE_SIZE (type);
5636 osize = TYPE_SIZE (orig_type);
5638 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5639 return false;
5641 return tree_int_cst_lt (size, osize) != 0;
5644 /* Perform final processing on global variables. */
5646 static GTY (()) tree dummy_global;
5648 void
5649 gnat_write_global_declarations (void)
5651 unsigned int i;
5652 tree iter;
5654 /* If we have declared types as used at the global level, insert them in
5655 the global hash table. We use a dummy variable for this purpose. */
5656 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
5658 struct varpool_node *node;
5659 char *label;
5661 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5662 dummy_global
5663 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5664 void_type_node);
5665 TREE_STATIC (dummy_global) = 1;
5666 TREE_ASM_WRITTEN (dummy_global) = 1;
5667 node = varpool_node_for_decl (dummy_global);
5668 node->force_output = 1;
5670 while (!types_used_by_cur_var_decl->is_empty ())
5672 tree t = types_used_by_cur_var_decl->pop ();
5673 types_used_by_var_decl_insert (t, dummy_global);
5677 /* Output debug information for all global type declarations first. This
5678 ensures that global types whose compilation hasn't been finalized yet,
5679 for example pointers to Taft amendment types, have their compilation
5680 finalized in the right context. */
5681 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5682 if (TREE_CODE (iter) == TYPE_DECL)
5683 debug_hooks->global_decl (iter);
5685 /* Proceed to optimize and emit assembly. */
5686 finalize_compilation_unit ();
5688 /* After cgraph has had a chance to emit everything that's going to
5689 be emitted, output debug information for the rest of globals. */
5690 if (!seen_error ())
5692 timevar_push (TV_SYMOUT);
5693 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5694 if (TREE_CODE (iter) != TYPE_DECL)
5695 debug_hooks->global_decl (iter);
5696 timevar_pop (TV_SYMOUT);
5700 /* ************************************************************************
5701 * * GCC builtins support *
5702 * ************************************************************************ */
5704 /* The general scheme is fairly simple:
5706 For each builtin function/type to be declared, gnat_install_builtins calls
5707 internal facilities which eventually get to gnat_push_decl, which in turn
5708 tracks the so declared builtin function decls in the 'builtin_decls' global
5709 datastructure. When an Intrinsic subprogram declaration is processed, we
5710 search this global datastructure to retrieve the associated BUILT_IN DECL
5711 node. */
5713 /* Search the chain of currently available builtin declarations for a node
5714 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5715 found, if any, or NULL_TREE otherwise. */
5716 tree
5717 builtin_decl_for (tree name)
5719 unsigned i;
5720 tree decl;
5722 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5723 if (DECL_NAME (decl) == name)
5724 return decl;
5726 return NULL_TREE;
5729 /* The code below eventually exposes gnat_install_builtins, which declares
5730 the builtin types and functions we might need, either internally or as
5731 user accessible facilities.
5733 ??? This is a first implementation shot, still in rough shape. It is
5734 heavily inspired from the "C" family implementation, with chunks copied
5735 verbatim from there.
5737 Two obvious TODO candidates are
5738 o Use a more efficient name/decl mapping scheme
5739 o Devise a middle-end infrastructure to avoid having to copy
5740 pieces between front-ends. */
5742 /* ----------------------------------------------------------------------- *
5743 * BUILTIN ELEMENTARY TYPES *
5744 * ----------------------------------------------------------------------- */
5746 /* Standard data types to be used in builtin argument declarations. */
5748 enum c_tree_index
5750 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5751 CTI_STRING_TYPE,
5752 CTI_CONST_STRING_TYPE,
5754 CTI_MAX
5757 static tree c_global_trees[CTI_MAX];
5759 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5760 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5761 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5763 /* ??? In addition some attribute handlers, we currently don't support a
5764 (small) number of builtin-types, which in turns inhibits support for a
5765 number of builtin functions. */
5766 #define wint_type_node void_type_node
5767 #define intmax_type_node void_type_node
5768 #define uintmax_type_node void_type_node
5770 /* Build the void_list_node (void_type_node having been created). */
5772 static tree
5773 build_void_list_node (void)
5775 tree t = build_tree_list (NULL_TREE, void_type_node);
5776 return t;
5779 /* Used to help initialize the builtin-types.def table. When a type of
5780 the correct size doesn't exist, use error_mark_node instead of NULL.
5781 The later results in segfaults even when a decl using the type doesn't
5782 get invoked. */
5784 static tree
5785 builtin_type_for_size (int size, bool unsignedp)
5787 tree type = gnat_type_for_size (size, unsignedp);
5788 return type ? type : error_mark_node;
5791 /* Build/push the elementary type decls that builtin functions/types
5792 will need. */
5794 static void
5795 install_builtin_elementary_types (void)
5797 signed_size_type_node = gnat_signed_type (size_type_node);
5798 pid_type_node = integer_type_node;
5799 void_list_node = build_void_list_node ();
5801 string_type_node = build_pointer_type (char_type_node);
5802 const_string_type_node
5803 = build_pointer_type (build_qualified_type
5804 (char_type_node, TYPE_QUAL_CONST));
5807 /* ----------------------------------------------------------------------- *
5808 * BUILTIN FUNCTION TYPES *
5809 * ----------------------------------------------------------------------- */
5811 /* Now, builtin function types per se. */
5813 enum c_builtin_type
5815 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5816 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5817 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5818 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5819 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5820 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5821 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5822 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5823 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5824 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7, ARG8) NAME,
5825 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5826 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5827 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5828 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5829 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5830 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5831 NAME,
5832 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5833 #include "builtin-types.def"
5834 #undef DEF_PRIMITIVE_TYPE
5835 #undef DEF_FUNCTION_TYPE_0
5836 #undef DEF_FUNCTION_TYPE_1
5837 #undef DEF_FUNCTION_TYPE_2
5838 #undef DEF_FUNCTION_TYPE_3
5839 #undef DEF_FUNCTION_TYPE_4
5840 #undef DEF_FUNCTION_TYPE_5
5841 #undef DEF_FUNCTION_TYPE_6
5842 #undef DEF_FUNCTION_TYPE_7
5843 #undef DEF_FUNCTION_TYPE_8
5844 #undef DEF_FUNCTION_TYPE_VAR_0
5845 #undef DEF_FUNCTION_TYPE_VAR_1
5846 #undef DEF_FUNCTION_TYPE_VAR_2
5847 #undef DEF_FUNCTION_TYPE_VAR_3
5848 #undef DEF_FUNCTION_TYPE_VAR_4
5849 #undef DEF_FUNCTION_TYPE_VAR_5
5850 #undef DEF_POINTER_TYPE
5851 BT_LAST
5854 typedef enum c_builtin_type builtin_type;
5856 /* A temporary array used in communication with def_fn_type. */
5857 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5859 /* A helper function for install_builtin_types. Build function type
5860 for DEF with return type RET and N arguments. If VAR is true, then the
5861 function should be variadic after those N arguments.
5863 Takes special care not to ICE if any of the types involved are
5864 error_mark_node, which indicates that said type is not in fact available
5865 (see builtin_type_for_size). In which case the function type as a whole
5866 should be error_mark_node. */
5868 static void
5869 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5871 tree t;
5872 tree *args = XALLOCAVEC (tree, n);
5873 va_list list;
5874 int i;
5876 va_start (list, n);
5877 for (i = 0; i < n; ++i)
5879 builtin_type a = (builtin_type) va_arg (list, int);
5880 t = builtin_types[a];
5881 if (t == error_mark_node)
5882 goto egress;
5883 args[i] = t;
5886 t = builtin_types[ret];
5887 if (t == error_mark_node)
5888 goto egress;
5889 if (var)
5890 t = build_varargs_function_type_array (t, n, args);
5891 else
5892 t = build_function_type_array (t, n, args);
5894 egress:
5895 builtin_types[def] = t;
5896 va_end (list);
5899 /* Build the builtin function types and install them in the builtin_types
5900 array for later use in builtin function decls. */
5902 static void
5903 install_builtin_function_types (void)
5905 tree va_list_ref_type_node;
5906 tree va_list_arg_type_node;
5908 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5910 va_list_arg_type_node = va_list_ref_type_node =
5911 build_pointer_type (TREE_TYPE (va_list_type_node));
5913 else
5915 va_list_arg_type_node = va_list_type_node;
5916 va_list_ref_type_node = build_reference_type (va_list_type_node);
5919 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5920 builtin_types[ENUM] = VALUE;
5921 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5922 def_fn_type (ENUM, RETURN, 0, 0);
5923 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5924 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5925 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5926 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5927 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5928 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5929 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5930 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5931 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5932 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5933 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5934 ARG6) \
5935 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5936 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5937 ARG6, ARG7) \
5938 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5939 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5940 ARG6, ARG7, ARG8) \
5941 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5942 ARG7, ARG8);
5943 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5944 def_fn_type (ENUM, RETURN, 1, 0);
5945 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5946 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5947 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5948 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5949 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5950 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5951 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5952 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5953 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5954 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5955 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5956 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5958 #include "builtin-types.def"
5960 #undef DEF_PRIMITIVE_TYPE
5961 #undef DEF_FUNCTION_TYPE_1
5962 #undef DEF_FUNCTION_TYPE_2
5963 #undef DEF_FUNCTION_TYPE_3
5964 #undef DEF_FUNCTION_TYPE_4
5965 #undef DEF_FUNCTION_TYPE_5
5966 #undef DEF_FUNCTION_TYPE_6
5967 #undef DEF_FUNCTION_TYPE_VAR_0
5968 #undef DEF_FUNCTION_TYPE_VAR_1
5969 #undef DEF_FUNCTION_TYPE_VAR_2
5970 #undef DEF_FUNCTION_TYPE_VAR_3
5971 #undef DEF_FUNCTION_TYPE_VAR_4
5972 #undef DEF_FUNCTION_TYPE_VAR_5
5973 #undef DEF_POINTER_TYPE
5974 builtin_types[(int) BT_LAST] = NULL_TREE;
5977 /* ----------------------------------------------------------------------- *
5978 * BUILTIN ATTRIBUTES *
5979 * ----------------------------------------------------------------------- */
5981 enum built_in_attribute
5983 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5984 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5985 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5986 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5987 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5988 #include "builtin-attrs.def"
5989 #undef DEF_ATTR_NULL_TREE
5990 #undef DEF_ATTR_INT
5991 #undef DEF_ATTR_STRING
5992 #undef DEF_ATTR_IDENT
5993 #undef DEF_ATTR_TREE_LIST
5994 ATTR_LAST
5997 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5999 static void
6000 install_builtin_attributes (void)
6002 /* Fill in the built_in_attributes array. */
6003 #define DEF_ATTR_NULL_TREE(ENUM) \
6004 built_in_attributes[(int) ENUM] = NULL_TREE;
6005 #define DEF_ATTR_INT(ENUM, VALUE) \
6006 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
6007 #define DEF_ATTR_STRING(ENUM, VALUE) \
6008 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
6009 #define DEF_ATTR_IDENT(ENUM, STRING) \
6010 built_in_attributes[(int) ENUM] = get_identifier (STRING);
6011 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
6012 built_in_attributes[(int) ENUM] \
6013 = tree_cons (built_in_attributes[(int) PURPOSE], \
6014 built_in_attributes[(int) VALUE], \
6015 built_in_attributes[(int) CHAIN]);
6016 #include "builtin-attrs.def"
6017 #undef DEF_ATTR_NULL_TREE
6018 #undef DEF_ATTR_INT
6019 #undef DEF_ATTR_STRING
6020 #undef DEF_ATTR_IDENT
6021 #undef DEF_ATTR_TREE_LIST
6024 /* Handle a "const" attribute; arguments as in
6025 struct attribute_spec.handler. */
6027 static tree
6028 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6029 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6030 bool *no_add_attrs)
6032 if (TREE_CODE (*node) == FUNCTION_DECL)
6033 TREE_READONLY (*node) = 1;
6034 else
6035 *no_add_attrs = true;
6037 return NULL_TREE;
6040 /* Handle a "nothrow" attribute; arguments as in
6041 struct attribute_spec.handler. */
6043 static tree
6044 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6045 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6046 bool *no_add_attrs)
6048 if (TREE_CODE (*node) == FUNCTION_DECL)
6049 TREE_NOTHROW (*node) = 1;
6050 else
6051 *no_add_attrs = true;
6053 return NULL_TREE;
6056 /* Handle a "pure" attribute; arguments as in
6057 struct attribute_spec.handler. */
6059 static tree
6060 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6061 int ARG_UNUSED (flags), bool *no_add_attrs)
6063 if (TREE_CODE (*node) == FUNCTION_DECL)
6064 DECL_PURE_P (*node) = 1;
6065 /* ??? TODO: Support types. */
6066 else
6068 warning (OPT_Wattributes, "%qs attribute ignored",
6069 IDENTIFIER_POINTER (name));
6070 *no_add_attrs = true;
6073 return NULL_TREE;
6076 /* Handle a "no vops" attribute; arguments as in
6077 struct attribute_spec.handler. */
6079 static tree
6080 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6081 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6082 bool *ARG_UNUSED (no_add_attrs))
6084 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6085 DECL_IS_NOVOPS (*node) = 1;
6086 return NULL_TREE;
6089 /* Helper for nonnull attribute handling; fetch the operand number
6090 from the attribute argument list. */
6092 static bool
6093 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6095 /* Verify the arg number is a constant. */
6096 if (TREE_CODE (arg_num_expr) != INTEGER_CST
6097 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
6098 return false;
6100 *valp = TREE_INT_CST_LOW (arg_num_expr);
6101 return true;
6104 /* Handle the "nonnull" attribute. */
6105 static tree
6106 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6107 tree args, int ARG_UNUSED (flags),
6108 bool *no_add_attrs)
6110 tree type = *node;
6111 unsigned HOST_WIDE_INT attr_arg_num;
6113 /* If no arguments are specified, all pointer arguments should be
6114 non-null. Verify a full prototype is given so that the arguments
6115 will have the correct types when we actually check them later. */
6116 if (!args)
6118 if (!prototype_p (type))
6120 error ("nonnull attribute without arguments on a non-prototype");
6121 *no_add_attrs = true;
6123 return NULL_TREE;
6126 /* Argument list specified. Verify that each argument number references
6127 a pointer argument. */
6128 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6130 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6132 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6134 error ("nonnull argument has invalid operand number (argument %lu)",
6135 (unsigned long) attr_arg_num);
6136 *no_add_attrs = true;
6137 return NULL_TREE;
6140 if (prototype_p (type))
6142 function_args_iterator iter;
6143 tree argument;
6145 function_args_iter_init (&iter, type);
6146 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6148 argument = function_args_iter_cond (&iter);
6149 if (!argument || ck_num == arg_num)
6150 break;
6153 if (!argument
6154 || TREE_CODE (argument) == VOID_TYPE)
6156 error ("nonnull argument with out-of-range operand number "
6157 "(argument %lu, operand %lu)",
6158 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6159 *no_add_attrs = true;
6160 return NULL_TREE;
6163 if (TREE_CODE (argument) != POINTER_TYPE)
6165 error ("nonnull argument references non-pointer operand "
6166 "(argument %lu, operand %lu)",
6167 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6168 *no_add_attrs = true;
6169 return NULL_TREE;
6174 return NULL_TREE;
6177 /* Handle a "sentinel" attribute. */
6179 static tree
6180 handle_sentinel_attribute (tree *node, tree name, tree args,
6181 int ARG_UNUSED (flags), bool *no_add_attrs)
6183 if (!prototype_p (*node))
6185 warning (OPT_Wattributes,
6186 "%qs attribute requires prototypes with named arguments",
6187 IDENTIFIER_POINTER (name));
6188 *no_add_attrs = true;
6190 else
6192 if (!stdarg_p (*node))
6194 warning (OPT_Wattributes,
6195 "%qs attribute only applies to variadic functions",
6196 IDENTIFIER_POINTER (name));
6197 *no_add_attrs = true;
6201 if (args)
6203 tree position = TREE_VALUE (args);
6205 if (TREE_CODE (position) != INTEGER_CST)
6207 warning (0, "requested position is not an integer constant");
6208 *no_add_attrs = true;
6210 else
6212 if (tree_int_cst_lt (position, integer_zero_node))
6214 warning (0, "requested position is less than zero");
6215 *no_add_attrs = true;
6220 return NULL_TREE;
6223 /* Handle a "noreturn" attribute; arguments as in
6224 struct attribute_spec.handler. */
6226 static tree
6227 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6228 int ARG_UNUSED (flags), bool *no_add_attrs)
6230 tree type = TREE_TYPE (*node);
6232 /* See FIXME comment in c_common_attribute_table. */
6233 if (TREE_CODE (*node) == FUNCTION_DECL)
6234 TREE_THIS_VOLATILE (*node) = 1;
6235 else if (TREE_CODE (type) == POINTER_TYPE
6236 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6237 TREE_TYPE (*node)
6238 = build_pointer_type
6239 (build_type_variant (TREE_TYPE (type),
6240 TYPE_READONLY (TREE_TYPE (type)), 1));
6241 else
6243 warning (OPT_Wattributes, "%qs attribute ignored",
6244 IDENTIFIER_POINTER (name));
6245 *no_add_attrs = true;
6248 return NULL_TREE;
6251 /* Handle a "leaf" attribute; arguments as in
6252 struct attribute_spec.handler. */
6254 static tree
6255 handle_leaf_attribute (tree *node, tree name,
6256 tree ARG_UNUSED (args),
6257 int ARG_UNUSED (flags), bool *no_add_attrs)
6259 if (TREE_CODE (*node) != FUNCTION_DECL)
6261 warning (OPT_Wattributes, "%qE attribute ignored", name);
6262 *no_add_attrs = true;
6264 if (!TREE_PUBLIC (*node))
6266 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6267 *no_add_attrs = true;
6270 return NULL_TREE;
6273 /* Handle a "malloc" attribute; arguments as in
6274 struct attribute_spec.handler. */
6276 static tree
6277 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6278 int ARG_UNUSED (flags), bool *no_add_attrs)
6280 if (TREE_CODE (*node) == FUNCTION_DECL
6281 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6282 DECL_IS_MALLOC (*node) = 1;
6283 else
6285 warning (OPT_Wattributes, "%qs attribute ignored",
6286 IDENTIFIER_POINTER (name));
6287 *no_add_attrs = true;
6290 return NULL_TREE;
6293 /* Fake handler for attributes we don't properly support. */
6295 tree
6296 fake_attribute_handler (tree * ARG_UNUSED (node),
6297 tree ARG_UNUSED (name),
6298 tree ARG_UNUSED (args),
6299 int ARG_UNUSED (flags),
6300 bool * ARG_UNUSED (no_add_attrs))
6302 return NULL_TREE;
6305 /* Handle a "type_generic" attribute. */
6307 static tree
6308 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6309 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6310 bool * ARG_UNUSED (no_add_attrs))
6312 /* Ensure we have a function type. */
6313 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6315 /* Ensure we have a variadic function. */
6316 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6318 return NULL_TREE;
6321 /* Handle a "vector_size" attribute; arguments as in
6322 struct attribute_spec.handler. */
6324 static tree
6325 handle_vector_size_attribute (tree *node, tree name, tree args,
6326 int ARG_UNUSED (flags),
6327 bool *no_add_attrs)
6329 unsigned HOST_WIDE_INT vecsize, nunits;
6330 enum machine_mode orig_mode;
6331 tree type = *node, new_type, size;
6333 *no_add_attrs = true;
6335 size = TREE_VALUE (args);
6337 if (!tree_fits_uhwi_p (size))
6339 warning (OPT_Wattributes, "%qs attribute ignored",
6340 IDENTIFIER_POINTER (name));
6341 return NULL_TREE;
6344 /* Get the vector size (in bytes). */
6345 vecsize = tree_to_uhwi (size);
6347 /* We need to provide for vector pointers, vector arrays, and
6348 functions returning vectors. For example:
6350 __attribute__((vector_size(16))) short *foo;
6352 In this case, the mode is SI, but the type being modified is
6353 HI, so we need to look further. */
6355 while (POINTER_TYPE_P (type)
6356 || TREE_CODE (type) == FUNCTION_TYPE
6357 || TREE_CODE (type) == ARRAY_TYPE)
6358 type = TREE_TYPE (type);
6360 /* Get the mode of the type being modified. */
6361 orig_mode = TYPE_MODE (type);
6363 if ((!INTEGRAL_TYPE_P (type)
6364 && !SCALAR_FLOAT_TYPE_P (type)
6365 && !FIXED_POINT_TYPE_P (type))
6366 || (!SCALAR_FLOAT_MODE_P (orig_mode)
6367 && GET_MODE_CLASS (orig_mode) != MODE_INT
6368 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
6369 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
6370 || TREE_CODE (type) == BOOLEAN_TYPE)
6372 error ("invalid vector type for attribute %qs",
6373 IDENTIFIER_POINTER (name));
6374 return NULL_TREE;
6377 if (vecsize % tree_to_uhwi (TYPE_SIZE_UNIT (type)))
6379 error ("vector size not an integral multiple of component size");
6380 return NULL;
6383 if (vecsize == 0)
6385 error ("zero vector size");
6386 return NULL;
6389 /* Calculate how many units fit in the vector. */
6390 nunits = vecsize / tree_to_uhwi (TYPE_SIZE_UNIT (type));
6391 if (nunits & (nunits - 1))
6393 error ("number of components of the vector not a power of two");
6394 return NULL_TREE;
6397 new_type = build_vector_type (type, nunits);
6399 /* Build back pointers if needed. */
6400 *node = reconstruct_complex_type (*node, new_type);
6402 return NULL_TREE;
6405 /* Handle a "vector_type" attribute; arguments as in
6406 struct attribute_spec.handler. */
6408 static tree
6409 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6410 int ARG_UNUSED (flags),
6411 bool *no_add_attrs)
6413 /* Vector representative type and size. */
6414 tree rep_type = *node;
6415 tree rep_size = TYPE_SIZE_UNIT (rep_type);
6417 /* Vector size in bytes and number of units. */
6418 unsigned HOST_WIDE_INT vec_bytes, vec_units;
6420 /* Vector element type and mode. */
6421 tree elem_type;
6422 enum machine_mode elem_mode;
6424 *no_add_attrs = true;
6426 if (TREE_CODE (rep_type) != ARRAY_TYPE)
6428 error ("attribute %qs applies to array types only",
6429 IDENTIFIER_POINTER (name));
6430 return NULL_TREE;
6433 /* Silently punt on variable sizes. We can't make vector types for them,
6434 need to ignore them on front-end generated subtypes of unconstrained
6435 bases, and this attribute is for binding implementors, not end-users, so
6436 we should never get there from legitimate explicit uses. */
6438 if (!tree_fits_uhwi_p (rep_size))
6439 return NULL_TREE;
6441 /* Get the element type/mode and check this is something we know
6442 how to make vectors of. */
6444 elem_type = TREE_TYPE (rep_type);
6445 elem_mode = TYPE_MODE (elem_type);
6447 if ((!INTEGRAL_TYPE_P (elem_type)
6448 && !SCALAR_FLOAT_TYPE_P (elem_type)
6449 && !FIXED_POINT_TYPE_P (elem_type))
6450 || (!SCALAR_FLOAT_MODE_P (elem_mode)
6451 && GET_MODE_CLASS (elem_mode) != MODE_INT
6452 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
6453 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (elem_type)))
6455 error ("invalid element type for attribute %qs",
6456 IDENTIFIER_POINTER (name));
6457 return NULL_TREE;
6460 /* Sanity check the vector size and element type consistency. */
6462 vec_bytes = tree_to_uhwi (rep_size);
6464 if (vec_bytes % tree_to_uhwi (TYPE_SIZE_UNIT (elem_type)))
6466 error ("vector size not an integral multiple of component size");
6467 return NULL;
6470 if (vec_bytes == 0)
6472 error ("zero vector size");
6473 return NULL;
6476 vec_units = vec_bytes / tree_to_uhwi (TYPE_SIZE_UNIT (elem_type));
6477 if (vec_units & (vec_units - 1))
6479 error ("number of components of the vector not a power of two");
6480 return NULL_TREE;
6483 /* Build the vector type and replace. */
6485 *node = build_vector_type (elem_type, vec_units);
6486 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
6488 return NULL_TREE;
6491 /* ----------------------------------------------------------------------- *
6492 * BUILTIN FUNCTIONS *
6493 * ----------------------------------------------------------------------- */
6495 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6496 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6497 if nonansi_p and flag_no_nonansi_builtin. */
6499 static void
6500 def_builtin_1 (enum built_in_function fncode,
6501 const char *name,
6502 enum built_in_class fnclass,
6503 tree fntype, tree libtype,
6504 bool both_p, bool fallback_p,
6505 bool nonansi_p ATTRIBUTE_UNUSED,
6506 tree fnattrs, bool implicit_p)
6508 tree decl;
6509 const char *libname;
6511 /* Preserve an already installed decl. It most likely was setup in advance
6512 (e.g. as part of the internal builtins) for specific reasons. */
6513 if (builtin_decl_explicit (fncode) != NULL_TREE)
6514 return;
6516 gcc_assert ((!both_p && !fallback_p)
6517 || !strncmp (name, "__builtin_",
6518 strlen ("__builtin_")));
6520 libname = name + strlen ("__builtin_");
6521 decl = add_builtin_function (name, fntype, fncode, fnclass,
6522 (fallback_p ? libname : NULL),
6523 fnattrs);
6524 if (both_p)
6525 /* ??? This is normally further controlled by command-line options
6526 like -fno-builtin, but we don't have them for Ada. */
6527 add_builtin_function (libname, libtype, fncode, fnclass,
6528 NULL, fnattrs);
6530 set_builtin_decl (fncode, decl, implicit_p);
6533 static int flag_isoc94 = 0;
6534 static int flag_isoc99 = 0;
6536 /* Install what the common builtins.def offers. */
6538 static void
6539 install_builtin_functions (void)
6541 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6542 NONANSI_P, ATTRS, IMPLICIT, COND) \
6543 if (NAME && COND) \
6544 def_builtin_1 (ENUM, NAME, CLASS, \
6545 builtin_types[(int) TYPE], \
6546 builtin_types[(int) LIBTYPE], \
6547 BOTH_P, FALLBACK_P, NONANSI_P, \
6548 built_in_attributes[(int) ATTRS], IMPLICIT);
6549 #include "builtins.def"
6550 #undef DEF_BUILTIN
6553 /* ----------------------------------------------------------------------- *
6554 * BUILTIN FUNCTIONS *
6555 * ----------------------------------------------------------------------- */
6557 /* Install the builtin functions we might need. */
6559 void
6560 gnat_install_builtins (void)
6562 install_builtin_elementary_types ();
6563 install_builtin_function_types ();
6564 install_builtin_attributes ();
6566 /* Install builtins used by generic middle-end pieces first. Some of these
6567 know about internal specificities and control attributes accordingly, for
6568 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6569 the generic definition from builtins.def. */
6570 build_common_builtin_nodes ();
6572 /* Now, install the target specific builtins, such as the AltiVec family on
6573 ppc, and the common set as exposed by builtins.def. */
6574 targetm.init_builtins ();
6575 install_builtin_functions ();
6578 #include "gt-ada-utils.h"
6579 #include "gtype-ada.h"