Reverting merge from trunk
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob01a60280e630948f9886b37988ffb0e050fcd687
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2013, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "toplev.h"
33 #include "diagnostic-core.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "debug.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "common/common-target.h"
40 #include "langhooks.h"
41 #include "cgraph.h"
42 #include "diagnostic.h"
43 #include "timevar.h"
44 #include "tree-dump.h"
45 #include "tree-inline.h"
46 #include "tree-iterator.h"
48 #include "ada.h"
49 #include "types.h"
50 #include "atree.h"
51 #include "elists.h"
52 #include "namet.h"
53 #include "nlists.h"
54 #include "stringt.h"
55 #include "uintp.h"
56 #include "fe.h"
57 #include "sinfo.h"
58 #include "einfo.h"
59 #include "ada-tree.h"
60 #include "gigi.h"
62 /* If nonzero, pretend we are allocating at global level. */
63 int force_global;
65 /* The default alignment of "double" floating-point types, i.e. floating
66 point types whose size is equal to 64 bits, or 0 if this alignment is
67 not specifically capped. */
68 int double_float_alignment;
70 /* The default alignment of "double" or larger scalar types, i.e. scalar
71 types whose size is greater or equal to 64 bits, or 0 if this alignment
72 is not specifically capped. */
73 int double_scalar_alignment;
75 /* Tree nodes for the various types and decls we create. */
76 tree gnat_std_decls[(int) ADT_LAST];
78 /* Functions to call for each of the possible raise reasons. */
79 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81 /* Likewise, but with extra info for each of the possible raise reasons. */
82 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84 /* Forward declarations for handlers of attributes. */
85 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
86 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
98 /* Fake handler for attributes we don't properly support, typically because
99 they'd require dragging a lot of the common-c front-end circuitry. */
100 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
102 /* Table of machine-independent internal attributes for Ada. We support
103 this minimal set of attributes to accommodate the needs of builtins. */
104 const struct attribute_spec gnat_internal_attribute_table[] =
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
107 affects_type_identity } */
108 { "const", 0, 0, true, false, false, handle_const_attribute,
109 false },
110 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
111 false },
112 { "pure", 0, 0, true, false, false, handle_pure_attribute,
113 false },
114 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
115 false },
116 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
117 false },
118 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
119 false },
120 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
121 false },
122 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
123 false },
124 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
125 false },
126 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
127 false },
129 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
130 false },
131 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
132 false },
133 { "may_alias", 0, 0, false, true, false, NULL, false },
135 /* ??? format and format_arg are heavy and not supported, which actually
136 prevents support for stdio builtins, which we however declare as part
137 of the common builtins.def contents. */
138 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
139 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
141 { NULL, 0, 0, false, false, false, NULL, false }
144 /* Associates a GNAT tree node to a GCC tree node. It is used in
145 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
146 of `save_gnu_tree' for more info. */
147 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
149 #define GET_GNU_TREE(GNAT_ENTITY) \
150 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
152 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
153 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
155 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
156 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
158 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
159 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
161 #define GET_DUMMY_NODE(GNAT_ENTITY) \
162 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
164 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
165 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
167 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
168 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
170 /* This variable keeps a table for types for each precision so that we only
171 allocate each of them once. Signed and unsigned types are kept separate.
173 Note that these types are only used when fold-const requests something
174 special. Perhaps we should NOT share these types; we'll see how it
175 goes later. */
176 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
178 /* Likewise for float types, but record these by mode. */
179 static GTY(()) tree float_types[NUM_MACHINE_MODES];
181 /* For each binding contour we allocate a binding_level structure to indicate
182 the binding depth. */
184 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
185 /* The binding level containing this one (the enclosing binding level). */
186 struct gnat_binding_level *chain;
187 /* The BLOCK node for this level. */
188 tree block;
189 /* If nonzero, the setjmp buffer that needs to be updated for any
190 variable-sized definition within this context. */
191 tree jmpbuf_decl;
194 /* The binding level currently in effect. */
195 static GTY(()) struct gnat_binding_level *current_binding_level;
197 /* A chain of gnat_binding_level structures awaiting reuse. */
198 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
200 /* The context to be used for global declarations. */
201 static GTY(()) tree global_context;
203 /* An array of global declarations. */
204 static GTY(()) vec<tree, va_gc> *global_decls;
206 /* An array of builtin function declarations. */
207 static GTY(()) vec<tree, va_gc> *builtin_decls;
209 /* An array of global renaming pointers. */
210 static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
212 /* A chain of unused BLOCK nodes. */
213 static GTY((deletable)) tree free_block_chain;
215 static int pad_type_hash_marked_p (const void *p);
216 static hashval_t pad_type_hash_hash (const void *p);
217 static int pad_type_hash_eq (const void *p1, const void *p2);
219 /* A hash table of padded types. It is modelled on the generic type
220 hash table in tree.c, which must thus be used as a reference. */
221 struct GTY(()) pad_type_hash {
222 unsigned long hash;
223 tree type;
226 static GTY ((if_marked ("pad_type_hash_marked_p"),
227 param_is (struct pad_type_hash)))
228 htab_t pad_type_hash_table;
230 static tree merge_sizes (tree, tree, tree, bool, bool);
231 static tree compute_related_constant (tree, tree);
232 static tree split_plus (tree, tree *);
233 static tree float_type_for_precision (int, enum machine_mode);
234 static tree convert_to_fat_pointer (tree, tree);
235 static unsigned int scale_by_factor_of (tree, unsigned int);
236 static bool potential_alignment_gap (tree, tree, tree);
238 /* Initialize data structures of the utils.c module. */
240 void
241 init_gnat_utils (void)
243 /* Initialize the association of GNAT nodes to GCC trees. */
244 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
246 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
247 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
249 /* Initialize the hash table of padded types. */
250 pad_type_hash_table = htab_create_ggc (512, pad_type_hash_hash,
251 pad_type_hash_eq, 0);
254 /* Destroy data structures of the utils.c module. */
256 void
257 destroy_gnat_utils (void)
259 /* Destroy the association of GNAT nodes to GCC trees. */
260 ggc_free (associate_gnat_to_gnu);
261 associate_gnat_to_gnu = NULL;
263 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
264 ggc_free (dummy_node_table);
265 dummy_node_table = NULL;
267 /* Destroy the hash table of padded types. */
268 htab_delete (pad_type_hash_table);
269 pad_type_hash_table = NULL;
271 /* Invalidate the global renaming pointers. */
272 invalidate_global_renaming_pointers ();
275 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
276 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
277 If NO_CHECK is true, the latter check is suppressed.
279 If GNU_DECL is zero, reset a previous association. */
281 void
282 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
284 /* Check that GNAT_ENTITY is not already defined and that it is being set
285 to something which is a decl. If that is not the case, this usually
286 means GNAT_ENTITY is defined twice, but occasionally is due to some
287 Gigi problem. */
288 gcc_assert (!(gnu_decl
289 && (PRESENT_GNU_TREE (gnat_entity)
290 || (!no_check && !DECL_P (gnu_decl)))));
292 SET_GNU_TREE (gnat_entity, gnu_decl);
295 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
296 that was associated with it. If there is no such tree node, abort.
298 In some cases, such as delayed elaboration or expressions that need to
299 be elaborated only once, GNAT_ENTITY is really not an entity. */
301 tree
302 get_gnu_tree (Entity_Id gnat_entity)
304 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
305 return GET_GNU_TREE (gnat_entity);
308 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
310 bool
311 present_gnu_tree (Entity_Id gnat_entity)
313 return PRESENT_GNU_TREE (gnat_entity);
316 /* Make a dummy type corresponding to GNAT_TYPE. */
318 tree
319 make_dummy_type (Entity_Id gnat_type)
321 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
322 tree gnu_type;
324 /* If there is an equivalent type, get its underlying type. */
325 if (Present (gnat_underlying))
326 gnat_underlying = Gigi_Equivalent_Type (Underlying_Type (gnat_underlying));
328 /* If there was no equivalent type (can only happen when just annotating
329 types) or underlying type, go back to the original type. */
330 if (No (gnat_underlying))
331 gnat_underlying = gnat_type;
333 /* If it there already a dummy type, use that one. Else make one. */
334 if (PRESENT_DUMMY_NODE (gnat_underlying))
335 return GET_DUMMY_NODE (gnat_underlying);
337 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
338 an ENUMERAL_TYPE. */
339 gnu_type = make_node (Is_Record_Type (gnat_underlying)
340 ? tree_code_for_record_type (gnat_underlying)
341 : ENUMERAL_TYPE);
342 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
343 TYPE_DUMMY_P (gnu_type) = 1;
344 TYPE_STUB_DECL (gnu_type)
345 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
346 if (Is_By_Reference_Type (gnat_underlying))
347 TYPE_BY_REFERENCE_P (gnu_type) = 1;
349 SET_DUMMY_NODE (gnat_underlying, gnu_type);
351 return gnu_type;
354 /* Return the dummy type that was made for GNAT_TYPE, if any. */
356 tree
357 get_dummy_type (Entity_Id gnat_type)
359 return GET_DUMMY_NODE (gnat_type);
362 /* Build dummy fat and thin pointer types whose designated type is specified
363 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
365 void
366 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
368 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
369 tree gnu_fat_type, fields, gnu_object_type;
371 gnu_template_type = make_node (RECORD_TYPE);
372 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
373 TYPE_DUMMY_P (gnu_template_type) = 1;
374 gnu_ptr_template = build_pointer_type (gnu_template_type);
376 gnu_array_type = make_node (ENUMERAL_TYPE);
377 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
378 TYPE_DUMMY_P (gnu_array_type) = 1;
379 gnu_ptr_array = build_pointer_type (gnu_array_type);
381 gnu_fat_type = make_node (RECORD_TYPE);
382 /* Build a stub DECL to trigger the special processing for fat pointer types
383 in gnat_pushdecl. */
384 TYPE_NAME (gnu_fat_type)
385 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
386 gnu_fat_type);
387 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
388 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
389 DECL_CHAIN (fields)
390 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
391 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
392 finish_fat_pointer_type (gnu_fat_type, fields);
393 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
394 /* Suppress debug info until after the type is completed. */
395 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
397 gnu_object_type = make_node (RECORD_TYPE);
398 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
399 TYPE_DUMMY_P (gnu_object_type) = 1;
401 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
402 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
405 /* Return true if we are in the global binding level. */
407 bool
408 global_bindings_p (void)
410 return force_global || current_function_decl == NULL_TREE;
413 /* Enter a new binding level. */
415 void
416 gnat_pushlevel (void)
418 struct gnat_binding_level *newlevel = NULL;
420 /* Reuse a struct for this binding level, if there is one. */
421 if (free_binding_level)
423 newlevel = free_binding_level;
424 free_binding_level = free_binding_level->chain;
426 else
427 newlevel = ggc_alloc_gnat_binding_level ();
429 /* Use a free BLOCK, if any; otherwise, allocate one. */
430 if (free_block_chain)
432 newlevel->block = free_block_chain;
433 free_block_chain = BLOCK_CHAIN (free_block_chain);
434 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
436 else
437 newlevel->block = make_node (BLOCK);
439 /* Point the BLOCK we just made to its parent. */
440 if (current_binding_level)
441 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
443 BLOCK_VARS (newlevel->block) = NULL_TREE;
444 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
445 TREE_USED (newlevel->block) = 1;
447 /* Add this level to the front of the chain (stack) of active levels. */
448 newlevel->chain = current_binding_level;
449 newlevel->jmpbuf_decl = NULL_TREE;
450 current_binding_level = newlevel;
453 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
454 and point FNDECL to this BLOCK. */
456 void
457 set_current_block_context (tree fndecl)
459 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
460 DECL_INITIAL (fndecl) = current_binding_level->block;
461 set_block_for_group (current_binding_level->block);
464 /* Set the jmpbuf_decl for the current binding level to DECL. */
466 void
467 set_block_jmpbuf_decl (tree decl)
469 current_binding_level->jmpbuf_decl = decl;
472 /* Get the jmpbuf_decl, if any, for the current binding level. */
474 tree
475 get_block_jmpbuf_decl (void)
477 return current_binding_level->jmpbuf_decl;
480 /* Exit a binding level. Set any BLOCK into the current code group. */
482 void
483 gnat_poplevel (void)
485 struct gnat_binding_level *level = current_binding_level;
486 tree block = level->block;
488 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
489 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
491 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
492 are no variables free the block and merge its subblocks into those of its
493 parent block. Otherwise, add it to the list of its parent. */
494 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
496 else if (BLOCK_VARS (block) == NULL_TREE)
498 BLOCK_SUBBLOCKS (level->chain->block)
499 = block_chainon (BLOCK_SUBBLOCKS (block),
500 BLOCK_SUBBLOCKS (level->chain->block));
501 BLOCK_CHAIN (block) = free_block_chain;
502 free_block_chain = block;
504 else
506 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
507 BLOCK_SUBBLOCKS (level->chain->block) = block;
508 TREE_USED (block) = 1;
509 set_block_for_group (block);
512 /* Free this binding structure. */
513 current_binding_level = level->chain;
514 level->chain = free_binding_level;
515 free_binding_level = level;
518 /* Exit a binding level and discard the associated BLOCK. */
520 void
521 gnat_zaplevel (void)
523 struct gnat_binding_level *level = current_binding_level;
524 tree block = level->block;
526 BLOCK_CHAIN (block) = free_block_chain;
527 free_block_chain = block;
529 /* Free this binding structure. */
530 current_binding_level = level->chain;
531 level->chain = free_binding_level;
532 free_binding_level = level;
535 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
537 static void
538 gnat_set_type_context (tree type, tree context)
540 tree decl = TYPE_STUB_DECL (type);
542 TYPE_CONTEXT (type) = context;
544 while (decl && DECL_PARALLEL_TYPE (decl))
546 TYPE_CONTEXT (DECL_PARALLEL_TYPE (decl)) = context;
547 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
551 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
552 for location information and flag propagation. */
554 void
555 gnat_pushdecl (tree decl, Node_Id gnat_node)
557 /* If DECL is public external or at top level, it has global context. */
558 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
560 if (!global_context)
561 global_context = build_translation_unit_decl (NULL_TREE);
562 DECL_CONTEXT (decl) = global_context;
564 else
566 DECL_CONTEXT (decl) = current_function_decl;
568 /* Functions imported in another function are not really nested.
569 For really nested functions mark them initially as needing
570 a static chain for uses of that flag before unnesting;
571 lower_nested_functions will then recompute it. */
572 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
573 DECL_STATIC_CHAIN (decl) = 1;
576 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
578 /* Set the location of DECL and emit a declaration for it. */
579 if (Present (gnat_node))
580 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
582 add_decl_expr (decl, gnat_node);
584 /* Put the declaration on the list. The list of declarations is in reverse
585 order. The list will be reversed later. Put global declarations in the
586 globals list and local ones in the current block. But skip TYPE_DECLs
587 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
588 with the debugger and aren't needed anyway. */
589 if (!(TREE_CODE (decl) == TYPE_DECL
590 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
592 if (DECL_EXTERNAL (decl))
594 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
595 vec_safe_push (builtin_decls, decl);
597 else if (global_bindings_p ())
598 vec_safe_push (global_decls, decl);
599 else
601 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
602 BLOCK_VARS (current_binding_level->block) = decl;
606 /* For the declaration of a type, set its name if it either is not already
607 set or if the previous type name was not derived from a source name.
608 We'd rather have the type named with a real name and all the pointer
609 types to the same object have the same POINTER_TYPE node. Code in the
610 equivalent function of c-decl.c makes a copy of the type node here, but
611 that may cause us trouble with incomplete types. We make an exception
612 for fat pointer types because the compiler automatically builds them
613 for unconstrained array types and the debugger uses them to represent
614 both these and pointers to these. */
615 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
617 tree t = TREE_TYPE (decl);
619 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
621 /* Array and pointer types aren't "tagged" types so we force the
622 type to be associated with its typedef in the DWARF back-end,
623 in order to make sure that the latter is always preserved. */
624 if (!DECL_ARTIFICIAL (decl)
625 && (TREE_CODE (t) == ARRAY_TYPE
626 || TREE_CODE (t) == POINTER_TYPE))
628 tree tt = build_distinct_type_copy (t);
629 if (TREE_CODE (t) == POINTER_TYPE)
630 TYPE_NEXT_PTR_TO (t) = tt;
631 TYPE_NAME (tt) = DECL_NAME (decl);
632 gnat_set_type_context (tt, DECL_CONTEXT (decl));
633 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
634 DECL_ORIGINAL_TYPE (decl) = tt;
637 else if (TYPE_IS_FAT_POINTER_P (t))
639 /* We need a variant for the placeholder machinery to work. */
640 tree tt = build_variant_type_copy (t);
641 TYPE_NAME (tt) = decl;
642 gnat_set_type_context (tt, DECL_CONTEXT (decl));
643 TREE_USED (tt) = TREE_USED (t);
644 TREE_TYPE (decl) = tt;
645 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
646 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
647 else
648 DECL_ORIGINAL_TYPE (decl) = t;
649 DECL_ARTIFICIAL (decl) = 0;
650 t = NULL_TREE;
652 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
654 else
655 t = NULL_TREE;
657 /* Propagate the name to all the anonymous variants. This is needed
658 for the type qualifiers machinery to work properly. */
659 if (t)
660 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
661 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
663 TYPE_NAME (t) = decl;
664 gnat_set_type_context (t, DECL_CONTEXT (decl));
669 /* Create a record type that contains a SIZE bytes long field of TYPE with a
670 starting bit position so that it is aligned to ALIGN bits, and leaving at
671 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
672 record is guaranteed to get. GNAT_NODE is used for the position of the
673 associated TYPE_DECL. */
675 tree
676 make_aligning_type (tree type, unsigned int align, tree size,
677 unsigned int base_align, int room, Node_Id gnat_node)
679 /* We will be crafting a record type with one field at a position set to be
680 the next multiple of ALIGN past record'address + room bytes. We use a
681 record placeholder to express record'address. */
682 tree record_type = make_node (RECORD_TYPE);
683 tree record = build0 (PLACEHOLDER_EXPR, record_type);
685 tree record_addr_st
686 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
688 /* The diagram below summarizes the shape of what we manipulate:
690 <--------- pos ---------->
691 { +------------+-------------+-----------------+
692 record =>{ |############| ... | field (type) |
693 { +------------+-------------+-----------------+
694 |<-- room -->|<- voffset ->|<---- size ----->|
697 record_addr vblock_addr
699 Every length is in sizetype bytes there, except "pos" which has to be
700 set as a bit position in the GCC tree for the record. */
701 tree room_st = size_int (room);
702 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
703 tree voffset_st, pos, field;
705 tree name = TYPE_NAME (type);
707 if (TREE_CODE (name) == TYPE_DECL)
708 name = DECL_NAME (name);
709 name = concat_name (name, "ALIGN");
710 TYPE_NAME (record_type) = name;
712 /* Compute VOFFSET and then POS. The next byte position multiple of some
713 alignment after some address is obtained by "and"ing the alignment minus
714 1 with the two's complement of the address. */
715 voffset_st = size_binop (BIT_AND_EXPR,
716 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
717 size_int ((align / BITS_PER_UNIT) - 1));
719 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
720 pos = size_binop (MULT_EXPR,
721 convert (bitsizetype,
722 size_binop (PLUS_EXPR, room_st, voffset_st)),
723 bitsize_unit_node);
725 /* Craft the GCC record representation. We exceptionally do everything
726 manually here because 1) our generic circuitry is not quite ready to
727 handle the complex position/size expressions we are setting up, 2) we
728 have a strong simplifying factor at hand: we know the maximum possible
729 value of voffset, and 3) we have to set/reset at least the sizes in
730 accordance with this maximum value anyway, as we need them to convey
731 what should be "alloc"ated for this type.
733 Use -1 as the 'addressable' indication for the field to prevent the
734 creation of a bitfield. We don't need one, it would have damaging
735 consequences on the alignment computation, and create_field_decl would
736 make one without this special argument, for instance because of the
737 complex position expression. */
738 field = create_field_decl (get_identifier ("F"), type, record_type, size,
739 pos, 1, -1);
740 TYPE_FIELDS (record_type) = field;
742 TYPE_ALIGN (record_type) = base_align;
743 TYPE_USER_ALIGN (record_type) = 1;
745 TYPE_SIZE (record_type)
746 = size_binop (PLUS_EXPR,
747 size_binop (MULT_EXPR, convert (bitsizetype, size),
748 bitsize_unit_node),
749 bitsize_int (align + room * BITS_PER_UNIT));
750 TYPE_SIZE_UNIT (record_type)
751 = size_binop (PLUS_EXPR, size,
752 size_int (room + align / BITS_PER_UNIT));
754 SET_TYPE_MODE (record_type, BLKmode);
755 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
757 /* Declare it now since it will never be declared otherwise. This is
758 necessary to ensure that its subtrees are properly marked. */
759 create_type_decl (name, record_type, true, false, gnat_node);
761 return record_type;
764 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
765 as the field type of a packed record if IN_RECORD is true, or as the
766 component type of a packed array if IN_RECORD is false. See if we can
767 rewrite it either as a type that has a non-BLKmode, which we can pack
768 tighter in the packed record case, or as a smaller type. If so, return
769 the new type. If not, return the original type. */
771 tree
772 make_packable_type (tree type, bool in_record)
774 unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE (type), 1);
775 unsigned HOST_WIDE_INT new_size;
776 tree new_type, old_field, field_list = NULL_TREE;
777 unsigned int align;
779 /* No point in doing anything if the size is zero. */
780 if (size == 0)
781 return type;
783 new_type = make_node (TREE_CODE (type));
785 /* Copy the name and flags from the old type to that of the new.
786 Note that we rely on the pointer equality created here for
787 TYPE_NAME to look through conversions in various places. */
788 TYPE_NAME (new_type) = TYPE_NAME (type);
789 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
790 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
791 if (TREE_CODE (type) == RECORD_TYPE)
792 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
794 /* If we are in a record and have a small size, set the alignment to
795 try for an integral mode. Otherwise set it to try for a smaller
796 type with BLKmode. */
797 if (in_record && size <= MAX_FIXED_MODE_SIZE)
799 align = ceil_pow2 (size);
800 TYPE_ALIGN (new_type) = align;
801 new_size = (size + align - 1) & -align;
803 else
805 unsigned HOST_WIDE_INT align;
807 /* Do not try to shrink the size if the RM size is not constant. */
808 if (TYPE_CONTAINS_TEMPLATE_P (type)
809 || !host_integerp (TYPE_ADA_SIZE (type), 1))
810 return type;
812 /* Round the RM size up to a unit boundary to get the minimal size
813 for a BLKmode record. Give up if it's already the size. */
814 new_size = TREE_INT_CST_LOW (TYPE_ADA_SIZE (type));
815 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
816 if (new_size == size)
817 return type;
819 align = new_size & -new_size;
820 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
823 TYPE_USER_ALIGN (new_type) = 1;
825 /* Now copy the fields, keeping the position and size as we don't want
826 to change the layout by propagating the packedness downwards. */
827 for (old_field = TYPE_FIELDS (type); old_field;
828 old_field = DECL_CHAIN (old_field))
830 tree new_field_type = TREE_TYPE (old_field);
831 tree new_field, new_size;
833 if (RECORD_OR_UNION_TYPE_P (new_field_type)
834 && !TYPE_FAT_POINTER_P (new_field_type)
835 && host_integerp (TYPE_SIZE (new_field_type), 1))
836 new_field_type = make_packable_type (new_field_type, true);
838 /* However, for the last field in a not already packed record type
839 that is of an aggregate type, we need to use the RM size in the
840 packable version of the record type, see finish_record_type. */
841 if (!DECL_CHAIN (old_field)
842 && !TYPE_PACKED (type)
843 && RECORD_OR_UNION_TYPE_P (new_field_type)
844 && !TYPE_FAT_POINTER_P (new_field_type)
845 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
846 && TYPE_ADA_SIZE (new_field_type))
847 new_size = TYPE_ADA_SIZE (new_field_type);
848 else
849 new_size = DECL_SIZE (old_field);
851 new_field
852 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
853 new_size, bit_position (old_field),
854 TYPE_PACKED (type),
855 !DECL_NONADDRESSABLE_P (old_field));
857 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
858 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
859 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
860 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
862 DECL_CHAIN (new_field) = field_list;
863 field_list = new_field;
866 finish_record_type (new_type, nreverse (field_list), 2, false);
867 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
868 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
869 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
871 /* If this is a padding record, we never want to make the size smaller
872 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
873 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
875 TYPE_SIZE (new_type) = TYPE_SIZE (type);
876 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
877 new_size = size;
879 else
881 TYPE_SIZE (new_type) = bitsize_int (new_size);
882 TYPE_SIZE_UNIT (new_type)
883 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
886 if (!TYPE_CONTAINS_TEMPLATE_P (type))
887 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
889 compute_record_mode (new_type);
891 /* Try harder to get a packable type if necessary, for example
892 in case the record itself contains a BLKmode field. */
893 if (in_record && TYPE_MODE (new_type) == BLKmode)
894 SET_TYPE_MODE (new_type,
895 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
897 /* If neither the mode nor the size has shrunk, return the old type. */
898 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
899 return type;
901 return new_type;
904 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
905 If TYPE is the best type, return it. Otherwise, make a new type. We
906 only support new integral and pointer types. FOR_BIASED is true if
907 we are making a biased type. */
909 tree
910 make_type_from_size (tree type, tree size_tree, bool for_biased)
912 unsigned HOST_WIDE_INT size;
913 bool biased_p;
914 tree new_type;
916 /* If size indicates an error, just return TYPE to avoid propagating
917 the error. Likewise if it's too large to represent. */
918 if (!size_tree || !host_integerp (size_tree, 1))
919 return type;
921 size = tree_low_cst (size_tree, 1);
923 switch (TREE_CODE (type))
925 case INTEGER_TYPE:
926 case ENUMERAL_TYPE:
927 case BOOLEAN_TYPE:
928 biased_p = (TREE_CODE (type) == INTEGER_TYPE
929 && TYPE_BIASED_REPRESENTATION_P (type));
931 /* Integer types with precision 0 are forbidden. */
932 if (size == 0)
933 size = 1;
935 /* Only do something if the type isn't a packed array type and doesn't
936 already have the proper size and the size isn't too large. */
937 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
938 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
939 || size > LONG_LONG_TYPE_SIZE)
940 break;
942 biased_p |= for_biased;
943 if (TYPE_UNSIGNED (type) || biased_p)
944 new_type = make_unsigned_type (size);
945 else
946 new_type = make_signed_type (size);
947 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
948 SET_TYPE_RM_MIN_VALUE (new_type,
949 convert (TREE_TYPE (new_type),
950 TYPE_MIN_VALUE (type)));
951 SET_TYPE_RM_MAX_VALUE (new_type,
952 convert (TREE_TYPE (new_type),
953 TYPE_MAX_VALUE (type)));
954 /* Copy the name to show that it's essentially the same type and
955 not a subrange type. */
956 TYPE_NAME (new_type) = TYPE_NAME (type);
957 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
958 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
959 return new_type;
961 case RECORD_TYPE:
962 /* Do something if this is a fat pointer, in which case we
963 may need to return the thin pointer. */
964 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
966 enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
967 if (!targetm.valid_pointer_mode (p_mode))
968 p_mode = ptr_mode;
969 return
970 build_pointer_type_for_mode
971 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
972 p_mode, 0);
974 break;
976 case POINTER_TYPE:
977 /* Only do something if this is a thin pointer, in which case we
978 may need to return the fat pointer. */
979 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
980 return
981 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
982 break;
984 default:
985 break;
988 return type;
991 /* See if the data pointed to by the hash table slot is marked. */
993 static int
994 pad_type_hash_marked_p (const void *p)
996 const_tree const type = ((const struct pad_type_hash *) p)->type;
998 return ggc_marked_p (type);
1001 /* Return the cached hash value. */
1003 static hashval_t
1004 pad_type_hash_hash (const void *p)
1006 return ((const struct pad_type_hash *) p)->hash;
1009 /* Return 1 iff the padded types are equivalent. */
1011 static int
1012 pad_type_hash_eq (const void *p1, const void *p2)
1014 const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
1015 const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
1016 tree type1, type2;
1018 if (t1->hash != t2->hash)
1019 return 0;
1021 type1 = t1->type;
1022 type2 = t2->type;
1024 /* We consider that the padded types are equivalent if they pad the same
1025 type and have the same size, alignment and RM size. Taking the mode
1026 into account is redundant since it is determined by the others. */
1027 return
1028 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1029 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1030 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1031 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1034 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1035 if needed. We have already verified that SIZE and TYPE are large enough.
1036 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1037 IS_COMPONENT_TYPE is true if this is being done for the component type of
1038 an array. IS_USER_TYPE is true if the original type needs to be completed.
1039 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1040 the RM size of the resulting type is to be set to SIZE too. */
1042 tree
1043 maybe_pad_type (tree type, tree size, unsigned int align,
1044 Entity_Id gnat_entity, bool is_component_type,
1045 bool is_user_type, bool definition, bool set_rm_size)
1047 tree orig_size = TYPE_SIZE (type);
1048 tree record, field;
1050 /* If TYPE is a padded type, see if it agrees with any size and alignment
1051 we were given. If so, return the original type. Otherwise, strip
1052 off the padding, since we will either be returning the inner type
1053 or repadding it. If no size or alignment is specified, use that of
1054 the original padded type. */
1055 if (TYPE_IS_PADDING_P (type))
1057 if ((!size
1058 || operand_equal_p (round_up (size,
1059 MAX (align, TYPE_ALIGN (type))),
1060 round_up (TYPE_SIZE (type),
1061 MAX (align, TYPE_ALIGN (type))),
1063 && (align == 0 || align == TYPE_ALIGN (type)))
1064 return type;
1066 if (!size)
1067 size = TYPE_SIZE (type);
1068 if (align == 0)
1069 align = TYPE_ALIGN (type);
1071 type = TREE_TYPE (TYPE_FIELDS (type));
1072 orig_size = TYPE_SIZE (type);
1075 /* If the size is either not being changed or is being made smaller (which
1076 is not done here and is only valid for bitfields anyway), show the size
1077 isn't changing. Likewise, clear the alignment if it isn't being
1078 changed. Then return if we aren't doing anything. */
1079 if (size
1080 && (operand_equal_p (size, orig_size, 0)
1081 || (TREE_CODE (orig_size) == INTEGER_CST
1082 && tree_int_cst_lt (size, orig_size))))
1083 size = NULL_TREE;
1085 if (align == TYPE_ALIGN (type))
1086 align = 0;
1088 if (align == 0 && !size)
1089 return type;
1091 /* If requested, complete the original type and give it a name. */
1092 if (is_user_type)
1093 create_type_decl (get_entity_name (gnat_entity), type,
1094 !Comes_From_Source (gnat_entity),
1095 !(TYPE_NAME (type)
1096 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1097 && DECL_IGNORED_P (TYPE_NAME (type))),
1098 gnat_entity);
1100 /* We used to modify the record in place in some cases, but that could
1101 generate incorrect debugging information. So make a new record
1102 type and name. */
1103 record = make_node (RECORD_TYPE);
1104 TYPE_PADDING_P (record) = 1;
1106 if (Present (gnat_entity))
1107 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1109 TYPE_ALIGN (record) = align;
1110 TYPE_SIZE (record) = size ? size : orig_size;
1111 TYPE_SIZE_UNIT (record)
1112 = convert (sizetype,
1113 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1114 bitsize_unit_node));
1116 /* If we are changing the alignment and the input type is a record with
1117 BLKmode and a small constant size, try to make a form that has an
1118 integral mode. This might allow the padding record to also have an
1119 integral mode, which will be much more efficient. There is no point
1120 in doing so if a size is specified unless it is also a small constant
1121 size and it is incorrect to do so if we cannot guarantee that the mode
1122 will be naturally aligned since the field must always be addressable.
1124 ??? This might not always be a win when done for a stand-alone object:
1125 since the nominal and the effective type of the object will now have
1126 different modes, a VIEW_CONVERT_EXPR will be required for converting
1127 between them and it might be hard to overcome afterwards, including
1128 at the RTL level when the stand-alone object is accessed as a whole. */
1129 if (align != 0
1130 && RECORD_OR_UNION_TYPE_P (type)
1131 && TYPE_MODE (type) == BLKmode
1132 && !TYPE_BY_REFERENCE_P (type)
1133 && TREE_CODE (orig_size) == INTEGER_CST
1134 && !TREE_OVERFLOW (orig_size)
1135 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1136 && (!size
1137 || (TREE_CODE (size) == INTEGER_CST
1138 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1140 tree packable_type = make_packable_type (type, true);
1141 if (TYPE_MODE (packable_type) != BLKmode
1142 && align >= TYPE_ALIGN (packable_type))
1143 type = packable_type;
1146 /* Now create the field with the original size. */
1147 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1148 bitsize_zero_node, 0, 1);
1149 DECL_INTERNAL_P (field) = 1;
1151 /* Do not emit debug info until after the auxiliary record is built. */
1152 finish_record_type (record, field, 1, false);
1154 /* Set the RM size if requested. */
1155 if (set_rm_size)
1157 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1159 /* If the padded type is complete and has constant size, we canonicalize
1160 it by means of the hash table. This is consistent with the language
1161 semantics and ensures that gigi and the middle-end have a common view
1162 of these padded types. */
1163 if (TREE_CONSTANT (TYPE_SIZE (record)))
1165 hashval_t hashcode;
1166 struct pad_type_hash in, *h;
1167 void **loc;
1169 hashcode = iterative_hash_object (TYPE_HASH (type), 0);
1170 hashcode = iterative_hash_expr (TYPE_SIZE (record), hashcode);
1171 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (record), hashcode);
1172 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (record), hashcode);
1174 in.hash = hashcode;
1175 in.type = record;
1176 h = (struct pad_type_hash *)
1177 htab_find_with_hash (pad_type_hash_table, &in, hashcode);
1178 if (h)
1180 record = h->type;
1181 goto built;
1184 h = ggc_alloc_pad_type_hash ();
1185 h->hash = hashcode;
1186 h->type = record;
1187 loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode,
1188 INSERT);
1189 *loc = (void *)h;
1193 /* Unless debugging information isn't being written for the input type,
1194 write a record that shows what we are a subtype of and also make a
1195 variable that indicates our size, if still variable. */
1196 if (TREE_CODE (orig_size) != INTEGER_CST
1197 && TYPE_NAME (record)
1198 && TYPE_NAME (type)
1199 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1200 && DECL_IGNORED_P (TYPE_NAME (type))))
1202 tree marker = make_node (RECORD_TYPE);
1203 tree name = TYPE_NAME (record);
1204 tree orig_name = TYPE_NAME (type);
1206 if (TREE_CODE (name) == TYPE_DECL)
1207 name = DECL_NAME (name);
1209 if (TREE_CODE (orig_name) == TYPE_DECL)
1210 orig_name = DECL_NAME (orig_name);
1212 TYPE_NAME (marker) = concat_name (name, "XVS");
1213 finish_record_type (marker,
1214 create_field_decl (orig_name,
1215 build_reference_type (type),
1216 marker, NULL_TREE, NULL_TREE,
1217 0, 0),
1218 0, true);
1220 add_parallel_type (record, marker);
1222 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1223 TYPE_SIZE_UNIT (marker)
1224 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1225 TYPE_SIZE_UNIT (record), false, false, false,
1226 false, NULL, gnat_entity);
1229 rest_of_record_type_compilation (record);
1231 built:
1232 /* If the size was widened explicitly, maybe give a warning. Take the
1233 original size as the maximum size of the input if there was an
1234 unconstrained record involved and round it up to the specified alignment,
1235 if one was specified. But don't do it if we are just annotating types
1236 and the type is tagged, since tagged types aren't fully laid out in this
1237 mode. */
1238 if (!size
1239 || TREE_CODE (size) == COND_EXPR
1240 || TREE_CODE (size) == MAX_EXPR
1241 || No (gnat_entity)
1242 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1243 return record;
1245 if (CONTAINS_PLACEHOLDER_P (orig_size))
1246 orig_size = max_size (orig_size, true);
1248 if (align)
1249 orig_size = round_up (orig_size, align);
1251 if (!operand_equal_p (size, orig_size, 0)
1252 && !(TREE_CODE (size) == INTEGER_CST
1253 && TREE_CODE (orig_size) == INTEGER_CST
1254 && (TREE_OVERFLOW (size)
1255 || TREE_OVERFLOW (orig_size)
1256 || tree_int_cst_lt (size, orig_size))))
1258 Node_Id gnat_error_node = Empty;
1260 if (Is_Packed_Array_Type (gnat_entity))
1261 gnat_entity = Original_Array_Type (gnat_entity);
1263 if ((Ekind (gnat_entity) == E_Component
1264 || Ekind (gnat_entity) == E_Discriminant)
1265 && Present (Component_Clause (gnat_entity)))
1266 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1267 else if (Present (Size_Clause (gnat_entity)))
1268 gnat_error_node = Expression (Size_Clause (gnat_entity));
1270 /* Generate message only for entities that come from source, since
1271 if we have an entity created by expansion, the message will be
1272 generated for some other corresponding source entity. */
1273 if (Comes_From_Source (gnat_entity))
1275 if (Present (gnat_error_node))
1276 post_error_ne_tree ("{^ }bits of & unused?",
1277 gnat_error_node, gnat_entity,
1278 size_diffop (size, orig_size));
1279 else if (is_component_type)
1280 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1281 gnat_entity, gnat_entity,
1282 size_diffop (size, orig_size));
1286 return record;
1289 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1290 If this is a multi-dimensional array type, do this recursively.
1292 OP may be
1293 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1294 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1295 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1297 void
1298 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1300 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1301 of a one-dimensional array, since the padding has the same alias set
1302 as the field type, but if it's a multi-dimensional array, we need to
1303 see the inner types. */
1304 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1305 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1306 || TYPE_PADDING_P (gnu_old_type)))
1307 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1309 /* Unconstrained array types are deemed incomplete and would thus be given
1310 alias set 0. Retrieve the underlying array type. */
1311 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1312 gnu_old_type
1313 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1314 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1315 gnu_new_type
1316 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1318 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1319 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1320 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1321 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1323 switch (op)
1325 case ALIAS_SET_COPY:
1326 /* The alias set shouldn't be copied between array types with different
1327 aliasing settings because this can break the aliasing relationship
1328 between the array type and its element type. */
1329 #ifndef ENABLE_CHECKING
1330 if (flag_strict_aliasing)
1331 #endif
1332 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1333 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1334 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1335 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1337 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1338 break;
1340 case ALIAS_SET_SUBSET:
1341 case ALIAS_SET_SUPERSET:
1343 alias_set_type old_set = get_alias_set (gnu_old_type);
1344 alias_set_type new_set = get_alias_set (gnu_new_type);
1346 /* Do nothing if the alias sets conflict. This ensures that we
1347 never call record_alias_subset several times for the same pair
1348 or at all for alias set 0. */
1349 if (!alias_sets_conflict_p (old_set, new_set))
1351 if (op == ALIAS_SET_SUBSET)
1352 record_alias_subset (old_set, new_set);
1353 else
1354 record_alias_subset (new_set, old_set);
1357 break;
1359 default:
1360 gcc_unreachable ();
1363 record_component_aliases (gnu_new_type);
1366 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1367 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1369 void
1370 record_builtin_type (const char *name, tree type, bool artificial_p)
1372 tree type_decl = build_decl (input_location,
1373 TYPE_DECL, get_identifier (name), type);
1374 DECL_ARTIFICIAL (type_decl) = artificial_p;
1375 TYPE_ARTIFICIAL (type) = artificial_p;
1376 gnat_pushdecl (type_decl, Empty);
1378 if (debug_hooks->type_decl)
1379 debug_hooks->type_decl (type_decl, false);
1382 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1383 finish constructing the record type as a fat pointer type. */
1385 void
1386 finish_fat_pointer_type (tree record_type, tree field_list)
1388 /* Make sure we can put it into a register. */
1389 if (STRICT_ALIGNMENT)
1390 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
1392 /* Show what it really is. */
1393 TYPE_FAT_POINTER_P (record_type) = 1;
1395 /* Do not emit debug info for it since the types of its fields may still be
1396 incomplete at this point. */
1397 finish_record_type (record_type, field_list, 0, false);
1399 /* Force type_contains_placeholder_p to return true on it. Although the
1400 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1401 type but the representation of the unconstrained array. */
1402 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1405 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1406 finish constructing the record or union type. If REP_LEVEL is zero, this
1407 record has no representation clause and so will be entirely laid out here.
1408 If REP_LEVEL is one, this record has a representation clause and has been
1409 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1410 this record is derived from a parent record and thus inherits its layout;
1411 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1412 we need to write debug information about this type. */
1414 void
1415 finish_record_type (tree record_type, tree field_list, int rep_level,
1416 bool debug_info_p)
1418 enum tree_code code = TREE_CODE (record_type);
1419 tree name = TYPE_NAME (record_type);
1420 tree ada_size = bitsize_zero_node;
1421 tree size = bitsize_zero_node;
1422 bool had_size = TYPE_SIZE (record_type) != 0;
1423 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1424 bool had_align = TYPE_ALIGN (record_type) != 0;
1425 tree field;
1427 TYPE_FIELDS (record_type) = field_list;
1429 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1430 generate debug info and have a parallel type. */
1431 if (name && TREE_CODE (name) == TYPE_DECL)
1432 name = DECL_NAME (name);
1433 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1435 /* Globally initialize the record first. If this is a rep'ed record,
1436 that just means some initializations; otherwise, layout the record. */
1437 if (rep_level > 0)
1439 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
1441 if (!had_size_unit)
1442 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1444 if (!had_size)
1445 TYPE_SIZE (record_type) = bitsize_zero_node;
1447 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1448 out just like a UNION_TYPE, since the size will be fixed. */
1449 else if (code == QUAL_UNION_TYPE)
1450 code = UNION_TYPE;
1452 else
1454 /* Ensure there isn't a size already set. There can be in an error
1455 case where there is a rep clause but all fields have errors and
1456 no longer have a position. */
1457 TYPE_SIZE (record_type) = 0;
1459 /* Ensure we use the traditional GCC layout for bitfields when we need
1460 to pack the record type or have a representation clause. The other
1461 possible layout (Microsoft C compiler), if available, would prevent
1462 efficient packing in almost all cases. */
1463 #ifdef TARGET_MS_BITFIELD_LAYOUT
1464 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1465 decl_attributes (&record_type,
1466 tree_cons (get_identifier ("gcc_struct"),
1467 NULL_TREE, NULL_TREE),
1468 ATTR_FLAG_TYPE_IN_PLACE);
1469 #endif
1471 layout_type (record_type);
1474 /* At this point, the position and size of each field is known. It was
1475 either set before entry by a rep clause, or by laying out the type above.
1477 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1478 to compute the Ada size; the GCC size and alignment (for rep'ed records
1479 that are not padding types); and the mode (for rep'ed records). We also
1480 clear the DECL_BIT_FIELD indication for the cases we know have not been
1481 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1483 if (code == QUAL_UNION_TYPE)
1484 field_list = nreverse (field_list);
1486 for (field = field_list; field; field = DECL_CHAIN (field))
1488 tree type = TREE_TYPE (field);
1489 tree pos = bit_position (field);
1490 tree this_size = DECL_SIZE (field);
1491 tree this_ada_size;
1493 if (RECORD_OR_UNION_TYPE_P (type)
1494 && !TYPE_FAT_POINTER_P (type)
1495 && !TYPE_CONTAINS_TEMPLATE_P (type)
1496 && TYPE_ADA_SIZE (type))
1497 this_ada_size = TYPE_ADA_SIZE (type);
1498 else
1499 this_ada_size = this_size;
1501 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1502 if (DECL_BIT_FIELD (field)
1503 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1505 unsigned int align = TYPE_ALIGN (type);
1507 /* In the general case, type alignment is required. */
1508 if (value_factor_p (pos, align))
1510 /* The enclosing record type must be sufficiently aligned.
1511 Otherwise, if no alignment was specified for it and it
1512 has been laid out already, bump its alignment to the
1513 desired one if this is compatible with its size. */
1514 if (TYPE_ALIGN (record_type) >= align)
1516 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1517 DECL_BIT_FIELD (field) = 0;
1519 else if (!had_align
1520 && rep_level == 0
1521 && value_factor_p (TYPE_SIZE (record_type), align))
1523 TYPE_ALIGN (record_type) = align;
1524 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1525 DECL_BIT_FIELD (field) = 0;
1529 /* In the non-strict alignment case, only byte alignment is. */
1530 if (!STRICT_ALIGNMENT
1531 && DECL_BIT_FIELD (field)
1532 && value_factor_p (pos, BITS_PER_UNIT))
1533 DECL_BIT_FIELD (field) = 0;
1536 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1537 field is technically not addressable. Except that it can actually
1538 be addressed if it is BLKmode and happens to be properly aligned. */
1539 if (DECL_BIT_FIELD (field)
1540 && !(DECL_MODE (field) == BLKmode
1541 && value_factor_p (pos, BITS_PER_UNIT)))
1542 DECL_NONADDRESSABLE_P (field) = 1;
1544 /* A type must be as aligned as its most aligned field that is not
1545 a bit-field. But this is already enforced by layout_type. */
1546 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1547 TYPE_ALIGN (record_type)
1548 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1550 switch (code)
1552 case UNION_TYPE:
1553 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1554 size = size_binop (MAX_EXPR, size, this_size);
1555 break;
1557 case QUAL_UNION_TYPE:
1558 ada_size
1559 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1560 this_ada_size, ada_size);
1561 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1562 this_size, size);
1563 break;
1565 case RECORD_TYPE:
1566 /* Since we know here that all fields are sorted in order of
1567 increasing bit position, the size of the record is one
1568 higher than the ending bit of the last field processed
1569 unless we have a rep clause, since in that case we might
1570 have a field outside a QUAL_UNION_TYPE that has a higher ending
1571 position. So use a MAX in that case. Also, if this field is a
1572 QUAL_UNION_TYPE, we need to take into account the previous size in
1573 the case of empty variants. */
1574 ada_size
1575 = merge_sizes (ada_size, pos, this_ada_size,
1576 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1577 size
1578 = merge_sizes (size, pos, this_size,
1579 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1580 break;
1582 default:
1583 gcc_unreachable ();
1587 if (code == QUAL_UNION_TYPE)
1588 nreverse (field_list);
1590 if (rep_level < 2)
1592 /* If this is a padding record, we never want to make the size smaller
1593 than what was specified in it, if any. */
1594 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1595 size = TYPE_SIZE (record_type);
1597 /* Now set any of the values we've just computed that apply. */
1598 if (!TYPE_FAT_POINTER_P (record_type)
1599 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1600 SET_TYPE_ADA_SIZE (record_type, ada_size);
1602 if (rep_level > 0)
1604 tree size_unit = had_size_unit
1605 ? TYPE_SIZE_UNIT (record_type)
1606 : convert (sizetype,
1607 size_binop (CEIL_DIV_EXPR, size,
1608 bitsize_unit_node));
1609 unsigned int align = TYPE_ALIGN (record_type);
1611 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1612 TYPE_SIZE_UNIT (record_type)
1613 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1615 compute_record_mode (record_type);
1619 if (debug_info_p)
1620 rest_of_record_type_compilation (record_type);
1623 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1625 void
1626 add_parallel_type (tree type, tree parallel_type)
1628 tree decl = TYPE_STUB_DECL (type);
1630 while (DECL_PARALLEL_TYPE (decl))
1631 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1633 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1636 /* Return true if TYPE has a parallel type. */
1638 static bool
1639 has_parallel_type (tree type)
1641 tree decl = TYPE_STUB_DECL (type);
1643 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1646 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1647 associated with it. It need not be invoked directly in most cases since
1648 finish_record_type takes care of doing so, but this can be necessary if
1649 a parallel type is to be attached to the record type. */
1651 void
1652 rest_of_record_type_compilation (tree record_type)
1654 bool var_size = false;
1655 tree field;
1657 /* If this is a padded type, the bulk of the debug info has already been
1658 generated for the field's type. */
1659 if (TYPE_IS_PADDING_P (record_type))
1660 return;
1662 /* If the type already has a parallel type (XVS type), then we're done. */
1663 if (has_parallel_type (record_type))
1664 return;
1666 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1668 /* We need to make an XVE/XVU record if any field has variable size,
1669 whether or not the record does. For example, if we have a union,
1670 it may be that all fields, rounded up to the alignment, have the
1671 same size, in which case we'll use that size. But the debug
1672 output routines (except Dwarf2) won't be able to output the fields,
1673 so we need to make the special record. */
1674 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1675 /* If a field has a non-constant qualifier, the record will have
1676 variable size too. */
1677 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1678 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1680 var_size = true;
1681 break;
1685 /* If this record type is of variable size, make a parallel record type that
1686 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1687 if (var_size)
1689 tree new_record_type
1690 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1691 ? UNION_TYPE : TREE_CODE (record_type));
1692 tree orig_name = TYPE_NAME (record_type), new_name;
1693 tree last_pos = bitsize_zero_node;
1694 tree old_field, prev_old_field = NULL_TREE;
1696 if (TREE_CODE (orig_name) == TYPE_DECL)
1697 orig_name = DECL_NAME (orig_name);
1699 new_name
1700 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1701 ? "XVU" : "XVE");
1702 TYPE_NAME (new_record_type) = new_name;
1703 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1704 TYPE_STUB_DECL (new_record_type)
1705 = create_type_stub_decl (new_name, new_record_type);
1706 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1707 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1708 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1709 TYPE_SIZE_UNIT (new_record_type)
1710 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1712 /* Now scan all the fields, replacing each field with a new field
1713 corresponding to the new encoding. */
1714 for (old_field = TYPE_FIELDS (record_type); old_field;
1715 old_field = DECL_CHAIN (old_field))
1717 tree field_type = TREE_TYPE (old_field);
1718 tree field_name = DECL_NAME (old_field);
1719 tree curpos = bit_position (old_field);
1720 tree pos, new_field;
1721 bool var = false;
1722 unsigned int align = 0;
1724 /* We're going to do some pattern matching below so remove as many
1725 conversions as possible. */
1726 curpos = remove_conversions (curpos, true);
1728 /* See how the position was modified from the last position.
1730 There are two basic cases we support: a value was added
1731 to the last position or the last position was rounded to
1732 a boundary and they something was added. Check for the
1733 first case first. If not, see if there is any evidence
1734 of rounding. If so, round the last position and retry.
1736 If this is a union, the position can be taken as zero. */
1737 if (TREE_CODE (new_record_type) == UNION_TYPE)
1738 pos = bitsize_zero_node;
1739 else
1740 pos = compute_related_constant (curpos, last_pos);
1742 if (!pos
1743 && TREE_CODE (curpos) == MULT_EXPR
1744 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1746 tree offset = TREE_OPERAND (curpos, 0);
1747 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1748 align = scale_by_factor_of (offset, align);
1749 last_pos = round_up (last_pos, align);
1750 pos = compute_related_constant (curpos, last_pos);
1752 else if (!pos
1753 && TREE_CODE (curpos) == PLUS_EXPR
1754 && host_integerp (TREE_OPERAND (curpos, 1), 1)
1755 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1756 && host_integerp
1757 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1))
1759 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1760 unsigned HOST_WIDE_INT addend
1761 = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1762 align
1763 = tree_low_cst (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1764 align = scale_by_factor_of (offset, align);
1765 align = MIN (align, addend & -addend);
1766 last_pos = round_up (last_pos, align);
1767 pos = compute_related_constant (curpos, last_pos);
1769 else if (potential_alignment_gap (prev_old_field, old_field, pos))
1771 align = TYPE_ALIGN (field_type);
1772 last_pos = round_up (last_pos, align);
1773 pos = compute_related_constant (curpos, last_pos);
1776 /* If we can't compute a position, set it to zero.
1778 ??? We really should abort here, but it's too much work
1779 to get this correct for all cases. */
1780 if (!pos)
1781 pos = bitsize_zero_node;
1783 /* See if this type is variable-sized and make a pointer type
1784 and indicate the indirection if so. Beware that the debug
1785 back-end may adjust the position computed above according
1786 to the alignment of the field type, i.e. the pointer type
1787 in this case, if we don't preventively counter that. */
1788 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1790 field_type = build_pointer_type (field_type);
1791 if (align != 0 && TYPE_ALIGN (field_type) > align)
1793 field_type = copy_node (field_type);
1794 TYPE_ALIGN (field_type) = align;
1796 var = true;
1799 /* Make a new field name, if necessary. */
1800 if (var || align != 0)
1802 char suffix[16];
1804 if (align != 0)
1805 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1806 align / BITS_PER_UNIT);
1807 else
1808 strcpy (suffix, "XVL");
1810 field_name = concat_name (field_name, suffix);
1813 new_field
1814 = create_field_decl (field_name, field_type, new_record_type,
1815 DECL_SIZE (old_field), pos, 0, 0);
1816 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1817 TYPE_FIELDS (new_record_type) = new_field;
1819 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1820 zero. The only time it's not the last field of the record
1821 is when there are other components at fixed positions after
1822 it (meaning there was a rep clause for every field) and we
1823 want to be able to encode them. */
1824 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1825 (TREE_CODE (TREE_TYPE (old_field))
1826 == QUAL_UNION_TYPE)
1827 ? bitsize_zero_node
1828 : DECL_SIZE (old_field));
1829 prev_old_field = old_field;
1832 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
1834 add_parallel_type (record_type, new_record_type);
1838 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1839 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1840 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1841 replace a value of zero with the old size. If HAS_REP is true, we take the
1842 MAX of the end position of this field with LAST_SIZE. In all other cases,
1843 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1845 static tree
1846 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1847 bool has_rep)
1849 tree type = TREE_TYPE (last_size);
1850 tree new_size;
1852 if (!special || TREE_CODE (size) != COND_EXPR)
1854 new_size = size_binop (PLUS_EXPR, first_bit, size);
1855 if (has_rep)
1856 new_size = size_binop (MAX_EXPR, last_size, new_size);
1859 else
1860 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1861 integer_zerop (TREE_OPERAND (size, 1))
1862 ? last_size : merge_sizes (last_size, first_bit,
1863 TREE_OPERAND (size, 1),
1864 1, has_rep),
1865 integer_zerop (TREE_OPERAND (size, 2))
1866 ? last_size : merge_sizes (last_size, first_bit,
1867 TREE_OPERAND (size, 2),
1868 1, has_rep));
1870 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1871 when fed through substitute_in_expr) into thinking that a constant
1872 size is not constant. */
1873 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1874 new_size = TREE_OPERAND (new_size, 0);
1876 return new_size;
1879 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1880 related by the addition of a constant. Return that constant if so. */
1882 static tree
1883 compute_related_constant (tree op0, tree op1)
1885 tree op0_var, op1_var;
1886 tree op0_con = split_plus (op0, &op0_var);
1887 tree op1_con = split_plus (op1, &op1_var);
1888 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1890 if (operand_equal_p (op0_var, op1_var, 0))
1891 return result;
1892 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1893 return result;
1894 else
1895 return 0;
1898 /* Utility function of above to split a tree OP which may be a sum, into a
1899 constant part, which is returned, and a variable part, which is stored
1900 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1901 bitsizetype. */
1903 static tree
1904 split_plus (tree in, tree *pvar)
1906 /* Strip conversions in order to ease the tree traversal and maximize the
1907 potential for constant or plus/minus discovery. We need to be careful
1908 to always return and set *pvar to bitsizetype trees, but it's worth
1909 the effort. */
1910 in = remove_conversions (in, false);
1912 *pvar = convert (bitsizetype, in);
1914 if (TREE_CODE (in) == INTEGER_CST)
1916 *pvar = bitsize_zero_node;
1917 return convert (bitsizetype, in);
1919 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1921 tree lhs_var, rhs_var;
1922 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1923 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1925 if (lhs_var == TREE_OPERAND (in, 0)
1926 && rhs_var == TREE_OPERAND (in, 1))
1927 return bitsize_zero_node;
1929 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1930 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1932 else
1933 return bitsize_zero_node;
1936 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1937 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1938 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1939 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1940 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1941 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1942 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1943 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1944 invisible reference. */
1946 tree
1947 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1948 bool return_unconstrained_p, bool return_by_direct_ref_p,
1949 bool return_by_invisi_ref_p)
1951 /* A list of the data type nodes of the subprogram formal parameters.
1952 This list is generated by traversing the input list of PARM_DECL
1953 nodes. */
1954 vec<tree, va_gc> *param_type_list = NULL;
1955 tree t, type;
1957 for (t = param_decl_list; t; t = DECL_CHAIN (t))
1958 vec_safe_push (param_type_list, TREE_TYPE (t));
1960 type = build_function_type_vec (return_type, param_type_list);
1962 /* TYPE may have been shared since GCC hashes types. If it has a different
1963 CICO_LIST, make a copy. Likewise for the various flags. */
1964 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1965 return_by_direct_ref_p, return_by_invisi_ref_p))
1967 type = copy_type (type);
1968 TYPE_CI_CO_LIST (type) = cico_list;
1969 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1970 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1971 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1974 return type;
1977 /* Return a copy of TYPE but safe to modify in any way. */
1979 tree
1980 copy_type (tree type)
1982 tree new_type = copy_node (type);
1984 /* Unshare the language-specific data. */
1985 if (TYPE_LANG_SPECIFIC (type))
1987 TYPE_LANG_SPECIFIC (new_type) = NULL;
1988 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1991 /* And the contents of the language-specific slot if needed. */
1992 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1993 && TYPE_RM_VALUES (type))
1995 TYPE_RM_VALUES (new_type) = NULL_TREE;
1996 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1997 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1998 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2001 /* copy_node clears this field instead of copying it, because it is
2002 aliased with TREE_CHAIN. */
2003 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2005 TYPE_POINTER_TO (new_type) = 0;
2006 TYPE_REFERENCE_TO (new_type) = 0;
2007 TYPE_MAIN_VARIANT (new_type) = new_type;
2008 TYPE_NEXT_VARIANT (new_type) = 0;
2010 return new_type;
2013 /* Return a subtype of sizetype with range MIN to MAX and whose
2014 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2015 of the associated TYPE_DECL. */
2017 tree
2018 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2020 /* First build a type for the desired range. */
2021 tree type = build_nonshared_range_type (sizetype, min, max);
2023 /* Then set the index type. */
2024 SET_TYPE_INDEX_TYPE (type, index);
2025 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2027 return type;
2030 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2031 sizetype is used. */
2033 tree
2034 create_range_type (tree type, tree min, tree max)
2036 tree range_type;
2038 if (type == NULL_TREE)
2039 type = sizetype;
2041 /* First build a type with the base range. */
2042 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2043 TYPE_MAX_VALUE (type));
2045 /* Then set the actual range. */
2046 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
2047 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
2049 return range_type;
2052 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2053 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2054 its data type. */
2056 tree
2057 create_type_stub_decl (tree type_name, tree type)
2059 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2060 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2061 emitted in DWARF. */
2062 tree type_decl = build_decl (input_location,
2063 TYPE_DECL, type_name, type);
2064 DECL_ARTIFICIAL (type_decl) = 1;
2065 TYPE_ARTIFICIAL (type) = 1;
2066 return type_decl;
2069 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2070 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2071 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2072 true if we need to write debug information about this type. GNAT_NODE
2073 is used for the position of the decl. */
2075 tree
2076 create_type_decl (tree type_name, tree type, bool artificial_p,
2077 bool debug_info_p, Node_Id gnat_node)
2079 enum tree_code code = TREE_CODE (type);
2080 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2081 tree type_decl;
2083 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2084 gcc_assert (!TYPE_IS_DUMMY_P (type));
2086 /* If the type hasn't been named yet, we're naming it; preserve an existing
2087 TYPE_STUB_DECL that has been attached to it for some purpose. */
2088 if (!named && TYPE_STUB_DECL (type))
2090 type_decl = TYPE_STUB_DECL (type);
2091 DECL_NAME (type_decl) = type_name;
2093 else
2094 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2096 DECL_ARTIFICIAL (type_decl) = artificial_p;
2097 TYPE_ARTIFICIAL (type) = artificial_p;
2099 /* Add this decl to the current binding level. */
2100 gnat_pushdecl (type_decl, gnat_node);
2102 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2103 This causes the name to be also viewed as a "tag" by the debug
2104 back-end, with the advantage that no DW_TAG_typedef is emitted
2105 for artificial "tagged" types in DWARF. */
2106 if (!named)
2107 TYPE_STUB_DECL (type) = type_decl;
2109 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2110 back-end doesn't support, and for others if we don't need to. */
2111 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2112 DECL_IGNORED_P (type_decl) = 1;
2114 return type_decl;
2117 /* Return a VAR_DECL or CONST_DECL node.
2119 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2120 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2121 the GCC tree for an optional initial expression; NULL_TREE if none.
2123 CONST_FLAG is true if this variable is constant, in which case we might
2124 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2126 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2127 definition to be made visible outside of the current compilation unit, for
2128 instance variable definitions in a package specification.
2130 EXTERN_FLAG is true when processing an external variable declaration (as
2131 opposed to a definition: no storage is to be allocated for the variable).
2133 STATIC_FLAG is only relevant when not at top level. In that case
2134 it indicates whether to always allocate storage to the variable.
2136 GNAT_NODE is used for the position of the decl. */
2138 tree
2139 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2140 bool const_flag, bool public_flag, bool extern_flag,
2141 bool static_flag, bool const_decl_allowed_p,
2142 struct attrib *attr_list, Node_Id gnat_node)
2144 /* Whether the initializer is a constant initializer. At the global level
2145 or for an external object or an object to be allocated in static memory,
2146 we check that it is a valid constant expression for use in initializing
2147 a static variable; otherwise, we only check that it is constant. */
2148 bool init_const
2149 = (var_init != 0
2150 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
2151 && (global_bindings_p () || extern_flag || static_flag
2152 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
2153 : TREE_CONSTANT (var_init)));
2155 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2156 case the initializer may be used in-lieu of the DECL node (as done in
2157 Identifier_to_gnu). This is useful to prevent the need of elaboration
2158 code when an identifier for which such a decl is made is in turn used as
2159 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2160 but extra constraints apply to this choice (see below) and are not
2161 relevant to the distinction we wish to make. */
2162 bool constant_p = const_flag && init_const;
2164 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2165 and may be used for scalars in general but not for aggregates. */
2166 tree var_decl
2167 = build_decl (input_location,
2168 (constant_p && const_decl_allowed_p
2169 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2170 var_name, type);
2172 /* If this is external, throw away any initializations (they will be done
2173 elsewhere) unless this is a constant for which we would like to remain
2174 able to get the initializer. If we are defining a global here, leave a
2175 constant initialization and save any variable elaborations for the
2176 elaboration routine. If we are just annotating types, throw away the
2177 initialization if it isn't a constant. */
2178 if ((extern_flag && !constant_p)
2179 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2180 var_init = NULL_TREE;
2182 /* At the global level, an initializer requiring code to be generated
2183 produces elaboration statements. Check that such statements are allowed,
2184 that is, not violating a No_Elaboration_Code restriction. */
2185 if (global_bindings_p () && var_init != 0 && !init_const)
2186 Check_Elaboration_Code_Allowed (gnat_node);
2188 DECL_INITIAL (var_decl) = var_init;
2189 TREE_READONLY (var_decl) = const_flag;
2190 DECL_EXTERNAL (var_decl) = extern_flag;
2191 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
2192 TREE_CONSTANT (var_decl) = constant_p;
2193 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
2194 = TYPE_VOLATILE (type);
2196 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2197 try to fiddle with DECL_COMMON. However, on platforms that don't
2198 support global BSS sections, uninitialized global variables would
2199 go in DATA instead, thus increasing the size of the executable. */
2200 if (!flag_no_common
2201 && TREE_CODE (var_decl) == VAR_DECL
2202 && TREE_PUBLIC (var_decl)
2203 && !have_global_bss_p ())
2204 DECL_COMMON (var_decl) = 1;
2206 /* At the global binding level, we need to allocate static storage for the
2207 variable if it isn't external. Otherwise, we allocate automatic storage
2208 unless requested not to. */
2209 TREE_STATIC (var_decl)
2210 = !extern_flag && (static_flag || global_bindings_p ());
2212 /* For an external constant whose initializer is not absolute, do not emit
2213 debug info. In DWARF this would mean a global relocation in a read-only
2214 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2215 if (extern_flag
2216 && constant_p
2217 && var_init
2218 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2219 != null_pointer_node)
2220 DECL_IGNORED_P (var_decl) = 1;
2222 if (TREE_SIDE_EFFECTS (var_decl))
2223 TREE_ADDRESSABLE (var_decl) = 1;
2225 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2226 if (TREE_CODE (var_decl) == VAR_DECL)
2227 process_attributes (&var_decl, &attr_list, true, gnat_node);
2229 /* Add this decl to the current binding level. */
2230 gnat_pushdecl (var_decl, gnat_node);
2232 if (TREE_CODE (var_decl) == VAR_DECL)
2234 if (asm_name)
2235 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2237 if (global_bindings_p ())
2238 rest_of_decl_compilation (var_decl, true, 0);
2241 return var_decl;
2244 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2246 static bool
2247 aggregate_type_contains_array_p (tree type)
2249 switch (TREE_CODE (type))
2251 case RECORD_TYPE:
2252 case UNION_TYPE:
2253 case QUAL_UNION_TYPE:
2255 tree field;
2256 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2257 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2258 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2259 return true;
2260 return false;
2263 case ARRAY_TYPE:
2264 return true;
2266 default:
2267 gcc_unreachable ();
2271 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2272 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2273 nonzero, it is the specified size of the field. If POS is nonzero, it is
2274 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2275 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2276 means we are allowed to take the address of the field; if it is negative,
2277 we should not make a bitfield, which is used by make_aligning_type. */
2279 tree
2280 create_field_decl (tree field_name, tree field_type, tree record_type,
2281 tree size, tree pos, int packed, int addressable)
2283 tree field_decl = build_decl (input_location,
2284 FIELD_DECL, field_name, field_type);
2286 DECL_CONTEXT (field_decl) = record_type;
2287 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2289 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2290 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2291 Likewise for an aggregate without specified position that contains an
2292 array, because in this case slices of variable length of this array
2293 must be handled by GCC and variable-sized objects need to be aligned
2294 to at least a byte boundary. */
2295 if (packed && (TYPE_MODE (field_type) == BLKmode
2296 || (!pos
2297 && AGGREGATE_TYPE_P (field_type)
2298 && aggregate_type_contains_array_p (field_type))))
2299 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2301 /* If a size is specified, use it. Otherwise, if the record type is packed
2302 compute a size to use, which may differ from the object's natural size.
2303 We always set a size in this case to trigger the checks for bitfield
2304 creation below, which is typically required when no position has been
2305 specified. */
2306 if (size)
2307 size = convert (bitsizetype, size);
2308 else if (packed == 1)
2310 size = rm_size (field_type);
2311 if (TYPE_MODE (field_type) == BLKmode)
2312 size = round_up (size, BITS_PER_UNIT);
2315 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2316 specified for two reasons: first if the size differs from the natural
2317 size. Second, if the alignment is insufficient. There are a number of
2318 ways the latter can be true.
2320 We never make a bitfield if the type of the field has a nonconstant size,
2321 because no such entity requiring bitfield operations should reach here.
2323 We do *preventively* make a bitfield when there might be the need for it
2324 but we don't have all the necessary information to decide, as is the case
2325 of a field with no specified position in a packed record.
2327 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2328 in layout_decl or finish_record_type to clear the bit_field indication if
2329 it is in fact not needed. */
2330 if (addressable >= 0
2331 && size
2332 && TREE_CODE (size) == INTEGER_CST
2333 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2334 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2335 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2336 || packed
2337 || (TYPE_ALIGN (record_type) != 0
2338 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2340 DECL_BIT_FIELD (field_decl) = 1;
2341 DECL_SIZE (field_decl) = size;
2342 if (!packed && !pos)
2344 if (TYPE_ALIGN (record_type) != 0
2345 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2346 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2347 else
2348 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2352 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2354 /* Bump the alignment if need be, either for bitfield/packing purposes or
2355 to satisfy the type requirements if no such consideration applies. When
2356 we get the alignment from the type, indicate if this is from an explicit
2357 user request, which prevents stor-layout from lowering it later on. */
2359 unsigned int bit_align
2360 = (DECL_BIT_FIELD (field_decl) ? 1
2361 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2363 if (bit_align > DECL_ALIGN (field_decl))
2364 DECL_ALIGN (field_decl) = bit_align;
2365 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2367 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2368 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2372 if (pos)
2374 /* We need to pass in the alignment the DECL is known to have.
2375 This is the lowest-order bit set in POS, but no more than
2376 the alignment of the record, if one is specified. Note
2377 that an alignment of 0 is taken as infinite. */
2378 unsigned int known_align;
2380 if (host_integerp (pos, 1))
2381 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
2382 else
2383 known_align = BITS_PER_UNIT;
2385 if (TYPE_ALIGN (record_type)
2386 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2387 known_align = TYPE_ALIGN (record_type);
2389 layout_decl (field_decl, known_align);
2390 SET_DECL_OFFSET_ALIGN (field_decl,
2391 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
2392 : BITS_PER_UNIT);
2393 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2394 &DECL_FIELD_BIT_OFFSET (field_decl),
2395 DECL_OFFSET_ALIGN (field_decl), pos);
2398 /* In addition to what our caller says, claim the field is addressable if we
2399 know that its type is not suitable.
2401 The field may also be "technically" nonaddressable, meaning that even if
2402 we attempt to take the field's address we will actually get the address
2403 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2404 value we have at this point is not accurate enough, so we don't account
2405 for this here and let finish_record_type decide. */
2406 if (!addressable && !type_for_nonaliased_component_p (field_type))
2407 addressable = 1;
2409 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2411 return field_decl;
2414 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2415 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2416 (either an In parameter or an address of a pass-by-ref parameter). */
2418 tree
2419 create_param_decl (tree param_name, tree param_type, bool readonly)
2421 tree param_decl = build_decl (input_location,
2422 PARM_DECL, param_name, param_type);
2424 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2425 can lead to various ABI violations. */
2426 if (targetm.calls.promote_prototypes (NULL_TREE)
2427 && INTEGRAL_TYPE_P (param_type)
2428 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2430 /* We have to be careful about biased types here. Make a subtype
2431 of integer_type_node with the proper biasing. */
2432 if (TREE_CODE (param_type) == INTEGER_TYPE
2433 && TYPE_BIASED_REPRESENTATION_P (param_type))
2435 tree subtype
2436 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2437 TREE_TYPE (subtype) = integer_type_node;
2438 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2439 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2440 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
2441 param_type = subtype;
2443 else
2444 param_type = integer_type_node;
2447 DECL_ARG_TYPE (param_decl) = param_type;
2448 TREE_READONLY (param_decl) = readonly;
2449 return param_decl;
2452 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2453 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2454 changed. GNAT_NODE is used for the position of error messages. */
2456 void
2457 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2458 Node_Id gnat_node)
2460 struct attrib *attr;
2462 for (attr = *attr_list; attr; attr = attr->next)
2463 switch (attr->type)
2465 case ATTR_MACHINE_ATTRIBUTE:
2466 Sloc_to_locus (Sloc (gnat_node), &input_location);
2467 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2468 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2469 break;
2471 case ATTR_LINK_ALIAS:
2472 if (!DECL_EXTERNAL (*node))
2474 TREE_STATIC (*node) = 1;
2475 assemble_alias (*node, attr->name);
2477 break;
2479 case ATTR_WEAK_EXTERNAL:
2480 if (SUPPORTS_WEAK)
2481 declare_weak (*node);
2482 else
2483 post_error ("?weak declarations not supported on this target",
2484 attr->error_point);
2485 break;
2487 case ATTR_LINK_SECTION:
2488 if (targetm_common.have_named_sections)
2490 DECL_SECTION_NAME (*node)
2491 = build_string (IDENTIFIER_LENGTH (attr->name),
2492 IDENTIFIER_POINTER (attr->name));
2493 DECL_COMMON (*node) = 0;
2495 else
2496 post_error ("?section attributes are not supported for this target",
2497 attr->error_point);
2498 break;
2500 case ATTR_LINK_CONSTRUCTOR:
2501 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2502 TREE_USED (*node) = 1;
2503 break;
2505 case ATTR_LINK_DESTRUCTOR:
2506 DECL_STATIC_DESTRUCTOR (*node) = 1;
2507 TREE_USED (*node) = 1;
2508 break;
2510 case ATTR_THREAD_LOCAL_STORAGE:
2511 DECL_TLS_MODEL (*node) = decl_default_tls_model (*node);
2512 DECL_COMMON (*node) = 0;
2513 break;
2516 *attr_list = NULL;
2519 /* Record DECL as a global renaming pointer. */
2521 void
2522 record_global_renaming_pointer (tree decl)
2524 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
2525 vec_safe_push (global_renaming_pointers, decl);
2528 /* Invalidate the global renaming pointers. */
2530 void
2531 invalidate_global_renaming_pointers (void)
2533 unsigned int i;
2534 tree iter;
2536 if (global_renaming_pointers == NULL)
2537 return;
2539 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
2540 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2542 vec_free (global_renaming_pointers);
2545 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2546 a power of 2. */
2548 bool
2549 value_factor_p (tree value, HOST_WIDE_INT factor)
2551 if (host_integerp (value, 1))
2552 return tree_low_cst (value, 1) % factor == 0;
2554 if (TREE_CODE (value) == MULT_EXPR)
2555 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2556 || value_factor_p (TREE_OPERAND (value, 1), factor));
2558 return false;
2561 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2563 static unsigned int
2564 scale_by_factor_of (tree expr, unsigned int value)
2566 expr = remove_conversions (expr, true);
2568 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2569 corresponding to the number of trailing zeros of the mask. */
2570 if (TREE_CODE (expr) == BIT_AND_EXPR
2571 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2573 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2574 unsigned int i = 0;
2576 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2578 mask >>= 1;
2579 value *= 2;
2580 i++;
2584 return value;
2587 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2588 unless we can prove these 2 fields are laid out in such a way that no gap
2589 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2590 is the distance in bits between the end of PREV_FIELD and the starting
2591 position of CURR_FIELD. It is ignored if null. */
2593 static bool
2594 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2596 /* If this is the first field of the record, there cannot be any gap */
2597 if (!prev_field)
2598 return false;
2600 /* If the previous field is a union type, then return false: The only
2601 time when such a field is not the last field of the record is when
2602 there are other components at fixed positions after it (meaning there
2603 was a rep clause for every field), in which case we don't want the
2604 alignment constraint to override them. */
2605 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2606 return false;
2608 /* If the distance between the end of prev_field and the beginning of
2609 curr_field is constant, then there is a gap if the value of this
2610 constant is not null. */
2611 if (offset && host_integerp (offset, 1))
2612 return !integer_zerop (offset);
2614 /* If the size and position of the previous field are constant,
2615 then check the sum of this size and position. There will be a gap
2616 iff it is not multiple of the current field alignment. */
2617 if (host_integerp (DECL_SIZE (prev_field), 1)
2618 && host_integerp (bit_position (prev_field), 1))
2619 return ((tree_low_cst (bit_position (prev_field), 1)
2620 + tree_low_cst (DECL_SIZE (prev_field), 1))
2621 % DECL_ALIGN (curr_field) != 0);
2623 /* If both the position and size of the previous field are multiples
2624 of the current field alignment, there cannot be any gap. */
2625 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2626 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2627 return false;
2629 /* Fallback, return that there may be a potential gap */
2630 return true;
2633 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2634 of the decl. */
2636 tree
2637 create_label_decl (tree label_name, Node_Id gnat_node)
2639 tree label_decl
2640 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
2642 DECL_MODE (label_decl) = VOIDmode;
2644 /* Add this decl to the current binding level. */
2645 gnat_pushdecl (label_decl, gnat_node);
2647 return label_decl;
2650 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
2651 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2652 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
2653 PARM_DECL nodes chained through the DECL_CHAIN field).
2655 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
2656 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2657 used for the position of the decl. */
2659 tree
2660 create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
2661 tree param_decl_list, enum inline_status_t inline_status,
2662 bool public_flag, bool extern_flag, bool artificial_flag,
2663 struct attrib *attr_list, Node_Id gnat_node)
2665 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
2666 subprog_type);
2667 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
2668 TREE_TYPE (subprog_type));
2669 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
2671 /* If this is a non-inline function nested inside an inlined external
2672 function, we cannot honor both requests without cloning the nested
2673 function in the current unit since it is private to the other unit.
2674 We could inline the nested function as well but it's probably better
2675 to err on the side of too little inlining. */
2676 if (inline_status != is_enabled
2677 && !public_flag
2678 && current_function_decl
2679 && DECL_DECLARED_INLINE_P (current_function_decl)
2680 && DECL_EXTERNAL (current_function_decl))
2681 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
2683 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
2684 DECL_EXTERNAL (subprog_decl) = extern_flag;
2686 switch (inline_status)
2688 case is_suppressed:
2689 DECL_UNINLINABLE (subprog_decl) = 1;
2690 break;
2692 case is_disabled:
2693 break;
2695 case is_enabled:
2696 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
2697 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
2698 break;
2700 default:
2701 gcc_unreachable ();
2704 TREE_PUBLIC (subprog_decl) = public_flag;
2705 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
2706 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
2707 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
2709 DECL_ARTIFICIAL (result_decl) = 1;
2710 DECL_IGNORED_P (result_decl) = 1;
2711 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
2712 DECL_RESULT (subprog_decl) = result_decl;
2714 if (asm_name)
2716 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
2718 /* The expand_main_function circuitry expects "main_identifier_node" to
2719 designate the DECL_NAME of the 'main' entry point, in turn expected
2720 to be declared as the "main" function literally by default. Ada
2721 program entry points are typically declared with a different name
2722 within the binder generated file, exported as 'main' to satisfy the
2723 system expectations. Force main_identifier_node in this case. */
2724 if (asm_name == main_identifier_node)
2725 DECL_NAME (subprog_decl) = main_identifier_node;
2728 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
2730 /* Add this decl to the current binding level. */
2731 gnat_pushdecl (subprog_decl, gnat_node);
2733 /* Output the assembler code and/or RTL for the declaration. */
2734 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2736 return subprog_decl;
2739 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2740 body. This routine needs to be invoked before processing the declarations
2741 appearing in the subprogram. */
2743 void
2744 begin_subprog_body (tree subprog_decl)
2746 tree param_decl;
2748 announce_function (subprog_decl);
2750 /* This function is being defined. */
2751 TREE_STATIC (subprog_decl) = 1;
2753 current_function_decl = subprog_decl;
2755 /* Enter a new binding level and show that all the parameters belong to
2756 this function. */
2757 gnat_pushlevel ();
2759 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
2760 param_decl = DECL_CHAIN (param_decl))
2761 DECL_CONTEXT (param_decl) = subprog_decl;
2763 make_decl_rtl (subprog_decl);
2766 /* Finish translating the current subprogram and set its BODY. */
2768 void
2769 end_subprog_body (tree body)
2771 tree fndecl = current_function_decl;
2773 /* Attach the BLOCK for this level to the function and pop the level. */
2774 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2775 DECL_INITIAL (fndecl) = current_binding_level->block;
2776 gnat_poplevel ();
2778 /* Mark the RESULT_DECL as being in this subprogram. */
2779 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2781 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2782 if (TREE_CODE (body) == BIND_EXPR)
2784 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
2785 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
2788 DECL_SAVED_TREE (fndecl) = body;
2790 current_function_decl = decl_function_context (fndecl);
2793 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
2795 void
2796 rest_of_subprog_body_compilation (tree subprog_decl)
2798 /* We cannot track the location of errors past this point. */
2799 error_gnat_node = Empty;
2801 /* If we're only annotating types, don't actually compile this function. */
2802 if (type_annotate_only)
2803 return;
2805 /* Dump functions before gimplification. */
2806 dump_function (TDI_original, subprog_decl);
2808 if (!decl_function_context (subprog_decl))
2809 cgraph_finalize_function (subprog_decl, false);
2810 else
2811 /* Register this function with cgraph just far enough to get it
2812 added to our parent's nested function list. */
2813 (void) cgraph_get_create_node (subprog_decl);
2816 tree
2817 gnat_builtin_function (tree decl)
2819 gnat_pushdecl (decl, Empty);
2820 return decl;
2823 /* Return an integer type with the number of bits of precision given by
2824 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2825 it is a signed type. */
2827 tree
2828 gnat_type_for_size (unsigned precision, int unsignedp)
2830 tree t;
2831 char type_name[20];
2833 if (precision <= 2 * MAX_BITS_PER_WORD
2834 && signed_and_unsigned_types[precision][unsignedp])
2835 return signed_and_unsigned_types[precision][unsignedp];
2837 if (unsignedp)
2838 t = make_unsigned_type (precision);
2839 else
2840 t = make_signed_type (precision);
2842 if (precision <= 2 * MAX_BITS_PER_WORD)
2843 signed_and_unsigned_types[precision][unsignedp] = t;
2845 if (!TYPE_NAME (t))
2847 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
2848 TYPE_NAME (t) = get_identifier (type_name);
2851 return t;
2854 /* Likewise for floating-point types. */
2856 static tree
2857 float_type_for_precision (int precision, enum machine_mode mode)
2859 tree t;
2860 char type_name[20];
2862 if (float_types[(int) mode])
2863 return float_types[(int) mode];
2865 float_types[(int) mode] = t = make_node (REAL_TYPE);
2866 TYPE_PRECISION (t) = precision;
2867 layout_type (t);
2869 gcc_assert (TYPE_MODE (t) == mode);
2870 if (!TYPE_NAME (t))
2872 sprintf (type_name, "FLOAT_%d", precision);
2873 TYPE_NAME (t) = get_identifier (type_name);
2876 return t;
2879 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2880 an unsigned type; otherwise a signed type is returned. */
2882 tree
2883 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2885 if (mode == BLKmode)
2886 return NULL_TREE;
2888 if (mode == VOIDmode)
2889 return void_type_node;
2891 if (COMPLEX_MODE_P (mode))
2892 return NULL_TREE;
2894 if (SCALAR_FLOAT_MODE_P (mode))
2895 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2897 if (SCALAR_INT_MODE_P (mode))
2898 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2900 if (VECTOR_MODE_P (mode))
2902 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2903 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2904 if (inner_type)
2905 return build_vector_type_for_mode (inner_type, mode);
2908 return NULL_TREE;
2911 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2913 tree
2914 gnat_unsigned_type (tree type_node)
2916 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2918 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2920 type = copy_node (type);
2921 TREE_TYPE (type) = type_node;
2923 else if (TREE_TYPE (type_node)
2924 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2925 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2927 type = copy_node (type);
2928 TREE_TYPE (type) = TREE_TYPE (type_node);
2931 return type;
2934 /* Return the signed version of a TYPE_NODE, a scalar type. */
2936 tree
2937 gnat_signed_type (tree type_node)
2939 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2941 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2943 type = copy_node (type);
2944 TREE_TYPE (type) = type_node;
2946 else if (TREE_TYPE (type_node)
2947 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2948 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2950 type = copy_node (type);
2951 TREE_TYPE (type) = TREE_TYPE (type_node);
2954 return type;
2957 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2958 transparently converted to each other. */
2961 gnat_types_compatible_p (tree t1, tree t2)
2963 enum tree_code code;
2965 /* This is the default criterion. */
2966 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2967 return 1;
2969 /* We only check structural equivalence here. */
2970 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2971 return 0;
2973 /* Vector types are also compatible if they have the same number of subparts
2974 and the same form of (scalar) element type. */
2975 if (code == VECTOR_TYPE
2976 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2977 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2978 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2979 return 1;
2981 /* Array types are also compatible if they are constrained and have the same
2982 domain(s) and the same component type. */
2983 if (code == ARRAY_TYPE
2984 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2985 || (TYPE_DOMAIN (t1)
2986 && TYPE_DOMAIN (t2)
2987 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2988 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2989 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2990 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
2991 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2992 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2993 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
2994 return 1;
2996 return 0;
2999 /* Return true if EXPR is a useless type conversion. */
3001 bool
3002 gnat_useless_type_conversion (tree expr)
3004 if (CONVERT_EXPR_P (expr)
3005 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3006 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3007 return gnat_types_compatible_p (TREE_TYPE (expr),
3008 TREE_TYPE (TREE_OPERAND (expr, 0)));
3010 return false;
3013 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3015 bool
3016 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3017 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3019 return TYPE_CI_CO_LIST (t) == cico_list
3020 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3021 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3022 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3025 /* EXP is an expression for the size of an object. If this size contains
3026 discriminant references, replace them with the maximum (if MAX_P) or
3027 minimum (if !MAX_P) possible value of the discriminant. */
3029 tree
3030 max_size (tree exp, bool max_p)
3032 enum tree_code code = TREE_CODE (exp);
3033 tree type = TREE_TYPE (exp);
3035 switch (TREE_CODE_CLASS (code))
3037 case tcc_declaration:
3038 case tcc_constant:
3039 return exp;
3041 case tcc_vl_exp:
3042 if (code == CALL_EXPR)
3044 tree t, *argarray;
3045 int n, i;
3047 t = maybe_inline_call_in_expr (exp);
3048 if (t)
3049 return max_size (t, max_p);
3051 n = call_expr_nargs (exp);
3052 gcc_assert (n > 0);
3053 argarray = XALLOCAVEC (tree, n);
3054 for (i = 0; i < n; i++)
3055 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3056 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3058 break;
3060 case tcc_reference:
3061 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3062 modify. Otherwise, we treat it like a variable. */
3063 if (!CONTAINS_PLACEHOLDER_P (exp))
3064 return exp;
3066 type = TREE_TYPE (TREE_OPERAND (exp, 1));
3067 return
3068 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
3070 case tcc_comparison:
3071 return max_p ? size_one_node : size_zero_node;
3073 case tcc_unary:
3074 if (code == NON_LVALUE_EXPR)
3075 return max_size (TREE_OPERAND (exp, 0), max_p);
3077 return fold_build1 (code, type,
3078 max_size (TREE_OPERAND (exp, 0),
3079 code == NEGATE_EXPR ? !max_p : max_p));
3081 case tcc_binary:
3083 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3084 tree rhs = max_size (TREE_OPERAND (exp, 1),
3085 code == MINUS_EXPR ? !max_p : max_p);
3087 /* Special-case wanting the maximum value of a MIN_EXPR.
3088 In that case, if one side overflows, return the other. */
3089 if (max_p && code == MIN_EXPR)
3091 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3092 return lhs;
3094 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3095 return rhs;
3098 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3099 overflowing and the RHS a variable. */
3100 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3101 && TREE_CODE (lhs) == INTEGER_CST
3102 && TREE_OVERFLOW (lhs)
3103 && !TREE_CONSTANT (rhs))
3104 return lhs;
3106 return size_binop (code, lhs, rhs);
3109 case tcc_expression:
3110 switch (TREE_CODE_LENGTH (code))
3112 case 1:
3113 if (code == SAVE_EXPR)
3114 return exp;
3116 return fold_build1 (code, type,
3117 max_size (TREE_OPERAND (exp, 0), max_p));
3119 case 2:
3120 if (code == COMPOUND_EXPR)
3121 return max_size (TREE_OPERAND (exp, 1), max_p);
3123 return fold_build2 (code, type,
3124 max_size (TREE_OPERAND (exp, 0), max_p),
3125 max_size (TREE_OPERAND (exp, 1), max_p));
3127 case 3:
3128 if (code == COND_EXPR)
3129 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3130 max_size (TREE_OPERAND (exp, 1), max_p),
3131 max_size (TREE_OPERAND (exp, 2), max_p));
3133 default:
3134 break;
3137 /* Other tree classes cannot happen. */
3138 default:
3139 break;
3142 gcc_unreachable ();
3145 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3146 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3147 Return a constructor for the template. */
3149 tree
3150 build_template (tree template_type, tree array_type, tree expr)
3152 vec<constructor_elt, va_gc> *template_elts = NULL;
3153 tree bound_list = NULL_TREE;
3154 tree field;
3156 while (TREE_CODE (array_type) == RECORD_TYPE
3157 && (TYPE_PADDING_P (array_type)
3158 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3159 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3161 if (TREE_CODE (array_type) == ARRAY_TYPE
3162 || (TREE_CODE (array_type) == INTEGER_TYPE
3163 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3164 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3166 /* First make the list for a CONSTRUCTOR for the template. Go down the
3167 field list of the template instead of the type chain because this
3168 array might be an Ada array of arrays and we can't tell where the
3169 nested arrays stop being the underlying object. */
3171 for (field = TYPE_FIELDS (template_type); field;
3172 (bound_list
3173 ? (bound_list = TREE_CHAIN (bound_list))
3174 : (array_type = TREE_TYPE (array_type))),
3175 field = DECL_CHAIN (DECL_CHAIN (field)))
3177 tree bounds, min, max;
3179 /* If we have a bound list, get the bounds from there. Likewise
3180 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3181 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3182 This will give us a maximum range. */
3183 if (bound_list)
3184 bounds = TREE_VALUE (bound_list);
3185 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3186 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3187 else if (expr && TREE_CODE (expr) == PARM_DECL
3188 && DECL_BY_COMPONENT_PTR_P (expr))
3189 bounds = TREE_TYPE (field);
3190 else
3191 gcc_unreachable ();
3193 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3194 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3196 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3197 substitute it from OBJECT. */
3198 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3199 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3201 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3202 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3205 return gnat_build_constructor (template_type, template_elts);
3208 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3209 being built; the new decl is chained on to the front of the list. */
3211 static tree
3212 make_descriptor_field (const char *name, tree type, tree rec_type,
3213 tree initial, tree field_list)
3215 tree field
3216 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
3217 NULL_TREE, 0, 0);
3219 DECL_INITIAL (field) = initial;
3220 DECL_CHAIN (field) = field_list;
3221 return field;
3224 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3225 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3226 type contains in its DECL_INITIAL the expression to use when a constructor
3227 is made for the type. GNAT_ENTITY is an entity used to print out an error
3228 message if the mechanism cannot be applied to an object of that type and
3229 also for the name. */
3231 tree
3232 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
3234 tree record_type = make_node (RECORD_TYPE);
3235 tree pointer32_type, pointer64_type;
3236 tree field_list = NULL_TREE;
3237 int klass, ndim, i, dtype = 0;
3238 tree inner_type, tem;
3239 tree *idx_arr;
3241 /* If TYPE is an unconstrained array, use the underlying array type. */
3242 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3243 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3245 /* If this is an array, compute the number of dimensions in the array,
3246 get the index types, and point to the inner type. */
3247 if (TREE_CODE (type) != ARRAY_TYPE)
3248 ndim = 0;
3249 else
3250 for (ndim = 1, inner_type = type;
3251 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3252 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3253 ndim++, inner_type = TREE_TYPE (inner_type))
3256 idx_arr = XALLOCAVEC (tree, ndim);
3258 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
3259 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3260 for (i = ndim - 1, inner_type = type;
3261 i >= 0;
3262 i--, inner_type = TREE_TYPE (inner_type))
3263 idx_arr[i] = TYPE_DOMAIN (inner_type);
3264 else
3265 for (i = 0, inner_type = type;
3266 i < ndim;
3267 i++, inner_type = TREE_TYPE (inner_type))
3268 idx_arr[i] = TYPE_DOMAIN (inner_type);
3270 /* Now get the DTYPE value. */
3271 switch (TREE_CODE (type))
3273 case INTEGER_TYPE:
3274 case ENUMERAL_TYPE:
3275 case BOOLEAN_TYPE:
3276 if (TYPE_VAX_FLOATING_POINT_P (type))
3277 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3279 case 6:
3280 dtype = 10;
3281 break;
3282 case 9:
3283 dtype = 11;
3284 break;
3285 case 15:
3286 dtype = 27;
3287 break;
3289 else
3290 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3292 case 8:
3293 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3294 break;
3295 case 16:
3296 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3297 break;
3298 case 32:
3299 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3300 break;
3301 case 64:
3302 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3303 break;
3304 case 128:
3305 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3306 break;
3308 break;
3310 case REAL_TYPE:
3311 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3312 break;
3314 case COMPLEX_TYPE:
3315 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3316 && TYPE_VAX_FLOATING_POINT_P (type))
3317 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3319 case 6:
3320 dtype = 12;
3321 break;
3322 case 9:
3323 dtype = 13;
3324 break;
3325 case 15:
3326 dtype = 29;
3328 else
3329 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3330 break;
3332 case ARRAY_TYPE:
3333 dtype = 14;
3334 break;
3336 default:
3337 break;
3340 /* Get the CLASS value. */
3341 switch (mech)
3343 case By_Descriptor_A:
3344 case By_Short_Descriptor_A:
3345 klass = 4;
3346 break;
3347 case By_Descriptor_NCA:
3348 case By_Short_Descriptor_NCA:
3349 klass = 10;
3350 break;
3351 case By_Descriptor_SB:
3352 case By_Short_Descriptor_SB:
3353 klass = 15;
3354 break;
3355 case By_Descriptor:
3356 case By_Short_Descriptor:
3357 case By_Descriptor_S:
3358 case By_Short_Descriptor_S:
3359 default:
3360 klass = 1;
3361 break;
3364 /* Make the type for a descriptor for VMS. The first four fields are the
3365 same for all types. */
3366 field_list
3367 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
3368 size_in_bytes ((mech == By_Descriptor_A
3369 || mech == By_Short_Descriptor_A)
3370 ? inner_type : type),
3371 field_list);
3372 field_list
3373 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
3374 size_int (dtype), field_list);
3375 field_list
3376 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
3377 size_int (klass), field_list);
3379 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
3380 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3382 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3383 that we cannot build a template call to the CE routine as it would get a
3384 wrong source location; instead we use a second placeholder for it. */
3385 tem = build_unary_op (ADDR_EXPR, pointer64_type,
3386 build0 (PLACEHOLDER_EXPR, type));
3387 tem = build3 (COND_EXPR, pointer32_type,
3388 Pmode != SImode
3389 ? build_binary_op (GE_EXPR, boolean_type_node, tem,
3390 build_int_cstu (pointer64_type, 0x80000000))
3391 : boolean_false_node,
3392 build0 (PLACEHOLDER_EXPR, void_type_node),
3393 convert (pointer32_type, tem));
3395 field_list
3396 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
3397 field_list);
3399 switch (mech)
3401 case By_Descriptor:
3402 case By_Short_Descriptor:
3403 case By_Descriptor_S:
3404 case By_Short_Descriptor_S:
3405 break;
3407 case By_Descriptor_SB:
3408 case By_Short_Descriptor_SB:
3409 field_list
3410 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3411 record_type,
3412 (TREE_CODE (type) == ARRAY_TYPE
3413 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3414 : size_zero_node),
3415 field_list);
3416 field_list
3417 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3418 record_type,
3419 (TREE_CODE (type) == ARRAY_TYPE
3420 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3421 : size_zero_node),
3422 field_list);
3423 break;
3425 case By_Descriptor_A:
3426 case By_Short_Descriptor_A:
3427 case By_Descriptor_NCA:
3428 case By_Short_Descriptor_NCA:
3429 field_list
3430 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3431 record_type, size_zero_node, field_list);
3433 field_list
3434 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3435 record_type, size_zero_node, field_list);
3437 field_list
3438 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3439 record_type,
3440 size_int ((mech == By_Descriptor_NCA
3441 || mech == By_Short_Descriptor_NCA)
3443 /* Set FL_COLUMN, FL_COEFF, and
3444 FL_BOUNDS. */
3445 : (TREE_CODE (type) == ARRAY_TYPE
3446 && TYPE_CONVENTION_FORTRAN_P
3447 (type)
3448 ? 224 : 192)),
3449 field_list);
3451 field_list
3452 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3453 record_type, size_int (ndim), field_list);
3455 field_list
3456 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3457 record_type, size_in_bytes (type),
3458 field_list);
3460 /* Now build a pointer to the 0,0,0... element. */
3461 tem = build0 (PLACEHOLDER_EXPR, type);
3462 for (i = 0, inner_type = type; i < ndim;
3463 i++, inner_type = TREE_TYPE (inner_type))
3464 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3465 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3466 NULL_TREE, NULL_TREE);
3468 field_list
3469 = make_descriptor_field ("A0", pointer32_type, record_type,
3470 build1 (ADDR_EXPR, pointer32_type, tem),
3471 field_list);
3473 /* Next come the addressing coefficients. */
3474 tem = size_one_node;
3475 for (i = 0; i < ndim; i++)
3477 char fname[3];
3478 tree idx_length
3479 = size_binop (MULT_EXPR, tem,
3480 size_binop (PLUS_EXPR,
3481 size_binop (MINUS_EXPR,
3482 TYPE_MAX_VALUE (idx_arr[i]),
3483 TYPE_MIN_VALUE (idx_arr[i])),
3484 size_int (1)));
3486 fname[0] = ((mech == By_Descriptor_NCA ||
3487 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
3488 fname[1] = '0' + i, fname[2] = 0;
3489 field_list
3490 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3491 record_type, idx_length, field_list);
3493 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
3494 tem = idx_length;
3497 /* Finally here are the bounds. */
3498 for (i = 0; i < ndim; i++)
3500 char fname[3];
3502 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3503 field_list
3504 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3505 record_type, TYPE_MIN_VALUE (idx_arr[i]),
3506 field_list);
3508 fname[0] = 'U';
3509 field_list
3510 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3511 record_type, TYPE_MAX_VALUE (idx_arr[i]),
3512 field_list);
3514 break;
3516 default:
3517 post_error ("unsupported descriptor type for &", gnat_entity);
3520 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
3521 finish_record_type (record_type, nreverse (field_list), 0, false);
3522 return record_type;
3525 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3526 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3527 type contains in its DECL_INITIAL the expression to use when a constructor
3528 is made for the type. GNAT_ENTITY is an entity used to print out an error
3529 message if the mechanism cannot be applied to an object of that type and
3530 also for the name. */
3532 tree
3533 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
3535 tree record_type = make_node (RECORD_TYPE);
3536 tree pointer64_type;
3537 tree field_list = NULL_TREE;
3538 int klass, ndim, i, dtype = 0;
3539 tree inner_type, tem;
3540 tree *idx_arr;
3542 /* If TYPE is an unconstrained array, use the underlying array type. */
3543 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3544 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3546 /* If this is an array, compute the number of dimensions in the array,
3547 get the index types, and point to the inner type. */
3548 if (TREE_CODE (type) != ARRAY_TYPE)
3549 ndim = 0;
3550 else
3551 for (ndim = 1, inner_type = type;
3552 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3553 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3554 ndim++, inner_type = TREE_TYPE (inner_type))
3557 idx_arr = XALLOCAVEC (tree, ndim);
3559 if (mech != By_Descriptor_NCA
3560 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3561 for (i = ndim - 1, inner_type = type;
3562 i >= 0;
3563 i--, inner_type = TREE_TYPE (inner_type))
3564 idx_arr[i] = TYPE_DOMAIN (inner_type);
3565 else
3566 for (i = 0, inner_type = type;
3567 i < ndim;
3568 i++, inner_type = TREE_TYPE (inner_type))
3569 idx_arr[i] = TYPE_DOMAIN (inner_type);
3571 /* Now get the DTYPE value. */
3572 switch (TREE_CODE (type))
3574 case INTEGER_TYPE:
3575 case ENUMERAL_TYPE:
3576 case BOOLEAN_TYPE:
3577 if (TYPE_VAX_FLOATING_POINT_P (type))
3578 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3580 case 6:
3581 dtype = 10;
3582 break;
3583 case 9:
3584 dtype = 11;
3585 break;
3586 case 15:
3587 dtype = 27;
3588 break;
3590 else
3591 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3593 case 8:
3594 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3595 break;
3596 case 16:
3597 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3598 break;
3599 case 32:
3600 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3601 break;
3602 case 64:
3603 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3604 break;
3605 case 128:
3606 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3607 break;
3609 break;
3611 case REAL_TYPE:
3612 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3613 break;
3615 case COMPLEX_TYPE:
3616 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3617 && TYPE_VAX_FLOATING_POINT_P (type))
3618 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3620 case 6:
3621 dtype = 12;
3622 break;
3623 case 9:
3624 dtype = 13;
3625 break;
3626 case 15:
3627 dtype = 29;
3629 else
3630 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3631 break;
3633 case ARRAY_TYPE:
3634 dtype = 14;
3635 break;
3637 default:
3638 break;
3641 /* Get the CLASS value. */
3642 switch (mech)
3644 case By_Descriptor_A:
3645 klass = 4;
3646 break;
3647 case By_Descriptor_NCA:
3648 klass = 10;
3649 break;
3650 case By_Descriptor_SB:
3651 klass = 15;
3652 break;
3653 case By_Descriptor:
3654 case By_Descriptor_S:
3655 default:
3656 klass = 1;
3657 break;
3660 /* Make the type for a 64-bit descriptor for VMS. The first six fields
3661 are the same for all types. */
3662 field_list
3663 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
3664 record_type, size_int (1), field_list);
3665 field_list
3666 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
3667 record_type, size_int (dtype), field_list);
3668 field_list
3669 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
3670 record_type, size_int (klass), field_list);
3671 field_list
3672 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
3673 record_type, size_int (-1), field_list);
3674 field_list
3675 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
3676 record_type,
3677 size_in_bytes (mech == By_Descriptor_A
3678 ? inner_type : type),
3679 field_list);
3681 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3683 field_list
3684 = make_descriptor_field ("POINTER", pointer64_type, record_type,
3685 build_unary_op (ADDR_EXPR, pointer64_type,
3686 build0 (PLACEHOLDER_EXPR, type)),
3687 field_list);
3689 switch (mech)
3691 case By_Descriptor:
3692 case By_Descriptor_S:
3693 break;
3695 case By_Descriptor_SB:
3696 field_list
3697 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
3698 record_type,
3699 (TREE_CODE (type) == ARRAY_TYPE
3700 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3701 : size_zero_node),
3702 field_list);
3703 field_list
3704 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
3705 record_type,
3706 (TREE_CODE (type) == ARRAY_TYPE
3707 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3708 : size_zero_node),
3709 field_list);
3710 break;
3712 case By_Descriptor_A:
3713 case By_Descriptor_NCA:
3714 field_list
3715 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3716 record_type, size_zero_node, field_list);
3718 field_list
3719 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3720 record_type, size_zero_node, field_list);
3722 dtype = (mech == By_Descriptor_NCA
3724 /* Set FL_COLUMN, FL_COEFF, and
3725 FL_BOUNDS. */
3726 : (TREE_CODE (type) == ARRAY_TYPE
3727 && TYPE_CONVENTION_FORTRAN_P (type)
3728 ? 224 : 192));
3729 field_list
3730 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3731 record_type, size_int (dtype),
3732 field_list);
3734 field_list
3735 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3736 record_type, size_int (ndim), field_list);
3738 field_list
3739 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
3740 record_type, size_int (0), field_list);
3741 field_list
3742 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
3743 record_type, size_in_bytes (type),
3744 field_list);
3746 /* Now build a pointer to the 0,0,0... element. */
3747 tem = build0 (PLACEHOLDER_EXPR, type);
3748 for (i = 0, inner_type = type; i < ndim;
3749 i++, inner_type = TREE_TYPE (inner_type))
3750 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3751 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3752 NULL_TREE, NULL_TREE);
3754 field_list
3755 = make_descriptor_field ("A0", pointer64_type, record_type,
3756 build1 (ADDR_EXPR, pointer64_type, tem),
3757 field_list);
3759 /* Next come the addressing coefficients. */
3760 tem = size_one_node;
3761 for (i = 0; i < ndim; i++)
3763 char fname[3];
3764 tree idx_length
3765 = size_binop (MULT_EXPR, tem,
3766 size_binop (PLUS_EXPR,
3767 size_binop (MINUS_EXPR,
3768 TYPE_MAX_VALUE (idx_arr[i]),
3769 TYPE_MIN_VALUE (idx_arr[i])),
3770 size_int (1)));
3772 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3773 fname[1] = '0' + i, fname[2] = 0;
3774 field_list
3775 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3776 record_type, idx_length, field_list);
3778 if (mech == By_Descriptor_NCA)
3779 tem = idx_length;
3782 /* Finally here are the bounds. */
3783 for (i = 0; i < ndim; i++)
3785 char fname[3];
3787 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3788 field_list
3789 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3790 record_type,
3791 TYPE_MIN_VALUE (idx_arr[i]), field_list);
3793 fname[0] = 'U';
3794 field_list
3795 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3796 record_type,
3797 TYPE_MAX_VALUE (idx_arr[i]), field_list);
3799 break;
3801 default:
3802 post_error ("unsupported descriptor type for &", gnat_entity);
3805 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
3806 finish_record_type (record_type, nreverse (field_list), 0, false);
3807 return record_type;
3810 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3811 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3813 tree
3814 fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
3816 vec<constructor_elt, va_gc> *v = NULL;
3817 tree field;
3819 gnu_expr = maybe_unconstrained_array (gnu_expr);
3820 gnu_expr = gnat_protect_expr (gnu_expr);
3821 gnat_mark_addressable (gnu_expr);
3823 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3824 routine in case we have a 32-bit descriptor. */
3825 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3826 build_call_raise (CE_Range_Check_Failed, gnat_actual,
3827 N_Raise_Constraint_Error),
3828 gnu_expr);
3830 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3832 tree value
3833 = convert (TREE_TYPE (field),
3834 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3835 gnu_expr));
3836 CONSTRUCTOR_APPEND_ELT (v, field, value);
3839 return gnat_build_constructor (gnu_type, v);
3842 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3843 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3844 which the VMS descriptor is passed. */
3846 static tree
3847 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3849 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3850 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3851 /* The CLASS field is the 3rd field in the descriptor. */
3852 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3853 /* The POINTER field is the 6th field in the descriptor. */
3854 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
3856 /* Retrieve the value of the POINTER field. */
3857 tree gnu_expr64
3858 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3860 if (POINTER_TYPE_P (gnu_type))
3861 return convert (gnu_type, gnu_expr64);
3863 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3865 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3866 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
3867 tree template_type = TREE_TYPE (p_bounds_type);
3868 tree min_field = TYPE_FIELDS (template_type);
3869 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
3870 tree template_tree, template_addr, aflags, dimct, t, u;
3871 /* See the head comment of build_vms_descriptor. */
3872 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3873 tree lfield, ufield;
3874 vec<constructor_elt, va_gc> *v;
3876 /* Convert POINTER to the pointer-to-array type. */
3877 gnu_expr64 = convert (p_array_type, gnu_expr64);
3879 switch (iklass)
3881 case 1: /* Class S */
3882 case 15: /* Class SB */
3883 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3884 vec_alloc (v, 2);
3885 t = DECL_CHAIN (DECL_CHAIN (klass));
3886 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3887 CONSTRUCTOR_APPEND_ELT (v, min_field,
3888 convert (TREE_TYPE (min_field),
3889 integer_one_node));
3890 CONSTRUCTOR_APPEND_ELT (v, max_field,
3891 convert (TREE_TYPE (max_field), t));
3892 template_tree = gnat_build_constructor (template_type, v);
3893 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3895 /* For class S, we are done. */
3896 if (iklass == 1)
3897 break;
3899 /* Test that we really have a SB descriptor, like DEC Ada. */
3900 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3901 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3902 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3903 /* If so, there is already a template in the descriptor and
3904 it is located right after the POINTER field. The fields are
3905 64bits so they must be repacked. */
3906 t = DECL_CHAIN (pointer);
3907 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3908 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3910 t = DECL_CHAIN (t);
3911 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3912 ufield = convert
3913 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3915 /* Build the template in the form of a constructor. */
3916 vec_alloc (v, 2);
3917 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3918 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3919 ufield);
3920 template_tree = gnat_build_constructor (template_type, v);
3922 /* Otherwise use the {1, LENGTH} template we build above. */
3923 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3924 build_unary_op (ADDR_EXPR, p_bounds_type,
3925 template_tree),
3926 template_addr);
3927 break;
3929 case 4: /* Class A */
3930 /* The AFLAGS field is the 3rd field after the pointer in the
3931 descriptor. */
3932 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3933 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3934 /* The DIMCT field is the next field in the descriptor after
3935 aflags. */
3936 t = DECL_CHAIN (t);
3937 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3938 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3939 or FL_COEFF or FL_BOUNDS not set. */
3940 u = build_int_cst (TREE_TYPE (aflags), 192);
3941 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3942 build_binary_op (NE_EXPR, boolean_type_node,
3943 dimct,
3944 convert (TREE_TYPE (dimct),
3945 size_one_node)),
3946 build_binary_op (NE_EXPR, boolean_type_node,
3947 build2 (BIT_AND_EXPR,
3948 TREE_TYPE (aflags),
3949 aflags, u),
3950 u));
3951 /* There is already a template in the descriptor and it is located
3952 in block 3. The fields are 64bits so they must be repacked. */
3953 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3954 (t)))));
3955 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3956 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3958 t = DECL_CHAIN (t);
3959 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3960 ufield = convert
3961 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3963 /* Build the template in the form of a constructor. */
3964 vec_alloc (v, 2);
3965 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3966 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3967 ufield);
3968 template_tree = gnat_build_constructor (template_type, v);
3969 template_tree = build3 (COND_EXPR, template_type, u,
3970 build_call_raise (CE_Length_Check_Failed, Empty,
3971 N_Raise_Constraint_Error),
3972 template_tree);
3973 template_addr
3974 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3975 break;
3977 case 10: /* Class NCA */
3978 default:
3979 post_error ("unsupported descriptor type for &", gnat_subprog);
3980 template_addr = integer_zero_node;
3981 break;
3984 /* Build the fat pointer in the form of a constructor. */
3985 vec_alloc (v, 2);
3986 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
3987 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3988 template_addr);
3989 return gnat_build_constructor (gnu_type, v);
3992 else
3993 gcc_unreachable ();
3996 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3997 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3998 which the VMS descriptor is passed. */
4000 static tree
4001 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
4003 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4004 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4005 /* The CLASS field is the 3rd field in the descriptor. */
4006 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
4007 /* The POINTER field is the 4th field in the descriptor. */
4008 tree pointer = DECL_CHAIN (klass);
4010 /* Retrieve the value of the POINTER field. */
4011 tree gnu_expr32
4012 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
4014 if (POINTER_TYPE_P (gnu_type))
4015 return convert (gnu_type, gnu_expr32);
4017 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
4019 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
4020 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
4021 tree template_type = TREE_TYPE (p_bounds_type);
4022 tree min_field = TYPE_FIELDS (template_type);
4023 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
4024 tree template_tree, template_addr, aflags, dimct, t, u;
4025 /* See the head comment of build_vms_descriptor. */
4026 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
4027 vec<constructor_elt, va_gc> *v;
4029 /* Convert POINTER to the pointer-to-array type. */
4030 gnu_expr32 = convert (p_array_type, gnu_expr32);
4032 switch (iklass)
4034 case 1: /* Class S */
4035 case 15: /* Class SB */
4036 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
4037 vec_alloc (v, 2);
4038 t = TYPE_FIELDS (desc_type);
4039 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4040 CONSTRUCTOR_APPEND_ELT (v, min_field,
4041 convert (TREE_TYPE (min_field),
4042 integer_one_node));
4043 CONSTRUCTOR_APPEND_ELT (v, max_field,
4044 convert (TREE_TYPE (max_field), t));
4045 template_tree = gnat_build_constructor (template_type, v);
4046 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
4048 /* For class S, we are done. */
4049 if (iklass == 1)
4050 break;
4052 /* Test that we really have a SB descriptor, like DEC Ada. */
4053 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
4054 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
4055 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
4056 /* If so, there is already a template in the descriptor and
4057 it is located right after the POINTER field. */
4058 t = DECL_CHAIN (pointer);
4059 template_tree
4060 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4061 /* Otherwise use the {1, LENGTH} template we build above. */
4062 template_addr = build3 (COND_EXPR, p_bounds_type, u,
4063 build_unary_op (ADDR_EXPR, p_bounds_type,
4064 template_tree),
4065 template_addr);
4066 break;
4068 case 4: /* Class A */
4069 /* The AFLAGS field is the 7th field in the descriptor. */
4070 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
4071 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4072 /* The DIMCT field is the 8th field in the descriptor. */
4073 t = DECL_CHAIN (t);
4074 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4075 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4076 or FL_COEFF or FL_BOUNDS not set. */
4077 u = build_int_cst (TREE_TYPE (aflags), 192);
4078 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
4079 build_binary_op (NE_EXPR, boolean_type_node,
4080 dimct,
4081 convert (TREE_TYPE (dimct),
4082 size_one_node)),
4083 build_binary_op (NE_EXPR, boolean_type_node,
4084 build2 (BIT_AND_EXPR,
4085 TREE_TYPE (aflags),
4086 aflags, u),
4087 u));
4088 /* There is already a template in the descriptor and it is
4089 located at the start of block 3 (12th field). */
4090 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
4091 template_tree
4092 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4093 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
4094 build_call_raise (CE_Length_Check_Failed, Empty,
4095 N_Raise_Constraint_Error),
4096 template_tree);
4097 template_addr
4098 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
4099 break;
4101 case 10: /* Class NCA */
4102 default:
4103 post_error ("unsupported descriptor type for &", gnat_subprog);
4104 template_addr = integer_zero_node;
4105 break;
4108 /* Build the fat pointer in the form of a constructor. */
4109 vec_alloc (v, 2);
4110 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
4111 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
4112 template_addr);
4114 return gnat_build_constructor (gnu_type, v);
4117 else
4118 gcc_unreachable ();
4121 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4122 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
4123 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
4124 descriptor is passed. */
4126 tree
4127 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
4128 Entity_Id gnat_subprog)
4130 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4131 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4132 tree mbo = TYPE_FIELDS (desc_type);
4133 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
4134 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
4135 tree is64bit, gnu_expr32, gnu_expr64;
4137 /* If the field name is not MBO, it must be 32-bit and no alternate.
4138 Otherwise primary must be 64-bit and alternate 32-bit. */
4139 if (strcmp (mbostr, "MBO") != 0)
4141 tree ret = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
4142 return ret;
4145 /* Build the test for 64-bit descriptor. */
4146 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
4147 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
4148 is64bit
4149 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
4150 build_binary_op (EQ_EXPR, boolean_type_node,
4151 convert (integer_type_node, mbo),
4152 integer_one_node),
4153 build_binary_op (EQ_EXPR, boolean_type_node,
4154 convert (integer_type_node, mbmo),
4155 integer_minus_one_node));
4157 /* Build the 2 possible end results. */
4158 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
4159 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
4160 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
4161 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
4164 /* Build a type to be used to represent an aliased object whose nominal type
4165 is an unconstrained array. This consists of a RECORD_TYPE containing a
4166 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4167 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4168 an arbitrary unconstrained object. Use NAME as the name of the record.
4169 DEBUG_INFO_P is true if we need to write debug information for the type. */
4171 tree
4172 build_unc_object_type (tree template_type, tree object_type, tree name,
4173 bool debug_info_p)
4175 tree type = make_node (RECORD_TYPE);
4176 tree template_field
4177 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4178 NULL_TREE, NULL_TREE, 0, 1);
4179 tree array_field
4180 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4181 NULL_TREE, NULL_TREE, 0, 1);
4183 TYPE_NAME (type) = name;
4184 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
4185 DECL_CHAIN (template_field) = array_field;
4186 finish_record_type (type, template_field, 0, true);
4188 /* Declare it now since it will never be declared otherwise. This is
4189 necessary to ensure that its subtrees are properly marked. */
4190 create_type_decl (name, type, true, debug_info_p, Empty);
4192 return type;
4195 /* Same, taking a thin or fat pointer type instead of a template type. */
4197 tree
4198 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
4199 tree name, bool debug_info_p)
4201 tree template_type;
4203 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
4205 template_type
4206 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
4207 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
4208 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
4210 return
4211 build_unc_object_type (template_type, object_type, name, debug_info_p);
4214 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4215 In the normal case this is just two adjustments, but we have more to
4216 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4218 void
4219 update_pointer_to (tree old_type, tree new_type)
4221 tree ptr = TYPE_POINTER_TO (old_type);
4222 tree ref = TYPE_REFERENCE_TO (old_type);
4223 tree t;
4225 /* If this is the main variant, process all the other variants first. */
4226 if (TYPE_MAIN_VARIANT (old_type) == old_type)
4227 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4228 update_pointer_to (t, new_type);
4230 /* If no pointers and no references, we are done. */
4231 if (!ptr && !ref)
4232 return;
4234 /* Merge the old type qualifiers in the new type.
4236 Each old variant has qualifiers for specific reasons, and the new
4237 designated type as well. Each set of qualifiers represents useful
4238 information grabbed at some point, and merging the two simply unifies
4239 these inputs into the final type description.
4241 Consider for instance a volatile type frozen after an access to constant
4242 type designating it; after the designated type's freeze, we get here with
4243 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4244 when the access type was processed. We will make a volatile and readonly
4245 designated type, because that's what it really is.
4247 We might also get here for a non-dummy OLD_TYPE variant with different
4248 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4249 to private record type elaboration (see the comments around the call to
4250 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4251 the qualifiers in those cases too, to avoid accidentally discarding the
4252 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4253 new_type
4254 = build_qualified_type (new_type,
4255 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4257 /* If old type and new type are identical, there is nothing to do. */
4258 if (old_type == new_type)
4259 return;
4261 /* Otherwise, first handle the simple case. */
4262 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4264 tree new_ptr, new_ref;
4266 /* If pointer or reference already points to new type, nothing to do.
4267 This can happen as update_pointer_to can be invoked multiple times
4268 on the same couple of types because of the type variants. */
4269 if ((ptr && TREE_TYPE (ptr) == new_type)
4270 || (ref && TREE_TYPE (ref) == new_type))
4271 return;
4273 /* Chain PTR and its variants at the end. */
4274 new_ptr = TYPE_POINTER_TO (new_type);
4275 if (new_ptr)
4277 while (TYPE_NEXT_PTR_TO (new_ptr))
4278 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4279 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4281 else
4282 TYPE_POINTER_TO (new_type) = ptr;
4284 /* Now adjust them. */
4285 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4286 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4288 TREE_TYPE (t) = new_type;
4289 if (TYPE_NULL_BOUNDS (t))
4290 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4293 /* Chain REF and its variants at the end. */
4294 new_ref = TYPE_REFERENCE_TO (new_type);
4295 if (new_ref)
4297 while (TYPE_NEXT_REF_TO (new_ref))
4298 new_ref = TYPE_NEXT_REF_TO (new_ref);
4299 TYPE_NEXT_REF_TO (new_ref) = ref;
4301 else
4302 TYPE_REFERENCE_TO (new_type) = ref;
4304 /* Now adjust them. */
4305 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4306 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4307 TREE_TYPE (t) = new_type;
4309 TYPE_POINTER_TO (old_type) = NULL_TREE;
4310 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4313 /* Now deal with the unconstrained array case. In this case the pointer
4314 is actually a record where both fields are pointers to dummy nodes.
4315 Turn them into pointers to the correct types using update_pointer_to.
4316 Likewise for the pointer to the object record (thin pointer). */
4317 else
4319 tree new_ptr = TYPE_POINTER_TO (new_type);
4321 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4323 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4324 since update_pointer_to can be invoked multiple times on the same
4325 couple of types because of the type variants. */
4326 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4327 return;
4329 update_pointer_to
4330 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4331 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4333 update_pointer_to
4334 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4335 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4337 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4338 TYPE_OBJECT_RECORD_TYPE (new_type));
4340 TYPE_POINTER_TO (old_type) = NULL_TREE;
4344 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4345 unconstrained one. This involves making or finding a template. */
4347 static tree
4348 convert_to_fat_pointer (tree type, tree expr)
4350 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4351 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4352 tree etype = TREE_TYPE (expr);
4353 tree template_tree;
4354 vec<constructor_elt, va_gc> *v;
4355 vec_alloc (v, 2);
4357 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4358 array (compare_fat_pointers ensures that this is the full discriminant)
4359 and a valid pointer to the bounds. This latter property is necessary
4360 since the compiler can hoist the load of the bounds done through it. */
4361 if (integer_zerop (expr))
4363 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4364 tree null_bounds, t;
4366 if (TYPE_NULL_BOUNDS (ptr_template_type))
4367 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4368 else
4370 /* The template type can still be dummy at this point so we build an
4371 empty constructor. The middle-end will fill it in with zeros. */
4372 t = build_constructor (template_type,
4373 NULL);
4374 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4375 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4376 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4379 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4380 fold_convert (p_array_type, null_pointer_node));
4381 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4382 t = build_constructor (type, v);
4383 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4384 TREE_CONSTANT (t) = 0;
4385 TREE_STATIC (t) = 1;
4387 return t;
4390 /* If EXPR is a thin pointer, make template and data from the record. */
4391 if (TYPE_IS_THIN_POINTER_P (etype))
4393 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4395 expr = gnat_protect_expr (expr);
4396 if (TREE_CODE (expr) == ADDR_EXPR)
4397 expr = TREE_OPERAND (expr, 0);
4398 else
4400 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4401 the thin pointer value has been shifted so we first need to shift
4402 it back to get the template address. */
4403 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4404 expr
4405 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4406 fold_build1 (NEGATE_EXPR, sizetype,
4407 byte_position
4408 (DECL_CHAIN (field))));
4409 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
4412 template_tree = build_component_ref (expr, NULL_TREE, field, false);
4413 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4414 build_component_ref (expr, NULL_TREE,
4415 DECL_CHAIN (field), false));
4418 /* Otherwise, build the constructor for the template. */
4419 else
4420 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
4422 /* The final result is a constructor for the fat pointer.
4424 If EXPR is an argument of a foreign convention subprogram, the type it
4425 points to is directly the component type. In this case, the expression
4426 type may not match the corresponding FIELD_DECL type at this point, so we
4427 call "convert" here to fix that up if necessary. This type consistency is
4428 required, for instance because it ensures that possible later folding of
4429 COMPONENT_REFs against this constructor always yields something of the
4430 same type as the initial reference.
4432 Note that the call to "build_template" above is still fine because it
4433 will only refer to the provided TEMPLATE_TYPE in this case. */
4434 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4435 convert (p_array_type, expr));
4436 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4437 build_unary_op (ADDR_EXPR, NULL_TREE,
4438 template_tree));
4439 return gnat_build_constructor (type, v);
4442 /* Create an expression whose value is that of EXPR,
4443 converted to type TYPE. The TREE_TYPE of the value
4444 is always TYPE. This function implements all reasonable
4445 conversions; callers should filter out those that are
4446 not permitted by the language being compiled. */
4448 tree
4449 convert (tree type, tree expr)
4451 tree etype = TREE_TYPE (expr);
4452 enum tree_code ecode = TREE_CODE (etype);
4453 enum tree_code code = TREE_CODE (type);
4455 /* If the expression is already of the right type, we are done. */
4456 if (etype == type)
4457 return expr;
4459 /* If both input and output have padding and are of variable size, do this
4460 as an unchecked conversion. Likewise if one is a mere variant of the
4461 other, so we avoid a pointless unpad/repad sequence. */
4462 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4463 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4464 && (!TREE_CONSTANT (TYPE_SIZE (type))
4465 || !TREE_CONSTANT (TYPE_SIZE (etype))
4466 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4467 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4468 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4471 /* If the output type has padding, convert to the inner type and make a
4472 constructor to build the record, unless a variable size is involved. */
4473 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4475 vec<constructor_elt, va_gc> *v;
4477 /* If we previously converted from another type and our type is
4478 of variable size, remove the conversion to avoid the need for
4479 variable-sized temporaries. Likewise for a conversion between
4480 original and packable version. */
4481 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4482 && (!TREE_CONSTANT (TYPE_SIZE (type))
4483 || (ecode == RECORD_TYPE
4484 && TYPE_NAME (etype)
4485 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4486 expr = TREE_OPERAND (expr, 0);
4488 /* If we are just removing the padding from expr, convert the original
4489 object if we have variable size in order to avoid the need for some
4490 variable-sized temporaries. Likewise if the padding is a variant
4491 of the other, so we avoid a pointless unpad/repad sequence. */
4492 if (TREE_CODE (expr) == COMPONENT_REF
4493 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4494 && (!TREE_CONSTANT (TYPE_SIZE (type))
4495 || TYPE_MAIN_VARIANT (type)
4496 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4497 || (ecode == RECORD_TYPE
4498 && TYPE_NAME (etype)
4499 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4500 return convert (type, TREE_OPERAND (expr, 0));
4502 /* If the inner type is of self-referential size and the expression type
4503 is a record, do this as an unchecked conversion. But first pad the
4504 expression if possible to have the same size on both sides. */
4505 if (ecode == RECORD_TYPE
4506 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4508 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4509 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4510 false, false, false, true),
4511 expr);
4512 return unchecked_convert (type, expr, false);
4515 /* If we are converting between array types with variable size, do the
4516 final conversion as an unchecked conversion, again to avoid the need
4517 for some variable-sized temporaries. If valid, this conversion is
4518 very likely purely technical and without real effects. */
4519 if (ecode == ARRAY_TYPE
4520 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4521 && !TREE_CONSTANT (TYPE_SIZE (etype))
4522 && !TREE_CONSTANT (TYPE_SIZE (type)))
4523 return unchecked_convert (type,
4524 convert (TREE_TYPE (TYPE_FIELDS (type)),
4525 expr),
4526 false);
4528 vec_alloc (v, 1);
4529 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4530 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4531 return gnat_build_constructor (type, v);
4534 /* If the input type has padding, remove it and convert to the output type.
4535 The conditions ordering is arranged to ensure that the output type is not
4536 a padding type here, as it is not clear whether the conversion would
4537 always be correct if this was to happen. */
4538 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4540 tree unpadded;
4542 /* If we have just converted to this padded type, just get the
4543 inner expression. */
4544 if (TREE_CODE (expr) == CONSTRUCTOR
4545 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4546 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4547 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
4549 /* Otherwise, build an explicit component reference. */
4550 else
4551 unpadded
4552 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4554 return convert (type, unpadded);
4557 /* If the input is a biased type, adjust first. */
4558 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4559 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4560 fold_convert (TREE_TYPE (etype),
4561 expr),
4562 TYPE_MIN_VALUE (etype)));
4564 /* If the input is a justified modular type, we need to extract the actual
4565 object before converting it to any other type with the exceptions of an
4566 unconstrained array or of a mere type variant. It is useful to avoid the
4567 extraction and conversion in the type variant case because it could end
4568 up replacing a VAR_DECL expr by a constructor and we might be about the
4569 take the address of the result. */
4570 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4571 && code != UNCONSTRAINED_ARRAY_TYPE
4572 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4573 return convert (type, build_component_ref (expr, NULL_TREE,
4574 TYPE_FIELDS (etype), false));
4576 /* If converting to a type that contains a template, convert to the data
4577 type and then build the template. */
4578 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4580 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4581 vec<constructor_elt, va_gc> *v;
4582 vec_alloc (v, 2);
4584 /* If the source already has a template, get a reference to the
4585 associated array only, as we are going to rebuild a template
4586 for the target type anyway. */
4587 expr = maybe_unconstrained_array (expr);
4589 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4590 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4591 obj_type, NULL_TREE));
4592 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4593 convert (obj_type, expr));
4594 return gnat_build_constructor (type, v);
4597 /* There are some cases of expressions that we process specially. */
4598 switch (TREE_CODE (expr))
4600 case ERROR_MARK:
4601 return expr;
4603 case NULL_EXPR:
4604 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4605 conversion in gnat_expand_expr. NULL_EXPR does not represent
4606 and actual value, so no conversion is needed. */
4607 expr = copy_node (expr);
4608 TREE_TYPE (expr) = type;
4609 return expr;
4611 case STRING_CST:
4612 /* If we are converting a STRING_CST to another constrained array type,
4613 just make a new one in the proper type. */
4614 if (code == ecode && AGGREGATE_TYPE_P (etype)
4615 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4616 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4618 expr = copy_node (expr);
4619 TREE_TYPE (expr) = type;
4620 return expr;
4622 break;
4624 case VECTOR_CST:
4625 /* If we are converting a VECTOR_CST to a mere variant type, just make
4626 a new one in the proper type. */
4627 if (code == ecode && gnat_types_compatible_p (type, etype))
4629 expr = copy_node (expr);
4630 TREE_TYPE (expr) = type;
4631 return expr;
4634 case CONSTRUCTOR:
4635 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4636 a new one in the proper type. */
4637 if (code == ecode && gnat_types_compatible_p (type, etype))
4639 expr = copy_node (expr);
4640 TREE_TYPE (expr) = type;
4641 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4642 return expr;
4645 /* Likewise for a conversion between original and packable version, or
4646 conversion between types of the same size and with the same list of
4647 fields, but we have to work harder to preserve type consistency. */
4648 if (code == ecode
4649 && code == RECORD_TYPE
4650 && (TYPE_NAME (type) == TYPE_NAME (etype)
4651 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4654 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4655 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4656 vec<constructor_elt, va_gc> *v;
4657 vec_alloc (v, len);
4658 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4659 unsigned HOST_WIDE_INT idx;
4660 tree index, value;
4662 /* Whether we need to clear TREE_CONSTANT et al. on the output
4663 constructor when we convert in place. */
4664 bool clear_constant = false;
4666 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4668 /* We expect only simple constructors. */
4669 if (!SAME_FIELD_P (index, efield))
4670 break;
4671 /* The field must be the same. */
4672 if (!SAME_FIELD_P (efield, field))
4673 break;
4674 constructor_elt elt = {field, convert (TREE_TYPE (field), value)};
4675 v->quick_push (elt);
4677 /* If packing has made this field a bitfield and the input
4678 value couldn't be emitted statically any more, we need to
4679 clear TREE_CONSTANT on our output. */
4680 if (!clear_constant
4681 && TREE_CONSTANT (expr)
4682 && !CONSTRUCTOR_BITFIELD_P (efield)
4683 && CONSTRUCTOR_BITFIELD_P (field)
4684 && !initializer_constant_valid_for_bitfield_p (value))
4685 clear_constant = true;
4687 efield = DECL_CHAIN (efield);
4688 field = DECL_CHAIN (field);
4691 /* If we have been able to match and convert all the input fields
4692 to their output type, convert in place now. We'll fallback to a
4693 view conversion downstream otherwise. */
4694 if (idx == len)
4696 expr = copy_node (expr);
4697 TREE_TYPE (expr) = type;
4698 CONSTRUCTOR_ELTS (expr) = v;
4699 if (clear_constant)
4700 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4701 return expr;
4705 /* Likewise for a conversion between array type and vector type with a
4706 compatible representative array. */
4707 else if (code == VECTOR_TYPE
4708 && ecode == ARRAY_TYPE
4709 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4710 etype))
4712 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4713 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4714 vec<constructor_elt, va_gc> *v;
4715 unsigned HOST_WIDE_INT ix;
4716 tree value;
4718 /* Build a VECTOR_CST from a *constant* array constructor. */
4719 if (TREE_CONSTANT (expr))
4721 bool constant_p = true;
4723 /* Iterate through elements and check if all constructor
4724 elements are *_CSTs. */
4725 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4726 if (!CONSTANT_CLASS_P (value))
4728 constant_p = false;
4729 break;
4732 if (constant_p)
4733 return build_vector_from_ctor (type,
4734 CONSTRUCTOR_ELTS (expr));
4737 /* Otherwise, build a regular vector constructor. */
4738 vec_alloc (v, len);
4739 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4741 constructor_elt elt = {NULL_TREE, value};
4742 v->quick_push (elt);
4744 expr = copy_node (expr);
4745 TREE_TYPE (expr) = type;
4746 CONSTRUCTOR_ELTS (expr) = v;
4747 return expr;
4749 break;
4751 case UNCONSTRAINED_ARRAY_REF:
4752 /* First retrieve the underlying array. */
4753 expr = maybe_unconstrained_array (expr);
4754 etype = TREE_TYPE (expr);
4755 ecode = TREE_CODE (etype);
4756 break;
4758 case VIEW_CONVERT_EXPR:
4760 /* GCC 4.x is very sensitive to type consistency overall, and view
4761 conversions thus are very frequent. Even though just "convert"ing
4762 the inner operand to the output type is fine in most cases, it
4763 might expose unexpected input/output type mismatches in special
4764 circumstances so we avoid such recursive calls when we can. */
4765 tree op0 = TREE_OPERAND (expr, 0);
4767 /* If we are converting back to the original type, we can just
4768 lift the input conversion. This is a common occurrence with
4769 switches back-and-forth amongst type variants. */
4770 if (type == TREE_TYPE (op0))
4771 return op0;
4773 /* Otherwise, if we're converting between two aggregate or vector
4774 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4775 target type in place or to just convert the inner expression. */
4776 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4777 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4779 /* If we are converting between mere variants, we can just
4780 substitute the VIEW_CONVERT_EXPR in place. */
4781 if (gnat_types_compatible_p (type, etype))
4782 return build1 (VIEW_CONVERT_EXPR, type, op0);
4784 /* Otherwise, we may just bypass the input view conversion unless
4785 one of the types is a fat pointer, which is handled by
4786 specialized code below which relies on exact type matching. */
4787 else if (!TYPE_IS_FAT_POINTER_P (type)
4788 && !TYPE_IS_FAT_POINTER_P (etype))
4789 return convert (type, op0);
4792 break;
4795 default:
4796 break;
4799 /* Check for converting to a pointer to an unconstrained array. */
4800 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4801 return convert_to_fat_pointer (type, expr);
4803 /* If we are converting between two aggregate or vector types that are mere
4804 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4805 to a vector type from its representative array type. */
4806 else if ((code == ecode
4807 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4808 && gnat_types_compatible_p (type, etype))
4809 || (code == VECTOR_TYPE
4810 && ecode == ARRAY_TYPE
4811 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4812 etype)))
4813 return build1 (VIEW_CONVERT_EXPR, type, expr);
4815 /* If we are converting between tagged types, try to upcast properly. */
4816 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4817 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4819 tree child_etype = etype;
4820 do {
4821 tree field = TYPE_FIELDS (child_etype);
4822 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4823 return build_component_ref (expr, NULL_TREE, field, false);
4824 child_etype = TREE_TYPE (field);
4825 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4828 /* If we are converting from a smaller form of record type back to it, just
4829 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4830 size on both sides. */
4831 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4832 && smaller_form_type_p (etype, type))
4834 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4835 false, false, false, true),
4836 expr);
4837 return build1 (VIEW_CONVERT_EXPR, type, expr);
4840 /* In all other cases of related types, make a NOP_EXPR. */
4841 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4842 return fold_convert (type, expr);
4844 switch (code)
4846 case VOID_TYPE:
4847 return fold_build1 (CONVERT_EXPR, type, expr);
4849 case INTEGER_TYPE:
4850 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4851 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4852 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4853 return unchecked_convert (type, expr, false);
4854 else if (TYPE_BIASED_REPRESENTATION_P (type))
4855 return fold_convert (type,
4856 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4857 convert (TREE_TYPE (type), expr),
4858 TYPE_MIN_VALUE (type)));
4860 /* ... fall through ... */
4862 case ENUMERAL_TYPE:
4863 case BOOLEAN_TYPE:
4864 /* If we are converting an additive expression to an integer type
4865 with lower precision, be wary of the optimization that can be
4866 applied by convert_to_integer. There are 2 problematic cases:
4867 - if the first operand was originally of a biased type,
4868 because we could be recursively called to convert it
4869 to an intermediate type and thus rematerialize the
4870 additive operator endlessly,
4871 - if the expression contains a placeholder, because an
4872 intermediate conversion that changes the sign could
4873 be inserted and thus introduce an artificial overflow
4874 at compile time when the placeholder is substituted. */
4875 if (code == INTEGER_TYPE
4876 && ecode == INTEGER_TYPE
4877 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4878 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4880 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4882 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4883 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4884 || CONTAINS_PLACEHOLDER_P (expr))
4885 return build1 (NOP_EXPR, type, expr);
4888 return fold (convert_to_integer (type, expr));
4890 case POINTER_TYPE:
4891 case REFERENCE_TYPE:
4892 /* If converting between two thin pointers, adjust if needed to account
4893 for differing offsets from the base pointer, depending on whether
4894 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4895 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4897 tree etype_pos
4898 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4899 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4900 : size_zero_node;
4901 tree type_pos
4902 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4903 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4904 : size_zero_node;
4905 tree byte_diff = size_diffop (type_pos, etype_pos);
4907 expr = build1 (NOP_EXPR, type, expr);
4908 if (integer_zerop (byte_diff))
4909 return expr;
4911 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4912 fold_convert (sizetype, byte_diff));
4915 /* If converting fat pointer to normal or thin pointer, get the pointer
4916 to the array and then convert it. */
4917 if (TYPE_IS_FAT_POINTER_P (etype))
4918 expr
4919 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4921 return fold (convert_to_pointer (type, expr));
4923 case REAL_TYPE:
4924 return fold (convert_to_real (type, expr));
4926 case RECORD_TYPE:
4927 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4929 vec<constructor_elt, va_gc> *v;
4930 vec_alloc (v, 1);
4932 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4933 convert (TREE_TYPE (TYPE_FIELDS (type)),
4934 expr));
4935 return gnat_build_constructor (type, v);
4938 /* ... fall through ... */
4940 case ARRAY_TYPE:
4941 /* In these cases, assume the front-end has validated the conversion.
4942 If the conversion is valid, it will be a bit-wise conversion, so
4943 it can be viewed as an unchecked conversion. */
4944 return unchecked_convert (type, expr, false);
4946 case UNION_TYPE:
4947 /* This is a either a conversion between a tagged type and some
4948 subtype, which we have to mark as a UNION_TYPE because of
4949 overlapping fields or a conversion of an Unchecked_Union. */
4950 return unchecked_convert (type, expr, false);
4952 case UNCONSTRAINED_ARRAY_TYPE:
4953 /* If the input is a VECTOR_TYPE, convert to the representative
4954 array type first. */
4955 if (ecode == VECTOR_TYPE)
4957 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4958 etype = TREE_TYPE (expr);
4959 ecode = TREE_CODE (etype);
4962 /* If EXPR is a constrained array, take its address, convert it to a
4963 fat pointer, and then dereference it. Likewise if EXPR is a
4964 record containing both a template and a constrained array.
4965 Note that a record representing a justified modular type
4966 always represents a packed constrained array. */
4967 if (ecode == ARRAY_TYPE
4968 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4969 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4970 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4971 return
4972 build_unary_op
4973 (INDIRECT_REF, NULL_TREE,
4974 convert_to_fat_pointer (TREE_TYPE (type),
4975 build_unary_op (ADDR_EXPR,
4976 NULL_TREE, expr)));
4978 /* Do something very similar for converting one unconstrained
4979 array to another. */
4980 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4981 return
4982 build_unary_op (INDIRECT_REF, NULL_TREE,
4983 convert (TREE_TYPE (type),
4984 build_unary_op (ADDR_EXPR,
4985 NULL_TREE, expr)));
4986 else
4987 gcc_unreachable ();
4989 case COMPLEX_TYPE:
4990 return fold (convert_to_complex (type, expr));
4992 default:
4993 gcc_unreachable ();
4997 /* Create an expression whose value is that of EXPR converted to the common
4998 index type, which is sizetype. EXPR is supposed to be in the base type
4999 of the GNAT index type. Calling it is equivalent to doing
5001 convert (sizetype, expr)
5003 but we try to distribute the type conversion with the knowledge that EXPR
5004 cannot overflow in its type. This is a best-effort approach and we fall
5005 back to the above expression as soon as difficulties are encountered.
5007 This is necessary to overcome issues that arise when the GNAT base index
5008 type and the GCC common index type (sizetype) don't have the same size,
5009 which is quite frequent on 64-bit architectures. In this case, and if
5010 the GNAT base index type is signed but the iteration type of the loop has
5011 been forced to unsigned, the loop scalar evolution engine cannot compute
5012 a simple evolution for the general induction variables associated with the
5013 array indices, because it will preserve the wrap-around semantics in the
5014 unsigned type of their "inner" part. As a result, many loop optimizations
5015 are blocked.
5017 The solution is to use a special (basic) induction variable that is at
5018 least as large as sizetype, and to express the aforementioned general
5019 induction variables in terms of this induction variable, eliminating
5020 the problematic intermediate truncation to the GNAT base index type.
5021 This is possible as long as the original expression doesn't overflow
5022 and if the middle-end hasn't introduced artificial overflows in the
5023 course of the various simplification it can make to the expression. */
5025 tree
5026 convert_to_index_type (tree expr)
5028 enum tree_code code = TREE_CODE (expr);
5029 tree type = TREE_TYPE (expr);
5031 /* If the type is unsigned, overflow is allowed so we cannot be sure that
5032 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
5033 if (TYPE_UNSIGNED (type) || !optimize)
5034 return convert (sizetype, expr);
5036 switch (code)
5038 case VAR_DECL:
5039 /* The main effect of the function: replace a loop parameter with its
5040 associated special induction variable. */
5041 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
5042 expr = DECL_INDUCTION_VAR (expr);
5043 break;
5045 CASE_CONVERT:
5047 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
5048 /* Bail out as soon as we suspect some sort of type frobbing. */
5049 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5050 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5051 break;
5054 /* ... fall through ... */
5056 case NON_LVALUE_EXPR:
5057 return fold_build1 (code, sizetype,
5058 convert_to_index_type (TREE_OPERAND (expr, 0)));
5060 case PLUS_EXPR:
5061 case MINUS_EXPR:
5062 case MULT_EXPR:
5063 return fold_build2 (code, sizetype,
5064 convert_to_index_type (TREE_OPERAND (expr, 0)),
5065 convert_to_index_type (TREE_OPERAND (expr, 1)));
5067 case COMPOUND_EXPR:
5068 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5069 convert_to_index_type (TREE_OPERAND (expr, 1)));
5071 case COND_EXPR:
5072 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5073 convert_to_index_type (TREE_OPERAND (expr, 1)),
5074 convert_to_index_type (TREE_OPERAND (expr, 2)));
5076 default:
5077 break;
5080 return convert (sizetype, expr);
5083 /* Remove all conversions that are done in EXP. This includes converting
5084 from a padded type or to a justified modular type. If TRUE_ADDRESS
5085 is true, always return the address of the containing object even if
5086 the address is not bit-aligned. */
5088 tree
5089 remove_conversions (tree exp, bool true_address)
5091 switch (TREE_CODE (exp))
5093 case CONSTRUCTOR:
5094 if (true_address
5095 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5096 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5097 return
5098 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
5099 break;
5101 case COMPONENT_REF:
5102 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
5103 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5104 break;
5106 CASE_CONVERT:
5107 case VIEW_CONVERT_EXPR:
5108 case NON_LVALUE_EXPR:
5109 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5111 default:
5112 break;
5115 return exp;
5118 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5119 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5120 likewise return an expression pointing to the underlying array. */
5122 tree
5123 maybe_unconstrained_array (tree exp)
5125 enum tree_code code = TREE_CODE (exp);
5126 tree type = TREE_TYPE (exp);
5128 switch (TREE_CODE (type))
5130 case UNCONSTRAINED_ARRAY_TYPE:
5131 if (code == UNCONSTRAINED_ARRAY_REF)
5133 const bool read_only = TREE_READONLY (exp);
5134 const bool no_trap = TREE_THIS_NOTRAP (exp);
5136 exp = TREE_OPERAND (exp, 0);
5137 type = TREE_TYPE (exp);
5139 if (TREE_CODE (exp) == COND_EXPR)
5141 tree op1
5142 = build_unary_op (INDIRECT_REF, NULL_TREE,
5143 build_component_ref (TREE_OPERAND (exp, 1),
5144 NULL_TREE,
5145 TYPE_FIELDS (type),
5146 false));
5147 tree op2
5148 = build_unary_op (INDIRECT_REF, NULL_TREE,
5149 build_component_ref (TREE_OPERAND (exp, 2),
5150 NULL_TREE,
5151 TYPE_FIELDS (type),
5152 false));
5154 exp = build3 (COND_EXPR,
5155 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5156 TREE_OPERAND (exp, 0), op1, op2);
5158 else
5160 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5161 build_component_ref (exp, NULL_TREE,
5162 TYPE_FIELDS (type),
5163 false));
5164 TREE_READONLY (exp) = read_only;
5165 TREE_THIS_NOTRAP (exp) = no_trap;
5169 else if (code == NULL_EXPR)
5170 exp = build1 (NULL_EXPR,
5171 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5172 TREE_OPERAND (exp, 0));
5173 break;
5175 case RECORD_TYPE:
5176 /* If this is a padded type and it contains a template, convert to the
5177 unpadded type first. */
5178 if (TYPE_PADDING_P (type)
5179 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5180 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5182 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5183 type = TREE_TYPE (exp);
5186 if (TYPE_CONTAINS_TEMPLATE_P (type))
5188 exp = build_component_ref (exp, NULL_TREE,
5189 DECL_CHAIN (TYPE_FIELDS (type)),
5190 false);
5191 type = TREE_TYPE (exp);
5193 /* If the array type is padded, convert to the unpadded type. */
5194 if (TYPE_IS_PADDING_P (type))
5195 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5197 break;
5199 default:
5200 break;
5203 return exp;
5206 /* Return true if EXPR is an expression that can be folded as an operand
5207 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5209 static bool
5210 can_fold_for_view_convert_p (tree expr)
5212 tree t1, t2;
5214 /* The folder will fold NOP_EXPRs between integral types with the same
5215 precision (in the middle-end's sense). We cannot allow it if the
5216 types don't have the same precision in the Ada sense as well. */
5217 if (TREE_CODE (expr) != NOP_EXPR)
5218 return true;
5220 t1 = TREE_TYPE (expr);
5221 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5223 /* Defer to the folder for non-integral conversions. */
5224 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5225 return true;
5227 /* Only fold conversions that preserve both precisions. */
5228 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5229 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5230 return true;
5232 return false;
5235 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5236 If NOTRUNC_P is true, truncation operations should be suppressed.
5238 Special care is required with (source or target) integral types whose
5239 precision is not equal to their size, to make sure we fetch or assign
5240 the value bits whose location might depend on the endianness, e.g.
5242 Rmsize : constant := 8;
5243 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5245 type Bit_Array is array (1 .. Rmsize) of Boolean;
5246 pragma Pack (Bit_Array);
5248 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5250 Value : Int := 2#1000_0001#;
5251 Vbits : Bit_Array := To_Bit_Array (Value);
5253 we expect the 8 bits at Vbits'Address to always contain Value, while
5254 their original location depends on the endianness, at Value'Address
5255 on a little-endian architecture but not on a big-endian one. */
5257 tree
5258 unchecked_convert (tree type, tree expr, bool notrunc_p)
5260 tree etype = TREE_TYPE (expr);
5261 enum tree_code ecode = TREE_CODE (etype);
5262 enum tree_code code = TREE_CODE (type);
5263 int c;
5265 /* If the expression is already of the right type, we are done. */
5266 if (etype == type)
5267 return expr;
5269 /* If both types types are integral just do a normal conversion.
5270 Likewise for a conversion to an unconstrained array. */
5271 if ((((INTEGRAL_TYPE_P (type)
5272 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
5273 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5274 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5275 && ((INTEGRAL_TYPE_P (etype)
5276 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
5277 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5278 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5279 || code == UNCONSTRAINED_ARRAY_TYPE)
5281 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
5283 tree ntype = copy_type (etype);
5284 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5285 TYPE_MAIN_VARIANT (ntype) = ntype;
5286 expr = build1 (NOP_EXPR, ntype, expr);
5289 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5291 tree rtype = copy_type (type);
5292 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5293 TYPE_MAIN_VARIANT (rtype) = rtype;
5294 expr = convert (rtype, expr);
5295 expr = build1 (NOP_EXPR, type, expr);
5297 else
5298 expr = convert (type, expr);
5301 /* If we are converting to an integral type whose precision is not equal
5302 to its size, first unchecked convert to a record type that contains an
5303 field of the given precision. Then extract the field. */
5304 else if (INTEGRAL_TYPE_P (type)
5305 && TYPE_RM_SIZE (type)
5306 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5307 GET_MODE_BITSIZE (TYPE_MODE (type))))
5309 tree rec_type = make_node (RECORD_TYPE);
5310 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5311 tree field_type, field;
5313 if (TYPE_UNSIGNED (type))
5314 field_type = make_unsigned_type (prec);
5315 else
5316 field_type = make_signed_type (prec);
5317 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5319 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5320 NULL_TREE, NULL_TREE, 1, 0);
5322 TYPE_FIELDS (rec_type) = field;
5323 layout_type (rec_type);
5325 expr = unchecked_convert (rec_type, expr, notrunc_p);
5326 expr = build_component_ref (expr, NULL_TREE, field, false);
5327 expr = fold_build1 (NOP_EXPR, type, expr);
5330 /* Similarly if we are converting from an integral type whose precision is
5331 not equal to its size, first copy into a field of the given precision
5332 and unchecked convert the record type. */
5333 else if (INTEGRAL_TYPE_P (etype)
5334 && TYPE_RM_SIZE (etype)
5335 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
5336 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5338 tree rec_type = make_node (RECORD_TYPE);
5339 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5340 vec<constructor_elt, va_gc> *v;
5341 vec_alloc (v, 1);
5342 tree field_type, field;
5344 if (TYPE_UNSIGNED (etype))
5345 field_type = make_unsigned_type (prec);
5346 else
5347 field_type = make_signed_type (prec);
5348 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5350 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5351 NULL_TREE, NULL_TREE, 1, 0);
5353 TYPE_FIELDS (rec_type) = field;
5354 layout_type (rec_type);
5356 expr = fold_build1 (NOP_EXPR, field_type, expr);
5357 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5358 expr = gnat_build_constructor (rec_type, v);
5359 expr = unchecked_convert (type, expr, notrunc_p);
5362 /* If we are converting from a scalar type to a type with a different size,
5363 we need to pad to have the same size on both sides.
5365 ??? We cannot do it unconditionally because unchecked conversions are
5366 used liberally by the front-end to implement polymorphism, e.g. in:
5368 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5369 return p___size__4 (p__object!(S191s.all));
5371 so we skip all expressions that are references. */
5372 else if (!REFERENCE_CLASS_P (expr)
5373 && !AGGREGATE_TYPE_P (etype)
5374 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5375 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5377 if (c < 0)
5379 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5380 false, false, false, true),
5381 expr);
5382 expr = unchecked_convert (type, expr, notrunc_p);
5384 else
5386 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5387 false, false, false, true);
5388 expr = unchecked_convert (rec_type, expr, notrunc_p);
5389 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
5390 false);
5394 /* We have a special case when we are converting between two unconstrained
5395 array types. In that case, take the address, convert the fat pointer
5396 types, and dereference. */
5397 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5398 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5399 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5400 build_unary_op (ADDR_EXPR, NULL_TREE,
5401 expr)));
5403 /* Another special case is when we are converting to a vector type from its
5404 representative array type; this a regular conversion. */
5405 else if (code == VECTOR_TYPE
5406 && ecode == ARRAY_TYPE
5407 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5408 etype))
5409 expr = convert (type, expr);
5411 else
5413 expr = maybe_unconstrained_array (expr);
5414 etype = TREE_TYPE (expr);
5415 ecode = TREE_CODE (etype);
5416 if (can_fold_for_view_convert_p (expr))
5417 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5418 else
5419 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5422 /* If the result is an integral type whose precision is not equal to its
5423 size, sign- or zero-extend the result. We need not do this if the input
5424 is an integral type of the same precision and signedness or if the output
5425 is a biased type or if both the input and output are unsigned. */
5426 if (!notrunc_p
5427 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
5428 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5429 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5430 GET_MODE_BITSIZE (TYPE_MODE (type)))
5431 && !(INTEGRAL_TYPE_P (etype)
5432 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5433 && operand_equal_p (TYPE_RM_SIZE (type),
5434 (TYPE_RM_SIZE (etype) != 0
5435 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5437 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5439 tree base_type
5440 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
5441 tree shift_expr
5442 = convert (base_type,
5443 size_binop (MINUS_EXPR,
5444 bitsize_int
5445 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5446 TYPE_RM_SIZE (type)));
5447 expr
5448 = convert (type,
5449 build_binary_op (RSHIFT_EXPR, base_type,
5450 build_binary_op (LSHIFT_EXPR, base_type,
5451 convert (base_type, expr),
5452 shift_expr),
5453 shift_expr));
5456 /* An unchecked conversion should never raise Constraint_Error. The code
5457 below assumes that GCC's conversion routines overflow the same way that
5458 the underlying hardware does. This is probably true. In the rare case
5459 when it is false, we can rely on the fact that such conversions are
5460 erroneous anyway. */
5461 if (TREE_CODE (expr) == INTEGER_CST)
5462 TREE_OVERFLOW (expr) = 0;
5464 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5465 show no longer constant. */
5466 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5467 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5468 OEP_ONLY_CONST))
5469 TREE_CONSTANT (expr) = 0;
5471 return expr;
5474 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5475 the latter being a record type as predicated by Is_Record_Type. */
5477 enum tree_code
5478 tree_code_for_record_type (Entity_Id gnat_type)
5480 Node_Id component_list, component;
5482 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5483 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5484 if (!Is_Unchecked_Union (gnat_type))
5485 return RECORD_TYPE;
5487 gnat_type = Implementation_Base_Type (gnat_type);
5488 component_list
5489 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5491 for (component = First_Non_Pragma (Component_Items (component_list));
5492 Present (component);
5493 component = Next_Non_Pragma (component))
5494 if (Ekind (Defining_Entity (component)) == E_Component)
5495 return RECORD_TYPE;
5497 return UNION_TYPE;
5500 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5501 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5502 according to the presence of an alignment clause on the type or, if it
5503 is an array, on the component type. */
5505 bool
5506 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5508 gnat_type = Underlying_Type (gnat_type);
5510 *align_clause = Present (Alignment_Clause (gnat_type));
5512 if (Is_Array_Type (gnat_type))
5514 gnat_type = Underlying_Type (Component_Type (gnat_type));
5515 if (Present (Alignment_Clause (gnat_type)))
5516 *align_clause = true;
5519 if (!Is_Floating_Point_Type (gnat_type))
5520 return false;
5522 if (UI_To_Int (Esize (gnat_type)) != 64)
5523 return false;
5525 return true;
5528 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5529 size is greater or equal to 64 bits, or an array of such a type. Set
5530 ALIGN_CLAUSE according to the presence of an alignment clause on the
5531 type or, if it is an array, on the component type. */
5533 bool
5534 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5536 gnat_type = Underlying_Type (gnat_type);
5538 *align_clause = Present (Alignment_Clause (gnat_type));
5540 if (Is_Array_Type (gnat_type))
5542 gnat_type = Underlying_Type (Component_Type (gnat_type));
5543 if (Present (Alignment_Clause (gnat_type)))
5544 *align_clause = true;
5547 if (!Is_Scalar_Type (gnat_type))
5548 return false;
5550 if (UI_To_Int (Esize (gnat_type)) < 64)
5551 return false;
5553 return true;
5556 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5557 component of an aggregate type. */
5559 bool
5560 type_for_nonaliased_component_p (tree gnu_type)
5562 /* If the type is passed by reference, we may have pointers to the
5563 component so it cannot be made non-aliased. */
5564 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5565 return false;
5567 /* We used to say that any component of aggregate type is aliased
5568 because the front-end may take 'Reference of it. The front-end
5569 has been enhanced in the meantime so as to use a renaming instead
5570 in most cases, but the back-end can probably take the address of
5571 such a component too so we go for the conservative stance.
5573 For instance, we might need the address of any array type, even
5574 if normally passed by copy, to construct a fat pointer if the
5575 component is used as an actual for an unconstrained formal.
5577 Likewise for record types: even if a specific record subtype is
5578 passed by copy, the parent type might be passed by ref (e.g. if
5579 it's of variable size) and we might take the address of a child
5580 component to pass to a parent formal. We have no way to check
5581 for such conditions here. */
5582 if (AGGREGATE_TYPE_P (gnu_type))
5583 return false;
5585 return true;
5588 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5590 bool
5591 smaller_form_type_p (tree type, tree orig_type)
5593 tree size, osize;
5595 /* We're not interested in variants here. */
5596 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5597 return false;
5599 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5600 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5601 return false;
5603 size = TYPE_SIZE (type);
5604 osize = TYPE_SIZE (orig_type);
5606 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5607 return false;
5609 return tree_int_cst_lt (size, osize) != 0;
5612 /* Perform final processing on global variables. */
5614 static GTY (()) tree dummy_global;
5616 void
5617 gnat_write_global_declarations (void)
5619 unsigned int i;
5620 tree iter;
5622 /* If we have declared types as used at the global level, insert them in
5623 the global hash table. We use a dummy variable for this purpose. */
5624 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
5626 struct varpool_node *node;
5627 char *label;
5629 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5630 dummy_global
5631 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5632 void_type_node);
5633 TREE_STATIC (dummy_global) = 1;
5634 TREE_ASM_WRITTEN (dummy_global) = 1;
5635 node = varpool_node_for_decl (dummy_global);
5636 node->force_output = 1;
5638 while (!types_used_by_cur_var_decl->is_empty ())
5640 tree t = types_used_by_cur_var_decl->pop ();
5641 types_used_by_var_decl_insert (t, dummy_global);
5645 /* Output debug information for all global type declarations first. This
5646 ensures that global types whose compilation hasn't been finalized yet,
5647 for example pointers to Taft amendment types, have their compilation
5648 finalized in the right context. */
5649 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5650 if (TREE_CODE (iter) == TYPE_DECL)
5651 debug_hooks->global_decl (iter);
5653 /* Proceed to optimize and emit assembly. */
5654 finalize_compilation_unit ();
5656 /* After cgraph has had a chance to emit everything that's going to
5657 be emitted, output debug information for the rest of globals. */
5658 if (!seen_error ())
5660 timevar_push (TV_SYMOUT);
5661 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5662 if (TREE_CODE (iter) != TYPE_DECL)
5663 debug_hooks->global_decl (iter);
5664 timevar_pop (TV_SYMOUT);
5668 /* ************************************************************************
5669 * * GCC builtins support *
5670 * ************************************************************************ */
5672 /* The general scheme is fairly simple:
5674 For each builtin function/type to be declared, gnat_install_builtins calls
5675 internal facilities which eventually get to gnat_push_decl, which in turn
5676 tracks the so declared builtin function decls in the 'builtin_decls' global
5677 datastructure. When an Intrinsic subprogram declaration is processed, we
5678 search this global datastructure to retrieve the associated BUILT_IN DECL
5679 node. */
5681 /* Search the chain of currently available builtin declarations for a node
5682 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5683 found, if any, or NULL_TREE otherwise. */
5684 tree
5685 builtin_decl_for (tree name)
5687 unsigned i;
5688 tree decl;
5690 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5691 if (DECL_NAME (decl) == name)
5692 return decl;
5694 return NULL_TREE;
5697 /* The code below eventually exposes gnat_install_builtins, which declares
5698 the builtin types and functions we might need, either internally or as
5699 user accessible facilities.
5701 ??? This is a first implementation shot, still in rough shape. It is
5702 heavily inspired from the "C" family implementation, with chunks copied
5703 verbatim from there.
5705 Two obvious TODO candidates are
5706 o Use a more efficient name/decl mapping scheme
5707 o Devise a middle-end infrastructure to avoid having to copy
5708 pieces between front-ends. */
5710 /* ----------------------------------------------------------------------- *
5711 * BUILTIN ELEMENTARY TYPES *
5712 * ----------------------------------------------------------------------- */
5714 /* Standard data types to be used in builtin argument declarations. */
5716 enum c_tree_index
5718 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5719 CTI_STRING_TYPE,
5720 CTI_CONST_STRING_TYPE,
5722 CTI_MAX
5725 static tree c_global_trees[CTI_MAX];
5727 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5728 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5729 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5731 /* ??? In addition some attribute handlers, we currently don't support a
5732 (small) number of builtin-types, which in turns inhibits support for a
5733 number of builtin functions. */
5734 #define wint_type_node void_type_node
5735 #define intmax_type_node void_type_node
5736 #define uintmax_type_node void_type_node
5738 /* Build the void_list_node (void_type_node having been created). */
5740 static tree
5741 build_void_list_node (void)
5743 tree t = build_tree_list (NULL_TREE, void_type_node);
5744 return t;
5747 /* Used to help initialize the builtin-types.def table. When a type of
5748 the correct size doesn't exist, use error_mark_node instead of NULL.
5749 The later results in segfaults even when a decl using the type doesn't
5750 get invoked. */
5752 static tree
5753 builtin_type_for_size (int size, bool unsignedp)
5755 tree type = gnat_type_for_size (size, unsignedp);
5756 return type ? type : error_mark_node;
5759 /* Build/push the elementary type decls that builtin functions/types
5760 will need. */
5762 static void
5763 install_builtin_elementary_types (void)
5765 signed_size_type_node = gnat_signed_type (size_type_node);
5766 pid_type_node = integer_type_node;
5767 void_list_node = build_void_list_node ();
5769 string_type_node = build_pointer_type (char_type_node);
5770 const_string_type_node
5771 = build_pointer_type (build_qualified_type
5772 (char_type_node, TYPE_QUAL_CONST));
5775 /* ----------------------------------------------------------------------- *
5776 * BUILTIN FUNCTION TYPES *
5777 * ----------------------------------------------------------------------- */
5779 /* Now, builtin function types per se. */
5781 enum c_builtin_type
5783 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5784 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5785 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5786 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5787 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5788 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5789 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5790 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5791 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5792 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7, ARG8) NAME,
5793 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5794 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5795 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5796 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5797 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5798 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5799 NAME,
5800 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5801 #include "builtin-types.def"
5802 #undef DEF_PRIMITIVE_TYPE
5803 #undef DEF_FUNCTION_TYPE_0
5804 #undef DEF_FUNCTION_TYPE_1
5805 #undef DEF_FUNCTION_TYPE_2
5806 #undef DEF_FUNCTION_TYPE_3
5807 #undef DEF_FUNCTION_TYPE_4
5808 #undef DEF_FUNCTION_TYPE_5
5809 #undef DEF_FUNCTION_TYPE_6
5810 #undef DEF_FUNCTION_TYPE_7
5811 #undef DEF_FUNCTION_TYPE_8
5812 #undef DEF_FUNCTION_TYPE_VAR_0
5813 #undef DEF_FUNCTION_TYPE_VAR_1
5814 #undef DEF_FUNCTION_TYPE_VAR_2
5815 #undef DEF_FUNCTION_TYPE_VAR_3
5816 #undef DEF_FUNCTION_TYPE_VAR_4
5817 #undef DEF_FUNCTION_TYPE_VAR_5
5818 #undef DEF_POINTER_TYPE
5819 BT_LAST
5822 typedef enum c_builtin_type builtin_type;
5824 /* A temporary array used in communication with def_fn_type. */
5825 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5827 /* A helper function for install_builtin_types. Build function type
5828 for DEF with return type RET and N arguments. If VAR is true, then the
5829 function should be variadic after those N arguments.
5831 Takes special care not to ICE if any of the types involved are
5832 error_mark_node, which indicates that said type is not in fact available
5833 (see builtin_type_for_size). In which case the function type as a whole
5834 should be error_mark_node. */
5836 static void
5837 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5839 tree t;
5840 tree *args = XALLOCAVEC (tree, n);
5841 va_list list;
5842 int i;
5844 va_start (list, n);
5845 for (i = 0; i < n; ++i)
5847 builtin_type a = (builtin_type) va_arg (list, int);
5848 t = builtin_types[a];
5849 if (t == error_mark_node)
5850 goto egress;
5851 args[i] = t;
5854 t = builtin_types[ret];
5855 if (t == error_mark_node)
5856 goto egress;
5857 if (var)
5858 t = build_varargs_function_type_array (t, n, args);
5859 else
5860 t = build_function_type_array (t, n, args);
5862 egress:
5863 builtin_types[def] = t;
5864 va_end (list);
5867 /* Build the builtin function types and install them in the builtin_types
5868 array for later use in builtin function decls. */
5870 static void
5871 install_builtin_function_types (void)
5873 tree va_list_ref_type_node;
5874 tree va_list_arg_type_node;
5876 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5878 va_list_arg_type_node = va_list_ref_type_node =
5879 build_pointer_type (TREE_TYPE (va_list_type_node));
5881 else
5883 va_list_arg_type_node = va_list_type_node;
5884 va_list_ref_type_node = build_reference_type (va_list_type_node);
5887 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5888 builtin_types[ENUM] = VALUE;
5889 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5890 def_fn_type (ENUM, RETURN, 0, 0);
5891 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5892 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5893 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5894 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5895 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5896 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5897 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5898 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5899 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5900 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5901 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5902 ARG6) \
5903 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5904 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5905 ARG6, ARG7) \
5906 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5907 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5908 ARG6, ARG7, ARG8) \
5909 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5910 ARG7, ARG8);
5911 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5912 def_fn_type (ENUM, RETURN, 1, 0);
5913 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5914 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5915 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5916 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5917 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5918 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5919 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5920 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5921 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5922 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5923 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5924 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5926 #include "builtin-types.def"
5928 #undef DEF_PRIMITIVE_TYPE
5929 #undef DEF_FUNCTION_TYPE_1
5930 #undef DEF_FUNCTION_TYPE_2
5931 #undef DEF_FUNCTION_TYPE_3
5932 #undef DEF_FUNCTION_TYPE_4
5933 #undef DEF_FUNCTION_TYPE_5
5934 #undef DEF_FUNCTION_TYPE_6
5935 #undef DEF_FUNCTION_TYPE_VAR_0
5936 #undef DEF_FUNCTION_TYPE_VAR_1
5937 #undef DEF_FUNCTION_TYPE_VAR_2
5938 #undef DEF_FUNCTION_TYPE_VAR_3
5939 #undef DEF_FUNCTION_TYPE_VAR_4
5940 #undef DEF_FUNCTION_TYPE_VAR_5
5941 #undef DEF_POINTER_TYPE
5942 builtin_types[(int) BT_LAST] = NULL_TREE;
5945 /* ----------------------------------------------------------------------- *
5946 * BUILTIN ATTRIBUTES *
5947 * ----------------------------------------------------------------------- */
5949 enum built_in_attribute
5951 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5952 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5953 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5954 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5955 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5956 #include "builtin-attrs.def"
5957 #undef DEF_ATTR_NULL_TREE
5958 #undef DEF_ATTR_INT
5959 #undef DEF_ATTR_STRING
5960 #undef DEF_ATTR_IDENT
5961 #undef DEF_ATTR_TREE_LIST
5962 ATTR_LAST
5965 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5967 static void
5968 install_builtin_attributes (void)
5970 /* Fill in the built_in_attributes array. */
5971 #define DEF_ATTR_NULL_TREE(ENUM) \
5972 built_in_attributes[(int) ENUM] = NULL_TREE;
5973 #define DEF_ATTR_INT(ENUM, VALUE) \
5974 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5975 #define DEF_ATTR_STRING(ENUM, VALUE) \
5976 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5977 #define DEF_ATTR_IDENT(ENUM, STRING) \
5978 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5979 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5980 built_in_attributes[(int) ENUM] \
5981 = tree_cons (built_in_attributes[(int) PURPOSE], \
5982 built_in_attributes[(int) VALUE], \
5983 built_in_attributes[(int) CHAIN]);
5984 #include "builtin-attrs.def"
5985 #undef DEF_ATTR_NULL_TREE
5986 #undef DEF_ATTR_INT
5987 #undef DEF_ATTR_STRING
5988 #undef DEF_ATTR_IDENT
5989 #undef DEF_ATTR_TREE_LIST
5992 /* Handle a "const" attribute; arguments as in
5993 struct attribute_spec.handler. */
5995 static tree
5996 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5997 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5998 bool *no_add_attrs)
6000 if (TREE_CODE (*node) == FUNCTION_DECL)
6001 TREE_READONLY (*node) = 1;
6002 else
6003 *no_add_attrs = true;
6005 return NULL_TREE;
6008 /* Handle a "nothrow" attribute; arguments as in
6009 struct attribute_spec.handler. */
6011 static tree
6012 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6013 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6014 bool *no_add_attrs)
6016 if (TREE_CODE (*node) == FUNCTION_DECL)
6017 TREE_NOTHROW (*node) = 1;
6018 else
6019 *no_add_attrs = true;
6021 return NULL_TREE;
6024 /* Handle a "pure" attribute; arguments as in
6025 struct attribute_spec.handler. */
6027 static tree
6028 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6029 int ARG_UNUSED (flags), bool *no_add_attrs)
6031 if (TREE_CODE (*node) == FUNCTION_DECL)
6032 DECL_PURE_P (*node) = 1;
6033 /* ??? TODO: Support types. */
6034 else
6036 warning (OPT_Wattributes, "%qs attribute ignored",
6037 IDENTIFIER_POINTER (name));
6038 *no_add_attrs = true;
6041 return NULL_TREE;
6044 /* Handle a "no vops" attribute; arguments as in
6045 struct attribute_spec.handler. */
6047 static tree
6048 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6049 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6050 bool *ARG_UNUSED (no_add_attrs))
6052 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6053 DECL_IS_NOVOPS (*node) = 1;
6054 return NULL_TREE;
6057 /* Helper for nonnull attribute handling; fetch the operand number
6058 from the attribute argument list. */
6060 static bool
6061 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6063 /* Verify the arg number is a constant. */
6064 if (TREE_CODE (arg_num_expr) != INTEGER_CST
6065 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
6066 return false;
6068 *valp = TREE_INT_CST_LOW (arg_num_expr);
6069 return true;
6072 /* Handle the "nonnull" attribute. */
6073 static tree
6074 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6075 tree args, int ARG_UNUSED (flags),
6076 bool *no_add_attrs)
6078 tree type = *node;
6079 unsigned HOST_WIDE_INT attr_arg_num;
6081 /* If no arguments are specified, all pointer arguments should be
6082 non-null. Verify a full prototype is given so that the arguments
6083 will have the correct types when we actually check them later. */
6084 if (!args)
6086 if (!prototype_p (type))
6088 error ("nonnull attribute without arguments on a non-prototype");
6089 *no_add_attrs = true;
6091 return NULL_TREE;
6094 /* Argument list specified. Verify that each argument number references
6095 a pointer argument. */
6096 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6098 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6100 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6102 error ("nonnull argument has invalid operand number (argument %lu)",
6103 (unsigned long) attr_arg_num);
6104 *no_add_attrs = true;
6105 return NULL_TREE;
6108 if (prototype_p (type))
6110 function_args_iterator iter;
6111 tree argument;
6113 function_args_iter_init (&iter, type);
6114 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6116 argument = function_args_iter_cond (&iter);
6117 if (!argument || ck_num == arg_num)
6118 break;
6121 if (!argument
6122 || TREE_CODE (argument) == VOID_TYPE)
6124 error ("nonnull argument with out-of-range operand number "
6125 "(argument %lu, operand %lu)",
6126 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6127 *no_add_attrs = true;
6128 return NULL_TREE;
6131 if (TREE_CODE (argument) != POINTER_TYPE)
6133 error ("nonnull argument references non-pointer operand "
6134 "(argument %lu, operand %lu)",
6135 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6136 *no_add_attrs = true;
6137 return NULL_TREE;
6142 return NULL_TREE;
6145 /* Handle a "sentinel" attribute. */
6147 static tree
6148 handle_sentinel_attribute (tree *node, tree name, tree args,
6149 int ARG_UNUSED (flags), bool *no_add_attrs)
6151 if (!prototype_p (*node))
6153 warning (OPT_Wattributes,
6154 "%qs attribute requires prototypes with named arguments",
6155 IDENTIFIER_POINTER (name));
6156 *no_add_attrs = true;
6158 else
6160 if (!stdarg_p (*node))
6162 warning (OPT_Wattributes,
6163 "%qs attribute only applies to variadic functions",
6164 IDENTIFIER_POINTER (name));
6165 *no_add_attrs = true;
6169 if (args)
6171 tree position = TREE_VALUE (args);
6173 if (TREE_CODE (position) != INTEGER_CST)
6175 warning (0, "requested position is not an integer constant");
6176 *no_add_attrs = true;
6178 else
6180 if (tree_int_cst_lt (position, integer_zero_node))
6182 warning (0, "requested position is less than zero");
6183 *no_add_attrs = true;
6188 return NULL_TREE;
6191 /* Handle a "noreturn" attribute; arguments as in
6192 struct attribute_spec.handler. */
6194 static tree
6195 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6196 int ARG_UNUSED (flags), bool *no_add_attrs)
6198 tree type = TREE_TYPE (*node);
6200 /* See FIXME comment in c_common_attribute_table. */
6201 if (TREE_CODE (*node) == FUNCTION_DECL)
6202 TREE_THIS_VOLATILE (*node) = 1;
6203 else if (TREE_CODE (type) == POINTER_TYPE
6204 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6205 TREE_TYPE (*node)
6206 = build_pointer_type
6207 (build_type_variant (TREE_TYPE (type),
6208 TYPE_READONLY (TREE_TYPE (type)), 1));
6209 else
6211 warning (OPT_Wattributes, "%qs attribute ignored",
6212 IDENTIFIER_POINTER (name));
6213 *no_add_attrs = true;
6216 return NULL_TREE;
6219 /* Handle a "leaf" attribute; arguments as in
6220 struct attribute_spec.handler. */
6222 static tree
6223 handle_leaf_attribute (tree *node, tree name,
6224 tree ARG_UNUSED (args),
6225 int ARG_UNUSED (flags), bool *no_add_attrs)
6227 if (TREE_CODE (*node) != FUNCTION_DECL)
6229 warning (OPT_Wattributes, "%qE attribute ignored", name);
6230 *no_add_attrs = true;
6232 if (!TREE_PUBLIC (*node))
6234 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6235 *no_add_attrs = true;
6238 return NULL_TREE;
6241 /* Handle a "malloc" attribute; arguments as in
6242 struct attribute_spec.handler. */
6244 static tree
6245 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6246 int ARG_UNUSED (flags), bool *no_add_attrs)
6248 if (TREE_CODE (*node) == FUNCTION_DECL
6249 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6250 DECL_IS_MALLOC (*node) = 1;
6251 else
6253 warning (OPT_Wattributes, "%qs attribute ignored",
6254 IDENTIFIER_POINTER (name));
6255 *no_add_attrs = true;
6258 return NULL_TREE;
6261 /* Fake handler for attributes we don't properly support. */
6263 tree
6264 fake_attribute_handler (tree * ARG_UNUSED (node),
6265 tree ARG_UNUSED (name),
6266 tree ARG_UNUSED (args),
6267 int ARG_UNUSED (flags),
6268 bool * ARG_UNUSED (no_add_attrs))
6270 return NULL_TREE;
6273 /* Handle a "type_generic" attribute. */
6275 static tree
6276 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6277 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6278 bool * ARG_UNUSED (no_add_attrs))
6280 /* Ensure we have a function type. */
6281 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6283 /* Ensure we have a variadic function. */
6284 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6286 return NULL_TREE;
6289 /* Handle a "vector_size" attribute; arguments as in
6290 struct attribute_spec.handler. */
6292 static tree
6293 handle_vector_size_attribute (tree *node, tree name, tree args,
6294 int ARG_UNUSED (flags),
6295 bool *no_add_attrs)
6297 unsigned HOST_WIDE_INT vecsize, nunits;
6298 enum machine_mode orig_mode;
6299 tree type = *node, new_type, size;
6301 *no_add_attrs = true;
6303 size = TREE_VALUE (args);
6305 if (!host_integerp (size, 1))
6307 warning (OPT_Wattributes, "%qs attribute ignored",
6308 IDENTIFIER_POINTER (name));
6309 return NULL_TREE;
6312 /* Get the vector size (in bytes). */
6313 vecsize = tree_low_cst (size, 1);
6315 /* We need to provide for vector pointers, vector arrays, and
6316 functions returning vectors. For example:
6318 __attribute__((vector_size(16))) short *foo;
6320 In this case, the mode is SI, but the type being modified is
6321 HI, so we need to look further. */
6323 while (POINTER_TYPE_P (type)
6324 || TREE_CODE (type) == FUNCTION_TYPE
6325 || TREE_CODE (type) == ARRAY_TYPE)
6326 type = TREE_TYPE (type);
6328 /* Get the mode of the type being modified. */
6329 orig_mode = TYPE_MODE (type);
6331 if ((!INTEGRAL_TYPE_P (type)
6332 && !SCALAR_FLOAT_TYPE_P (type)
6333 && !FIXED_POINT_TYPE_P (type))
6334 || (!SCALAR_FLOAT_MODE_P (orig_mode)
6335 && GET_MODE_CLASS (orig_mode) != MODE_INT
6336 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
6337 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
6338 || TREE_CODE (type) == BOOLEAN_TYPE)
6340 error ("invalid vector type for attribute %qs",
6341 IDENTIFIER_POINTER (name));
6342 return NULL_TREE;
6345 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
6347 error ("vector size not an integral multiple of component size");
6348 return NULL;
6351 if (vecsize == 0)
6353 error ("zero vector size");
6354 return NULL;
6357 /* Calculate how many units fit in the vector. */
6358 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
6359 if (nunits & (nunits - 1))
6361 error ("number of components of the vector not a power of two");
6362 return NULL_TREE;
6365 new_type = build_vector_type (type, nunits);
6367 /* Build back pointers if needed. */
6368 *node = reconstruct_complex_type (*node, new_type);
6370 return NULL_TREE;
6373 /* Handle a "vector_type" attribute; arguments as in
6374 struct attribute_spec.handler. */
6376 static tree
6377 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6378 int ARG_UNUSED (flags),
6379 bool *no_add_attrs)
6381 /* Vector representative type and size. */
6382 tree rep_type = *node;
6383 tree rep_size = TYPE_SIZE_UNIT (rep_type);
6385 /* Vector size in bytes and number of units. */
6386 unsigned HOST_WIDE_INT vec_bytes, vec_units;
6388 /* Vector element type and mode. */
6389 tree elem_type;
6390 enum machine_mode elem_mode;
6392 *no_add_attrs = true;
6394 if (TREE_CODE (rep_type) != ARRAY_TYPE)
6396 error ("attribute %qs applies to array types only",
6397 IDENTIFIER_POINTER (name));
6398 return NULL_TREE;
6401 /* Silently punt on variable sizes. We can't make vector types for them,
6402 need to ignore them on front-end generated subtypes of unconstrained
6403 bases, and this attribute is for binding implementors, not end-users, so
6404 we should never get there from legitimate explicit uses. */
6406 if (!host_integerp (rep_size, 1))
6407 return NULL_TREE;
6409 /* Get the element type/mode and check this is something we know
6410 how to make vectors of. */
6412 elem_type = TREE_TYPE (rep_type);
6413 elem_mode = TYPE_MODE (elem_type);
6415 if ((!INTEGRAL_TYPE_P (elem_type)
6416 && !SCALAR_FLOAT_TYPE_P (elem_type)
6417 && !FIXED_POINT_TYPE_P (elem_type))
6418 || (!SCALAR_FLOAT_MODE_P (elem_mode)
6419 && GET_MODE_CLASS (elem_mode) != MODE_INT
6420 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
6421 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
6423 error ("invalid element type for attribute %qs",
6424 IDENTIFIER_POINTER (name));
6425 return NULL_TREE;
6428 /* Sanity check the vector size and element type consistency. */
6430 vec_bytes = tree_low_cst (rep_size, 1);
6432 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
6434 error ("vector size not an integral multiple of component size");
6435 return NULL;
6438 if (vec_bytes == 0)
6440 error ("zero vector size");
6441 return NULL;
6444 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
6445 if (vec_units & (vec_units - 1))
6447 error ("number of components of the vector not a power of two");
6448 return NULL_TREE;
6451 /* Build the vector type and replace. */
6453 *node = build_vector_type (elem_type, vec_units);
6454 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
6456 return NULL_TREE;
6459 /* ----------------------------------------------------------------------- *
6460 * BUILTIN FUNCTIONS *
6461 * ----------------------------------------------------------------------- */
6463 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6464 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6465 if nonansi_p and flag_no_nonansi_builtin. */
6467 static void
6468 def_builtin_1 (enum built_in_function fncode,
6469 const char *name,
6470 enum built_in_class fnclass,
6471 tree fntype, tree libtype,
6472 bool both_p, bool fallback_p,
6473 bool nonansi_p ATTRIBUTE_UNUSED,
6474 tree fnattrs, bool implicit_p)
6476 tree decl;
6477 const char *libname;
6479 /* Preserve an already installed decl. It most likely was setup in advance
6480 (e.g. as part of the internal builtins) for specific reasons. */
6481 if (builtin_decl_explicit (fncode) != NULL_TREE)
6482 return;
6484 gcc_assert ((!both_p && !fallback_p)
6485 || !strncmp (name, "__builtin_",
6486 strlen ("__builtin_")));
6488 libname = name + strlen ("__builtin_");
6489 decl = add_builtin_function (name, fntype, fncode, fnclass,
6490 (fallback_p ? libname : NULL),
6491 fnattrs);
6492 if (both_p)
6493 /* ??? This is normally further controlled by command-line options
6494 like -fno-builtin, but we don't have them for Ada. */
6495 add_builtin_function (libname, libtype, fncode, fnclass,
6496 NULL, fnattrs);
6498 set_builtin_decl (fncode, decl, implicit_p);
6501 static int flag_isoc94 = 0;
6502 static int flag_isoc99 = 0;
6504 /* Install what the common builtins.def offers. */
6506 static void
6507 install_builtin_functions (void)
6509 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6510 NONANSI_P, ATTRS, IMPLICIT, COND) \
6511 if (NAME && COND) \
6512 def_builtin_1 (ENUM, NAME, CLASS, \
6513 builtin_types[(int) TYPE], \
6514 builtin_types[(int) LIBTYPE], \
6515 BOTH_P, FALLBACK_P, NONANSI_P, \
6516 built_in_attributes[(int) ATTRS], IMPLICIT);
6517 #include "builtins.def"
6518 #undef DEF_BUILTIN
6521 /* ----------------------------------------------------------------------- *
6522 * BUILTIN FUNCTIONS *
6523 * ----------------------------------------------------------------------- */
6525 /* Install the builtin functions we might need. */
6527 void
6528 gnat_install_builtins (void)
6530 install_builtin_elementary_types ();
6531 install_builtin_function_types ();
6532 install_builtin_attributes ();
6534 /* Install builtins used by generic middle-end pieces first. Some of these
6535 know about internal specificities and control attributes accordingly, for
6536 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6537 the generic definition from builtins.def. */
6538 build_common_builtin_nodes ();
6540 /* Now, install the target specific builtins, such as the AltiVec family on
6541 ppc, and the common set as exposed by builtins.def. */
6542 targetm.init_builtins ();
6543 install_builtin_functions ();
6546 #include "gt-ada-utils.h"
6547 #include "gtype-ada.h"