gcc/
[official-gcc.git] / gcc / ada / gcc-interface / utils.c
blob8c5dc5836253d0fce96c31837501263ad251fb39
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2008, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 /* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28 #define GCC_DIAG_STYLE __gcc_cdiag__
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "tree.h"
35 #include "flags.h"
36 #include "defaults.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "debug.h"
41 #include "convert.h"
42 #include "target.h"
43 #include "function.h"
44 #include "cgraph.h"
45 #include "tree-inline.h"
46 #include "tree-iterator.h"
47 #include "gimple.h"
48 #include "tree-dump.h"
49 #include "pointer-set.h"
50 #include "langhooks.h"
52 #include "ada.h"
53 #include "types.h"
54 #include "atree.h"
55 #include "elists.h"
56 #include "namet.h"
57 #include "nlists.h"
58 #include "stringt.h"
59 #include "uintp.h"
60 #include "fe.h"
61 #include "sinfo.h"
62 #include "einfo.h"
63 #include "ada-tree.h"
64 #include "gigi.h"
66 #ifndef MAX_FIXED_MODE_SIZE
67 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
68 #endif
70 #ifndef MAX_BITS_PER_WORD
71 #define MAX_BITS_PER_WORD BITS_PER_WORD
72 #endif
74 /* If nonzero, pretend we are allocating at global level. */
75 int force_global;
77 /* Tree nodes for the various types and decls we create. */
78 tree gnat_std_decls[(int) ADT_LAST];
80 /* Functions to call for each of the possible raise reasons. */
81 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
83 /* Forward declarations for handlers of attributes. */
84 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
85 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
86 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
94 /* Fake handler for attributes we don't properly support, typically because
95 they'd require dragging a lot of the common-c front-end circuitry. */
96 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
98 /* Table of machine-independent internal attributes for Ada. We support
99 this minimal set of attributes to accommodate the needs of builtins. */
100 const struct attribute_spec gnat_internal_attribute_table[] =
102 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
103 { "const", 0, 0, true, false, false, handle_const_attribute },
104 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
105 { "pure", 0, 0, true, false, false, handle_pure_attribute },
106 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
107 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
108 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
109 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
110 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
111 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
113 /* ??? format and format_arg are heavy and not supported, which actually
114 prevents support for stdio builtins, which we however declare as part
115 of the common builtins.def contents. */
116 { "format", 3, 3, false, true, true, fake_attribute_handler },
117 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
119 { NULL, 0, 0, false, false, false, NULL }
122 /* Associates a GNAT tree node to a GCC tree node. It is used in
123 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
124 of `save_gnu_tree' for more info. */
125 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
127 #define GET_GNU_TREE(GNAT_ENTITY) \
128 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
130 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
131 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
133 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
134 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
136 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
137 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
139 #define GET_DUMMY_NODE(GNAT_ENTITY) \
140 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
142 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
143 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
145 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
146 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
148 /* This variable keeps a table for types for each precision so that we only
149 allocate each of them once. Signed and unsigned types are kept separate.
151 Note that these types are only used when fold-const requests something
152 special. Perhaps we should NOT share these types; we'll see how it
153 goes later. */
154 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
156 /* Likewise for float types, but record these by mode. */
157 static GTY(()) tree float_types[NUM_MACHINE_MODES];
159 /* For each binding contour we allocate a binding_level structure to indicate
160 the binding depth. */
162 struct gnat_binding_level GTY((chain_next ("%h.chain")))
164 /* The binding level containing this one (the enclosing binding level). */
165 struct gnat_binding_level *chain;
166 /* The BLOCK node for this level. */
167 tree block;
168 /* If nonzero, the setjmp buffer that needs to be updated for any
169 variable-sized definition within this context. */
170 tree jmpbuf_decl;
173 /* The binding level currently in effect. */
174 static GTY(()) struct gnat_binding_level *current_binding_level;
176 /* A chain of gnat_binding_level structures awaiting reuse. */
177 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
179 /* An array of global declarations. */
180 static GTY(()) VEC(tree,gc) *global_decls;
182 /* An array of builtin function declarations. */
183 static GTY(()) VEC(tree,gc) *builtin_decls;
185 /* An array of global renaming pointers. */
186 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
188 /* A chain of unused BLOCK nodes. */
189 static GTY((deletable)) tree free_block_chain;
191 static void gnat_install_builtins (void);
192 static tree merge_sizes (tree, tree, tree, bool, bool);
193 static tree compute_related_constant (tree, tree);
194 static tree split_plus (tree, tree *);
195 static void gnat_gimplify_function (tree);
196 static tree float_type_for_precision (int, enum machine_mode);
197 static tree convert_to_fat_pointer (tree, tree);
198 static tree convert_to_thin_pointer (tree, tree);
199 static tree make_descriptor_field (const char *,tree, tree, tree);
200 static bool potential_alignment_gap (tree, tree, tree);
202 /* Initialize the association of GNAT nodes to GCC trees. */
204 void
205 init_gnat_to_gnu (void)
207 associate_gnat_to_gnu
208 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
211 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
212 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
213 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
215 If GNU_DECL is zero, a previous association is to be reset. */
217 void
218 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
220 /* Check that GNAT_ENTITY is not already defined and that it is being set
221 to something which is a decl. Raise gigi 401 if not. Usually, this
222 means GNAT_ENTITY is defined twice, but occasionally is due to some
223 Gigi problem. */
224 gcc_assert (!(gnu_decl
225 && (PRESENT_GNU_TREE (gnat_entity)
226 || (!no_check && !DECL_P (gnu_decl)))));
228 SET_GNU_TREE (gnat_entity, gnu_decl);
231 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
232 Return the ..._DECL node that was associated with it. If there is no tree
233 node associated with GNAT_ENTITY, abort.
235 In some cases, such as delayed elaboration or expressions that need to
236 be elaborated only once, GNAT_ENTITY is really not an entity. */
238 tree
239 get_gnu_tree (Entity_Id gnat_entity)
241 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
242 return GET_GNU_TREE (gnat_entity);
245 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
247 bool
248 present_gnu_tree (Entity_Id gnat_entity)
250 return PRESENT_GNU_TREE (gnat_entity);
253 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
255 void
256 init_dummy_type (void)
258 dummy_node_table
259 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
262 /* Make a dummy type corresponding to GNAT_TYPE. */
264 tree
265 make_dummy_type (Entity_Id gnat_type)
267 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
268 tree gnu_type;
270 /* If there is an equivalent type, get its underlying type. */
271 if (Present (gnat_underlying))
272 gnat_underlying = Underlying_Type (gnat_underlying);
274 /* If there was no equivalent type (can only happen when just annotating
275 types) or underlying type, go back to the original type. */
276 if (No (gnat_underlying))
277 gnat_underlying = gnat_type;
279 /* If it there already a dummy type, use that one. Else make one. */
280 if (PRESENT_DUMMY_NODE (gnat_underlying))
281 return GET_DUMMY_NODE (gnat_underlying);
283 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
284 an ENUMERAL_TYPE. */
285 gnu_type = make_node (Is_Record_Type (gnat_underlying)
286 ? tree_code_for_record_type (gnat_underlying)
287 : ENUMERAL_TYPE);
288 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
289 TYPE_DUMMY_P (gnu_type) = 1;
290 if (AGGREGATE_TYPE_P (gnu_type))
292 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
293 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
296 SET_DUMMY_NODE (gnat_underlying, gnu_type);
298 return gnu_type;
301 /* Return nonzero if we are currently in the global binding level. */
304 global_bindings_p (void)
306 return ((force_global || !current_function_decl) ? -1 : 0);
309 /* Enter a new binding level. */
311 void
312 gnat_pushlevel ()
314 struct gnat_binding_level *newlevel = NULL;
316 /* Reuse a struct for this binding level, if there is one. */
317 if (free_binding_level)
319 newlevel = free_binding_level;
320 free_binding_level = free_binding_level->chain;
322 else
323 newlevel
324 = (struct gnat_binding_level *)
325 ggc_alloc (sizeof (struct gnat_binding_level));
327 /* Use a free BLOCK, if any; otherwise, allocate one. */
328 if (free_block_chain)
330 newlevel->block = free_block_chain;
331 free_block_chain = BLOCK_CHAIN (free_block_chain);
332 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
334 else
335 newlevel->block = make_node (BLOCK);
337 /* Point the BLOCK we just made to its parent. */
338 if (current_binding_level)
339 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
341 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
342 TREE_USED (newlevel->block) = 1;
344 /* Add this level to the front of the chain (stack) of levels that are
345 active. */
346 newlevel->chain = current_binding_level;
347 newlevel->jmpbuf_decl = NULL_TREE;
348 current_binding_level = newlevel;
351 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
352 and point FNDECL to this BLOCK. */
354 void
355 set_current_block_context (tree fndecl)
357 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
358 DECL_INITIAL (fndecl) = current_binding_level->block;
361 /* Set the jmpbuf_decl for the current binding level to DECL. */
363 void
364 set_block_jmpbuf_decl (tree decl)
366 current_binding_level->jmpbuf_decl = decl;
369 /* Get the jmpbuf_decl, if any, for the current binding level. */
371 tree
372 get_block_jmpbuf_decl ()
374 return current_binding_level->jmpbuf_decl;
377 /* Exit a binding level. Set any BLOCK into the current code group. */
379 void
380 gnat_poplevel ()
382 struct gnat_binding_level *level = current_binding_level;
383 tree block = level->block;
385 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
386 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
388 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
389 are no variables free the block and merge its subblocks into those of its
390 parent block. Otherwise, add it to the list of its parent. */
391 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
393 else if (BLOCK_VARS (block) == NULL_TREE)
395 BLOCK_SUBBLOCKS (level->chain->block)
396 = chainon (BLOCK_SUBBLOCKS (block),
397 BLOCK_SUBBLOCKS (level->chain->block));
398 BLOCK_CHAIN (block) = free_block_chain;
399 free_block_chain = block;
401 else
403 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
404 BLOCK_SUBBLOCKS (level->chain->block) = block;
405 TREE_USED (block) = 1;
406 set_block_for_group (block);
409 /* Free this binding structure. */
410 current_binding_level = level->chain;
411 level->chain = free_binding_level;
412 free_binding_level = level;
416 /* Records a ..._DECL node DECL as belonging to the current lexical scope
417 and uses GNAT_NODE for location information and propagating flags. */
419 void
420 gnat_pushdecl (tree decl, Node_Id gnat_node)
422 /* If this decl is public external or at toplevel, there is no context.
423 But PARM_DECLs always go in the level of its function. */
424 if (TREE_CODE (decl) != PARM_DECL
425 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
426 || global_bindings_p ()))
427 DECL_CONTEXT (decl) = 0;
428 else
430 DECL_CONTEXT (decl) = current_function_decl;
432 /* Functions imported in another function are not really nested. */
433 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
434 DECL_NO_STATIC_CHAIN (decl) = 1;
437 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
439 /* Set the location of DECL and emit a declaration for it. */
440 if (Present (gnat_node))
441 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
442 add_decl_expr (decl, gnat_node);
444 /* Put the declaration on the list. The list of declarations is in reverse
445 order. The list will be reversed later. Put global variables in the
446 globals list and builtin functions in a dedicated list to speed up
447 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
448 the list, as they will cause trouble with the debugger and aren't needed
449 anyway. */
450 if (TREE_CODE (decl) != TYPE_DECL
451 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
453 if (global_bindings_p ())
455 VEC_safe_push (tree, gc, global_decls, decl);
457 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
458 VEC_safe_push (tree, gc, builtin_decls, decl);
460 else
462 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
463 BLOCK_VARS (current_binding_level->block) = decl;
467 /* For the declaration of a type, set its name if it either is not already
468 set, was set to an IDENTIFIER_NODE, indicating an internal name,
469 or if the previous type name was not derived from a source name.
470 We'd rather have the type named with a real name and all the pointer
471 types to the same object have the same POINTER_TYPE node. Code in the
472 equivalent function of c-decl.c makes a copy of the type node here, but
473 that may cause us trouble with incomplete types. We make an exception
474 for fat pointer types because the compiler automatically builds them
475 for unconstrained array types and the debugger uses them to represent
476 both these and pointers to these. */
477 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
479 tree t = TREE_TYPE (decl);
481 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
483 else if (TYPE_FAT_POINTER_P (t))
485 tree tt = build_variant_type_copy (t);
486 TYPE_NAME (tt) = decl;
487 TREE_USED (tt) = TREE_USED (t);
488 TREE_TYPE (decl) = tt;
489 DECL_ORIGINAL_TYPE (decl) = t;
490 t = NULL_TREE;
492 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
494 else
495 t = NULL_TREE;
497 /* Propagate the name to all the variants. This is needed for
498 the type qualifiers machinery to work properly. */
499 if (t)
500 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
501 TYPE_NAME (t) = decl;
505 /* Do little here. Set up the standard declarations later after the
506 front end has been run. */
508 void
509 gnat_init_decl_processing (void)
511 /* Make the binding_level structure for global names. */
512 current_function_decl = 0;
513 current_binding_level = 0;
514 free_binding_level = 0;
515 gnat_pushlevel ();
517 build_common_tree_nodes (true, true);
519 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
520 corresponding to the size of Pmode. In most cases when ptr_mode and
521 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
522 far better code using the width of Pmode. Make this here since we need
523 this before we can expand the GNAT types. */
524 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
525 set_sizetype (size_type_node);
527 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
528 boolean_type_node = make_node (BOOLEAN_TYPE);
529 TYPE_PRECISION (boolean_type_node) = 1;
530 fixup_unsigned_type (boolean_type_node);
531 TYPE_RM_SIZE_NUM (boolean_type_node) = bitsize_int (1);
533 build_common_tree_nodes_2 (0);
535 ptr_void_type_node = build_pointer_type (void_type_node);
538 /* Create the predefined scalar types such as `integer_type_node' needed
539 in the gcc back-end and initialize the global binding level. */
541 void
542 init_gigi_decls (tree long_long_float_type, tree exception_type)
544 tree endlink, decl;
545 tree int64_type = gnat_type_for_size (64, 0);
546 unsigned int i;
548 /* Set the types that GCC and Gigi use from the front end. We would like
549 to do this for char_type_node, but it needs to correspond to the C
550 char type. */
551 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
553 /* In this case, the builtin floating point types are VAX float,
554 so make up a type for use. */
555 longest_float_type_node = make_node (REAL_TYPE);
556 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
557 layout_type (longest_float_type_node);
558 create_type_decl (get_identifier ("longest float type"),
559 longest_float_type_node, NULL, false, true, Empty);
561 else
562 longest_float_type_node = TREE_TYPE (long_long_float_type);
564 except_type_node = TREE_TYPE (exception_type);
566 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
567 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
568 NULL, false, true, Empty);
570 void_type_decl_node = create_type_decl (get_identifier ("void"),
571 void_type_node, NULL, false, true,
572 Empty);
574 void_ftype = build_function_type (void_type_node, NULL_TREE);
575 ptr_void_ftype = build_pointer_type (void_ftype);
577 /* Build the special descriptor type and its null node if needed. */
578 if (TARGET_VTABLE_USES_DESCRIPTORS)
580 tree null_node = fold_convert (ptr_void_ftype, null_pointer_node);
581 tree field_list = NULL_TREE, null_list = NULL_TREE;
582 int j;
584 fdesc_type_node = make_node (RECORD_TYPE);
586 for (j = 0; j < TARGET_VTABLE_USES_DESCRIPTORS; j++)
588 tree field = create_field_decl (NULL_TREE, ptr_void_ftype,
589 fdesc_type_node, 0, 0, 0, 1);
590 TREE_CHAIN (field) = field_list;
591 field_list = field;
592 null_list = tree_cons (field, null_node, null_list);
595 finish_record_type (fdesc_type_node, nreverse (field_list), 0, false);
596 null_fdesc_node = gnat_build_constructor (fdesc_type_node, null_list);
599 /* Now declare runtime functions. */
600 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
602 /* malloc is a function declaration tree for a function to allocate
603 memory. */
604 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
605 NULL_TREE,
606 build_function_type (ptr_void_type_node,
607 tree_cons (NULL_TREE,
608 sizetype,
609 endlink)),
610 NULL_TREE, false, true, true, NULL,
611 Empty);
612 DECL_IS_MALLOC (malloc_decl) = 1;
614 /* malloc32 is a function declaration tree for a function to allocate
615 32bit memory on a 64bit system. Needed only on 64bit VMS. */
616 malloc32_decl = create_subprog_decl (get_identifier ("__gnat_malloc32"),
617 NULL_TREE,
618 build_function_type (ptr_void_type_node,
619 tree_cons (NULL_TREE,
620 sizetype,
621 endlink)),
622 NULL_TREE, false, true, true, NULL,
623 Empty);
624 DECL_IS_MALLOC (malloc32_decl) = 1;
626 /* free is a function declaration tree for a function to free memory. */
627 free_decl
628 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
629 build_function_type (void_type_node,
630 tree_cons (NULL_TREE,
631 ptr_void_type_node,
632 endlink)),
633 NULL_TREE, false, true, true, NULL, Empty);
635 /* This is used for 64-bit multiplication with overflow checking. */
636 mulv64_decl
637 = create_subprog_decl (get_identifier ("__gnat_mulv64"), NULL_TREE,
638 build_function_type_list (int64_type, int64_type,
639 int64_type, NULL_TREE),
640 NULL_TREE, false, true, true, NULL, Empty);
642 /* Make the types and functions used for exception processing. */
643 jmpbuf_type
644 = build_array_type (gnat_type_for_mode (Pmode, 0),
645 build_index_type (build_int_cst (NULL_TREE, 5)));
646 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
647 true, true, Empty);
648 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
650 /* Functions to get and set the jumpbuf pointer for the current thread. */
651 get_jmpbuf_decl
652 = create_subprog_decl
653 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
654 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
655 NULL_TREE, false, true, true, NULL, Empty);
656 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
657 DECL_PURE_P (get_jmpbuf_decl) = 1;
659 set_jmpbuf_decl
660 = create_subprog_decl
661 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
662 NULL_TREE,
663 build_function_type (void_type_node,
664 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
665 NULL_TREE, false, true, true, NULL, Empty);
667 /* Function to get the current exception. */
668 get_excptr_decl
669 = create_subprog_decl
670 (get_identifier ("system__soft_links__get_gnat_exception"),
671 NULL_TREE,
672 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
673 NULL_TREE, false, true, true, NULL, Empty);
674 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
675 DECL_PURE_P (get_excptr_decl) = 1;
677 /* Functions that raise exceptions. */
678 raise_nodefer_decl
679 = create_subprog_decl
680 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
681 build_function_type (void_type_node,
682 tree_cons (NULL_TREE,
683 build_pointer_type (except_type_node),
684 endlink)),
685 NULL_TREE, false, true, true, NULL, Empty);
687 /* Dummy objects to materialize "others" and "all others" in the exception
688 tables. These are exported by a-exexpr.adb, so see this unit for the
689 types to use. */
691 others_decl
692 = create_var_decl (get_identifier ("OTHERS"),
693 get_identifier ("__gnat_others_value"),
694 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
696 all_others_decl
697 = create_var_decl (get_identifier ("ALL_OTHERS"),
698 get_identifier ("__gnat_all_others_value"),
699 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
701 /* Hooks to call when entering/leaving an exception handler. */
702 begin_handler_decl
703 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
704 build_function_type (void_type_node,
705 tree_cons (NULL_TREE,
706 ptr_void_type_node,
707 endlink)),
708 NULL_TREE, false, true, true, NULL, Empty);
710 end_handler_decl
711 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
712 build_function_type (void_type_node,
713 tree_cons (NULL_TREE,
714 ptr_void_type_node,
715 endlink)),
716 NULL_TREE, false, true, true, NULL, Empty);
718 /* If in no exception handlers mode, all raise statements are redirected to
719 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
720 this procedure will never be called in this mode. */
721 if (No_Exception_Handlers_Set ())
723 decl
724 = create_subprog_decl
725 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
726 build_function_type (void_type_node,
727 tree_cons (NULL_TREE,
728 build_pointer_type (char_type_node),
729 tree_cons (NULL_TREE,
730 integer_type_node,
731 endlink))),
732 NULL_TREE, false, true, true, NULL, Empty);
734 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
735 gnat_raise_decls[i] = decl;
737 else
738 /* Otherwise, make one decl for each exception reason. */
739 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
741 char name[17];
743 sprintf (name, "__gnat_rcheck_%.2d", i);
744 gnat_raise_decls[i]
745 = create_subprog_decl
746 (get_identifier (name), NULL_TREE,
747 build_function_type (void_type_node,
748 tree_cons (NULL_TREE,
749 build_pointer_type
750 (char_type_node),
751 tree_cons (NULL_TREE,
752 integer_type_node,
753 endlink))),
754 NULL_TREE, false, true, true, NULL, Empty);
757 /* Indicate that these never return. */
758 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
759 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
760 TREE_TYPE (raise_nodefer_decl)
761 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
762 TYPE_QUAL_VOLATILE);
764 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
766 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
767 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
768 TREE_TYPE (gnat_raise_decls[i])
769 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
770 TYPE_QUAL_VOLATILE);
773 /* setjmp returns an integer and has one operand, which is a pointer to
774 a jmpbuf. */
775 setjmp_decl
776 = create_subprog_decl
777 (get_identifier ("__builtin_setjmp"), NULL_TREE,
778 build_function_type (integer_type_node,
779 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
780 NULL_TREE, false, true, true, NULL, Empty);
782 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
783 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
785 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
786 address. */
787 update_setjmp_buf_decl
788 = create_subprog_decl
789 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
790 build_function_type (void_type_node,
791 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
792 NULL_TREE, false, true, true, NULL, Empty);
794 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
795 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
797 main_identifier_node = get_identifier ("main");
799 /* Install the builtins we might need, either internally or as
800 user available facilities for Intrinsic imports. */
801 gnat_install_builtins ();
804 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
805 finish constructing the record or union type. If REP_LEVEL is zero, this
806 record has no representation clause and so will be entirely laid out here.
807 If REP_LEVEL is one, this record has a representation clause and has been
808 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
809 this record is derived from a parent record and thus inherits its layout;
810 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
811 true, the record type is expected to be modified afterwards so it will
812 not be sent to the back-end for finalization. */
814 void
815 finish_record_type (tree record_type, tree fieldlist, int rep_level,
816 bool do_not_finalize)
818 enum tree_code code = TREE_CODE (record_type);
819 tree name = TYPE_NAME (record_type);
820 tree ada_size = bitsize_zero_node;
821 tree size = bitsize_zero_node;
822 bool had_size = TYPE_SIZE (record_type) != 0;
823 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
824 bool had_align = TYPE_ALIGN (record_type) != 0;
825 tree field;
827 if (name && TREE_CODE (name) == TYPE_DECL)
828 name = DECL_NAME (name);
830 TYPE_FIELDS (record_type) = fieldlist;
831 TYPE_STUB_DECL (record_type) = build_decl (TYPE_DECL, name, record_type);
833 /* We don't need both the typedef name and the record name output in
834 the debugging information, since they are the same. */
835 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
837 /* Globally initialize the record first. If this is a rep'ed record,
838 that just means some initializations; otherwise, layout the record. */
839 if (rep_level > 0)
841 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
842 SET_TYPE_MODE (record_type, BLKmode);
844 if (!had_size_unit)
845 TYPE_SIZE_UNIT (record_type) = size_zero_node;
846 if (!had_size)
847 TYPE_SIZE (record_type) = bitsize_zero_node;
849 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
850 out just like a UNION_TYPE, since the size will be fixed. */
851 else if (code == QUAL_UNION_TYPE)
852 code = UNION_TYPE;
854 else
856 /* Ensure there isn't a size already set. There can be in an error
857 case where there is a rep clause but all fields have errors and
858 no longer have a position. */
859 TYPE_SIZE (record_type) = 0;
860 layout_type (record_type);
863 /* At this point, the position and size of each field is known. It was
864 either set before entry by a rep clause, or by laying out the type above.
866 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
867 to compute the Ada size; the GCC size and alignment (for rep'ed records
868 that are not padding types); and the mode (for rep'ed records). We also
869 clear the DECL_BIT_FIELD indication for the cases we know have not been
870 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
872 if (code == QUAL_UNION_TYPE)
873 fieldlist = nreverse (fieldlist);
875 for (field = fieldlist; field; field = TREE_CHAIN (field))
877 tree type = TREE_TYPE (field);
878 tree pos = bit_position (field);
879 tree this_size = DECL_SIZE (field);
880 tree this_ada_size;
882 if ((TREE_CODE (type) == RECORD_TYPE
883 || TREE_CODE (type) == UNION_TYPE
884 || TREE_CODE (type) == QUAL_UNION_TYPE)
885 && !TYPE_IS_FAT_POINTER_P (type)
886 && !TYPE_CONTAINS_TEMPLATE_P (type)
887 && TYPE_ADA_SIZE (type))
888 this_ada_size = TYPE_ADA_SIZE (type);
889 else
890 this_ada_size = this_size;
892 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
893 if (DECL_BIT_FIELD (field)
894 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
896 unsigned int align = TYPE_ALIGN (type);
898 /* In the general case, type alignment is required. */
899 if (value_factor_p (pos, align))
901 /* The enclosing record type must be sufficiently aligned.
902 Otherwise, if no alignment was specified for it and it
903 has been laid out already, bump its alignment to the
904 desired one if this is compatible with its size. */
905 if (TYPE_ALIGN (record_type) >= align)
907 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
908 DECL_BIT_FIELD (field) = 0;
910 else if (!had_align
911 && rep_level == 0
912 && value_factor_p (TYPE_SIZE (record_type), align))
914 TYPE_ALIGN (record_type) = align;
915 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
916 DECL_BIT_FIELD (field) = 0;
920 /* In the non-strict alignment case, only byte alignment is. */
921 if (!STRICT_ALIGNMENT
922 && DECL_BIT_FIELD (field)
923 && value_factor_p (pos, BITS_PER_UNIT))
924 DECL_BIT_FIELD (field) = 0;
927 /* If we still have DECL_BIT_FIELD set at this point, we know the field
928 is technically not addressable. Except that it can actually be
929 addressed if the field is BLKmode and happens to be properly
930 aligned. */
931 DECL_NONADDRESSABLE_P (field)
932 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
934 /* A type must be as aligned as its most aligned field that is not
935 a bit-field. But this is already enforced by layout_type. */
936 if (rep_level > 0 && !DECL_BIT_FIELD (field))
937 TYPE_ALIGN (record_type)
938 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
940 switch (code)
942 case UNION_TYPE:
943 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
944 size = size_binop (MAX_EXPR, size, this_size);
945 break;
947 case QUAL_UNION_TYPE:
948 ada_size
949 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
950 this_ada_size, ada_size);
951 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
952 this_size, size);
953 break;
955 case RECORD_TYPE:
956 /* Since we know here that all fields are sorted in order of
957 increasing bit position, the size of the record is one
958 higher than the ending bit of the last field processed
959 unless we have a rep clause, since in that case we might
960 have a field outside a QUAL_UNION_TYPE that has a higher ending
961 position. So use a MAX in that case. Also, if this field is a
962 QUAL_UNION_TYPE, we need to take into account the previous size in
963 the case of empty variants. */
964 ada_size
965 = merge_sizes (ada_size, pos, this_ada_size,
966 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
967 size
968 = merge_sizes (size, pos, this_size,
969 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
970 break;
972 default:
973 gcc_unreachable ();
977 if (code == QUAL_UNION_TYPE)
978 nreverse (fieldlist);
980 if (rep_level < 2)
982 /* If this is a padding record, we never want to make the size smaller
983 than what was specified in it, if any. */
984 if (TREE_CODE (record_type) == RECORD_TYPE
985 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
986 size = TYPE_SIZE (record_type);
988 /* Now set any of the values we've just computed that apply. */
989 if (!TYPE_IS_FAT_POINTER_P (record_type)
990 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
991 SET_TYPE_ADA_SIZE (record_type, ada_size);
993 if (rep_level > 0)
995 tree size_unit = had_size_unit
996 ? TYPE_SIZE_UNIT (record_type)
997 : convert (sizetype,
998 size_binop (CEIL_DIV_EXPR, size,
999 bitsize_unit_node));
1000 unsigned int align = TYPE_ALIGN (record_type);
1002 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1003 TYPE_SIZE_UNIT (record_type)
1004 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1006 compute_record_mode (record_type);
1010 if (!do_not_finalize)
1011 rest_of_record_type_compilation (record_type);
1014 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
1015 the debug information associated with it. It need not be invoked
1016 directly in most cases since finish_record_type takes care of doing
1017 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
1019 void
1020 rest_of_record_type_compilation (tree record_type)
1022 tree fieldlist = TYPE_FIELDS (record_type);
1023 tree field;
1024 enum tree_code code = TREE_CODE (record_type);
1025 bool var_size = false;
1027 for (field = fieldlist; field; field = TREE_CHAIN (field))
1029 /* We need to make an XVE/XVU record if any field has variable size,
1030 whether or not the record does. For example, if we have a union,
1031 it may be that all fields, rounded up to the alignment, have the
1032 same size, in which case we'll use that size. But the debug
1033 output routines (except Dwarf2) won't be able to output the fields,
1034 so we need to make the special record. */
1035 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1036 /* If a field has a non-constant qualifier, the record will have
1037 variable size too. */
1038 || (code == QUAL_UNION_TYPE
1039 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1041 var_size = true;
1042 break;
1046 /* If this record is of variable size, rename it so that the
1047 debugger knows it is and make a new, parallel, record
1048 that tells the debugger how the record is laid out. See
1049 exp_dbug.ads. But don't do this for records that are padding
1050 since they confuse GDB. */
1051 if (var_size
1052 && !(TREE_CODE (record_type) == RECORD_TYPE
1053 && TYPE_IS_PADDING_P (record_type)))
1055 tree new_record_type
1056 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1057 ? UNION_TYPE : TREE_CODE (record_type));
1058 tree orig_name = TYPE_NAME (record_type);
1059 tree orig_id
1060 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
1061 : orig_name);
1062 tree new_id
1063 = concat_id_with_name (orig_id,
1064 TREE_CODE (record_type) == QUAL_UNION_TYPE
1065 ? "XVU" : "XVE");
1066 tree last_pos = bitsize_zero_node;
1067 tree old_field;
1068 tree prev_old_field = 0;
1070 TYPE_NAME (new_record_type) = new_id;
1071 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1072 TYPE_STUB_DECL (new_record_type)
1073 = build_decl (TYPE_DECL, new_id, new_record_type);
1074 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
1075 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1076 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1077 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1078 TYPE_SIZE_UNIT (new_record_type)
1079 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1081 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
1083 /* Now scan all the fields, replacing each field with a new
1084 field corresponding to the new encoding. */
1085 for (old_field = TYPE_FIELDS (record_type); old_field;
1086 old_field = TREE_CHAIN (old_field))
1088 tree field_type = TREE_TYPE (old_field);
1089 tree field_name = DECL_NAME (old_field);
1090 tree new_field;
1091 tree curpos = bit_position (old_field);
1092 bool var = false;
1093 unsigned int align = 0;
1094 tree pos;
1096 /* See how the position was modified from the last position.
1098 There are two basic cases we support: a value was added
1099 to the last position or the last position was rounded to
1100 a boundary and they something was added. Check for the
1101 first case first. If not, see if there is any evidence
1102 of rounding. If so, round the last position and try
1103 again.
1105 If this is a union, the position can be taken as zero. */
1107 /* Some computations depend on the shape of the position expression,
1108 so strip conversions to make sure it's exposed. */
1109 curpos = remove_conversions (curpos, true);
1111 if (TREE_CODE (new_record_type) == UNION_TYPE)
1112 pos = bitsize_zero_node, align = 0;
1113 else
1114 pos = compute_related_constant (curpos, last_pos);
1116 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1117 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1119 tree offset = TREE_OPERAND (curpos, 0);
1120 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1122 /* An offset which is a bitwise AND with a negative power of 2
1123 means an alignment corresponding to this power of 2. */
1124 offset = remove_conversions (offset, true);
1125 if (TREE_CODE (offset) == BIT_AND_EXPR
1126 && host_integerp (TREE_OPERAND (offset, 1), 0)
1127 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1129 unsigned int pow
1130 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1131 if (exact_log2 (pow) > 0)
1132 align *= pow;
1135 pos = compute_related_constant (curpos,
1136 round_up (last_pos, align));
1138 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1139 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1140 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1141 && host_integerp (TREE_OPERAND
1142 (TREE_OPERAND (curpos, 0), 1),
1145 align
1146 = tree_low_cst
1147 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1148 pos = compute_related_constant (curpos,
1149 round_up (last_pos, align));
1151 else if (potential_alignment_gap (prev_old_field, old_field,
1152 pos))
1154 align = TYPE_ALIGN (field_type);
1155 pos = compute_related_constant (curpos,
1156 round_up (last_pos, align));
1159 /* If we can't compute a position, set it to zero.
1161 ??? We really should abort here, but it's too much work
1162 to get this correct for all cases. */
1164 if (!pos)
1165 pos = bitsize_zero_node;
1167 /* See if this type is variable-sized and make a pointer type
1168 and indicate the indirection if so. Beware that the debug
1169 back-end may adjust the position computed above according
1170 to the alignment of the field type, i.e. the pointer type
1171 in this case, if we don't preventively counter that. */
1172 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1174 field_type = build_pointer_type (field_type);
1175 if (align != 0 && TYPE_ALIGN (field_type) > align)
1177 field_type = copy_node (field_type);
1178 TYPE_ALIGN (field_type) = align;
1180 var = true;
1183 /* Make a new field name, if necessary. */
1184 if (var || align != 0)
1186 char suffix[16];
1188 if (align != 0)
1189 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1190 align / BITS_PER_UNIT);
1191 else
1192 strcpy (suffix, "XVL");
1194 field_name = concat_id_with_name (field_name, suffix);
1197 new_field = create_field_decl (field_name, field_type,
1198 new_record_type, 0,
1199 DECL_SIZE (old_field), pos, 0);
1200 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1201 TYPE_FIELDS (new_record_type) = new_field;
1203 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1204 zero. The only time it's not the last field of the record
1205 is when there are other components at fixed positions after
1206 it (meaning there was a rep clause for every field) and we
1207 want to be able to encode them. */
1208 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1209 (TREE_CODE (TREE_TYPE (old_field))
1210 == QUAL_UNION_TYPE)
1211 ? bitsize_zero_node
1212 : DECL_SIZE (old_field));
1213 prev_old_field = old_field;
1216 TYPE_FIELDS (new_record_type)
1217 = nreverse (TYPE_FIELDS (new_record_type));
1219 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1222 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1225 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
1227 void
1228 add_parallel_type (tree decl, tree parallel_type)
1230 tree d = decl;
1232 while (DECL_PARALLEL_TYPE (d))
1233 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
1235 SET_DECL_PARALLEL_TYPE (d, parallel_type);
1238 /* Return the parallel type associated to a type, if any. */
1240 tree
1241 get_parallel_type (tree type)
1243 if (TYPE_STUB_DECL (type))
1244 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
1245 else
1246 return NULL_TREE;
1249 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1250 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1251 if this represents a QUAL_UNION_TYPE in which case we must look for
1252 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1253 is nonzero, we must take the MAX of the end position of this field
1254 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1256 We return an expression for the size. */
1258 static tree
1259 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1260 bool has_rep)
1262 tree type = TREE_TYPE (last_size);
1263 tree new;
1265 if (!special || TREE_CODE (size) != COND_EXPR)
1267 new = size_binop (PLUS_EXPR, first_bit, size);
1268 if (has_rep)
1269 new = size_binop (MAX_EXPR, last_size, new);
1272 else
1273 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1274 integer_zerop (TREE_OPERAND (size, 1))
1275 ? last_size : merge_sizes (last_size, first_bit,
1276 TREE_OPERAND (size, 1),
1277 1, has_rep),
1278 integer_zerop (TREE_OPERAND (size, 2))
1279 ? last_size : merge_sizes (last_size, first_bit,
1280 TREE_OPERAND (size, 2),
1281 1, has_rep));
1283 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1284 when fed through substitute_in_expr) into thinking that a constant
1285 size is not constant. */
1286 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1287 new = TREE_OPERAND (new, 0);
1289 return new;
1292 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1293 related by the addition of a constant. Return that constant if so. */
1295 static tree
1296 compute_related_constant (tree op0, tree op1)
1298 tree op0_var, op1_var;
1299 tree op0_con = split_plus (op0, &op0_var);
1300 tree op1_con = split_plus (op1, &op1_var);
1301 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1303 if (operand_equal_p (op0_var, op1_var, 0))
1304 return result;
1305 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1306 return result;
1307 else
1308 return 0;
1311 /* Utility function of above to split a tree OP which may be a sum, into a
1312 constant part, which is returned, and a variable part, which is stored
1313 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1314 bitsizetype. */
1316 static tree
1317 split_plus (tree in, tree *pvar)
1319 /* Strip NOPS in order to ease the tree traversal and maximize the
1320 potential for constant or plus/minus discovery. We need to be careful
1321 to always return and set *pvar to bitsizetype trees, but it's worth
1322 the effort. */
1323 STRIP_NOPS (in);
1325 *pvar = convert (bitsizetype, in);
1327 if (TREE_CODE (in) == INTEGER_CST)
1329 *pvar = bitsize_zero_node;
1330 return convert (bitsizetype, in);
1332 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1334 tree lhs_var, rhs_var;
1335 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1336 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1338 if (lhs_var == TREE_OPERAND (in, 0)
1339 && rhs_var == TREE_OPERAND (in, 1))
1340 return bitsize_zero_node;
1342 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1343 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1345 else
1346 return bitsize_zero_node;
1349 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1350 subprogram. If it is void_type_node, then we are dealing with a procedure,
1351 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1352 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1353 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1354 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1355 object. RETURNS_BY_REF is true if the function returns by reference.
1356 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1357 first parameter) the address of the place to copy its result. */
1359 tree
1360 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1361 bool returns_unconstrained, bool returns_by_ref,
1362 bool returns_by_target_ptr)
1364 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1365 the subprogram formal parameters. This list is generated by traversing the
1366 input list of PARM_DECL nodes. */
1367 tree param_type_list = NULL;
1368 tree param_decl;
1369 tree type;
1371 for (param_decl = param_decl_list; param_decl;
1372 param_decl = TREE_CHAIN (param_decl))
1373 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1374 param_type_list);
1376 /* The list of the function parameter types has to be terminated by the void
1377 type to signal to the back-end that we are not dealing with a variable
1378 parameter subprogram, but that the subprogram has a fixed number of
1379 parameters. */
1380 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1382 /* The list of argument types has been created in reverse
1383 so nreverse it. */
1384 param_type_list = nreverse (param_type_list);
1386 type = build_function_type (return_type, param_type_list);
1388 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1389 or the new type should, make a copy of TYPE. Likewise for
1390 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1391 if (TYPE_CI_CO_LIST (type) || cico_list
1392 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1393 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1394 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1395 type = copy_type (type);
1397 TYPE_CI_CO_LIST (type) = cico_list;
1398 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1399 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1400 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1401 return type;
1404 /* Return a copy of TYPE but safe to modify in any way. */
1406 tree
1407 copy_type (tree type)
1409 tree new = copy_node (type);
1411 /* copy_node clears this field instead of copying it, because it is
1412 aliased with TREE_CHAIN. */
1413 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1415 TYPE_POINTER_TO (new) = 0;
1416 TYPE_REFERENCE_TO (new) = 0;
1417 TYPE_MAIN_VARIANT (new) = new;
1418 TYPE_NEXT_VARIANT (new) = 0;
1420 return new;
1423 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1424 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1425 the decl. */
1427 tree
1428 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1430 /* First build a type for the desired range. */
1431 tree type = build_index_2_type (min, max);
1433 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1434 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1435 is set, but not to INDEX, make a copy of this type with the requested
1436 index type. Note that we have no way of sharing these types, but that's
1437 only a small hole. */
1438 if (TYPE_INDEX_TYPE (type) == index)
1439 return type;
1440 else if (TYPE_INDEX_TYPE (type))
1441 type = copy_type (type);
1443 SET_TYPE_INDEX_TYPE (type, index);
1444 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1445 return type;
1448 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1449 string) and TYPE is a ..._TYPE node giving its data type.
1450 ARTIFICIAL_P is true if this is a declaration that was generated
1451 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1452 information about this type. GNAT_NODE is used for the position of
1453 the decl. */
1455 tree
1456 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1457 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1459 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1460 enum tree_code code = TREE_CODE (type);
1462 DECL_ARTIFICIAL (type_decl) = artificial_p;
1464 if (!TYPE_IS_DUMMY_P (type))
1465 gnat_pushdecl (type_decl, gnat_node);
1467 process_attributes (type_decl, attr_list);
1469 /* Pass type declaration information to the debugger unless this is an
1470 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1471 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1472 type for which debugging information was not requested. */
1473 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1474 DECL_IGNORED_P (type_decl) = 1;
1475 else if (code != ENUMERAL_TYPE
1476 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1477 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1478 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1479 rest_of_type_decl_compilation (type_decl);
1481 return type_decl;
1484 /* Return a VAR_DECL or CONST_DECL node.
1486 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1487 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1488 the GCC tree for an optional initial expression; NULL_TREE if none.
1490 CONST_FLAG is true if this variable is constant, in which case we might
1491 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1493 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1494 definition to be made visible outside of the current compilation unit, for
1495 instance variable definitions in a package specification.
1497 EXTERN_FLAG is nonzero when processing an external variable declaration (as
1498 opposed to a definition: no storage is to be allocated for the variable).
1500 STATIC_FLAG is only relevant when not at top level. In that case
1501 it indicates whether to always allocate storage to the variable.
1503 GNAT_NODE is used for the position of the decl. */
1505 tree
1506 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1507 bool const_flag, bool public_flag, bool extern_flag,
1508 bool static_flag, bool const_decl_allowed_p,
1509 struct attrib *attr_list, Node_Id gnat_node)
1511 bool init_const
1512 = (var_init != 0
1513 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1514 && (global_bindings_p () || static_flag
1515 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1516 : TREE_CONSTANT (var_init)));
1518 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1519 case the initializer may be used in-lieu of the DECL node (as done in
1520 Identifier_to_gnu). This is useful to prevent the need of elaboration
1521 code when an identifier for which such a decl is made is in turn used as
1522 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1523 but extra constraints apply to this choice (see below) and are not
1524 relevant to the distinction we wish to make. */
1525 bool constant_p = const_flag && init_const;
1527 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1528 and may be used for scalars in general but not for aggregates. */
1529 tree var_decl
1530 = build_decl ((constant_p && const_decl_allowed_p
1531 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1532 var_name, type);
1534 /* If this is external, throw away any initializations (they will be done
1535 elsewhere) unless this is a constant for which we would like to remain
1536 able to get the initializer. If we are defining a global here, leave a
1537 constant initialization and save any variable elaborations for the
1538 elaboration routine. If we are just annotating types, throw away the
1539 initialization if it isn't a constant. */
1540 if ((extern_flag && !constant_p)
1541 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1542 var_init = NULL_TREE;
1544 /* At the global level, an initializer requiring code to be generated
1545 produces elaboration statements. Check that such statements are allowed,
1546 that is, not violating a No_Elaboration_Code restriction. */
1547 if (global_bindings_p () && var_init != 0 && ! init_const)
1548 Check_Elaboration_Code_Allowed (gnat_node);
1550 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1551 try to fiddle with DECL_COMMON. However, on platforms that don't
1552 support global BSS sections, uninitialized global variables would
1553 go in DATA instead, thus increasing the size of the executable. */
1554 if (!flag_no_common
1555 && TREE_CODE (var_decl) == VAR_DECL
1556 && !have_global_bss_p ())
1557 DECL_COMMON (var_decl) = 1;
1558 DECL_INITIAL (var_decl) = var_init;
1559 TREE_READONLY (var_decl) = const_flag;
1560 DECL_EXTERNAL (var_decl) = extern_flag;
1561 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1562 TREE_CONSTANT (var_decl) = constant_p;
1563 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1564 = TYPE_VOLATILE (type);
1566 /* If it's public and not external, always allocate storage for it.
1567 At the global binding level we need to allocate static storage for the
1568 variable if and only if it's not external. If we are not at the top level
1569 we allocate automatic storage unless requested not to. */
1570 TREE_STATIC (var_decl)
1571 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1573 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1574 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1576 process_attributes (var_decl, attr_list);
1578 /* Add this decl to the current binding level. */
1579 gnat_pushdecl (var_decl, gnat_node);
1581 if (TREE_SIDE_EFFECTS (var_decl))
1582 TREE_ADDRESSABLE (var_decl) = 1;
1584 if (TREE_CODE (var_decl) != CONST_DECL)
1586 if (global_bindings_p ())
1587 rest_of_decl_compilation (var_decl, true, 0);
1589 else
1590 expand_decl (var_decl);
1592 return var_decl;
1595 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1597 static bool
1598 aggregate_type_contains_array_p (tree type)
1600 switch (TREE_CODE (type))
1602 case RECORD_TYPE:
1603 case UNION_TYPE:
1604 case QUAL_UNION_TYPE:
1606 tree field;
1607 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1608 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1609 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1610 return true;
1611 return false;
1614 case ARRAY_TYPE:
1615 return true;
1617 default:
1618 gcc_unreachable ();
1622 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1623 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1624 this field is in a record type with a "pragma pack". If SIZE is nonzero
1625 it is the specified size for this field. If POS is nonzero, it is the bit
1626 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1627 the address of this field for aliasing purposes. If it is negative, we
1628 should not make a bitfield, which is used by make_aligning_type. */
1630 tree
1631 create_field_decl (tree field_name, tree field_type, tree record_type,
1632 int packed, tree size, tree pos, int addressable)
1634 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1636 DECL_CONTEXT (field_decl) = record_type;
1637 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1639 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1640 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1641 Likewise for an aggregate without specified position that contains an
1642 array, because in this case slices of variable length of this array
1643 must be handled by GCC and variable-sized objects need to be aligned
1644 to at least a byte boundary. */
1645 if (packed && (TYPE_MODE (field_type) == BLKmode
1646 || (!pos
1647 && AGGREGATE_TYPE_P (field_type)
1648 && aggregate_type_contains_array_p (field_type))))
1649 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1651 /* If a size is specified, use it. Otherwise, if the record type is packed
1652 compute a size to use, which may differ from the object's natural size.
1653 We always set a size in this case to trigger the checks for bitfield
1654 creation below, which is typically required when no position has been
1655 specified. */
1656 if (size)
1657 size = convert (bitsizetype, size);
1658 else if (packed == 1)
1660 size = rm_size (field_type);
1662 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1663 byte. */
1664 if (TREE_CODE (size) == INTEGER_CST
1665 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1666 size = round_up (size, BITS_PER_UNIT);
1669 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1670 specified for two reasons: first if the size differs from the natural
1671 size. Second, if the alignment is insufficient. There are a number of
1672 ways the latter can be true.
1674 We never make a bitfield if the type of the field has a nonconstant size,
1675 because no such entity requiring bitfield operations should reach here.
1677 We do *preventively* make a bitfield when there might be the need for it
1678 but we don't have all the necessary information to decide, as is the case
1679 of a field with no specified position in a packed record.
1681 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1682 in layout_decl or finish_record_type to clear the bit_field indication if
1683 it is in fact not needed. */
1684 if (addressable >= 0
1685 && size
1686 && TREE_CODE (size) == INTEGER_CST
1687 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1688 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1689 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1690 || packed
1691 || (TYPE_ALIGN (record_type) != 0
1692 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1694 DECL_BIT_FIELD (field_decl) = 1;
1695 DECL_SIZE (field_decl) = size;
1696 if (!packed && !pos)
1697 DECL_ALIGN (field_decl)
1698 = (TYPE_ALIGN (record_type) != 0
1699 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1700 : TYPE_ALIGN (field_type));
1703 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1705 /* Bump the alignment if need be, either for bitfield/packing purposes or
1706 to satisfy the type requirements if no such consideration applies. When
1707 we get the alignment from the type, indicate if this is from an explicit
1708 user request, which prevents stor-layout from lowering it later on. */
1710 unsigned int bit_align
1711 = (DECL_BIT_FIELD (field_decl) ? 1
1712 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1714 if (bit_align > DECL_ALIGN (field_decl))
1715 DECL_ALIGN (field_decl) = bit_align;
1716 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1718 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1719 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1723 if (pos)
1725 /* We need to pass in the alignment the DECL is known to have.
1726 This is the lowest-order bit set in POS, but no more than
1727 the alignment of the record, if one is specified. Note
1728 that an alignment of 0 is taken as infinite. */
1729 unsigned int known_align;
1731 if (host_integerp (pos, 1))
1732 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1733 else
1734 known_align = BITS_PER_UNIT;
1736 if (TYPE_ALIGN (record_type)
1737 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1738 known_align = TYPE_ALIGN (record_type);
1740 layout_decl (field_decl, known_align);
1741 SET_DECL_OFFSET_ALIGN (field_decl,
1742 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1743 : BITS_PER_UNIT);
1744 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1745 &DECL_FIELD_BIT_OFFSET (field_decl),
1746 DECL_OFFSET_ALIGN (field_decl), pos);
1748 DECL_HAS_REP_P (field_decl) = 1;
1751 /* In addition to what our caller says, claim the field is addressable if we
1752 know that its type is not suitable.
1754 The field may also be "technically" nonaddressable, meaning that even if
1755 we attempt to take the field's address we will actually get the address
1756 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1757 value we have at this point is not accurate enough, so we don't account
1758 for this here and let finish_record_type decide. */
1759 if (!addressable && !type_for_nonaliased_component_p (field_type))
1760 addressable = 1;
1762 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1764 return field_decl;
1767 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1768 PARAM_TYPE is its type. READONLY is true if the parameter is
1769 readonly (either an In parameter or an address of a pass-by-ref
1770 parameter). */
1772 tree
1773 create_param_decl (tree param_name, tree param_type, bool readonly)
1775 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1777 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1778 lead to various ABI violations. */
1779 if (targetm.calls.promote_prototypes (param_type)
1780 && (TREE_CODE (param_type) == INTEGER_TYPE
1781 || TREE_CODE (param_type) == ENUMERAL_TYPE
1782 || TREE_CODE (param_type) == BOOLEAN_TYPE)
1783 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1785 /* We have to be careful about biased types here. Make a subtype
1786 of integer_type_node with the proper biasing. */
1787 if (TREE_CODE (param_type) == INTEGER_TYPE
1788 && TYPE_BIASED_REPRESENTATION_P (param_type))
1790 param_type
1791 = copy_type (build_range_type (integer_type_node,
1792 TYPE_MIN_VALUE (param_type),
1793 TYPE_MAX_VALUE (param_type)));
1795 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1797 else
1798 param_type = integer_type_node;
1801 DECL_ARG_TYPE (param_decl) = param_type;
1802 TREE_READONLY (param_decl) = readonly;
1803 return param_decl;
1806 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1808 void
1809 process_attributes (tree decl, struct attrib *attr_list)
1811 for (; attr_list; attr_list = attr_list->next)
1812 switch (attr_list->type)
1814 case ATTR_MACHINE_ATTRIBUTE:
1815 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1816 NULL_TREE),
1817 ATTR_FLAG_TYPE_IN_PLACE);
1818 break;
1820 case ATTR_LINK_ALIAS:
1821 if (! DECL_EXTERNAL (decl))
1823 TREE_STATIC (decl) = 1;
1824 assemble_alias (decl, attr_list->name);
1826 break;
1828 case ATTR_WEAK_EXTERNAL:
1829 if (SUPPORTS_WEAK)
1830 declare_weak (decl);
1831 else
1832 post_error ("?weak declarations not supported on this target",
1833 attr_list->error_point);
1834 break;
1836 case ATTR_LINK_SECTION:
1837 if (targetm.have_named_sections)
1839 DECL_SECTION_NAME (decl)
1840 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1841 IDENTIFIER_POINTER (attr_list->name));
1842 DECL_COMMON (decl) = 0;
1844 else
1845 post_error ("?section attributes are not supported for this target",
1846 attr_list->error_point);
1847 break;
1849 case ATTR_LINK_CONSTRUCTOR:
1850 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1851 TREE_USED (decl) = 1;
1852 break;
1854 case ATTR_LINK_DESTRUCTOR:
1855 DECL_STATIC_DESTRUCTOR (decl) = 1;
1856 TREE_USED (decl) = 1;
1857 break;
1861 /* Record a global renaming pointer. */
1863 void
1864 record_global_renaming_pointer (tree decl)
1866 gcc_assert (DECL_RENAMED_OBJECT (decl));
1867 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1870 /* Invalidate the global renaming pointers. */
1872 void
1873 invalidate_global_renaming_pointers (void)
1875 unsigned int i;
1876 tree iter;
1878 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1879 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1881 VEC_free (tree, gc, global_renaming_pointers);
1884 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1885 a power of 2. */
1887 bool
1888 value_factor_p (tree value, HOST_WIDE_INT factor)
1890 if (host_integerp (value, 1))
1891 return tree_low_cst (value, 1) % factor == 0;
1893 if (TREE_CODE (value) == MULT_EXPR)
1894 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1895 || value_factor_p (TREE_OPERAND (value, 1), factor));
1897 return false;
1900 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1901 unless we can prove these 2 fields are laid out in such a way that no gap
1902 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1903 is the distance in bits between the end of PREV_FIELD and the starting
1904 position of CURR_FIELD. It is ignored if null. */
1906 static bool
1907 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1909 /* If this is the first field of the record, there cannot be any gap */
1910 if (!prev_field)
1911 return false;
1913 /* If the previous field is a union type, then return False: The only
1914 time when such a field is not the last field of the record is when
1915 there are other components at fixed positions after it (meaning there
1916 was a rep clause for every field), in which case we don't want the
1917 alignment constraint to override them. */
1918 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1919 return false;
1921 /* If the distance between the end of prev_field and the beginning of
1922 curr_field is constant, then there is a gap if the value of this
1923 constant is not null. */
1924 if (offset && host_integerp (offset, 1))
1925 return !integer_zerop (offset);
1927 /* If the size and position of the previous field are constant,
1928 then check the sum of this size and position. There will be a gap
1929 iff it is not multiple of the current field alignment. */
1930 if (host_integerp (DECL_SIZE (prev_field), 1)
1931 && host_integerp (bit_position (prev_field), 1))
1932 return ((tree_low_cst (bit_position (prev_field), 1)
1933 + tree_low_cst (DECL_SIZE (prev_field), 1))
1934 % DECL_ALIGN (curr_field) != 0);
1936 /* If both the position and size of the previous field are multiples
1937 of the current field alignment, there cannot be any gap. */
1938 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1939 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1940 return false;
1942 /* Fallback, return that there may be a potential gap */
1943 return true;
1946 /* Returns a LABEL_DECL node for LABEL_NAME. */
1948 tree
1949 create_label_decl (tree label_name)
1951 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1953 DECL_CONTEXT (label_decl) = current_function_decl;
1954 DECL_MODE (label_decl) = VOIDmode;
1955 DECL_SOURCE_LOCATION (label_decl) = input_location;
1957 return label_decl;
1960 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1961 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1962 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1963 PARM_DECL nodes chained through the TREE_CHAIN field).
1965 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1966 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1968 tree
1969 create_subprog_decl (tree subprog_name, tree asm_name,
1970 tree subprog_type, tree param_decl_list, bool inline_flag,
1971 bool public_flag, bool extern_flag,
1972 struct attrib *attr_list, Node_Id gnat_node)
1974 tree return_type = TREE_TYPE (subprog_type);
1975 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1977 /* If this is a non-inline function nested inside an inlined external
1978 function, we cannot honor both requests without cloning the nested
1979 function in the current unit since it is private to the other unit.
1980 We could inline the nested function as well but it's probably better
1981 to err on the side of too little inlining. */
1982 if (!inline_flag
1983 && current_function_decl
1984 && DECL_DECLARED_INLINE_P (current_function_decl)
1985 && DECL_EXTERNAL (current_function_decl))
1986 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1988 DECL_EXTERNAL (subprog_decl) = extern_flag;
1989 TREE_PUBLIC (subprog_decl) = public_flag;
1990 TREE_STATIC (subprog_decl) = 1;
1991 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1992 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1993 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1994 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1995 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1996 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1997 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1998 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
2000 /* TREE_ADDRESSABLE is set on the result type to request the use of the
2001 target by-reference return mechanism. This is not supported all the
2002 way down to RTL expansion with GCC 4, which ICEs on temporary creation
2003 attempts with such a type and expects DECL_BY_REFERENCE to be set on
2004 the RESULT_DECL instead - see gnat_genericize for more details. */
2005 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
2007 tree result_decl = DECL_RESULT (subprog_decl);
2009 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
2010 DECL_BY_REFERENCE (result_decl) = 1;
2013 if (asm_name)
2015 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
2017 /* The expand_main_function circuitry expects "main_identifier_node" to
2018 designate the DECL_NAME of the 'main' entry point, in turn expected
2019 to be declared as the "main" function literally by default. Ada
2020 program entry points are typically declared with a different name
2021 within the binder generated file, exported as 'main' to satisfy the
2022 system expectations. Redirect main_identifier_node in this case. */
2023 if (asm_name == main_identifier_node)
2024 main_identifier_node = DECL_NAME (subprog_decl);
2027 process_attributes (subprog_decl, attr_list);
2029 /* Add this decl to the current binding level. */
2030 gnat_pushdecl (subprog_decl, gnat_node);
2032 /* Output the assembler code and/or RTL for the declaration. */
2033 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2035 return subprog_decl;
2038 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2039 body. This routine needs to be invoked before processing the declarations
2040 appearing in the subprogram. */
2042 void
2043 begin_subprog_body (tree subprog_decl)
2045 tree param_decl;
2047 current_function_decl = subprog_decl;
2048 announce_function (subprog_decl);
2050 /* Enter a new binding level and show that all the parameters belong to
2051 this function. */
2052 gnat_pushlevel ();
2053 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
2054 param_decl = TREE_CHAIN (param_decl))
2055 DECL_CONTEXT (param_decl) = subprog_decl;
2057 make_decl_rtl (subprog_decl);
2059 /* We handle pending sizes via the elaboration of types, so we don't need to
2060 save them. This causes them to be marked as part of the outer function
2061 and then discarded. */
2062 get_pending_sizes ();
2066 /* Helper for the genericization callback. Return a dereference of VAL
2067 if it is of a reference type. */
2069 static tree
2070 convert_from_reference (tree val)
2072 tree value_type, ref;
2074 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
2075 return val;
2077 value_type = TREE_TYPE (TREE_TYPE (val));
2078 ref = build1 (INDIRECT_REF, value_type, val);
2080 /* See if what we reference is CONST or VOLATILE, which requires
2081 looking into array types to get to the component type. */
2083 while (TREE_CODE (value_type) == ARRAY_TYPE)
2084 value_type = TREE_TYPE (value_type);
2086 TREE_READONLY (ref)
2087 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
2088 TREE_THIS_VOLATILE (ref)
2089 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
2091 TREE_SIDE_EFFECTS (ref)
2092 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
2094 return ref;
2097 /* Helper for the genericization callback. Returns true if T denotes
2098 a RESULT_DECL with DECL_BY_REFERENCE set. */
2100 static inline bool
2101 is_byref_result (tree t)
2103 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
2107 /* Tree walking callback for gnat_genericize. Currently ...
2109 o Adjust references to the function's DECL_RESULT if it is marked
2110 DECL_BY_REFERENCE and so has had its type turned into a reference
2111 type at the end of the function compilation. */
2113 static tree
2114 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
2116 /* This implementation is modeled after what the C++ front-end is
2117 doing, basis of the downstream passes behavior. */
2119 tree stmt = *stmt_p;
2120 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
2122 /* If we have a direct mention of the result decl, dereference. */
2123 if (is_byref_result (stmt))
2125 *stmt_p = convert_from_reference (stmt);
2126 *walk_subtrees = 0;
2127 return NULL;
2130 /* Otherwise, no need to walk the same tree twice. */
2131 if (pointer_set_contains (p_set, stmt))
2133 *walk_subtrees = 0;
2134 return NULL_TREE;
2137 /* If we are taking the address of what now is a reference, just get the
2138 reference value. */
2139 if (TREE_CODE (stmt) == ADDR_EXPR
2140 && is_byref_result (TREE_OPERAND (stmt, 0)))
2142 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2143 *walk_subtrees = 0;
2146 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2147 else if (TREE_CODE (stmt) == RETURN_EXPR
2148 && TREE_OPERAND (stmt, 0)
2149 && is_byref_result (TREE_OPERAND (stmt, 0)))
2150 *walk_subtrees = 0;
2152 /* Don't look inside trees that cannot embed references of interest. */
2153 else if (IS_TYPE_OR_DECL_P (stmt))
2154 *walk_subtrees = 0;
2156 pointer_set_insert (p_set, *stmt_p);
2158 return NULL;
2161 /* Perform lowering of Ada trees to GENERIC. In particular:
2163 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2164 and adjust all the references to this decl accordingly. */
2166 static void
2167 gnat_genericize (tree fndecl)
2169 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2170 was handled by simply setting TREE_ADDRESSABLE on the result type.
2171 Everything required to actually pass by invisible ref using the target
2172 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2174 This doesn't work with GCC 4 any more for several reasons. First, the
2175 gimplification process might need the creation of temporaries of this
2176 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2177 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2178 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2179 be explicitly accounted for by the front-end in the function body.
2181 We achieve the complete transformation in two steps:
2183 1/ create_subprog_decl performs early attribute tweaks: it clears
2184 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2185 the result decl. The former ensures that the bit isn't set in the GCC
2186 tree saved for the function, so prevents ICEs on temporary creation.
2187 The latter we use here to trigger the rest of the processing.
2189 2/ This function performs the type transformation on the result decl
2190 and adjusts all the references to this decl from the function body
2191 accordingly.
2193 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2194 strategy, which escapes the gimplifier temporary creation issues by
2195 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2196 on simple specific support code in aggregate_value_p to look at the
2197 target function result decl explicitly. */
2199 struct pointer_set_t *p_set;
2200 tree decl_result = DECL_RESULT (fndecl);
2202 if (!DECL_BY_REFERENCE (decl_result))
2203 return;
2205 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2206 occurrences in the function body using the common tree-walking facility.
2207 We want to see every occurrence of the result decl to adjust the
2208 referencing tree, so need to use our own pointer set to control which
2209 trees should be visited again or not. */
2211 p_set = pointer_set_create ();
2213 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2214 TREE_ADDRESSABLE (decl_result) = 0;
2215 relayout_decl (decl_result);
2217 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2219 pointer_set_destroy (p_set);
2222 /* Finish the definition of the current subprogram BODY and compile it all the
2223 way to assembler language output. ELAB_P tells if this is called for an
2224 elaboration routine, to be entirely discarded if empty. */
2226 void
2227 end_subprog_body (tree body, bool elab_p)
2229 tree fndecl = current_function_decl;
2231 /* Mark the BLOCK for this level as being for this function and pop the
2232 level. Since the vars in it are the parameters, clear them. */
2233 BLOCK_VARS (current_binding_level->block) = 0;
2234 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2235 DECL_INITIAL (fndecl) = current_binding_level->block;
2236 gnat_poplevel ();
2238 /* We handle pending sizes via the elaboration of types, so we don't
2239 need to save them. */
2240 get_pending_sizes ();
2242 /* Mark the RESULT_DECL as being in this subprogram. */
2243 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2245 DECL_SAVED_TREE (fndecl) = body;
2247 current_function_decl = DECL_CONTEXT (fndecl);
2248 set_cfun (NULL);
2250 /* We cannot track the location of errors past this point. */
2251 error_gnat_node = Empty;
2253 /* If we're only annotating types, don't actually compile this function. */
2254 if (type_annotate_only)
2255 return;
2257 /* Perform the required pre-gimplification transformations on the tree. */
2258 gnat_genericize (fndecl);
2260 /* We do different things for nested and non-nested functions.
2261 ??? This should be in cgraph. */
2262 if (!DECL_CONTEXT (fndecl))
2264 gnat_gimplify_function (fndecl);
2266 /* If this is an empty elaboration proc, just discard the node.
2267 Otherwise, compile further. */
2268 if (elab_p && empty_body_p (gimple_body (fndecl)))
2269 cgraph_remove_node (cgraph_node (fndecl));
2270 else
2271 cgraph_finalize_function (fndecl, false);
2273 else
2274 /* Register this function with cgraph just far enough to get it
2275 added to our parent's nested function list. */
2276 (void) cgraph_node (fndecl);
2279 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2281 static void
2282 gnat_gimplify_function (tree fndecl)
2284 struct cgraph_node *cgn;
2286 dump_function (TDI_original, fndecl);
2287 gimplify_function_tree (fndecl);
2288 dump_function (TDI_generic, fndecl);
2290 /* Convert all nested functions to GIMPLE now. We do things in this order
2291 so that items like VLA sizes are expanded properly in the context of the
2292 correct function. */
2293 cgn = cgraph_node (fndecl);
2294 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2295 gnat_gimplify_function (cgn->decl);
2299 tree
2300 gnat_builtin_function (tree decl)
2302 gnat_pushdecl (decl, Empty);
2303 return decl;
2306 /* Return an integer type with the number of bits of precision given by
2307 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2308 it is a signed type. */
2310 tree
2311 gnat_type_for_size (unsigned precision, int unsignedp)
2313 tree t;
2314 char type_name[20];
2316 if (precision <= 2 * MAX_BITS_PER_WORD
2317 && signed_and_unsigned_types[precision][unsignedp])
2318 return signed_and_unsigned_types[precision][unsignedp];
2320 if (unsignedp)
2321 t = make_unsigned_type (precision);
2322 else
2323 t = make_signed_type (precision);
2325 if (precision <= 2 * MAX_BITS_PER_WORD)
2326 signed_and_unsigned_types[precision][unsignedp] = t;
2328 if (!TYPE_NAME (t))
2330 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2331 TYPE_NAME (t) = get_identifier (type_name);
2334 return t;
2337 /* Likewise for floating-point types. */
2339 static tree
2340 float_type_for_precision (int precision, enum machine_mode mode)
2342 tree t;
2343 char type_name[20];
2345 if (float_types[(int) mode])
2346 return float_types[(int) mode];
2348 float_types[(int) mode] = t = make_node (REAL_TYPE);
2349 TYPE_PRECISION (t) = precision;
2350 layout_type (t);
2352 gcc_assert (TYPE_MODE (t) == mode);
2353 if (!TYPE_NAME (t))
2355 sprintf (type_name, "FLOAT_%d", precision);
2356 TYPE_NAME (t) = get_identifier (type_name);
2359 return t;
2362 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2363 an unsigned type; otherwise a signed type is returned. */
2365 tree
2366 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2368 if (mode == BLKmode)
2369 return NULL_TREE;
2370 else if (mode == VOIDmode)
2371 return void_type_node;
2372 else if (COMPLEX_MODE_P (mode))
2373 return NULL_TREE;
2374 else if (SCALAR_FLOAT_MODE_P (mode))
2375 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2376 else if (SCALAR_INT_MODE_P (mode))
2377 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2378 else
2379 return NULL_TREE;
2382 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2384 tree
2385 gnat_unsigned_type (tree type_node)
2387 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2389 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2391 type = copy_node (type);
2392 TREE_TYPE (type) = type_node;
2394 else if (TREE_TYPE (type_node)
2395 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2396 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2398 type = copy_node (type);
2399 TREE_TYPE (type) = TREE_TYPE (type_node);
2402 return type;
2405 /* Return the signed version of a TYPE_NODE, a scalar type. */
2407 tree
2408 gnat_signed_type (tree type_node)
2410 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2412 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2414 type = copy_node (type);
2415 TREE_TYPE (type) = type_node;
2417 else if (TREE_TYPE (type_node)
2418 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2419 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2421 type = copy_node (type);
2422 TREE_TYPE (type) = TREE_TYPE (type_node);
2425 return type;
2428 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2429 transparently converted to each other. */
2432 gnat_types_compatible_p (tree t1, tree t2)
2434 enum tree_code code;
2436 /* This is the default criterion. */
2437 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2438 return 1;
2440 /* We only check structural equivalence here. */
2441 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2442 return 0;
2444 /* Array types are also compatible if they are constrained and have
2445 the same component type and the same domain. */
2446 if (code == ARRAY_TYPE
2447 && TREE_TYPE (t1) == TREE_TYPE (t2)
2448 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2449 || (TYPE_DOMAIN (t1)
2450 && TYPE_DOMAIN (t2)
2451 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2452 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2453 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2454 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2455 return 1;
2457 /* Padding record types are also compatible if they pad the same
2458 type and have the same constant size. */
2459 if (code == RECORD_TYPE
2460 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2461 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2462 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2463 return 1;
2465 return 0;
2468 /* EXP is an expression for the size of an object. If this size contains
2469 discriminant references, replace them with the maximum (if MAX_P) or
2470 minimum (if !MAX_P) possible value of the discriminant. */
2472 tree
2473 max_size (tree exp, bool max_p)
2475 enum tree_code code = TREE_CODE (exp);
2476 tree type = TREE_TYPE (exp);
2478 switch (TREE_CODE_CLASS (code))
2480 case tcc_declaration:
2481 case tcc_constant:
2482 return exp;
2484 case tcc_vl_exp:
2485 if (code == CALL_EXPR)
2487 tree *argarray;
2488 int i, n = call_expr_nargs (exp);
2489 gcc_assert (n > 0);
2491 argarray = (tree *) alloca (n * sizeof (tree));
2492 for (i = 0; i < n; i++)
2493 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2494 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2496 break;
2498 case tcc_reference:
2499 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2500 modify. Otherwise, we treat it like a variable. */
2501 if (!CONTAINS_PLACEHOLDER_P (exp))
2502 return exp;
2504 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2505 return
2506 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2508 case tcc_comparison:
2509 return max_p ? size_one_node : size_zero_node;
2511 case tcc_unary:
2512 case tcc_binary:
2513 case tcc_expression:
2514 switch (TREE_CODE_LENGTH (code))
2516 case 1:
2517 if (code == NON_LVALUE_EXPR)
2518 return max_size (TREE_OPERAND (exp, 0), max_p);
2519 else
2520 return
2521 fold_build1 (code, type,
2522 max_size (TREE_OPERAND (exp, 0),
2523 code == NEGATE_EXPR ? !max_p : max_p));
2525 case 2:
2526 if (code == COMPOUND_EXPR)
2527 return max_size (TREE_OPERAND (exp, 1), max_p);
2529 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2530 may provide a tighter bound on max_size. */
2531 if (code == MINUS_EXPR
2532 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2534 tree lhs = fold_build2 (MINUS_EXPR, type,
2535 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2536 TREE_OPERAND (exp, 1));
2537 tree rhs = fold_build2 (MINUS_EXPR, type,
2538 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2539 TREE_OPERAND (exp, 1));
2540 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2541 max_size (lhs, max_p),
2542 max_size (rhs, max_p));
2546 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2547 tree rhs = max_size (TREE_OPERAND (exp, 1),
2548 code == MINUS_EXPR ? !max_p : max_p);
2550 /* Special-case wanting the maximum value of a MIN_EXPR.
2551 In that case, if one side overflows, return the other.
2552 sizetype is signed, but we know sizes are non-negative.
2553 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2554 overflowing or the maximum possible value and the RHS
2555 a variable. */
2556 if (max_p
2557 && code == MIN_EXPR
2558 && TREE_CODE (rhs) == INTEGER_CST
2559 && TREE_OVERFLOW (rhs))
2560 return lhs;
2561 else if (max_p
2562 && code == MIN_EXPR
2563 && TREE_CODE (lhs) == INTEGER_CST
2564 && TREE_OVERFLOW (lhs))
2565 return rhs;
2566 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2567 && ((TREE_CODE (lhs) == INTEGER_CST
2568 && TREE_OVERFLOW (lhs))
2569 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2570 && !TREE_CONSTANT (rhs))
2571 return lhs;
2572 else
2573 return fold_build2 (code, type, lhs, rhs);
2576 case 3:
2577 if (code == SAVE_EXPR)
2578 return exp;
2579 else if (code == COND_EXPR)
2580 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2581 max_size (TREE_OPERAND (exp, 1), max_p),
2582 max_size (TREE_OPERAND (exp, 2), max_p));
2585 /* Other tree classes cannot happen. */
2586 default:
2587 break;
2590 gcc_unreachable ();
2593 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2594 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2595 Return a constructor for the template. */
2597 tree
2598 build_template (tree template_type, tree array_type, tree expr)
2600 tree template_elts = NULL_TREE;
2601 tree bound_list = NULL_TREE;
2602 tree field;
2604 while (TREE_CODE (array_type) == RECORD_TYPE
2605 && (TYPE_IS_PADDING_P (array_type)
2606 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2607 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2609 if (TREE_CODE (array_type) == ARRAY_TYPE
2610 || (TREE_CODE (array_type) == INTEGER_TYPE
2611 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2612 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2614 /* First make the list for a CONSTRUCTOR for the template. Go down the
2615 field list of the template instead of the type chain because this
2616 array might be an Ada array of arrays and we can't tell where the
2617 nested arrays stop being the underlying object. */
2619 for (field = TYPE_FIELDS (template_type); field;
2620 (bound_list
2621 ? (bound_list = TREE_CHAIN (bound_list))
2622 : (array_type = TREE_TYPE (array_type))),
2623 field = TREE_CHAIN (TREE_CHAIN (field)))
2625 tree bounds, min, max;
2627 /* If we have a bound list, get the bounds from there. Likewise
2628 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2629 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2630 This will give us a maximum range. */
2631 if (bound_list)
2632 bounds = TREE_VALUE (bound_list);
2633 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2634 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2635 else if (expr && TREE_CODE (expr) == PARM_DECL
2636 && DECL_BY_COMPONENT_PTR_P (expr))
2637 bounds = TREE_TYPE (field);
2638 else
2639 gcc_unreachable ();
2641 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2642 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2644 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2645 substitute it from OBJECT. */
2646 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2647 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2649 template_elts = tree_cons (TREE_CHAIN (field), max,
2650 tree_cons (field, min, template_elts));
2653 return gnat_build_constructor (template_type, nreverse (template_elts));
2656 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2657 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2658 in the type contains in its DECL_INITIAL the expression to use when
2659 a constructor is made for the type. GNAT_ENTITY is an entity used
2660 to print out an error message if the mechanism cannot be applied to
2661 an object of that type and also for the name. */
2663 tree
2664 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2666 tree record_type = make_node (RECORD_TYPE);
2667 tree pointer32_type;
2668 tree field_list = 0;
2669 int class;
2670 int dtype = 0;
2671 tree inner_type;
2672 int ndim;
2673 int i;
2674 tree *idx_arr;
2675 tree tem;
2677 /* If TYPE is an unconstrained array, use the underlying array type. */
2678 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2679 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2681 /* If this is an array, compute the number of dimensions in the array,
2682 get the index types, and point to the inner type. */
2683 if (TREE_CODE (type) != ARRAY_TYPE)
2684 ndim = 0;
2685 else
2686 for (ndim = 1, inner_type = type;
2687 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2688 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2689 ndim++, inner_type = TREE_TYPE (inner_type))
2692 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2694 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2695 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2696 for (i = ndim - 1, inner_type = type;
2697 i >= 0;
2698 i--, inner_type = TREE_TYPE (inner_type))
2699 idx_arr[i] = TYPE_DOMAIN (inner_type);
2700 else
2701 for (i = 0, inner_type = type;
2702 i < ndim;
2703 i++, inner_type = TREE_TYPE (inner_type))
2704 idx_arr[i] = TYPE_DOMAIN (inner_type);
2706 /* Now get the DTYPE value. */
2707 switch (TREE_CODE (type))
2709 case INTEGER_TYPE:
2710 case ENUMERAL_TYPE:
2711 case BOOLEAN_TYPE:
2712 if (TYPE_VAX_FLOATING_POINT_P (type))
2713 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2715 case 6:
2716 dtype = 10;
2717 break;
2718 case 9:
2719 dtype = 11;
2720 break;
2721 case 15:
2722 dtype = 27;
2723 break;
2725 else
2726 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2728 case 8:
2729 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2730 break;
2731 case 16:
2732 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2733 break;
2734 case 32:
2735 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2736 break;
2737 case 64:
2738 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2739 break;
2740 case 128:
2741 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2742 break;
2744 break;
2746 case REAL_TYPE:
2747 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2748 break;
2750 case COMPLEX_TYPE:
2751 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2752 && TYPE_VAX_FLOATING_POINT_P (type))
2753 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2755 case 6:
2756 dtype = 12;
2757 break;
2758 case 9:
2759 dtype = 13;
2760 break;
2761 case 15:
2762 dtype = 29;
2764 else
2765 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2766 break;
2768 case ARRAY_TYPE:
2769 dtype = 14;
2770 break;
2772 default:
2773 break;
2776 /* Get the CLASS value. */
2777 switch (mech)
2779 case By_Descriptor_A:
2780 case By_Short_Descriptor_A:
2781 class = 4;
2782 break;
2783 case By_Descriptor_NCA:
2784 case By_Short_Descriptor_NCA:
2785 class = 10;
2786 break;
2787 case By_Descriptor_SB:
2788 case By_Short_Descriptor_SB:
2789 class = 15;
2790 break;
2791 case By_Descriptor:
2792 case By_Short_Descriptor:
2793 case By_Descriptor_S:
2794 case By_Short_Descriptor_S:
2795 default:
2796 class = 1;
2797 break;
2800 /* Make the type for a descriptor for VMS. The first four fields
2801 are the same for all types. */
2803 field_list
2804 = chainon (field_list,
2805 make_descriptor_field
2806 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2807 size_in_bytes ((mech == By_Descriptor_A ||
2808 mech == By_Short_Descriptor_A)
2809 ? inner_type : type)));
2811 field_list = chainon (field_list,
2812 make_descriptor_field ("DTYPE",
2813 gnat_type_for_size (8, 1),
2814 record_type, size_int (dtype)));
2815 field_list = chainon (field_list,
2816 make_descriptor_field ("CLASS",
2817 gnat_type_for_size (8, 1),
2818 record_type, size_int (class)));
2820 /* Of course this will crash at run-time if the address space is not
2821 within the low 32 bits, but there is nothing else we can do. */
2822 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2824 field_list
2825 = chainon (field_list,
2826 make_descriptor_field
2827 ("POINTER", pointer32_type, record_type,
2828 build_unary_op (ADDR_EXPR,
2829 pointer32_type,
2830 build0 (PLACEHOLDER_EXPR, type))));
2832 switch (mech)
2834 case By_Descriptor:
2835 case By_Short_Descriptor:
2836 case By_Descriptor_S:
2837 case By_Short_Descriptor_S:
2838 break;
2840 case By_Descriptor_SB:
2841 case By_Short_Descriptor_SB:
2842 field_list
2843 = chainon (field_list,
2844 make_descriptor_field
2845 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2846 TREE_CODE (type) == ARRAY_TYPE
2847 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2848 field_list
2849 = chainon (field_list,
2850 make_descriptor_field
2851 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2852 TREE_CODE (type) == ARRAY_TYPE
2853 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2854 break;
2856 case By_Descriptor_A:
2857 case By_Short_Descriptor_A:
2858 case By_Descriptor_NCA:
2859 case By_Short_Descriptor_NCA:
2860 field_list = chainon (field_list,
2861 make_descriptor_field ("SCALE",
2862 gnat_type_for_size (8, 1),
2863 record_type,
2864 size_zero_node));
2866 field_list = chainon (field_list,
2867 make_descriptor_field ("DIGITS",
2868 gnat_type_for_size (8, 1),
2869 record_type,
2870 size_zero_node));
2872 field_list
2873 = chainon (field_list,
2874 make_descriptor_field
2875 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2876 size_int ((mech == By_Descriptor_NCA ||
2877 mech == By_Short_Descriptor_NCA)
2879 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2880 : (TREE_CODE (type) == ARRAY_TYPE
2881 && TYPE_CONVENTION_FORTRAN_P (type)
2882 ? 224 : 192))));
2884 field_list = chainon (field_list,
2885 make_descriptor_field ("DIMCT",
2886 gnat_type_for_size (8, 1),
2887 record_type,
2888 size_int (ndim)));
2890 field_list = chainon (field_list,
2891 make_descriptor_field ("ARSIZE",
2892 gnat_type_for_size (32, 1),
2893 record_type,
2894 size_in_bytes (type)));
2896 /* Now build a pointer to the 0,0,0... element. */
2897 tem = build0 (PLACEHOLDER_EXPR, type);
2898 for (i = 0, inner_type = type; i < ndim;
2899 i++, inner_type = TREE_TYPE (inner_type))
2900 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2901 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2902 NULL_TREE, NULL_TREE);
2904 field_list
2905 = chainon (field_list,
2906 make_descriptor_field
2907 ("A0",
2908 build_pointer_type_for_mode (inner_type, SImode, false),
2909 record_type,
2910 build1 (ADDR_EXPR,
2911 build_pointer_type_for_mode (inner_type, SImode,
2912 false),
2913 tem)));
2915 /* Next come the addressing coefficients. */
2916 tem = size_one_node;
2917 for (i = 0; i < ndim; i++)
2919 char fname[3];
2920 tree idx_length
2921 = size_binop (MULT_EXPR, tem,
2922 size_binop (PLUS_EXPR,
2923 size_binop (MINUS_EXPR,
2924 TYPE_MAX_VALUE (idx_arr[i]),
2925 TYPE_MIN_VALUE (idx_arr[i])),
2926 size_int (1)));
2928 fname[0] = ((mech == By_Descriptor_NCA ||
2929 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2930 fname[1] = '0' + i, fname[2] = 0;
2931 field_list
2932 = chainon (field_list,
2933 make_descriptor_field (fname,
2934 gnat_type_for_size (32, 1),
2935 record_type, idx_length));
2937 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2938 tem = idx_length;
2941 /* Finally here are the bounds. */
2942 for (i = 0; i < ndim; i++)
2944 char fname[3];
2946 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2947 field_list
2948 = chainon (field_list,
2949 make_descriptor_field
2950 (fname, gnat_type_for_size (32, 1), record_type,
2951 TYPE_MIN_VALUE (idx_arr[i])));
2953 fname[0] = 'U';
2954 field_list
2955 = chainon (field_list,
2956 make_descriptor_field
2957 (fname, gnat_type_for_size (32, 1), record_type,
2958 TYPE_MAX_VALUE (idx_arr[i])));
2960 break;
2962 default:
2963 post_error ("unsupported descriptor type for &", gnat_entity);
2966 finish_record_type (record_type, field_list, 0, true);
2967 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2968 NULL, true, false, gnat_entity);
2970 return record_type;
2973 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2974 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2975 in the type contains in its DECL_INITIAL the expression to use when
2976 a constructor is made for the type. GNAT_ENTITY is an entity used
2977 to print out an error message if the mechanism cannot be applied to
2978 an object of that type and also for the name. */
2980 tree
2981 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2983 tree record64_type = make_node (RECORD_TYPE);
2984 tree pointer64_type;
2985 tree field_list64 = 0;
2986 int class;
2987 int dtype = 0;
2988 tree inner_type;
2989 int ndim;
2990 int i;
2991 tree *idx_arr;
2992 tree tem;
2994 /* If TYPE is an unconstrained array, use the underlying array type. */
2995 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2996 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2998 /* If this is an array, compute the number of dimensions in the array,
2999 get the index types, and point to the inner type. */
3000 if (TREE_CODE (type) != ARRAY_TYPE)
3001 ndim = 0;
3002 else
3003 for (ndim = 1, inner_type = type;
3004 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3005 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3006 ndim++, inner_type = TREE_TYPE (inner_type))
3009 idx_arr = (tree *) alloca (ndim * sizeof (tree));
3011 if (mech != By_Descriptor_NCA
3012 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3013 for (i = ndim - 1, inner_type = type;
3014 i >= 0;
3015 i--, inner_type = TREE_TYPE (inner_type))
3016 idx_arr[i] = TYPE_DOMAIN (inner_type);
3017 else
3018 for (i = 0, inner_type = type;
3019 i < ndim;
3020 i++, inner_type = TREE_TYPE (inner_type))
3021 idx_arr[i] = TYPE_DOMAIN (inner_type);
3023 /* Now get the DTYPE value. */
3024 switch (TREE_CODE (type))
3026 case INTEGER_TYPE:
3027 case ENUMERAL_TYPE:
3028 case BOOLEAN_TYPE:
3029 if (TYPE_VAX_FLOATING_POINT_P (type))
3030 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3032 case 6:
3033 dtype = 10;
3034 break;
3035 case 9:
3036 dtype = 11;
3037 break;
3038 case 15:
3039 dtype = 27;
3040 break;
3042 else
3043 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3045 case 8:
3046 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3047 break;
3048 case 16:
3049 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3050 break;
3051 case 32:
3052 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3053 break;
3054 case 64:
3055 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3056 break;
3057 case 128:
3058 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3059 break;
3061 break;
3063 case REAL_TYPE:
3064 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3065 break;
3067 case COMPLEX_TYPE:
3068 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3069 && TYPE_VAX_FLOATING_POINT_P (type))
3070 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3072 case 6:
3073 dtype = 12;
3074 break;
3075 case 9:
3076 dtype = 13;
3077 break;
3078 case 15:
3079 dtype = 29;
3081 else
3082 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3083 break;
3085 case ARRAY_TYPE:
3086 dtype = 14;
3087 break;
3089 default:
3090 break;
3093 /* Get the CLASS value. */
3094 switch (mech)
3096 case By_Descriptor_A:
3097 class = 4;
3098 break;
3099 case By_Descriptor_NCA:
3100 class = 10;
3101 break;
3102 case By_Descriptor_SB:
3103 class = 15;
3104 break;
3105 case By_Descriptor:
3106 case By_Descriptor_S:
3107 default:
3108 class = 1;
3109 break;
3112 /* Make the type for a 64bit descriptor for VMS. The first six fields
3113 are the same for all types. */
3115 field_list64 = chainon (field_list64,
3116 make_descriptor_field ("MBO",
3117 gnat_type_for_size (16, 1),
3118 record64_type, size_int (1)));
3120 field_list64 = chainon (field_list64,
3121 make_descriptor_field ("DTYPE",
3122 gnat_type_for_size (8, 1),
3123 record64_type, size_int (dtype)));
3124 field_list64 = chainon (field_list64,
3125 make_descriptor_field ("CLASS",
3126 gnat_type_for_size (8, 1),
3127 record64_type, size_int (class)));
3129 field_list64 = chainon (field_list64,
3130 make_descriptor_field ("MBMO",
3131 gnat_type_for_size (32, 1),
3132 record64_type, ssize_int (-1)));
3134 field_list64
3135 = chainon (field_list64,
3136 make_descriptor_field
3137 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
3138 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
3140 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3142 field_list64
3143 = chainon (field_list64,
3144 make_descriptor_field
3145 ("POINTER", pointer64_type, record64_type,
3146 build_unary_op (ADDR_EXPR,
3147 pointer64_type,
3148 build0 (PLACEHOLDER_EXPR, type))));
3150 switch (mech)
3152 case By_Descriptor:
3153 case By_Descriptor_S:
3154 break;
3156 case By_Descriptor_SB:
3157 field_list64
3158 = chainon (field_list64,
3159 make_descriptor_field
3160 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
3161 TREE_CODE (type) == ARRAY_TYPE
3162 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3163 field_list64
3164 = chainon (field_list64,
3165 make_descriptor_field
3166 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
3167 TREE_CODE (type) == ARRAY_TYPE
3168 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
3169 break;
3171 case By_Descriptor_A:
3172 case By_Descriptor_NCA:
3173 field_list64 = chainon (field_list64,
3174 make_descriptor_field ("SCALE",
3175 gnat_type_for_size (8, 1),
3176 record64_type,
3177 size_zero_node));
3179 field_list64 = chainon (field_list64,
3180 make_descriptor_field ("DIGITS",
3181 gnat_type_for_size (8, 1),
3182 record64_type,
3183 size_zero_node));
3185 field_list64
3186 = chainon (field_list64,
3187 make_descriptor_field
3188 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
3189 size_int (mech == By_Descriptor_NCA
3191 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
3192 : (TREE_CODE (type) == ARRAY_TYPE
3193 && TYPE_CONVENTION_FORTRAN_P (type)
3194 ? 224 : 192))));
3196 field_list64 = chainon (field_list64,
3197 make_descriptor_field ("DIMCT",
3198 gnat_type_for_size (8, 1),
3199 record64_type,
3200 size_int (ndim)));
3202 field_list64 = chainon (field_list64,
3203 make_descriptor_field ("MBZ",
3204 gnat_type_for_size (32, 1),
3205 record64_type,
3206 size_int (0)));
3207 field_list64 = chainon (field_list64,
3208 make_descriptor_field ("ARSIZE",
3209 gnat_type_for_size (64, 1),
3210 record64_type,
3211 size_in_bytes (type)));
3213 /* Now build a pointer to the 0,0,0... element. */
3214 tem = build0 (PLACEHOLDER_EXPR, type);
3215 for (i = 0, inner_type = type; i < ndim;
3216 i++, inner_type = TREE_TYPE (inner_type))
3217 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3218 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3219 NULL_TREE, NULL_TREE);
3221 field_list64
3222 = chainon (field_list64,
3223 make_descriptor_field
3224 ("A0",
3225 build_pointer_type_for_mode (inner_type, DImode, false),
3226 record64_type,
3227 build1 (ADDR_EXPR,
3228 build_pointer_type_for_mode (inner_type, DImode,
3229 false),
3230 tem)));
3232 /* Next come the addressing coefficients. */
3233 tem = size_one_node;
3234 for (i = 0; i < ndim; i++)
3236 char fname[3];
3237 tree idx_length
3238 = size_binop (MULT_EXPR, tem,
3239 size_binop (PLUS_EXPR,
3240 size_binop (MINUS_EXPR,
3241 TYPE_MAX_VALUE (idx_arr[i]),
3242 TYPE_MIN_VALUE (idx_arr[i])),
3243 size_int (1)));
3245 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3246 fname[1] = '0' + i, fname[2] = 0;
3247 field_list64
3248 = chainon (field_list64,
3249 make_descriptor_field (fname,
3250 gnat_type_for_size (64, 1),
3251 record64_type, idx_length));
3253 if (mech == By_Descriptor_NCA)
3254 tem = idx_length;
3257 /* Finally here are the bounds. */
3258 for (i = 0; i < ndim; i++)
3260 char fname[3];
3262 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3263 field_list64
3264 = chainon (field_list64,
3265 make_descriptor_field
3266 (fname, gnat_type_for_size (64, 1), record64_type,
3267 TYPE_MIN_VALUE (idx_arr[i])));
3269 fname[0] = 'U';
3270 field_list64
3271 = chainon (field_list64,
3272 make_descriptor_field
3273 (fname, gnat_type_for_size (64, 1), record64_type,
3274 TYPE_MAX_VALUE (idx_arr[i])));
3276 break;
3278 default:
3279 post_error ("unsupported descriptor type for &", gnat_entity);
3282 finish_record_type (record64_type, field_list64, 0, true);
3283 create_type_decl (create_concat_name (gnat_entity, "DESC64"), record64_type,
3284 NULL, true, false, gnat_entity);
3286 return record64_type;
3289 /* Utility routine for above code to make a field. */
3291 static tree
3292 make_descriptor_field (const char *name, tree type,
3293 tree rec_type, tree initial)
3295 tree field
3296 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3298 DECL_INITIAL (field) = initial;
3299 return field;
3302 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3303 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3304 which the VMS descriptor is passed. */
3306 static tree
3307 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3309 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3310 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3311 /* The CLASS field is the 3rd field in the descriptor. */
3312 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3313 /* The POINTER field is the 6th field in the descriptor. */
3314 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (class)));
3316 /* Retrieve the value of the POINTER field. */
3317 tree gnu_expr64
3318 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3320 if (POINTER_TYPE_P (gnu_type))
3321 return convert (gnu_type, gnu_expr64);
3323 else if (TYPE_FAT_POINTER_P (gnu_type))
3325 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3326 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3327 tree template_type = TREE_TYPE (p_bounds_type);
3328 tree min_field = TYPE_FIELDS (template_type);
3329 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3330 tree template, template_addr, aflags, dimct, t, u;
3331 /* See the head comment of build_vms_descriptor. */
3332 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3333 tree lfield, ufield;
3335 /* Convert POINTER to the type of the P_ARRAY field. */
3336 gnu_expr64 = convert (p_array_type, gnu_expr64);
3338 switch (iclass)
3340 case 1: /* Class S */
3341 case 15: /* Class SB */
3342 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3343 t = TREE_CHAIN (TREE_CHAIN (class));
3344 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3345 t = tree_cons (min_field,
3346 convert (TREE_TYPE (min_field), integer_one_node),
3347 tree_cons (max_field,
3348 convert (TREE_TYPE (max_field), t),
3349 NULL_TREE));
3350 template = gnat_build_constructor (template_type, t);
3351 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3353 /* For class S, we are done. */
3354 if (iclass == 1)
3355 break;
3357 /* Test that we really have a SB descriptor, like DEC Ada. */
3358 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3359 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3360 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3361 /* If so, there is already a template in the descriptor and
3362 it is located right after the POINTER field. The fields are
3363 64bits so they must be repacked. */
3364 t = TREE_CHAIN (pointer64);
3365 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3366 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3368 t = TREE_CHAIN (t);
3369 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3370 ufield = convert
3371 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3373 /* Build the template in the form of a constructor. */
3374 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3375 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3376 ufield, NULL_TREE));
3377 template = gnat_build_constructor (template_type, t);
3379 /* Otherwise use the {1, LENGTH} template we build above. */
3380 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3381 build_unary_op (ADDR_EXPR, p_bounds_type,
3382 template),
3383 template_addr);
3384 break;
3386 case 4: /* Class A */
3387 /* The AFLAGS field is the 3rd field after the pointer in the
3388 descriptor. */
3389 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3390 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3391 /* The DIMCT field is the next field in the descriptor after
3392 aflags. */
3393 t = TREE_CHAIN (t);
3394 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3395 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3396 or FL_COEFF or FL_BOUNDS not set. */
3397 u = build_int_cst (TREE_TYPE (aflags), 192);
3398 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3399 build_binary_op (NE_EXPR, integer_type_node,
3400 dimct,
3401 convert (TREE_TYPE (dimct),
3402 size_one_node)),
3403 build_binary_op (NE_EXPR, integer_type_node,
3404 build2 (BIT_AND_EXPR,
3405 TREE_TYPE (aflags),
3406 aflags, u),
3407 u));
3408 /* There is already a template in the descriptor and it is located
3409 in block 3. The fields are 64bits so they must be repacked. */
3410 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3411 (t)))));
3412 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3413 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3415 t = TREE_CHAIN (t);
3416 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3417 ufield = convert
3418 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3420 /* Build the template in the form of a constructor. */
3421 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3422 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3423 ufield, NULL_TREE));
3424 template = gnat_build_constructor (template_type, t);
3425 template = build3 (COND_EXPR, p_bounds_type, u,
3426 build_call_raise (CE_Length_Check_Failed, Empty,
3427 N_Raise_Constraint_Error),
3428 template);
3429 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3430 break;
3432 case 10: /* Class NCA */
3433 default:
3434 post_error ("unsupported descriptor type for &", gnat_subprog);
3435 template_addr = integer_zero_node;
3436 break;
3439 /* Build the fat pointer in the form of a constructor. */
3440 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3441 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3442 template_addr, NULL_TREE));
3443 return gnat_build_constructor (gnu_type, t);
3446 else
3447 gcc_unreachable ();
3450 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3451 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3452 which the VMS descriptor is passed. */
3454 static tree
3455 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3457 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3458 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3459 /* The CLASS field is the 3rd field in the descriptor. */
3460 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3461 /* The POINTER field is the 4th field in the descriptor. */
3462 tree pointer = TREE_CHAIN (class);
3464 /* Retrieve the value of the POINTER field. */
3465 tree gnu_expr32
3466 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3468 if (POINTER_TYPE_P (gnu_type))
3469 return convert (gnu_type, gnu_expr32);
3471 else if (TYPE_FAT_POINTER_P (gnu_type))
3473 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3474 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3475 tree template_type = TREE_TYPE (p_bounds_type);
3476 tree min_field = TYPE_FIELDS (template_type);
3477 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3478 tree template, template_addr, aflags, dimct, t, u;
3479 /* See the head comment of build_vms_descriptor. */
3480 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3482 /* Convert POINTER to the type of the P_ARRAY field. */
3483 gnu_expr32 = convert (p_array_type, gnu_expr32);
3485 switch (iclass)
3487 case 1: /* Class S */
3488 case 15: /* Class SB */
3489 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3490 t = TYPE_FIELDS (desc_type);
3491 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3492 t = tree_cons (min_field,
3493 convert (TREE_TYPE (min_field), integer_one_node),
3494 tree_cons (max_field,
3495 convert (TREE_TYPE (max_field), t),
3496 NULL_TREE));
3497 template = gnat_build_constructor (template_type, t);
3498 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3500 /* For class S, we are done. */
3501 if (iclass == 1)
3502 break;
3504 /* Test that we really have a SB descriptor, like DEC Ada. */
3505 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3506 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3507 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3508 /* If so, there is already a template in the descriptor and
3509 it is located right after the POINTER field. */
3510 t = TREE_CHAIN (pointer);
3511 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3512 /* Otherwise use the {1, LENGTH} template we build above. */
3513 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3514 build_unary_op (ADDR_EXPR, p_bounds_type,
3515 template),
3516 template_addr);
3517 break;
3519 case 4: /* Class A */
3520 /* The AFLAGS field is the 7th field in the descriptor. */
3521 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3522 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3523 /* The DIMCT field is the 8th field in the descriptor. */
3524 t = TREE_CHAIN (t);
3525 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3526 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3527 or FL_COEFF or FL_BOUNDS not set. */
3528 u = build_int_cst (TREE_TYPE (aflags), 192);
3529 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3530 build_binary_op (NE_EXPR, integer_type_node,
3531 dimct,
3532 convert (TREE_TYPE (dimct),
3533 size_one_node)),
3534 build_binary_op (NE_EXPR, integer_type_node,
3535 build2 (BIT_AND_EXPR,
3536 TREE_TYPE (aflags),
3537 aflags, u),
3538 u));
3539 /* There is already a template in the descriptor and it is
3540 located at the start of block 3 (12th field). */
3541 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3542 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3543 template = build3 (COND_EXPR, p_bounds_type, u,
3544 build_call_raise (CE_Length_Check_Failed, Empty,
3545 N_Raise_Constraint_Error),
3546 template);
3547 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3548 break;
3550 case 10: /* Class NCA */
3551 default:
3552 post_error ("unsupported descriptor type for &", gnat_subprog);
3553 template_addr = integer_zero_node;
3554 break;
3557 /* Build the fat pointer in the form of a constructor. */
3558 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3559 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3560 template_addr, NULL_TREE));
3562 return gnat_build_constructor (gnu_type, t);
3565 else
3566 gcc_unreachable ();
3569 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3570 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3571 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3572 VMS descriptor is passed. */
3574 static tree
3575 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3576 Entity_Id gnat_subprog)
3578 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3579 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3580 tree mbo = TYPE_FIELDS (desc_type);
3581 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3582 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3583 tree is64bit, gnu_expr32, gnu_expr64;
3585 /* If the field name is not MBO, it must be 32-bit and no alternate.
3586 Otherwise primary must be 64-bit and alternate 32-bit. */
3587 if (strcmp (mbostr, "MBO") != 0)
3588 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3590 /* Build the test for 64-bit descriptor. */
3591 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3592 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3593 is64bit
3594 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3595 build_binary_op (EQ_EXPR, integer_type_node,
3596 convert (integer_type_node, mbo),
3597 integer_one_node),
3598 build_binary_op (EQ_EXPR, integer_type_node,
3599 convert (integer_type_node, mbmo),
3600 integer_minus_one_node));
3602 /* Build the 2 possible end results. */
3603 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3604 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3605 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3607 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3610 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3611 and the GNAT node GNAT_SUBPROG. */
3613 void
3614 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3616 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3617 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3618 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3619 tree gnu_body;
3621 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3622 gnu_param_list = NULL_TREE;
3624 begin_subprog_body (gnu_stub_decl);
3625 gnat_pushlevel ();
3627 start_stmt_group ();
3629 /* Loop over the parameters of the stub and translate any of them
3630 passed by descriptor into a by reference one. */
3631 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3632 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3633 gnu_stub_param;
3634 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3635 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3637 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3638 gnu_param
3639 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3640 gnu_stub_param,
3641 DECL_PARM_ALT_TYPE (gnu_stub_param),
3642 gnat_subprog);
3643 else
3644 gnu_param = gnu_stub_param;
3646 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3649 gnu_body = end_stmt_group ();
3651 /* Invoke the internal subprogram. */
3652 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3653 gnu_subprog);
3654 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3655 gnu_subprog_addr,
3656 nreverse (gnu_param_list));
3658 /* Propagate the return value, if any. */
3659 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3660 append_to_statement_list (gnu_subprog_call, &gnu_body);
3661 else
3662 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3663 gnu_subprog_call),
3664 &gnu_body);
3666 gnat_poplevel ();
3668 allocate_struct_function (gnu_stub_decl, false);
3669 end_subprog_body (gnu_body, false);
3672 /* Build a type to be used to represent an aliased object whose nominal
3673 type is an unconstrained array. This consists of a RECORD_TYPE containing
3674 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3675 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3676 is used to represent an arbitrary unconstrained object. Use NAME
3677 as the name of the record. */
3679 tree
3680 build_unc_object_type (tree template_type, tree object_type, tree name)
3682 tree type = make_node (RECORD_TYPE);
3683 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3684 template_type, type, 0, 0, 0, 1);
3685 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3686 type, 0, 0, 0, 1);
3688 TYPE_NAME (type) = name;
3689 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3690 finish_record_type (type,
3691 chainon (chainon (NULL_TREE, template_field),
3692 array_field),
3693 0, false);
3695 return type;
3698 /* Same, taking a thin or fat pointer type instead of a template type. */
3700 tree
3701 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3702 tree name)
3704 tree template_type;
3706 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3708 template_type
3709 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3710 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3711 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3712 return build_unc_object_type (template_type, object_type, name);
3715 /* Shift the component offsets within an unconstrained object TYPE to make it
3716 suitable for use as a designated type for thin pointers. */
3718 void
3719 shift_unc_components_for_thin_pointers (tree type)
3721 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3722 allocated past the BOUNDS template. The designated type is adjusted to
3723 have ARRAY at position zero and the template at a negative offset, so
3724 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3726 tree bounds_field = TYPE_FIELDS (type);
3727 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3729 DECL_FIELD_OFFSET (bounds_field)
3730 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3732 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3733 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3736 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3737 the normal case this is just two adjustments, but we have more to do
3738 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3740 void
3741 update_pointer_to (tree old_type, tree new_type)
3743 tree ptr = TYPE_POINTER_TO (old_type);
3744 tree ref = TYPE_REFERENCE_TO (old_type);
3745 tree ptr1, ref1;
3746 tree type;
3748 /* If this is the main variant, process all the other variants first. */
3749 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3750 for (type = TYPE_NEXT_VARIANT (old_type); type;
3751 type = TYPE_NEXT_VARIANT (type))
3752 update_pointer_to (type, new_type);
3754 /* If no pointer or reference, we are done. */
3755 if (!ptr && !ref)
3756 return;
3758 /* Merge the old type qualifiers in the new type.
3760 Each old variant has qualifiers for specific reasons, and the new
3761 designated type as well. Each set of qualifiers represents useful
3762 information grabbed at some point, and merging the two simply unifies
3763 these inputs into the final type description.
3765 Consider for instance a volatile type frozen after an access to constant
3766 type designating it. After the designated type freeze, we get here with a
3767 volatile new_type and a dummy old_type with a readonly variant, created
3768 when the access type was processed. We shall make a volatile and readonly
3769 designated type, because that's what it really is.
3771 We might also get here for a non-dummy old_type variant with different
3772 qualifiers than the new_type ones, for instance in some cases of pointers
3773 to private record type elaboration (see the comments around the call to
3774 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3775 qualifiers in those cases too, to avoid accidentally discarding the
3776 initial set, and will often end up with old_type == new_type then. */
3777 new_type = build_qualified_type (new_type,
3778 TYPE_QUALS (old_type)
3779 | TYPE_QUALS (new_type));
3781 /* If the new type and the old one are identical, there is nothing to
3782 update. */
3783 if (old_type == new_type)
3784 return;
3786 /* Otherwise, first handle the simple case. */
3787 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3789 TYPE_POINTER_TO (new_type) = ptr;
3790 TYPE_REFERENCE_TO (new_type) = ref;
3792 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3793 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3794 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3795 TREE_TYPE (ptr1) = new_type;
3797 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3798 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3799 ref1 = TYPE_NEXT_VARIANT (ref1))
3800 TREE_TYPE (ref1) = new_type;
3803 /* Now deal with the unconstrained array case. In this case the "pointer"
3804 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3805 Turn them into pointers to the correct types using update_pointer_to. */
3806 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3807 gcc_unreachable ();
3809 else
3811 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3812 tree array_field = TYPE_FIELDS (ptr);
3813 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3814 tree new_ptr = TYPE_POINTER_TO (new_type);
3815 tree new_ref;
3816 tree var;
3818 /* Make pointers to the dummy template point to the real template. */
3819 update_pointer_to
3820 (TREE_TYPE (TREE_TYPE (bounds_field)),
3821 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3823 /* The references to the template bounds present in the array type
3824 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3825 are updating ptr to make it a full replacement for new_ptr as
3826 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3827 to make it of type ptr. */
3828 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3829 build0 (PLACEHOLDER_EXPR, ptr),
3830 bounds_field, NULL_TREE);
3832 /* Create the new array for the new PLACEHOLDER_EXPR and make
3833 pointers to the dummy array point to it.
3835 ??? This is now the only use of substitute_in_type,
3836 which is a very "heavy" routine to do this, so it
3837 should be replaced at some point. */
3838 update_pointer_to
3839 (TREE_TYPE (TREE_TYPE (array_field)),
3840 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3841 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3843 /* Make ptr the pointer to new_type. */
3844 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3845 = TREE_TYPE (new_type) = ptr;
3847 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3848 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3850 /* Now handle updating the allocation record, what the thin pointer
3851 points to. Update all pointers from the old record into the new
3852 one, update the type of the array field, and recompute the size. */
3853 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3855 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3856 = TREE_TYPE (TREE_TYPE (array_field));
3858 /* The size recomputation needs to account for alignment constraints, so
3859 we let layout_type work it out. This will reset the field offsets to
3860 what they would be in a regular record, so we shift them back to what
3861 we want them to be for a thin pointer designated type afterwards. */
3862 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3863 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3864 TYPE_SIZE (new_obj_rec) = 0;
3865 layout_type (new_obj_rec);
3867 shift_unc_components_for_thin_pointers (new_obj_rec);
3869 /* We are done, at last. */
3870 rest_of_record_type_compilation (ptr);
3874 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3875 unconstrained one. This involves making or finding a template. */
3877 static tree
3878 convert_to_fat_pointer (tree type, tree expr)
3880 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3881 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3882 tree etype = TREE_TYPE (expr);
3883 tree template;
3885 /* If EXPR is null, make a fat pointer that contains null pointers to the
3886 template and array. */
3887 if (integer_zerop (expr))
3888 return
3889 gnat_build_constructor
3890 (type,
3891 tree_cons (TYPE_FIELDS (type),
3892 convert (p_array_type, expr),
3893 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3894 convert (build_pointer_type (template_type),
3895 expr),
3896 NULL_TREE)));
3898 /* If EXPR is a thin pointer, make template and data from the record.. */
3899 else if (TYPE_THIN_POINTER_P (etype))
3901 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3903 expr = save_expr (expr);
3904 if (TREE_CODE (expr) == ADDR_EXPR)
3905 expr = TREE_OPERAND (expr, 0);
3906 else
3907 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3909 template = build_component_ref (expr, NULL_TREE, fields, false);
3910 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3911 build_component_ref (expr, NULL_TREE,
3912 TREE_CHAIN (fields), false));
3915 /* Otherwise, build the constructor for the template. */
3916 else
3917 template = build_template (template_type, TREE_TYPE (etype), expr);
3919 /* The final result is a constructor for the fat pointer.
3921 If EXPR is an argument of a foreign convention subprogram, the type it
3922 points to is directly the component type. In this case, the expression
3923 type may not match the corresponding FIELD_DECL type at this point, so we
3924 call "convert" here to fix that up if necessary. This type consistency is
3925 required, for instance because it ensures that possible later folding of
3926 COMPONENT_REFs against this constructor always yields something of the
3927 same type as the initial reference.
3929 Note that the call to "build_template" above is still fine because it
3930 will only refer to the provided TEMPLATE_TYPE in this case. */
3931 return
3932 gnat_build_constructor
3933 (type,
3934 tree_cons (TYPE_FIELDS (type),
3935 convert (p_array_type, expr),
3936 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3937 build_unary_op (ADDR_EXPR, NULL_TREE, template),
3938 NULL_TREE)));
3941 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3942 is something that is a fat pointer, so convert to it first if it EXPR
3943 is not already a fat pointer. */
3945 static tree
3946 convert_to_thin_pointer (tree type, tree expr)
3948 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3949 expr
3950 = convert_to_fat_pointer
3951 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3953 /* We get the pointer to the data and use a NOP_EXPR to make it the
3954 proper GCC type. */
3955 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3956 false);
3957 expr = build1 (NOP_EXPR, type, expr);
3959 return expr;
3962 /* Create an expression whose value is that of EXPR,
3963 converted to type TYPE. The TREE_TYPE of the value
3964 is always TYPE. This function implements all reasonable
3965 conversions; callers should filter out those that are
3966 not permitted by the language being compiled. */
3968 tree
3969 convert (tree type, tree expr)
3971 enum tree_code code = TREE_CODE (type);
3972 tree etype = TREE_TYPE (expr);
3973 enum tree_code ecode = TREE_CODE (etype);
3975 /* If EXPR is already the right type, we are done. */
3976 if (type == etype)
3977 return expr;
3979 /* If both input and output have padding and are of variable size, do this
3980 as an unchecked conversion. Likewise if one is a mere variant of the
3981 other, so we avoid a pointless unpad/repad sequence. */
3982 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3983 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3984 && (!TREE_CONSTANT (TYPE_SIZE (type))
3985 || !TREE_CONSTANT (TYPE_SIZE (etype))
3986 || gnat_types_compatible_p (type, etype)
3987 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3988 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3991 /* If the output type has padding, convert to the inner type and
3992 make a constructor to build the record. */
3993 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3995 /* If we previously converted from another type and our type is
3996 of variable size, remove the conversion to avoid the need for
3997 variable-size temporaries. Likewise for a conversion between
3998 original and packable version. */
3999 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4000 && (!TREE_CONSTANT (TYPE_SIZE (type))
4001 || (ecode == RECORD_TYPE
4002 && TYPE_NAME (etype)
4003 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4004 expr = TREE_OPERAND (expr, 0);
4006 /* If we are just removing the padding from expr, convert the original
4007 object if we have variable size in order to avoid the need for some
4008 variable-size temporaries. Likewise if the padding is a mere variant
4009 of the other, so we avoid a pointless unpad/repad sequence. */
4010 if (TREE_CODE (expr) == COMPONENT_REF
4011 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
4012 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4013 && (!TREE_CONSTANT (TYPE_SIZE (type))
4014 || gnat_types_compatible_p (type,
4015 TREE_TYPE (TREE_OPERAND (expr, 0)))
4016 || (ecode == RECORD_TYPE
4017 && TYPE_NAME (etype)
4018 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4019 return convert (type, TREE_OPERAND (expr, 0));
4021 /* If the result type is a padded type with a self-referentially-sized
4022 field and the expression type is a record, do this as an
4023 unchecked conversion. */
4024 else if (TREE_CODE (etype) == RECORD_TYPE
4025 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4026 return unchecked_convert (type, expr, false);
4028 else
4029 return
4030 gnat_build_constructor (type,
4031 tree_cons (TYPE_FIELDS (type),
4032 convert (TREE_TYPE
4033 (TYPE_FIELDS (type)),
4034 expr),
4035 NULL_TREE));
4038 /* If the input type has padding, remove it and convert to the output type.
4039 The conditions ordering is arranged to ensure that the output type is not
4040 a padding type here, as it is not clear whether the conversion would
4041 always be correct if this was to happen. */
4042 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
4044 tree unpadded;
4046 /* If we have just converted to this padded type, just get the
4047 inner expression. */
4048 if (TREE_CODE (expr) == CONSTRUCTOR
4049 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
4050 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
4051 == TYPE_FIELDS (etype))
4052 unpadded
4053 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
4055 /* Otherwise, build an explicit component reference. */
4056 else
4057 unpadded
4058 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4060 return convert (type, unpadded);
4063 /* If the input is a biased type, adjust first. */
4064 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4065 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4066 fold_convert (TREE_TYPE (etype),
4067 expr),
4068 TYPE_MIN_VALUE (etype)));
4070 /* If the input is a justified modular type, we need to extract the actual
4071 object before converting it to any other type with the exceptions of an
4072 unconstrained array or of a mere type variant. It is useful to avoid the
4073 extraction and conversion in the type variant case because it could end
4074 up replacing a VAR_DECL expr by a constructor and we might be about the
4075 take the address of the result. */
4076 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4077 && code != UNCONSTRAINED_ARRAY_TYPE
4078 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4079 return convert (type, build_component_ref (expr, NULL_TREE,
4080 TYPE_FIELDS (etype), false));
4082 /* If converting to a type that contains a template, convert to the data
4083 type and then build the template. */
4084 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4086 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
4088 /* If the source already has a template, get a reference to the
4089 associated array only, as we are going to rebuild a template
4090 for the target type anyway. */
4091 expr = maybe_unconstrained_array (expr);
4093 return
4094 gnat_build_constructor
4095 (type,
4096 tree_cons (TYPE_FIELDS (type),
4097 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4098 obj_type, NULL_TREE),
4099 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
4100 convert (obj_type, expr), NULL_TREE)));
4103 /* There are some special cases of expressions that we process
4104 specially. */
4105 switch (TREE_CODE (expr))
4107 case ERROR_MARK:
4108 return expr;
4110 case NULL_EXPR:
4111 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4112 conversion in gnat_expand_expr. NULL_EXPR does not represent
4113 and actual value, so no conversion is needed. */
4114 expr = copy_node (expr);
4115 TREE_TYPE (expr) = type;
4116 return expr;
4118 case STRING_CST:
4119 /* If we are converting a STRING_CST to another constrained array type,
4120 just make a new one in the proper type. */
4121 if (code == ecode && AGGREGATE_TYPE_P (etype)
4122 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4123 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4125 expr = copy_node (expr);
4126 TREE_TYPE (expr) = type;
4127 return expr;
4129 break;
4131 case CONSTRUCTOR:
4132 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4133 a new one in the proper type. */
4134 if (code == ecode && gnat_types_compatible_p (type, etype))
4136 expr = copy_node (expr);
4137 TREE_TYPE (expr) = type;
4138 return expr;
4141 /* Likewise for a conversion between original and packable version, but
4142 we have to work harder in order to preserve type consistency. */
4143 if (code == ecode
4144 && code == RECORD_TYPE
4145 && TYPE_NAME (type) == TYPE_NAME (etype))
4147 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4148 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4149 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
4150 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4151 unsigned HOST_WIDE_INT idx;
4152 tree index, value;
4154 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4156 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
4157 /* We expect only simple constructors. Otherwise, punt. */
4158 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
4159 break;
4160 elt->index = field;
4161 elt->value = convert (TREE_TYPE (field), value);
4162 efield = TREE_CHAIN (efield);
4163 field = TREE_CHAIN (field);
4166 if (idx == len)
4168 expr = copy_node (expr);
4169 TREE_TYPE (expr) = type;
4170 CONSTRUCTOR_ELTS (expr) = v;
4171 return expr;
4174 break;
4176 case UNCONSTRAINED_ARRAY_REF:
4177 /* Convert this to the type of the inner array by getting the address of
4178 the array from the template. */
4179 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4180 build_component_ref (TREE_OPERAND (expr, 0),
4181 get_identifier ("P_ARRAY"),
4182 NULL_TREE, false));
4183 etype = TREE_TYPE (expr);
4184 ecode = TREE_CODE (etype);
4185 break;
4187 case VIEW_CONVERT_EXPR:
4189 /* GCC 4.x is very sensitive to type consistency overall, and view
4190 conversions thus are very frequent. Even though just "convert"ing
4191 the inner operand to the output type is fine in most cases, it
4192 might expose unexpected input/output type mismatches in special
4193 circumstances so we avoid such recursive calls when we can. */
4194 tree op0 = TREE_OPERAND (expr, 0);
4196 /* If we are converting back to the original type, we can just
4197 lift the input conversion. This is a common occurrence with
4198 switches back-and-forth amongst type variants. */
4199 if (type == TREE_TYPE (op0))
4200 return op0;
4202 /* Otherwise, if we're converting between two aggregate types, we
4203 might be allowed to substitute the VIEW_CONVERT_EXPR target type
4204 in place or to just convert the inner expression. */
4205 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4207 /* If we are converting between mere variants, we can just
4208 substitute the VIEW_CONVERT_EXPR in place. */
4209 if (gnat_types_compatible_p (type, etype))
4210 return build1 (VIEW_CONVERT_EXPR, type, op0);
4212 /* Otherwise, we may just bypass the input view conversion unless
4213 one of the types is a fat pointer, which is handled by
4214 specialized code below which relies on exact type matching. */
4215 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4216 return convert (type, op0);
4219 break;
4221 case INDIRECT_REF:
4222 /* If both types are record types, just convert the pointer and
4223 make a new INDIRECT_REF.
4225 ??? Disable this for now since it causes problems with the
4226 code in build_binary_op for MODIFY_EXPR which wants to
4227 strip off conversions. But that code really is a mess and
4228 we need to do this a much better way some time. */
4229 if (0
4230 && (TREE_CODE (type) == RECORD_TYPE
4231 || TREE_CODE (type) == UNION_TYPE)
4232 && (TREE_CODE (etype) == RECORD_TYPE
4233 || TREE_CODE (etype) == UNION_TYPE)
4234 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4235 return build_unary_op (INDIRECT_REF, NULL_TREE,
4236 convert (build_pointer_type (type),
4237 TREE_OPERAND (expr, 0)));
4238 break;
4240 default:
4241 break;
4244 /* Check for converting to a pointer to an unconstrained array. */
4245 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4246 return convert_to_fat_pointer (type, expr);
4248 /* If we are converting between two aggregate types that are mere
4249 variants, just make a VIEW_CONVERT_EXPR. */
4250 else if (code == ecode
4251 && AGGREGATE_TYPE_P (type)
4252 && gnat_types_compatible_p (type, etype))
4253 return build1 (VIEW_CONVERT_EXPR, type, expr);
4255 /* In all other cases of related types, make a NOP_EXPR. */
4256 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4257 || (code == INTEGER_CST && ecode == INTEGER_CST
4258 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4259 return fold_convert (type, expr);
4261 switch (code)
4263 case VOID_TYPE:
4264 return fold_build1 (CONVERT_EXPR, type, expr);
4266 case INTEGER_TYPE:
4267 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4268 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4269 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4270 return unchecked_convert (type, expr, false);
4271 else if (TYPE_BIASED_REPRESENTATION_P (type))
4272 return fold_convert (type,
4273 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4274 convert (TREE_TYPE (type), expr),
4275 TYPE_MIN_VALUE (type)));
4277 /* ... fall through ... */
4279 case ENUMERAL_TYPE:
4280 case BOOLEAN_TYPE:
4281 /* If we are converting an additive expression to an integer type
4282 with lower precision, be wary of the optimization that can be
4283 applied by convert_to_integer. There are 2 problematic cases:
4284 - if the first operand was originally of a biased type,
4285 because we could be recursively called to convert it
4286 to an intermediate type and thus rematerialize the
4287 additive operator endlessly,
4288 - if the expression contains a placeholder, because an
4289 intermediate conversion that changes the sign could
4290 be inserted and thus introduce an artificial overflow
4291 at compile time when the placeholder is substituted. */
4292 if (code == INTEGER_TYPE
4293 && ecode == INTEGER_TYPE
4294 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4295 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4297 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4299 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4300 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4301 || CONTAINS_PLACEHOLDER_P (expr))
4302 return build1 (NOP_EXPR, type, expr);
4305 return fold (convert_to_integer (type, expr));
4307 case POINTER_TYPE:
4308 case REFERENCE_TYPE:
4309 /* If converting between two pointers to records denoting
4310 both a template and type, adjust if needed to account
4311 for any differing offsets, since one might be negative. */
4312 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
4314 tree bit_diff
4315 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4316 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4317 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4318 sbitsize_int (BITS_PER_UNIT));
4320 expr = build1 (NOP_EXPR, type, expr);
4321 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4322 if (integer_zerop (byte_diff))
4323 return expr;
4325 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4326 fold (convert (sizetype, byte_diff)));
4329 /* If converting to a thin pointer, handle specially. */
4330 if (TYPE_THIN_POINTER_P (type)
4331 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4332 return convert_to_thin_pointer (type, expr);
4334 /* If converting fat pointer to normal pointer, get the pointer to the
4335 array and then convert it. */
4336 else if (TYPE_FAT_POINTER_P (etype))
4337 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4338 NULL_TREE, false);
4340 return fold (convert_to_pointer (type, expr));
4342 case REAL_TYPE:
4343 return fold (convert_to_real (type, expr));
4345 case RECORD_TYPE:
4346 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4347 return
4348 gnat_build_constructor
4349 (type, tree_cons (TYPE_FIELDS (type),
4350 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4351 NULL_TREE));
4353 /* ... fall through ... */
4355 case ARRAY_TYPE:
4356 /* In these cases, assume the front-end has validated the conversion.
4357 If the conversion is valid, it will be a bit-wise conversion, so
4358 it can be viewed as an unchecked conversion. */
4359 return unchecked_convert (type, expr, false);
4361 case UNION_TYPE:
4362 /* This is a either a conversion between a tagged type and some
4363 subtype, which we have to mark as a UNION_TYPE because of
4364 overlapping fields or a conversion of an Unchecked_Union. */
4365 return unchecked_convert (type, expr, false);
4367 case UNCONSTRAINED_ARRAY_TYPE:
4368 /* If EXPR is a constrained array, take its address, convert it to a
4369 fat pointer, and then dereference it. Likewise if EXPR is a
4370 record containing both a template and a constrained array.
4371 Note that a record representing a justified modular type
4372 always represents a packed constrained array. */
4373 if (ecode == ARRAY_TYPE
4374 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4375 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4376 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4377 return
4378 build_unary_op
4379 (INDIRECT_REF, NULL_TREE,
4380 convert_to_fat_pointer (TREE_TYPE (type),
4381 build_unary_op (ADDR_EXPR,
4382 NULL_TREE, expr)));
4384 /* Do something very similar for converting one unconstrained
4385 array to another. */
4386 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4387 return
4388 build_unary_op (INDIRECT_REF, NULL_TREE,
4389 convert (TREE_TYPE (type),
4390 build_unary_op (ADDR_EXPR,
4391 NULL_TREE, expr)));
4392 else
4393 gcc_unreachable ();
4395 case COMPLEX_TYPE:
4396 return fold (convert_to_complex (type, expr));
4398 default:
4399 gcc_unreachable ();
4403 /* Remove all conversions that are done in EXP. This includes converting
4404 from a padded type or to a justified modular type. If TRUE_ADDRESS
4405 is true, always return the address of the containing object even if
4406 the address is not bit-aligned. */
4408 tree
4409 remove_conversions (tree exp, bool true_address)
4411 switch (TREE_CODE (exp))
4413 case CONSTRUCTOR:
4414 if (true_address
4415 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4416 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4417 return
4418 remove_conversions (VEC_index (constructor_elt,
4419 CONSTRUCTOR_ELTS (exp), 0)->value,
4420 true);
4421 break;
4423 case COMPONENT_REF:
4424 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
4425 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4426 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4427 break;
4429 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4430 CASE_CONVERT:
4431 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4433 default:
4434 break;
4437 return exp;
4440 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4441 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4442 likewise return an expression pointing to the underlying array. */
4444 tree
4445 maybe_unconstrained_array (tree exp)
4447 enum tree_code code = TREE_CODE (exp);
4448 tree new;
4450 switch (TREE_CODE (TREE_TYPE (exp)))
4452 case UNCONSTRAINED_ARRAY_TYPE:
4453 if (code == UNCONSTRAINED_ARRAY_REF)
4456 = build_unary_op (INDIRECT_REF, NULL_TREE,
4457 build_component_ref (TREE_OPERAND (exp, 0),
4458 get_identifier ("P_ARRAY"),
4459 NULL_TREE, false));
4460 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
4461 return new;
4464 else if (code == NULL_EXPR)
4465 return build1 (NULL_EXPR,
4466 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4467 (TREE_TYPE (TREE_TYPE (exp))))),
4468 TREE_OPERAND (exp, 0));
4470 case RECORD_TYPE:
4471 /* If this is a padded type, convert to the unpadded type and see if
4472 it contains a template. */
4473 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4475 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4476 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
4477 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
4478 return
4479 build_component_ref (new, NULL_TREE,
4480 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
4483 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4484 return
4485 build_component_ref (exp, NULL_TREE,
4486 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4487 break;
4489 default:
4490 break;
4493 return exp;
4496 /* Return true if EXPR is an expression that can be folded as an operand
4497 of a VIEW_CONVERT_EXPR. See the head comment of unchecked_convert for
4498 the rationale. */
4500 static bool
4501 can_fold_for_view_convert_p (tree expr)
4503 tree t1, t2;
4505 /* The folder will fold NOP_EXPRs between integral types with the same
4506 precision (in the middle-end's sense). We cannot allow it if the
4507 types don't have the same precision in the Ada sense as well. */
4508 if (TREE_CODE (expr) != NOP_EXPR)
4509 return true;
4511 t1 = TREE_TYPE (expr);
4512 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4514 /* Defer to the folder for non-integral conversions. */
4515 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4516 return true;
4518 /* Only fold conversions that preserve both precisions. */
4519 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4520 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4521 return true;
4523 return false;
4526 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4527 If NOTRUNC_P is true, truncation operations should be suppressed.
4529 Special care is required with (source or target) integral types whose
4530 precision is not equal to their size, to make sure we fetch or assign
4531 the value bits whose location might depend on the endianness, e.g.
4533 Rmsize : constant := 8;
4534 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4536 type Bit_Array is array (1 .. Rmsize) of Boolean;
4537 pragma Pack (Bit_Array);
4539 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4541 Value : Int := 2#1000_0001#;
4542 Vbits : Bit_Array := To_Bit_Array (Value);
4544 we expect the 8 bits at Vbits'Address to always contain Value, while
4545 their original location depends on the endianness, at Value'Address
4546 on a little-endian architecture but not on a big-endian one.
4548 ??? There is a problematic discrepancy between what is called precision
4549 here (and more generally throughout gigi) for integral types and what is
4550 called precision in the middle-end. In the former case it's the RM size
4551 as given by TYPE_RM_SIZE (or rm_size) whereas it's TYPE_PRECISION in the
4552 latter case, the hitch being that they are not equal when they matter,
4553 that is when the number of value bits is not equal to the type's size:
4554 TYPE_RM_SIZE does give the number of value bits but TYPE_PRECISION is set
4555 to the size. The sole exception are BOOLEAN_TYPEs for which both are 1.
4557 The consequence is that gigi must duplicate code bridging the gap between
4558 the type's size and its precision that exists for TYPE_PRECISION in the
4559 middle-end, because the latter knows nothing about TYPE_RM_SIZE, and be
4560 wary of transformations applied in the middle-end based on TYPE_PRECISION
4561 because this value doesn't reflect the actual precision for Ada. */
4563 tree
4564 unchecked_convert (tree type, tree expr, bool notrunc_p)
4566 tree etype = TREE_TYPE (expr);
4568 /* If the expression is already the right type, we are done. */
4569 if (etype == type)
4570 return expr;
4572 /* If both types types are integral just do a normal conversion.
4573 Likewise for a conversion to an unconstrained array. */
4574 if ((((INTEGRAL_TYPE_P (type)
4575 && !(TREE_CODE (type) == INTEGER_TYPE
4576 && TYPE_VAX_FLOATING_POINT_P (type)))
4577 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
4578 || (TREE_CODE (type) == RECORD_TYPE
4579 && TYPE_JUSTIFIED_MODULAR_P (type)))
4580 && ((INTEGRAL_TYPE_P (etype)
4581 && !(TREE_CODE (etype) == INTEGER_TYPE
4582 && TYPE_VAX_FLOATING_POINT_P (etype)))
4583 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
4584 || (TREE_CODE (etype) == RECORD_TYPE
4585 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4586 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4588 if (TREE_CODE (etype) == INTEGER_TYPE
4589 && TYPE_BIASED_REPRESENTATION_P (etype))
4591 tree ntype = copy_type (etype);
4592 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4593 TYPE_MAIN_VARIANT (ntype) = ntype;
4594 expr = build1 (NOP_EXPR, ntype, expr);
4597 if (TREE_CODE (type) == INTEGER_TYPE
4598 && TYPE_BIASED_REPRESENTATION_P (type))
4600 tree rtype = copy_type (type);
4601 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4602 TYPE_MAIN_VARIANT (rtype) = rtype;
4603 expr = convert (rtype, expr);
4604 expr = build1 (NOP_EXPR, type, expr);
4607 /* We have another special case: if we are unchecked converting either
4608 a subtype or a type with limited range into a base type, we need to
4609 ensure that VRP doesn't propagate range information because this
4610 conversion may be done precisely to validate that the object is
4611 within the range it is supposed to have. */
4612 else if (TREE_CODE (expr) != INTEGER_CST
4613 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
4614 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
4615 || TREE_CODE (etype) == ENUMERAL_TYPE
4616 || TREE_CODE (etype) == BOOLEAN_TYPE))
4618 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
4619 in order not to be deemed an useless type conversion, it must
4620 be from subtype to base type.
4622 Therefore we first do the bulk of the conversion to a subtype of
4623 the final type. And this conversion must itself not be deemed
4624 useless if the source type is not a subtype because, otherwise,
4625 the final VIEW_CONVERT_EXPR will be deemed so as well. That's
4626 why we toggle the unsigned flag in this conversion, which is
4627 harmless since the final conversion is only a reinterpretation
4628 of the bit pattern.
4630 ??? This may raise addressability and/or aliasing issues because
4631 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
4632 address of its operand to be taken if it is deemed addressable
4633 and not already in GIMPLE form. */
4634 tree rtype
4635 = gnat_type_for_mode (TYPE_MODE (type), !TYPE_UNSIGNED (etype));
4636 rtype = copy_type (rtype);
4637 TYPE_MAIN_VARIANT (rtype) = rtype;
4638 TREE_TYPE (rtype) = type;
4639 expr = convert (rtype, expr);
4640 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4643 else
4644 expr = convert (type, expr);
4647 /* If we are converting to an integral type whose precision is not equal
4648 to its size, first unchecked convert to a record that contains an
4649 object of the output type. Then extract the field. */
4650 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4651 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4652 GET_MODE_BITSIZE (TYPE_MODE (type))))
4654 tree rec_type = make_node (RECORD_TYPE);
4655 tree field = create_field_decl (get_identifier ("OBJ"), type,
4656 rec_type, 1, 0, 0, 0);
4658 TYPE_FIELDS (rec_type) = field;
4659 layout_type (rec_type);
4661 expr = unchecked_convert (rec_type, expr, notrunc_p);
4662 expr = build_component_ref (expr, NULL_TREE, field, 0);
4665 /* Similarly if we are converting from an integral type whose precision
4666 is not equal to its size. */
4667 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4668 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4669 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4671 tree rec_type = make_node (RECORD_TYPE);
4672 tree field
4673 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4674 1, 0, 0, 0);
4676 TYPE_FIELDS (rec_type) = field;
4677 layout_type (rec_type);
4679 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4680 expr = unchecked_convert (type, expr, notrunc_p);
4683 /* We have a special case when we are converting between two
4684 unconstrained array types. In that case, take the address,
4685 convert the fat pointer types, and dereference. */
4686 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4687 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4688 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4689 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4690 build_unary_op (ADDR_EXPR, NULL_TREE,
4691 expr)));
4692 else
4694 expr = maybe_unconstrained_array (expr);
4695 etype = TREE_TYPE (expr);
4696 if (can_fold_for_view_convert_p (expr))
4697 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4698 else
4699 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4702 /* If the result is an integral type whose precision is not equal to its
4703 size, sign- or zero-extend the result. We need not do this if the input
4704 is an integral type of the same precision and signedness or if the output
4705 is a biased type or if both the input and output are unsigned. */
4706 if (!notrunc_p
4707 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4708 && !(TREE_CODE (type) == INTEGER_TYPE
4709 && TYPE_BIASED_REPRESENTATION_P (type))
4710 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4711 GET_MODE_BITSIZE (TYPE_MODE (type)))
4712 && !(INTEGRAL_TYPE_P (etype)
4713 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4714 && operand_equal_p (TYPE_RM_SIZE (type),
4715 (TYPE_RM_SIZE (etype) != 0
4716 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4718 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4720 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4721 TYPE_UNSIGNED (type));
4722 tree shift_expr
4723 = convert (base_type,
4724 size_binop (MINUS_EXPR,
4725 bitsize_int
4726 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4727 TYPE_RM_SIZE (type)));
4728 expr
4729 = convert (type,
4730 build_binary_op (RSHIFT_EXPR, base_type,
4731 build_binary_op (LSHIFT_EXPR, base_type,
4732 convert (base_type, expr),
4733 shift_expr),
4734 shift_expr));
4737 /* An unchecked conversion should never raise Constraint_Error. The code
4738 below assumes that GCC's conversion routines overflow the same way that
4739 the underlying hardware does. This is probably true. In the rare case
4740 when it is false, we can rely on the fact that such conversions are
4741 erroneous anyway. */
4742 if (TREE_CODE (expr) == INTEGER_CST)
4743 TREE_OVERFLOW (expr) = 0;
4745 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4746 show no longer constant. */
4747 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4748 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4749 OEP_ONLY_CONST))
4750 TREE_CONSTANT (expr) = 0;
4752 return expr;
4755 /* Return the appropriate GCC tree code for the specified GNAT type,
4756 the latter being a record type as predicated by Is_Record_Type. */
4758 enum tree_code
4759 tree_code_for_record_type (Entity_Id gnat_type)
4761 Node_Id component_list
4762 = Component_List (Type_Definition
4763 (Declaration_Node
4764 (Implementation_Base_Type (gnat_type))));
4765 Node_Id component;
4767 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4768 we have a non-discriminant field outside a variant. In either case,
4769 it's a RECORD_TYPE. */
4771 if (!Is_Unchecked_Union (gnat_type))
4772 return RECORD_TYPE;
4774 for (component = First_Non_Pragma (Component_Items (component_list));
4775 Present (component);
4776 component = Next_Non_Pragma (component))
4777 if (Ekind (Defining_Entity (component)) == E_Component)
4778 return RECORD_TYPE;
4780 return UNION_TYPE;
4783 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4784 component of an aggregate type. */
4786 bool
4787 type_for_nonaliased_component_p (tree gnu_type)
4789 /* If the type is passed by reference, we may have pointers to the
4790 component so it cannot be made non-aliased. */
4791 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4792 return false;
4794 /* We used to say that any component of aggregate type is aliased
4795 because the front-end may take 'Reference of it. The front-end
4796 has been enhanced in the meantime so as to use a renaming instead
4797 in most cases, but the back-end can probably take the address of
4798 such a component too so we go for the conservative stance.
4800 For instance, we might need the address of any array type, even
4801 if normally passed by copy, to construct a fat pointer if the
4802 component is used as an actual for an unconstrained formal.
4804 Likewise for record types: even if a specific record subtype is
4805 passed by copy, the parent type might be passed by ref (e.g. if
4806 it's of variable size) and we might take the address of a child
4807 component to pass to a parent formal. We have no way to check
4808 for such conditions here. */
4809 if (AGGREGATE_TYPE_P (gnu_type))
4810 return false;
4812 return true;
4815 /* Perform final processing on global variables. */
4817 void
4818 gnat_write_global_declarations (void)
4820 /* Proceed to optimize and emit assembly.
4821 FIXME: shouldn't be the front end's responsibility to call this. */
4822 cgraph_optimize ();
4824 /* Emit debug info for all global declarations. */
4825 emit_debug_global_declarations (VEC_address (tree, global_decls),
4826 VEC_length (tree, global_decls));
4829 /* ************************************************************************
4830 * * GCC builtins support *
4831 * ************************************************************************ */
4833 /* The general scheme is fairly simple:
4835 For each builtin function/type to be declared, gnat_install_builtins calls
4836 internal facilities which eventually get to gnat_push_decl, which in turn
4837 tracks the so declared builtin function decls in the 'builtin_decls' global
4838 datastructure. When an Intrinsic subprogram declaration is processed, we
4839 search this global datastructure to retrieve the associated BUILT_IN DECL
4840 node. */
4842 /* Search the chain of currently available builtin declarations for a node
4843 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4844 found, if any, or NULL_TREE otherwise. */
4845 tree
4846 builtin_decl_for (tree name)
4848 unsigned i;
4849 tree decl;
4851 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4852 if (DECL_NAME (decl) == name)
4853 return decl;
4855 return NULL_TREE;
4858 /* The code below eventually exposes gnat_install_builtins, which declares
4859 the builtin types and functions we might need, either internally or as
4860 user accessible facilities.
4862 ??? This is a first implementation shot, still in rough shape. It is
4863 heavily inspired from the "C" family implementation, with chunks copied
4864 verbatim from there.
4866 Two obvious TODO candidates are
4867 o Use a more efficient name/decl mapping scheme
4868 o Devise a middle-end infrastructure to avoid having to copy
4869 pieces between front-ends. */
4871 /* ----------------------------------------------------------------------- *
4872 * BUILTIN ELEMENTARY TYPES *
4873 * ----------------------------------------------------------------------- */
4875 /* Standard data types to be used in builtin argument declarations. */
4877 enum c_tree_index
4879 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4880 CTI_STRING_TYPE,
4881 CTI_CONST_STRING_TYPE,
4883 CTI_MAX
4886 static tree c_global_trees[CTI_MAX];
4888 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4889 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4890 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4892 /* ??? In addition some attribute handlers, we currently don't support a
4893 (small) number of builtin-types, which in turns inhibits support for a
4894 number of builtin functions. */
4895 #define wint_type_node void_type_node
4896 #define intmax_type_node void_type_node
4897 #define uintmax_type_node void_type_node
4899 /* Build the void_list_node (void_type_node having been created). */
4901 static tree
4902 build_void_list_node (void)
4904 tree t = build_tree_list (NULL_TREE, void_type_node);
4905 return t;
4908 /* Used to help initialize the builtin-types.def table. When a type of
4909 the correct size doesn't exist, use error_mark_node instead of NULL.
4910 The later results in segfaults even when a decl using the type doesn't
4911 get invoked. */
4913 static tree
4914 builtin_type_for_size (int size, bool unsignedp)
4916 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4917 return type ? type : error_mark_node;
4920 /* Build/push the elementary type decls that builtin functions/types
4921 will need. */
4923 static void
4924 install_builtin_elementary_types (void)
4926 signed_size_type_node = size_type_node;
4927 pid_type_node = integer_type_node;
4928 void_list_node = build_void_list_node ();
4930 string_type_node = build_pointer_type (char_type_node);
4931 const_string_type_node
4932 = build_pointer_type (build_qualified_type
4933 (char_type_node, TYPE_QUAL_CONST));
4936 /* ----------------------------------------------------------------------- *
4937 * BUILTIN FUNCTION TYPES *
4938 * ----------------------------------------------------------------------- */
4940 /* Now, builtin function types per se. */
4942 enum c_builtin_type
4944 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4945 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4946 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4947 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4948 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4949 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4950 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4951 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4952 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4953 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4954 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4955 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4956 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4957 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4958 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4959 NAME,
4960 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4961 #include "builtin-types.def"
4962 #undef DEF_PRIMITIVE_TYPE
4963 #undef DEF_FUNCTION_TYPE_0
4964 #undef DEF_FUNCTION_TYPE_1
4965 #undef DEF_FUNCTION_TYPE_2
4966 #undef DEF_FUNCTION_TYPE_3
4967 #undef DEF_FUNCTION_TYPE_4
4968 #undef DEF_FUNCTION_TYPE_5
4969 #undef DEF_FUNCTION_TYPE_6
4970 #undef DEF_FUNCTION_TYPE_7
4971 #undef DEF_FUNCTION_TYPE_VAR_0
4972 #undef DEF_FUNCTION_TYPE_VAR_1
4973 #undef DEF_FUNCTION_TYPE_VAR_2
4974 #undef DEF_FUNCTION_TYPE_VAR_3
4975 #undef DEF_FUNCTION_TYPE_VAR_4
4976 #undef DEF_FUNCTION_TYPE_VAR_5
4977 #undef DEF_POINTER_TYPE
4978 BT_LAST
4981 typedef enum c_builtin_type builtin_type;
4983 /* A temporary array used in communication with def_fn_type. */
4984 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4986 /* A helper function for install_builtin_types. Build function type
4987 for DEF with return type RET and N arguments. If VAR is true, then the
4988 function should be variadic after those N arguments.
4990 Takes special care not to ICE if any of the types involved are
4991 error_mark_node, which indicates that said type is not in fact available
4992 (see builtin_type_for_size). In which case the function type as a whole
4993 should be error_mark_node. */
4995 static void
4996 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4998 tree args = NULL, t;
4999 va_list list;
5000 int i;
5002 va_start (list, n);
5003 for (i = 0; i < n; ++i)
5005 builtin_type a = va_arg (list, builtin_type);
5006 t = builtin_types[a];
5007 if (t == error_mark_node)
5008 goto egress;
5009 args = tree_cons (NULL_TREE, t, args);
5011 va_end (list);
5013 args = nreverse (args);
5014 if (!var)
5015 args = chainon (args, void_list_node);
5017 t = builtin_types[ret];
5018 if (t == error_mark_node)
5019 goto egress;
5020 t = build_function_type (t, args);
5022 egress:
5023 builtin_types[def] = t;
5026 /* Build the builtin function types and install them in the builtin_types
5027 array for later use in builtin function decls. */
5029 static void
5030 install_builtin_function_types (void)
5032 tree va_list_ref_type_node;
5033 tree va_list_arg_type_node;
5035 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5037 va_list_arg_type_node = va_list_ref_type_node =
5038 build_pointer_type (TREE_TYPE (va_list_type_node));
5040 else
5042 va_list_arg_type_node = va_list_type_node;
5043 va_list_ref_type_node = build_reference_type (va_list_type_node);
5046 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5047 builtin_types[ENUM] = VALUE;
5048 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5049 def_fn_type (ENUM, RETURN, 0, 0);
5050 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5051 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5052 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5053 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5054 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5055 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5056 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5057 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5058 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5059 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5060 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5061 ARG6) \
5062 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5063 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5064 ARG6, ARG7) \
5065 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5066 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5067 def_fn_type (ENUM, RETURN, 1, 0);
5068 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5069 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5070 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5071 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5072 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5073 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5074 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5075 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5076 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5077 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5078 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5079 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5081 #include "builtin-types.def"
5083 #undef DEF_PRIMITIVE_TYPE
5084 #undef DEF_FUNCTION_TYPE_1
5085 #undef DEF_FUNCTION_TYPE_2
5086 #undef DEF_FUNCTION_TYPE_3
5087 #undef DEF_FUNCTION_TYPE_4
5088 #undef DEF_FUNCTION_TYPE_5
5089 #undef DEF_FUNCTION_TYPE_6
5090 #undef DEF_FUNCTION_TYPE_VAR_0
5091 #undef DEF_FUNCTION_TYPE_VAR_1
5092 #undef DEF_FUNCTION_TYPE_VAR_2
5093 #undef DEF_FUNCTION_TYPE_VAR_3
5094 #undef DEF_FUNCTION_TYPE_VAR_4
5095 #undef DEF_FUNCTION_TYPE_VAR_5
5096 #undef DEF_POINTER_TYPE
5097 builtin_types[(int) BT_LAST] = NULL_TREE;
5100 /* ----------------------------------------------------------------------- *
5101 * BUILTIN ATTRIBUTES *
5102 * ----------------------------------------------------------------------- */
5104 enum built_in_attribute
5106 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5107 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5108 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5109 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5110 #include "builtin-attrs.def"
5111 #undef DEF_ATTR_NULL_TREE
5112 #undef DEF_ATTR_INT
5113 #undef DEF_ATTR_IDENT
5114 #undef DEF_ATTR_TREE_LIST
5115 ATTR_LAST
5118 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5120 static void
5121 install_builtin_attributes (void)
5123 /* Fill in the built_in_attributes array. */
5124 #define DEF_ATTR_NULL_TREE(ENUM) \
5125 built_in_attributes[(int) ENUM] = NULL_TREE;
5126 #define DEF_ATTR_INT(ENUM, VALUE) \
5127 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5128 #define DEF_ATTR_IDENT(ENUM, STRING) \
5129 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5130 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5131 built_in_attributes[(int) ENUM] \
5132 = tree_cons (built_in_attributes[(int) PURPOSE], \
5133 built_in_attributes[(int) VALUE], \
5134 built_in_attributes[(int) CHAIN]);
5135 #include "builtin-attrs.def"
5136 #undef DEF_ATTR_NULL_TREE
5137 #undef DEF_ATTR_INT
5138 #undef DEF_ATTR_IDENT
5139 #undef DEF_ATTR_TREE_LIST
5142 /* Handle a "const" attribute; arguments as in
5143 struct attribute_spec.handler. */
5145 static tree
5146 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5147 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5148 bool *no_add_attrs)
5150 if (TREE_CODE (*node) == FUNCTION_DECL)
5151 TREE_READONLY (*node) = 1;
5152 else
5153 *no_add_attrs = true;
5155 return NULL_TREE;
5158 /* Handle a "nothrow" attribute; arguments as in
5159 struct attribute_spec.handler. */
5161 static tree
5162 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5163 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5164 bool *no_add_attrs)
5166 if (TREE_CODE (*node) == FUNCTION_DECL)
5167 TREE_NOTHROW (*node) = 1;
5168 else
5169 *no_add_attrs = true;
5171 return NULL_TREE;
5174 /* Handle a "pure" attribute; arguments as in
5175 struct attribute_spec.handler. */
5177 static tree
5178 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5179 int ARG_UNUSED (flags), bool *no_add_attrs)
5181 if (TREE_CODE (*node) == FUNCTION_DECL)
5182 DECL_PURE_P (*node) = 1;
5183 /* ??? TODO: Support types. */
5184 else
5186 warning (OPT_Wattributes, "%qE attribute ignored", name);
5187 *no_add_attrs = true;
5190 return NULL_TREE;
5193 /* Handle a "no vops" attribute; arguments as in
5194 struct attribute_spec.handler. */
5196 static tree
5197 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5198 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5199 bool *ARG_UNUSED (no_add_attrs))
5201 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5202 DECL_IS_NOVOPS (*node) = 1;
5203 return NULL_TREE;
5206 /* Helper for nonnull attribute handling; fetch the operand number
5207 from the attribute argument list. */
5209 static bool
5210 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5212 /* Verify the arg number is a constant. */
5213 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5214 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5215 return false;
5217 *valp = TREE_INT_CST_LOW (arg_num_expr);
5218 return true;
5221 /* Handle the "nonnull" attribute. */
5222 static tree
5223 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5224 tree args, int ARG_UNUSED (flags),
5225 bool *no_add_attrs)
5227 tree type = *node;
5228 unsigned HOST_WIDE_INT attr_arg_num;
5230 /* If no arguments are specified, all pointer arguments should be
5231 non-null. Verify a full prototype is given so that the arguments
5232 will have the correct types when we actually check them later. */
5233 if (!args)
5235 if (!TYPE_ARG_TYPES (type))
5237 error ("nonnull attribute without arguments on a non-prototype");
5238 *no_add_attrs = true;
5240 return NULL_TREE;
5243 /* Argument list specified. Verify that each argument number references
5244 a pointer argument. */
5245 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5247 tree argument;
5248 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5250 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5252 error ("nonnull argument has invalid operand number (argument %lu)",
5253 (unsigned long) attr_arg_num);
5254 *no_add_attrs = true;
5255 return NULL_TREE;
5258 argument = TYPE_ARG_TYPES (type);
5259 if (argument)
5261 for (ck_num = 1; ; ck_num++)
5263 if (!argument || ck_num == arg_num)
5264 break;
5265 argument = TREE_CHAIN (argument);
5268 if (!argument
5269 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5271 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5272 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5273 *no_add_attrs = true;
5274 return NULL_TREE;
5277 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5279 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5280 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5281 *no_add_attrs = true;
5282 return NULL_TREE;
5287 return NULL_TREE;
5290 /* Handle a "sentinel" attribute. */
5292 static tree
5293 handle_sentinel_attribute (tree *node, tree name, tree args,
5294 int ARG_UNUSED (flags), bool *no_add_attrs)
5296 tree params = TYPE_ARG_TYPES (*node);
5298 if (!params)
5300 warning (OPT_Wattributes,
5301 "%qE attribute requires prototypes with named arguments", name);
5302 *no_add_attrs = true;
5304 else
5306 while (TREE_CHAIN (params))
5307 params = TREE_CHAIN (params);
5309 if (VOID_TYPE_P (TREE_VALUE (params)))
5311 warning (OPT_Wattributes,
5312 "%qE attribute only applies to variadic functions", name);
5313 *no_add_attrs = true;
5317 if (args)
5319 tree position = TREE_VALUE (args);
5321 if (TREE_CODE (position) != INTEGER_CST)
5323 warning (0, "requested position is not an integer constant");
5324 *no_add_attrs = true;
5326 else
5328 if (tree_int_cst_lt (position, integer_zero_node))
5330 warning (0, "requested position is less than zero");
5331 *no_add_attrs = true;
5336 return NULL_TREE;
5339 /* Handle a "noreturn" attribute; arguments as in
5340 struct attribute_spec.handler. */
5342 static tree
5343 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5344 int ARG_UNUSED (flags), bool *no_add_attrs)
5346 tree type = TREE_TYPE (*node);
5348 /* See FIXME comment in c_common_attribute_table. */
5349 if (TREE_CODE (*node) == FUNCTION_DECL)
5350 TREE_THIS_VOLATILE (*node) = 1;
5351 else if (TREE_CODE (type) == POINTER_TYPE
5352 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5353 TREE_TYPE (*node)
5354 = build_pointer_type
5355 (build_type_variant (TREE_TYPE (type),
5356 TYPE_READONLY (TREE_TYPE (type)), 1));
5357 else
5359 warning (OPT_Wattributes, "%qE attribute ignored", name);
5360 *no_add_attrs = true;
5363 return NULL_TREE;
5366 /* Handle a "malloc" attribute; arguments as in
5367 struct attribute_spec.handler. */
5369 static tree
5370 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5371 int ARG_UNUSED (flags), bool *no_add_attrs)
5373 if (TREE_CODE (*node) == FUNCTION_DECL
5374 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5375 DECL_IS_MALLOC (*node) = 1;
5376 else
5378 warning (OPT_Wattributes, "%qE attribute ignored", name);
5379 *no_add_attrs = true;
5382 return NULL_TREE;
5385 /* Fake handler for attributes we don't properly support. */
5387 tree
5388 fake_attribute_handler (tree * ARG_UNUSED (node),
5389 tree ARG_UNUSED (name),
5390 tree ARG_UNUSED (args),
5391 int ARG_UNUSED (flags),
5392 bool * ARG_UNUSED (no_add_attrs))
5394 return NULL_TREE;
5397 /* Handle a "type_generic" attribute. */
5399 static tree
5400 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5401 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5402 bool * ARG_UNUSED (no_add_attrs))
5404 tree params;
5406 /* Ensure we have a function type. */
5407 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5409 params = TYPE_ARG_TYPES (*node);
5410 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5411 params = TREE_CHAIN (params);
5413 /* Ensure we have a variadic function. */
5414 gcc_assert (!params);
5416 return NULL_TREE;
5419 /* ----------------------------------------------------------------------- *
5420 * BUILTIN FUNCTIONS *
5421 * ----------------------------------------------------------------------- */
5423 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5424 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5425 if nonansi_p and flag_no_nonansi_builtin. */
5427 static void
5428 def_builtin_1 (enum built_in_function fncode,
5429 const char *name,
5430 enum built_in_class fnclass,
5431 tree fntype, tree libtype,
5432 bool both_p, bool fallback_p,
5433 bool nonansi_p ATTRIBUTE_UNUSED,
5434 tree fnattrs, bool implicit_p)
5436 tree decl;
5437 const char *libname;
5439 /* Preserve an already installed decl. It most likely was setup in advance
5440 (e.g. as part of the internal builtins) for specific reasons. */
5441 if (built_in_decls[(int) fncode] != NULL_TREE)
5442 return;
5444 gcc_assert ((!both_p && !fallback_p)
5445 || !strncmp (name, "__builtin_",
5446 strlen ("__builtin_")));
5448 libname = name + strlen ("__builtin_");
5449 decl = add_builtin_function (name, fntype, fncode, fnclass,
5450 (fallback_p ? libname : NULL),
5451 fnattrs);
5452 if (both_p)
5453 /* ??? This is normally further controlled by command-line options
5454 like -fno-builtin, but we don't have them for Ada. */
5455 add_builtin_function (libname, libtype, fncode, fnclass,
5456 NULL, fnattrs);
5458 built_in_decls[(int) fncode] = decl;
5459 if (implicit_p)
5460 implicit_built_in_decls[(int) fncode] = decl;
5463 static int flag_isoc94 = 0;
5464 static int flag_isoc99 = 0;
5466 /* Install what the common builtins.def offers. */
5468 static void
5469 install_builtin_functions (void)
5471 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5472 NONANSI_P, ATTRS, IMPLICIT, COND) \
5473 if (NAME && COND) \
5474 def_builtin_1 (ENUM, NAME, CLASS, \
5475 builtin_types[(int) TYPE], \
5476 builtin_types[(int) LIBTYPE], \
5477 BOTH_P, FALLBACK_P, NONANSI_P, \
5478 built_in_attributes[(int) ATTRS], IMPLICIT);
5479 #include "builtins.def"
5480 #undef DEF_BUILTIN
5483 /* ----------------------------------------------------------------------- *
5484 * BUILTIN FUNCTIONS *
5485 * ----------------------------------------------------------------------- */
5487 /* Install the builtin functions we might need. */
5489 void
5490 gnat_install_builtins (void)
5492 install_builtin_elementary_types ();
5493 install_builtin_function_types ();
5494 install_builtin_attributes ();
5496 /* Install builtins used by generic middle-end pieces first. Some of these
5497 know about internal specificities and control attributes accordingly, for
5498 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5499 the generic definition from builtins.def. */
5500 build_common_builtin_nodes ();
5502 /* Now, install the target specific builtins, such as the AltiVec family on
5503 ppc, and the common set as exposed by builtins.def. */
5504 targetm.init_builtins ();
5505 install_builtin_functions ();
5508 #include "gt-ada-utils.h"
5509 #include "gtype-ada.h"