2007-11-26 Andreas Krebbel <krebbel1@de.ibm.com>
[official-gcc.git] / gcc / ada / utils.c
blobe11ce2bef1f32a9827bfecc8dbd09269085005e2
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2007, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "defaults.h"
33 #include "toplev.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "debug.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "function.h"
40 #include "cgraph.h"
41 #include "tree-inline.h"
42 #include "tree-gimple.h"
43 #include "tree-dump.h"
44 #include "pointer-set.h"
46 #include "ada.h"
47 #include "types.h"
48 #include "atree.h"
49 #include "elists.h"
50 #include "namet.h"
51 #include "nlists.h"
52 #include "stringt.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
62 #endif
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
66 #endif
68 /* If nonzero, pretend we are allocating at global level. */
69 int force_global;
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
77 /* Forward declarations for handlers of attributes. */
78 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
79 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
81 /* Table of machine-independent internal attributes for Ada. We support
82 this minimal set of attributes to accommodate the Alpha back-end which
83 unconditionally puts them on its builtins. */
84 const struct attribute_spec gnat_internal_attribute_table[] =
86 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
87 { "const", 0, 0, true, false, false, handle_const_attribute },
88 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
89 { NULL, 0, 0, false, false, false, NULL }
92 /* Associates a GNAT tree node to a GCC tree node. It is used in
93 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
94 of `save_gnu_tree' for more info. */
95 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
97 #define GET_GNU_TREE(GNAT_ENTITY) \
98 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
100 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
101 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
103 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
104 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
106 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
107 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
109 #define GET_DUMMY_NODE(GNAT_ENTITY) \
110 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
112 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
113 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
115 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
116 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
118 /* This variable keeps a table for types for each precision so that we only
119 allocate each of them once. Signed and unsigned types are kept separate.
121 Note that these types are only used when fold-const requests something
122 special. Perhaps we should NOT share these types; we'll see how it
123 goes later. */
124 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
126 /* Likewise for float types, but record these by mode. */
127 static GTY(()) tree float_types[NUM_MACHINE_MODES];
129 /* For each binding contour we allocate a binding_level structure to indicate
130 the binding depth. */
132 struct gnat_binding_level GTY((chain_next ("%h.chain")))
134 /* The binding level containing this one (the enclosing binding level). */
135 struct gnat_binding_level *chain;
136 /* The BLOCK node for this level. */
137 tree block;
138 /* If nonzero, the setjmp buffer that needs to be updated for any
139 variable-sized definition within this context. */
140 tree jmpbuf_decl;
143 /* The binding level currently in effect. */
144 static GTY(()) struct gnat_binding_level *current_binding_level;
146 /* A chain of gnat_binding_level structures awaiting reuse. */
147 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
149 /* An array of global declarations. */
150 static GTY(()) VEC(tree,gc) *global_decls;
152 /* An array of builtin declarations. */
153 static GTY(()) VEC(tree,gc) *builtin_decls;
155 /* An array of global renaming pointers. */
156 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
158 /* A chain of unused BLOCK nodes. */
159 static GTY((deletable)) tree free_block_chain;
161 static void gnat_install_builtins (void);
162 static tree merge_sizes (tree, tree, tree, bool, bool);
163 static tree compute_related_constant (tree, tree);
164 static tree split_plus (tree, tree *);
165 static void gnat_gimplify_function (tree);
166 static tree float_type_for_precision (int, enum machine_mode);
167 static tree convert_to_fat_pointer (tree, tree);
168 static tree convert_to_thin_pointer (tree, tree);
169 static tree make_descriptor_field (const char *,tree, tree, tree);
170 static bool potential_alignment_gap (tree, tree, tree);
172 /* Initialize the association of GNAT nodes to GCC trees. */
174 void
175 init_gnat_to_gnu (void)
177 associate_gnat_to_gnu
178 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
181 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
182 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
183 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
185 If GNU_DECL is zero, a previous association is to be reset. */
187 void
188 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
190 /* Check that GNAT_ENTITY is not already defined and that it is being set
191 to something which is a decl. Raise gigi 401 if not. Usually, this
192 means GNAT_ENTITY is defined twice, but occasionally is due to some
193 Gigi problem. */
194 gcc_assert (!(gnu_decl
195 && (PRESENT_GNU_TREE (gnat_entity)
196 || (!no_check && !DECL_P (gnu_decl)))));
198 SET_GNU_TREE (gnat_entity, gnu_decl);
201 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
202 Return the ..._DECL node that was associated with it. If there is no tree
203 node associated with GNAT_ENTITY, abort.
205 In some cases, such as delayed elaboration or expressions that need to
206 be elaborated only once, GNAT_ENTITY is really not an entity. */
208 tree
209 get_gnu_tree (Entity_Id gnat_entity)
211 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
212 return GET_GNU_TREE (gnat_entity);
215 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
217 bool
218 present_gnu_tree (Entity_Id gnat_entity)
220 return PRESENT_GNU_TREE (gnat_entity);
223 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
225 void
226 init_dummy_type (void)
228 dummy_node_table
229 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
232 /* Make a dummy type corresponding to GNAT_TYPE. */
234 tree
235 make_dummy_type (Entity_Id gnat_type)
237 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
238 tree gnu_type;
240 /* If there is an equivalent type, get its underlying type. */
241 if (Present (gnat_underlying))
242 gnat_underlying = Underlying_Type (gnat_underlying);
244 /* If there was no equivalent type (can only happen when just annotating
245 types) or underlying type, go back to the original type. */
246 if (No (gnat_underlying))
247 gnat_underlying = gnat_type;
249 /* If it there already a dummy type, use that one. Else make one. */
250 if (PRESENT_DUMMY_NODE (gnat_underlying))
251 return GET_DUMMY_NODE (gnat_underlying);
253 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
254 an ENUMERAL_TYPE. */
255 gnu_type = make_node (Is_Record_Type (gnat_underlying)
256 ? tree_code_for_record_type (gnat_underlying)
257 : ENUMERAL_TYPE);
258 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
259 TYPE_DUMMY_P (gnu_type) = 1;
260 if (AGGREGATE_TYPE_P (gnu_type))
262 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
263 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
266 SET_DUMMY_NODE (gnat_underlying, gnu_type);
268 return gnu_type;
271 /* Return nonzero if we are currently in the global binding level. */
274 global_bindings_p (void)
276 return ((force_global || !current_function_decl) ? -1 : 0);
279 /* Enter a new binding level. */
281 void
282 gnat_pushlevel ()
284 struct gnat_binding_level *newlevel = NULL;
286 /* Reuse a struct for this binding level, if there is one. */
287 if (free_binding_level)
289 newlevel = free_binding_level;
290 free_binding_level = free_binding_level->chain;
292 else
293 newlevel
294 = (struct gnat_binding_level *)
295 ggc_alloc (sizeof (struct gnat_binding_level));
297 /* Use a free BLOCK, if any; otherwise, allocate one. */
298 if (free_block_chain)
300 newlevel->block = free_block_chain;
301 free_block_chain = BLOCK_CHAIN (free_block_chain);
302 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
304 else
305 newlevel->block = make_node (BLOCK);
307 /* Point the BLOCK we just made to its parent. */
308 if (current_binding_level)
309 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
311 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
312 TREE_USED (newlevel->block) = 1;
314 /* Add this level to the front of the chain (stack) of levels that are
315 active. */
316 newlevel->chain = current_binding_level;
317 newlevel->jmpbuf_decl = NULL_TREE;
318 current_binding_level = newlevel;
321 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
322 and point FNDECL to this BLOCK. */
324 void
325 set_current_block_context (tree fndecl)
327 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
328 DECL_INITIAL (fndecl) = current_binding_level->block;
331 /* Set the jmpbuf_decl for the current binding level to DECL. */
333 void
334 set_block_jmpbuf_decl (tree decl)
336 current_binding_level->jmpbuf_decl = decl;
339 /* Get the jmpbuf_decl, if any, for the current binding level. */
341 tree
342 get_block_jmpbuf_decl ()
344 return current_binding_level->jmpbuf_decl;
347 /* Exit a binding level. Set any BLOCK into the current code group. */
349 void
350 gnat_poplevel ()
352 struct gnat_binding_level *level = current_binding_level;
353 tree block = level->block;
355 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
356 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
358 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
359 are no variables free the block and merge its subblocks into those of its
360 parent block. Otherwise, add it to the list of its parent. */
361 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
363 else if (BLOCK_VARS (block) == NULL_TREE)
365 BLOCK_SUBBLOCKS (level->chain->block)
366 = chainon (BLOCK_SUBBLOCKS (block),
367 BLOCK_SUBBLOCKS (level->chain->block));
368 BLOCK_CHAIN (block) = free_block_chain;
369 free_block_chain = block;
371 else
373 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
374 BLOCK_SUBBLOCKS (level->chain->block) = block;
375 TREE_USED (block) = 1;
376 set_block_for_group (block);
379 /* Free this binding structure. */
380 current_binding_level = level->chain;
381 level->chain = free_binding_level;
382 free_binding_level = level;
385 /* Insert BLOCK at the end of the list of subblocks of the
386 current binding level. This is used when a BIND_EXPR is expanded,
387 to handle the BLOCK node inside the BIND_EXPR. */
389 void
390 insert_block (tree block)
392 TREE_USED (block) = 1;
393 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
394 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
397 /* Records a ..._DECL node DECL as belonging to the current lexical scope
398 and uses GNAT_NODE for location information and propagating flags. */
400 void
401 gnat_pushdecl (tree decl, Node_Id gnat_node)
403 /* If at top level, there is no context. But PARM_DECLs always go in the
404 level of its function. */
405 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
406 DECL_CONTEXT (decl) = 0;
407 else
409 DECL_CONTEXT (decl) = current_function_decl;
411 /* Functions imported in another function are not really nested. */
412 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
413 DECL_NO_STATIC_CHAIN (decl) = 1;
416 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
418 /* Set the location of DECL and emit a declaration for it. */
419 if (Present (gnat_node))
420 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
421 add_decl_expr (decl, gnat_node);
423 /* Put the declaration on the list. The list of declarations is in reverse
424 order. The list will be reversed later. Put global variables in the
425 globals list and builtin functions in a dedicated list to speed up
426 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
427 the list, as they will cause trouble with the debugger and aren't needed
428 anyway. */
429 if (TREE_CODE (decl) != TYPE_DECL
430 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
432 if (global_bindings_p ())
434 VEC_safe_push (tree, gc, global_decls, decl);
436 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
437 VEC_safe_push (tree, gc, builtin_decls, decl);
439 else
441 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
442 BLOCK_VARS (current_binding_level->block) = decl;
446 /* For the declaration of a type, set its name if it either is not already
447 set, was set to an IDENTIFIER_NODE, indicating an internal name,
448 or if the previous type name was not derived from a source name.
449 We'd rather have the type named with a real name and all the pointer
450 types to the same object have the same POINTER_TYPE node. Code in the
451 equivalent function of c-decl.c makes a copy of the type node here, but
452 that may cause us trouble with incomplete types. We make an exception
453 for fat pointer types because the compiler automatically builds them
454 for unconstrained array types and the debugger uses them to represent
455 both these and pointers to these. */
456 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
458 tree t = TREE_TYPE (decl);
460 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
461 TYPE_NAME (t) = decl;
462 else if (TYPE_FAT_POINTER_P (t))
464 tree tt = build_variant_type_copy (t);
465 TYPE_NAME (tt) = decl;
466 TREE_USED (tt) = TREE_USED (t);
467 TREE_TYPE (decl) = tt;
468 DECL_ORIGINAL_TYPE (decl) = t;
470 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
471 TYPE_NAME (t) = decl;
475 /* Do little here. Set up the standard declarations later after the
476 front end has been run. */
478 void
479 gnat_init_decl_processing (void)
481 /* Make the binding_level structure for global names. */
482 current_function_decl = 0;
483 current_binding_level = 0;
484 free_binding_level = 0;
485 gnat_pushlevel ();
487 build_common_tree_nodes (true, true);
489 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
490 corresponding to the size of Pmode. In most cases when ptr_mode and
491 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
492 far better code using the width of Pmode. Make this here since we need
493 this before we can expand the GNAT types. */
494 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
495 set_sizetype (size_type_node);
496 build_common_tree_nodes_2 (0);
498 ptr_void_type_node = build_pointer_type (void_type_node);
500 gnat_install_builtins ();
503 /* Install the builtin functions we might need. */
505 static void
506 gnat_install_builtins ()
508 /* Builtins used by generic middle-end optimizers. */
509 build_common_builtin_nodes ();
511 /* Target specific builtins, such as the AltiVec family on ppc. */
512 targetm.init_builtins ();
515 /* Create the predefined scalar types such as `integer_type_node' needed
516 in the gcc back-end and initialize the global binding level. */
518 void
519 init_gigi_decls (tree long_long_float_type, tree exception_type)
521 tree endlink, decl;
522 unsigned int i;
524 /* Set the types that GCC and Gigi use from the front end. We would like
525 to do this for char_type_node, but it needs to correspond to the C
526 char type. */
527 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
529 /* In this case, the builtin floating point types are VAX float,
530 so make up a type for use. */
531 longest_float_type_node = make_node (REAL_TYPE);
532 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
533 layout_type (longest_float_type_node);
534 create_type_decl (get_identifier ("longest float type"),
535 longest_float_type_node, NULL, false, true, Empty);
537 else
538 longest_float_type_node = TREE_TYPE (long_long_float_type);
540 except_type_node = TREE_TYPE (exception_type);
542 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
543 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
544 NULL, false, true, Empty);
546 void_type_decl_node = create_type_decl (get_identifier ("void"),
547 void_type_node, NULL, false, true,
548 Empty);
550 void_ftype = build_function_type (void_type_node, NULL_TREE);
551 ptr_void_ftype = build_pointer_type (void_ftype);
553 /* Now declare runtime functions. */
554 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
556 /* malloc is a function declaration tree for a function to allocate
557 memory. */
558 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
559 NULL_TREE,
560 build_function_type (ptr_void_type_node,
561 tree_cons (NULL_TREE,
562 sizetype,
563 endlink)),
564 NULL_TREE, false, true, true, NULL,
565 Empty);
566 DECL_IS_MALLOC (malloc_decl) = 1;
568 /* free is a function declaration tree for a function to free memory. */
569 free_decl
570 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
571 build_function_type (void_type_node,
572 tree_cons (NULL_TREE,
573 ptr_void_type_node,
574 endlink)),
575 NULL_TREE, false, true, true, NULL, Empty);
577 /* Make the types and functions used for exception processing. */
578 jmpbuf_type
579 = build_array_type (gnat_type_for_mode (Pmode, 0),
580 build_index_type (build_int_cst (NULL_TREE, 5)));
581 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
582 true, true, Empty);
583 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
585 /* Functions to get and set the jumpbuf pointer for the current thread. */
586 get_jmpbuf_decl
587 = create_subprog_decl
588 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
589 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
590 NULL_TREE, false, true, true, NULL, Empty);
591 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
592 DECL_IS_PURE (get_jmpbuf_decl) = 1;
594 set_jmpbuf_decl
595 = create_subprog_decl
596 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
597 NULL_TREE,
598 build_function_type (void_type_node,
599 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
600 NULL_TREE, false, true, true, NULL, Empty);
602 /* Function to get the current exception. */
603 get_excptr_decl
604 = create_subprog_decl
605 (get_identifier ("system__soft_links__get_gnat_exception"),
606 NULL_TREE,
607 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
608 NULL_TREE, false, true, true, NULL, Empty);
609 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
610 DECL_IS_PURE (get_excptr_decl) = 1;
612 /* Functions that raise exceptions. */
613 raise_nodefer_decl
614 = create_subprog_decl
615 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
616 build_function_type (void_type_node,
617 tree_cons (NULL_TREE,
618 build_pointer_type (except_type_node),
619 endlink)),
620 NULL_TREE, false, true, true, NULL, Empty);
622 /* Dummy objects to materialize "others" and "all others" in the exception
623 tables. These are exported by a-exexpr.adb, so see this unit for the
624 types to use. */
626 others_decl
627 = create_var_decl (get_identifier ("OTHERS"),
628 get_identifier ("__gnat_others_value"),
629 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
631 all_others_decl
632 = create_var_decl (get_identifier ("ALL_OTHERS"),
633 get_identifier ("__gnat_all_others_value"),
634 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
636 /* Hooks to call when entering/leaving an exception handler. */
637 begin_handler_decl
638 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
639 build_function_type (void_type_node,
640 tree_cons (NULL_TREE,
641 ptr_void_type_node,
642 endlink)),
643 NULL_TREE, false, true, true, NULL, Empty);
645 end_handler_decl
646 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
647 build_function_type (void_type_node,
648 tree_cons (NULL_TREE,
649 ptr_void_type_node,
650 endlink)),
651 NULL_TREE, false, true, true, NULL, Empty);
653 /* If in no exception handlers mode, all raise statements are redirected to
654 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
655 this procedure will never be called in this mode. */
656 if (No_Exception_Handlers_Set ())
658 decl
659 = create_subprog_decl
660 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
661 build_function_type (void_type_node,
662 tree_cons (NULL_TREE,
663 build_pointer_type (char_type_node),
664 tree_cons (NULL_TREE,
665 integer_type_node,
666 endlink))),
667 NULL_TREE, false, true, true, NULL, Empty);
669 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
670 gnat_raise_decls[i] = decl;
672 else
673 /* Otherwise, make one decl for each exception reason. */
674 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
676 char name[17];
678 sprintf (name, "__gnat_rcheck_%.2d", i);
679 gnat_raise_decls[i]
680 = create_subprog_decl
681 (get_identifier (name), NULL_TREE,
682 build_function_type (void_type_node,
683 tree_cons (NULL_TREE,
684 build_pointer_type
685 (char_type_node),
686 tree_cons (NULL_TREE,
687 integer_type_node,
688 endlink))),
689 NULL_TREE, false, true, true, NULL, Empty);
692 /* Indicate that these never return. */
693 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
694 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
695 TREE_TYPE (raise_nodefer_decl)
696 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
697 TYPE_QUAL_VOLATILE);
699 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
701 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
702 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
703 TREE_TYPE (gnat_raise_decls[i])
704 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
705 TYPE_QUAL_VOLATILE);
708 /* setjmp returns an integer and has one operand, which is a pointer to
709 a jmpbuf. */
710 setjmp_decl
711 = create_subprog_decl
712 (get_identifier ("__builtin_setjmp"), NULL_TREE,
713 build_function_type (integer_type_node,
714 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
715 NULL_TREE, false, true, true, NULL, Empty);
717 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
718 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
720 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
721 address. */
722 update_setjmp_buf_decl
723 = create_subprog_decl
724 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
725 build_function_type (void_type_node,
726 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
727 NULL_TREE, false, true, true, NULL, Empty);
729 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
730 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
732 main_identifier_node = get_identifier ("main");
735 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
736 finish constructing the record or union type. If REP_LEVEL is zero, this
737 record has no representation clause and so will be entirely laid out here.
738 If REP_LEVEL is one, this record has a representation clause and has been
739 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
740 this record is derived from a parent record and thus inherits its layout;
741 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
742 true, the record type is expected to be modified afterwards so it will
743 not be sent to the back-end for finalization. */
745 void
746 finish_record_type (tree record_type, tree fieldlist, int rep_level,
747 bool do_not_finalize)
749 enum tree_code code = TREE_CODE (record_type);
750 tree ada_size = bitsize_zero_node;
751 tree size = bitsize_zero_node;
752 bool var_size = false;
753 bool had_size = TYPE_SIZE (record_type) != 0;
754 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
755 tree field;
757 TYPE_FIELDS (record_type) = fieldlist;
758 TYPE_STUB_DECL (record_type)
759 = build_decl (TYPE_DECL, TYPE_NAME (record_type), record_type);
761 /* We don't need both the typedef name and the record name output in
762 the debugging information, since they are the same. */
763 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
765 /* Globally initialize the record first. If this is a rep'ed record,
766 that just means some initializations; otherwise, layout the record. */
767 if (rep_level > 0)
769 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
770 TYPE_MODE (record_type) = BLKmode;
772 if (!had_size_unit)
773 TYPE_SIZE_UNIT (record_type) = size_zero_node;
774 if (!had_size)
775 TYPE_SIZE (record_type) = bitsize_zero_node;
777 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
778 out just like a UNION_TYPE, since the size will be fixed. */
779 else if (code == QUAL_UNION_TYPE)
780 code = UNION_TYPE;
782 else
784 /* Ensure there isn't a size already set. There can be in an error
785 case where there is a rep clause but all fields have errors and
786 no longer have a position. */
787 TYPE_SIZE (record_type) = 0;
788 layout_type (record_type);
791 /* At this point, the position and size of each field is known. It was
792 either set before entry by a rep clause, or by laying out the type above.
794 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
795 to compute the Ada size; the GCC size and alignment (for rep'ed records
796 that are not padding types); and the mode (for rep'ed records). We also
797 clear the DECL_BIT_FIELD indication for the cases we know have not been
798 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
800 if (code == QUAL_UNION_TYPE)
801 fieldlist = nreverse (fieldlist);
803 for (field = fieldlist; field; field = TREE_CHAIN (field))
805 tree pos = bit_position (field);
807 tree type = TREE_TYPE (field);
808 tree this_size = DECL_SIZE (field);
809 tree this_ada_size = DECL_SIZE (field);
811 /* We need to make an XVE/XVU record if any field has variable size,
812 whether or not the record does. For example, if we have a union,
813 it may be that all fields, rounded up to the alignment, have the
814 same size, in which case we'll use that size. But the debug
815 output routines (except Dwarf2) won't be able to output the fields,
816 so we need to make the special record. */
817 if (TREE_CODE (this_size) != INTEGER_CST)
818 var_size = true;
820 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
821 || TREE_CODE (type) == QUAL_UNION_TYPE)
822 && !TYPE_IS_FAT_POINTER_P (type)
823 && !TYPE_CONTAINS_TEMPLATE_P (type)
824 && TYPE_ADA_SIZE (type))
825 this_ada_size = TYPE_ADA_SIZE (type);
827 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
828 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
829 && value_factor_p (pos, BITS_PER_UNIT)
830 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
831 DECL_BIT_FIELD (field) = 0;
833 /* If we still have DECL_BIT_FIELD set at this point, we know the field
834 is technically not addressable. Except that it can actually be
835 addressed if the field is BLKmode and happens to be properly
836 aligned. */
837 DECL_NONADDRESSABLE_P (field)
838 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
840 if ((rep_level > 0) && !DECL_BIT_FIELD (field))
841 TYPE_ALIGN (record_type)
842 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
844 switch (code)
846 case UNION_TYPE:
847 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
848 size = size_binop (MAX_EXPR, size, this_size);
849 break;
851 case QUAL_UNION_TYPE:
852 ada_size
853 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
854 this_ada_size, ada_size);
855 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
856 this_size, size);
857 break;
859 case RECORD_TYPE:
860 /* Since we know here that all fields are sorted in order of
861 increasing bit position, the size of the record is one
862 higher than the ending bit of the last field processed
863 unless we have a rep clause, since in that case we might
864 have a field outside a QUAL_UNION_TYPE that has a higher ending
865 position. So use a MAX in that case. Also, if this field is a
866 QUAL_UNION_TYPE, we need to take into account the previous size in
867 the case of empty variants. */
868 ada_size
869 = merge_sizes (ada_size, pos, this_ada_size,
870 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
871 size
872 = merge_sizes (size, pos, this_size,
873 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
874 break;
876 default:
877 gcc_unreachable ();
881 if (code == QUAL_UNION_TYPE)
882 nreverse (fieldlist);
884 if (rep_level < 2)
886 /* If this is a padding record, we never want to make the size smaller
887 than what was specified in it, if any. */
888 if (TREE_CODE (record_type) == RECORD_TYPE
889 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
890 size = TYPE_SIZE (record_type);
892 /* Now set any of the values we've just computed that apply. */
893 if (!TYPE_IS_FAT_POINTER_P (record_type)
894 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
895 SET_TYPE_ADA_SIZE (record_type, ada_size);
897 if (rep_level > 0)
899 tree size_unit = had_size_unit
900 ? TYPE_SIZE_UNIT (record_type)
901 : convert (sizetype,
902 size_binop (CEIL_DIV_EXPR, size,
903 bitsize_unit_node));
904 unsigned int align = TYPE_ALIGN (record_type);
906 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
907 TYPE_SIZE_UNIT (record_type)
908 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
910 compute_record_mode (record_type);
914 if (!do_not_finalize)
915 rest_of_record_type_compilation (record_type);
918 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
919 the debug information associated with it. It need not be invoked
920 directly in most cases since finish_record_type takes care of doing
921 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
923 void
924 rest_of_record_type_compilation (tree record_type)
926 tree fieldlist = TYPE_FIELDS (record_type);
927 tree field;
928 enum tree_code code = TREE_CODE (record_type);
929 bool var_size = false;
931 for (field = fieldlist; field; field = TREE_CHAIN (field))
933 /* We need to make an XVE/XVU record if any field has variable size,
934 whether or not the record does. For example, if we have a union,
935 it may be that all fields, rounded up to the alignment, have the
936 same size, in which case we'll use that size. But the debug
937 output routines (except Dwarf2) won't be able to output the fields,
938 so we need to make the special record. */
939 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
940 /* If a field has a non-constant qualifier, the record will have
941 variable size too. */
942 || (code == QUAL_UNION_TYPE
943 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
945 var_size = true;
946 break;
950 /* If this record is of variable size, rename it so that the
951 debugger knows it is and make a new, parallel, record
952 that tells the debugger how the record is laid out. See
953 exp_dbug.ads. But don't do this for records that are padding
954 since they confuse GDB. */
955 if (var_size
956 && !(TREE_CODE (record_type) == RECORD_TYPE
957 && TYPE_IS_PADDING_P (record_type)))
959 tree new_record_type
960 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
961 ? UNION_TYPE : TREE_CODE (record_type));
962 tree orig_name = TYPE_NAME (record_type);
963 tree orig_id
964 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
965 : orig_name);
966 tree new_id
967 = concat_id_with_name (orig_id,
968 TREE_CODE (record_type) == QUAL_UNION_TYPE
969 ? "XVU" : "XVE");
970 tree last_pos = bitsize_zero_node;
971 tree old_field;
972 tree prev_old_field = 0;
974 TYPE_NAME (new_record_type) = new_id;
975 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
976 TYPE_STUB_DECL (new_record_type)
977 = build_decl (TYPE_DECL, new_id, new_record_type);
978 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
979 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
980 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
981 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
982 TYPE_SIZE_UNIT (new_record_type)
983 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
985 /* Now scan all the fields, replacing each field with a new
986 field corresponding to the new encoding. */
987 for (old_field = TYPE_FIELDS (record_type); old_field;
988 old_field = TREE_CHAIN (old_field))
990 tree field_type = TREE_TYPE (old_field);
991 tree field_name = DECL_NAME (old_field);
992 tree new_field;
993 tree curpos = bit_position (old_field);
994 bool var = false;
995 unsigned int align = 0;
996 tree pos;
998 /* See how the position was modified from the last position.
1000 There are two basic cases we support: a value was added
1001 to the last position or the last position was rounded to
1002 a boundary and they something was added. Check for the
1003 first case first. If not, see if there is any evidence
1004 of rounding. If so, round the last position and try
1005 again.
1007 If this is a union, the position can be taken as zero. */
1009 if (TREE_CODE (new_record_type) == UNION_TYPE)
1010 pos = bitsize_zero_node, align = 0;
1011 else
1012 pos = compute_related_constant (curpos, last_pos);
1014 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1015 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1017 tree offset = TREE_OPERAND (curpos, 0);
1018 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1020 /* Strip off any conversions. */
1021 while (TREE_CODE (offset) == NON_LVALUE_EXPR
1022 || TREE_CODE (offset) == NOP_EXPR
1023 || TREE_CODE (offset) == CONVERT_EXPR)
1024 offset = TREE_OPERAND (offset, 0);
1026 /* An offset which is a bitwise AND with a negative power of 2
1027 means an alignment corresponding to this power of 2. */
1028 if (TREE_CODE (offset) == BIT_AND_EXPR
1029 && host_integerp (TREE_OPERAND (offset, 1), 0)
1030 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1032 unsigned int pow
1033 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1034 if (exact_log2 (pow) > 0)
1035 align *= pow;
1038 pos = compute_related_constant (curpos,
1039 round_up (last_pos, align));
1041 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1042 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1043 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1044 && host_integerp (TREE_OPERAND
1045 (TREE_OPERAND (curpos, 0), 1),
1048 align
1049 = tree_low_cst
1050 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1051 pos = compute_related_constant (curpos,
1052 round_up (last_pos, align));
1054 else if (potential_alignment_gap (prev_old_field, old_field,
1055 pos))
1057 align = TYPE_ALIGN (field_type);
1058 pos = compute_related_constant (curpos,
1059 round_up (last_pos, align));
1062 /* If we can't compute a position, set it to zero.
1064 ??? We really should abort here, but it's too much work
1065 to get this correct for all cases. */
1067 if (!pos)
1068 pos = bitsize_zero_node;
1070 /* See if this type is variable-sized and make a pointer type
1071 and indicate the indirection if so. Beware that the debug
1072 back-end may adjust the position computed above according
1073 to the alignment of the field type, i.e. the pointer type
1074 in this case, if we don't preventively counter that. */
1075 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1077 field_type = build_pointer_type (field_type);
1078 if (align != 0 && TYPE_ALIGN (field_type) > align)
1080 field_type = copy_node (field_type);
1081 TYPE_ALIGN (field_type) = align;
1083 var = true;
1086 /* Make a new field name, if necessary. */
1087 if (var || align != 0)
1089 char suffix[16];
1091 if (align != 0)
1092 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1093 align / BITS_PER_UNIT);
1094 else
1095 strcpy (suffix, "XVL");
1097 field_name = concat_id_with_name (field_name, suffix);
1100 new_field = create_field_decl (field_name, field_type,
1101 new_record_type, 0,
1102 DECL_SIZE (old_field), pos, 0);
1103 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1104 TYPE_FIELDS (new_record_type) = new_field;
1106 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1107 zero. The only time it's not the last field of the record
1108 is when there are other components at fixed positions after
1109 it (meaning there was a rep clause for every field) and we
1110 want to be able to encode them. */
1111 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1112 (TREE_CODE (TREE_TYPE (old_field))
1113 == QUAL_UNION_TYPE)
1114 ? bitsize_zero_node
1115 : DECL_SIZE (old_field));
1116 prev_old_field = old_field;
1119 TYPE_FIELDS (new_record_type)
1120 = nreverse (TYPE_FIELDS (new_record_type));
1122 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1125 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1128 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1129 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1130 if this represents a QUAL_UNION_TYPE in which case we must look for
1131 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1132 is nonzero, we must take the MAX of the end position of this field
1133 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1135 We return an expression for the size. */
1137 static tree
1138 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1139 bool has_rep)
1141 tree type = TREE_TYPE (last_size);
1142 tree new;
1144 if (!special || TREE_CODE (size) != COND_EXPR)
1146 new = size_binop (PLUS_EXPR, first_bit, size);
1147 if (has_rep)
1148 new = size_binop (MAX_EXPR, last_size, new);
1151 else
1152 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1153 integer_zerop (TREE_OPERAND (size, 1))
1154 ? last_size : merge_sizes (last_size, first_bit,
1155 TREE_OPERAND (size, 1),
1156 1, has_rep),
1157 integer_zerop (TREE_OPERAND (size, 2))
1158 ? last_size : merge_sizes (last_size, first_bit,
1159 TREE_OPERAND (size, 2),
1160 1, has_rep));
1162 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1163 when fed through substitute_in_expr) into thinking that a constant
1164 size is not constant. */
1165 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1166 new = TREE_OPERAND (new, 0);
1168 return new;
1171 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1172 related by the addition of a constant. Return that constant if so. */
1174 static tree
1175 compute_related_constant (tree op0, tree op1)
1177 tree op0_var, op1_var;
1178 tree op0_con = split_plus (op0, &op0_var);
1179 tree op1_con = split_plus (op1, &op1_var);
1180 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1182 if (operand_equal_p (op0_var, op1_var, 0))
1183 return result;
1184 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1185 return result;
1186 else
1187 return 0;
1190 /* Utility function of above to split a tree OP which may be a sum, into a
1191 constant part, which is returned, and a variable part, which is stored
1192 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1193 bitsizetype. */
1195 static tree
1196 split_plus (tree in, tree *pvar)
1198 /* Strip NOPS in order to ease the tree traversal and maximize the
1199 potential for constant or plus/minus discovery. We need to be careful
1200 to always return and set *pvar to bitsizetype trees, but it's worth
1201 the effort. */
1202 STRIP_NOPS (in);
1204 *pvar = convert (bitsizetype, in);
1206 if (TREE_CODE (in) == INTEGER_CST)
1208 *pvar = bitsize_zero_node;
1209 return convert (bitsizetype, in);
1211 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1213 tree lhs_var, rhs_var;
1214 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1215 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1217 if (lhs_var == TREE_OPERAND (in, 0)
1218 && rhs_var == TREE_OPERAND (in, 1))
1219 return bitsize_zero_node;
1221 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1222 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1224 else
1225 return bitsize_zero_node;
1228 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1229 subprogram. If it is void_type_node, then we are dealing with a procedure,
1230 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1231 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1232 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1233 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1234 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1235 RETURNS_WITH_DSP is nonzero if the function is to return with a
1236 depressed stack pointer. RETURNS_BY_TARGET_PTR is true if the function
1237 is to be passed (as its first parameter) the address of the place to copy
1238 its result. */
1240 tree
1241 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1242 bool returns_unconstrained, bool returns_by_ref,
1243 bool returns_with_dsp, bool returns_by_target_ptr)
1245 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1246 the subprogram formal parameters. This list is generated by traversing the
1247 input list of PARM_DECL nodes. */
1248 tree param_type_list = NULL;
1249 tree param_decl;
1250 tree type;
1252 for (param_decl = param_decl_list; param_decl;
1253 param_decl = TREE_CHAIN (param_decl))
1254 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1255 param_type_list);
1257 /* The list of the function parameter types has to be terminated by the void
1258 type to signal to the back-end that we are not dealing with a variable
1259 parameter subprogram, but that the subprogram has a fixed number of
1260 parameters. */
1261 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1263 /* The list of argument types has been created in reverse
1264 so nreverse it. */
1265 param_type_list = nreverse (param_type_list);
1267 type = build_function_type (return_type, param_type_list);
1269 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1270 or the new type should, make a copy of TYPE. Likewise for
1271 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1272 if (TYPE_CI_CO_LIST (type) || cico_list
1273 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1274 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1275 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1276 type = copy_type (type);
1278 TYPE_CI_CO_LIST (type) = cico_list;
1279 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1280 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1281 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1282 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1283 return type;
1286 /* Return a copy of TYPE but safe to modify in any way. */
1288 tree
1289 copy_type (tree type)
1291 tree new = copy_node (type);
1293 /* copy_node clears this field instead of copying it, because it is
1294 aliased with TREE_CHAIN. */
1295 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1297 TYPE_POINTER_TO (new) = 0;
1298 TYPE_REFERENCE_TO (new) = 0;
1299 TYPE_MAIN_VARIANT (new) = new;
1300 TYPE_NEXT_VARIANT (new) = 0;
1302 return new;
1305 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1306 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1307 the decl. */
1309 tree
1310 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1312 /* First build a type for the desired range. */
1313 tree type = build_index_2_type (min, max);
1315 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1316 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1317 is set, but not to INDEX, make a copy of this type with the requested
1318 index type. Note that we have no way of sharing these types, but that's
1319 only a small hole. */
1320 if (TYPE_INDEX_TYPE (type) == index)
1321 return type;
1322 else if (TYPE_INDEX_TYPE (type))
1323 type = copy_type (type);
1325 SET_TYPE_INDEX_TYPE (type, index);
1326 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1327 return type;
1330 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1331 string) and TYPE is a ..._TYPE node giving its data type.
1332 ARTIFICIAL_P is true if this is a declaration that was generated
1333 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1334 information about this type. GNAT_NODE is used for the position of
1335 the decl. */
1337 tree
1338 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1339 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1341 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1342 enum tree_code code = TREE_CODE (type);
1344 DECL_ARTIFICIAL (type_decl) = artificial_p;
1346 if (!TYPE_IS_DUMMY_P (type))
1347 gnat_pushdecl (type_decl, gnat_node);
1349 process_attributes (type_decl, attr_list);
1351 /* Pass type declaration information to the debugger unless this is an
1352 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1353 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1354 type for which debugging information was not requested. */
1355 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1356 DECL_IGNORED_P (type_decl) = 1;
1357 else if (code != ENUMERAL_TYPE
1358 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1359 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1360 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1361 rest_of_type_decl_compilation (type_decl);
1363 return type_decl;
1366 /* Helper for create_var_decl and create_true_var_decl. Returns a GCC VAR_DECL
1367 or CONST_DECL node.
1369 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1370 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1371 the GCC tree for an optional initial expression; NULL_TREE if none.
1373 CONST_FLAG is true if this variable is constant, in which case we might
1374 return a CONST_DECL node unless CONST_DECL_ALLOWED_FLAG is false.
1376 PUBLIC_FLAG is true if this definition is to be made visible outside of
1377 the current compilation unit. This flag should be set when processing the
1378 variable definitions in a package specification. EXTERN_FLAG is nonzero
1379 when processing an external variable declaration (as opposed to a
1380 definition: no storage is to be allocated for the variable here).
1382 STATIC_FLAG is only relevant when not at top level. In that case
1383 it indicates whether to always allocate storage to the variable.
1385 GNAT_NODE is used for the position of the decl. */
1387 static tree
1388 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1389 bool const_flag, bool const_decl_allowed_flag,
1390 bool public_flag, bool extern_flag, bool static_flag,
1391 struct attrib *attr_list, Node_Id gnat_node)
1393 bool init_const
1394 = (var_init != 0
1395 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1396 && (global_bindings_p () || static_flag
1397 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1398 : TREE_CONSTANT (var_init)));
1400 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1401 case the initializer may be used in-lieu of the DECL node (as done in
1402 Identifier_to_gnu). This is useful to prevent the need of elaboration
1403 code when an identifier for which such a decl is made is in turn used as
1404 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1405 but extra constraints apply to this choice (see below) and are not
1406 relevant to the distinction we wish to make. */
1407 bool constant_p = const_flag && init_const;
1409 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1410 and may be used for scalars in general but not for aggregates. */
1411 tree var_decl
1412 = build_decl ((constant_p && const_decl_allowed_flag
1413 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1414 var_name, type);
1416 /* If this is external, throw away any initializations (they will be done
1417 elsewhere) unless this is a a constant for which we would like to remain
1418 able to get the initializer. If we are defining a global here, leave a
1419 constant initialization and save any variable elaborations for the
1420 elaboration routine. If we are just annotating types, throw away the
1421 initialization if it isn't a constant. */
1422 if ((extern_flag && !constant_p)
1423 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1424 var_init = NULL_TREE;
1426 /* At the global level, an initializer requiring code to be generated
1427 produces elaboration statements. Check that such statements are allowed,
1428 that is, not violating a No_Elaboration_Code restriction. */
1429 if (global_bindings_p () && var_init != 0 && ! init_const)
1430 Check_Elaboration_Code_Allowed (gnat_node);
1432 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1433 try to fiddle with DECL_COMMON. However, on platforms that don't
1434 support global BSS sections, uninitialized global variables would
1435 go in DATA instead, thus increasing the size of the executable. */
1436 if (!flag_no_common
1437 && TREE_CODE (var_decl) == VAR_DECL
1438 && !have_global_bss_p ())
1439 DECL_COMMON (var_decl) = 1;
1440 DECL_INITIAL (var_decl) = var_init;
1441 TREE_READONLY (var_decl) = const_flag;
1442 DECL_EXTERNAL (var_decl) = extern_flag;
1443 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1444 TREE_CONSTANT (var_decl) = constant_p;
1445 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1446 = TYPE_VOLATILE (type);
1448 /* If it's public and not external, always allocate storage for it.
1449 At the global binding level we need to allocate static storage for the
1450 variable if and only if it's not external. If we are not at the top level
1451 we allocate automatic storage unless requested not to. */
1452 TREE_STATIC (var_decl)
1453 = public_flag || (global_bindings_p () ? !extern_flag : static_flag);
1455 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1456 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1458 process_attributes (var_decl, attr_list);
1460 /* Add this decl to the current binding level. */
1461 gnat_pushdecl (var_decl, gnat_node);
1463 if (TREE_SIDE_EFFECTS (var_decl))
1464 TREE_ADDRESSABLE (var_decl) = 1;
1466 if (TREE_CODE (var_decl) != CONST_DECL)
1467 rest_of_decl_compilation (var_decl, global_bindings_p (), 0);
1468 else
1469 expand_decl (var_decl);
1471 return var_decl;
1474 /* Wrapper around create_var_decl_1 for cases where we don't care whether
1475 a VAR or a CONST decl node is created. */
1477 tree
1478 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1479 bool const_flag, bool public_flag, bool extern_flag,
1480 bool static_flag, struct attrib *attr_list,
1481 Node_Id gnat_node)
1483 return create_var_decl_1 (var_name, asm_name, type, var_init,
1484 const_flag, true,
1485 public_flag, extern_flag, static_flag,
1486 attr_list, gnat_node);
1489 /* Wrapper around create_var_decl_1 for cases where a VAR_DECL node is
1490 required. The primary intent is for DECL_CONST_CORRESPONDING_VARs, which
1491 must be VAR_DECLs and on which we want TREE_READONLY set to have them
1492 possibly assigned to a readonly data section. */
1494 tree
1495 create_true_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1496 bool const_flag, bool public_flag, bool extern_flag,
1497 bool static_flag, struct attrib *attr_list,
1498 Node_Id gnat_node)
1500 return create_var_decl_1 (var_name, asm_name, type, var_init,
1501 const_flag, false,
1502 public_flag, extern_flag, static_flag,
1503 attr_list, gnat_node);
1506 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1507 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1508 this field is in a record type with a "pragma pack". If SIZE is nonzero
1509 it is the specified size for this field. If POS is nonzero, it is the bit
1510 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1511 the address of this field for aliasing purposes. If it is negative, we
1512 should not make a bitfield, which is used by make_aligning_type. */
1514 tree
1515 create_field_decl (tree field_name, tree field_type, tree record_type,
1516 int packed, tree size, tree pos, int addressable)
1518 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1520 DECL_CONTEXT (field_decl) = record_type;
1521 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1523 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1524 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1525 if (packed && TYPE_MODE (field_type) == BLKmode)
1526 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1528 /* If a size is specified, use it. Otherwise, if the record type is packed
1529 compute a size to use, which may differ from the object's natural size.
1530 We always set a size in this case to trigger the checks for bitfield
1531 creation below, which is typically required when no position has been
1532 specified. */
1533 if (size)
1534 size = convert (bitsizetype, size);
1535 else if (packed == 1)
1537 size = rm_size (field_type);
1539 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1540 byte. */
1541 if (TREE_CODE (size) == INTEGER_CST
1542 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1543 size = round_up (size, BITS_PER_UNIT);
1546 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1547 specified for two reasons: first if the size differs from the natural
1548 size. Second, if the alignment is insufficient. There are a number of
1549 ways the latter can be true.
1551 We never make a bitfield if the type of the field has a nonconstant size,
1552 because no such entity requiring bitfield operations should reach here.
1554 We do *preventively* make a bitfield when there might be the need for it
1555 but we don't have all the necessary information to decide, as is the case
1556 of a field with no specified position in a packed record.
1558 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1559 in layout_decl or finish_record_type to clear the bit_field indication if
1560 it is in fact not needed. */
1561 if (addressable >= 0
1562 && size
1563 && TREE_CODE (size) == INTEGER_CST
1564 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1565 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1566 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1567 || packed
1568 || (TYPE_ALIGN (record_type) != 0
1569 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1571 DECL_BIT_FIELD (field_decl) = 1;
1572 DECL_SIZE (field_decl) = size;
1573 if (!packed && !pos)
1574 DECL_ALIGN (field_decl)
1575 = (TYPE_ALIGN (record_type) != 0
1576 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1577 : TYPE_ALIGN (field_type));
1580 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1581 DECL_ALIGN (field_decl)
1582 = MAX (DECL_ALIGN (field_decl),
1583 DECL_BIT_FIELD (field_decl) ? 1
1584 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1585 : TYPE_ALIGN (field_type));
1587 if (pos)
1589 /* We need to pass in the alignment the DECL is known to have.
1590 This is the lowest-order bit set in POS, but no more than
1591 the alignment of the record, if one is specified. Note
1592 that an alignment of 0 is taken as infinite. */
1593 unsigned int known_align;
1595 if (host_integerp (pos, 1))
1596 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1597 else
1598 known_align = BITS_PER_UNIT;
1600 if (TYPE_ALIGN (record_type)
1601 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1602 known_align = TYPE_ALIGN (record_type);
1604 layout_decl (field_decl, known_align);
1605 SET_DECL_OFFSET_ALIGN (field_decl,
1606 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1607 : BITS_PER_UNIT);
1608 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1609 &DECL_FIELD_BIT_OFFSET (field_decl),
1610 DECL_OFFSET_ALIGN (field_decl), pos);
1612 DECL_HAS_REP_P (field_decl) = 1;
1615 /* In addition to what our caller says, claim the field is addressable if we
1616 know that its type is not suitable.
1618 The field may also be "technically" nonaddressable, meaning that even if
1619 we attempt to take the field's address we will actually get the address
1620 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1621 value we have at this point is not accurate enough, so we don't account
1622 for this here and let finish_record_type decide. */
1623 if (!type_for_nonaliased_component_p (field_type))
1624 addressable = 1;
1626 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1628 return field_decl;
1631 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1632 PARAM_TYPE is its type. READONLY is true if the parameter is
1633 readonly (either an IN parameter or an address of a pass-by-ref
1634 parameter). */
1636 tree
1637 create_param_decl (tree param_name, tree param_type, bool readonly)
1639 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1641 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1642 lead to various ABI violations. */
1643 if (targetm.calls.promote_prototypes (param_type)
1644 && (TREE_CODE (param_type) == INTEGER_TYPE
1645 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1646 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1648 /* We have to be careful about biased types here. Make a subtype
1649 of integer_type_node with the proper biasing. */
1650 if (TREE_CODE (param_type) == INTEGER_TYPE
1651 && TYPE_BIASED_REPRESENTATION_P (param_type))
1653 param_type
1654 = copy_type (build_range_type (integer_type_node,
1655 TYPE_MIN_VALUE (param_type),
1656 TYPE_MAX_VALUE (param_type)));
1658 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1660 else
1661 param_type = integer_type_node;
1664 DECL_ARG_TYPE (param_decl) = param_type;
1665 TREE_READONLY (param_decl) = readonly;
1666 return param_decl;
1669 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1671 void
1672 process_attributes (tree decl, struct attrib *attr_list)
1674 for (; attr_list; attr_list = attr_list->next)
1675 switch (attr_list->type)
1677 case ATTR_MACHINE_ATTRIBUTE:
1678 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1679 NULL_TREE),
1680 ATTR_FLAG_TYPE_IN_PLACE);
1681 break;
1683 case ATTR_LINK_ALIAS:
1684 if (! DECL_EXTERNAL (decl))
1686 TREE_STATIC (decl) = 1;
1687 assemble_alias (decl, attr_list->name);
1689 break;
1691 case ATTR_WEAK_EXTERNAL:
1692 if (SUPPORTS_WEAK)
1693 declare_weak (decl);
1694 else
1695 post_error ("?weak declarations not supported on this target",
1696 attr_list->error_point);
1697 break;
1699 case ATTR_LINK_SECTION:
1700 if (targetm.have_named_sections)
1702 DECL_SECTION_NAME (decl)
1703 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1704 IDENTIFIER_POINTER (attr_list->name));
1705 DECL_COMMON (decl) = 0;
1707 else
1708 post_error ("?section attributes are not supported for this target",
1709 attr_list->error_point);
1710 break;
1712 case ATTR_LINK_CONSTRUCTOR:
1713 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1714 TREE_USED (decl) = 1;
1715 break;
1717 case ATTR_LINK_DESTRUCTOR:
1718 DECL_STATIC_DESTRUCTOR (decl) = 1;
1719 TREE_USED (decl) = 1;
1720 break;
1724 /* Record a global renaming pointer. */
1726 void
1727 record_global_renaming_pointer (tree decl)
1729 gcc_assert (DECL_RENAMED_OBJECT (decl));
1730 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1733 /* Invalidate the global renaming pointers. */
1735 void
1736 invalidate_global_renaming_pointers (void)
1738 unsigned int i;
1739 tree iter;
1741 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1742 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1744 VEC_free (tree, gc, global_renaming_pointers);
1747 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1748 a power of 2. */
1750 bool
1751 value_factor_p (tree value, HOST_WIDE_INT factor)
1753 if (host_integerp (value, 1))
1754 return tree_low_cst (value, 1) % factor == 0;
1756 if (TREE_CODE (value) == MULT_EXPR)
1757 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1758 || value_factor_p (TREE_OPERAND (value, 1), factor));
1760 return 0;
1763 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1764 unless we can prove these 2 fields are laid out in such a way that no gap
1765 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1766 is the distance in bits between the end of PREV_FIELD and the starting
1767 position of CURR_FIELD. It is ignored if null. */
1769 static bool
1770 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1772 /* If this is the first field of the record, there cannot be any gap */
1773 if (!prev_field)
1774 return false;
1776 /* If the previous field is a union type, then return False: The only
1777 time when such a field is not the last field of the record is when
1778 there are other components at fixed positions after it (meaning there
1779 was a rep clause for every field), in which case we don't want the
1780 alignment constraint to override them. */
1781 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1782 return false;
1784 /* If the distance between the end of prev_field and the beginning of
1785 curr_field is constant, then there is a gap if the value of this
1786 constant is not null. */
1787 if (offset && host_integerp (offset, 1))
1788 return !integer_zerop (offset);
1790 /* If the size and position of the previous field are constant,
1791 then check the sum of this size and position. There will be a gap
1792 iff it is not multiple of the current field alignment. */
1793 if (host_integerp (DECL_SIZE (prev_field), 1)
1794 && host_integerp (bit_position (prev_field), 1))
1795 return ((tree_low_cst (bit_position (prev_field), 1)
1796 + tree_low_cst (DECL_SIZE (prev_field), 1))
1797 % DECL_ALIGN (curr_field) != 0);
1799 /* If both the position and size of the previous field are multiples
1800 of the current field alignment, there cannot be any gap. */
1801 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1802 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1803 return false;
1805 /* Fallback, return that there may be a potential gap */
1806 return true;
1809 /* Returns a LABEL_DECL node for LABEL_NAME. */
1811 tree
1812 create_label_decl (tree label_name)
1814 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1816 DECL_CONTEXT (label_decl) = current_function_decl;
1817 DECL_MODE (label_decl) = VOIDmode;
1818 DECL_SOURCE_LOCATION (label_decl) = input_location;
1820 return label_decl;
1823 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1824 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1825 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1826 PARM_DECL nodes chained through the TREE_CHAIN field).
1828 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1829 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1831 tree
1832 create_subprog_decl (tree subprog_name, tree asm_name,
1833 tree subprog_type, tree param_decl_list, bool inline_flag,
1834 bool public_flag, bool extern_flag,
1835 struct attrib *attr_list, Node_Id gnat_node)
1837 tree return_type = TREE_TYPE (subprog_type);
1838 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1840 /* If this is a function nested inside an inlined external function, it
1841 means we aren't going to compile the outer function unless it is
1842 actually inlined, so do the same for us. */
1843 if (current_function_decl && DECL_INLINE (current_function_decl)
1844 && DECL_EXTERNAL (current_function_decl))
1845 extern_flag = true;
1847 DECL_EXTERNAL (subprog_decl) = extern_flag;
1848 TREE_PUBLIC (subprog_decl) = public_flag;
1849 TREE_STATIC (subprog_decl) = 1;
1850 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1851 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1852 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1853 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1854 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1855 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1856 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1858 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1859 target by-reference return mechanism. This is not supported all the
1860 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1861 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1862 the RESULT_DECL instead - see gnat_genericize for more details. */
1863 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1865 tree result_decl = DECL_RESULT (subprog_decl);
1867 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1868 DECL_BY_REFERENCE (result_decl) = 1;
1871 if (inline_flag)
1872 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1874 if (asm_name)
1875 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1877 process_attributes (subprog_decl, attr_list);
1879 /* Add this decl to the current binding level. */
1880 gnat_pushdecl (subprog_decl, gnat_node);
1882 /* Output the assembler code and/or RTL for the declaration. */
1883 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1885 return subprog_decl;
1888 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1889 body. This routine needs to be invoked before processing the declarations
1890 appearing in the subprogram. */
1892 void
1893 begin_subprog_body (tree subprog_decl)
1895 tree param_decl;
1897 current_function_decl = subprog_decl;
1898 announce_function (subprog_decl);
1900 /* Enter a new binding level and show that all the parameters belong to
1901 this function. */
1902 gnat_pushlevel ();
1903 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1904 param_decl = TREE_CHAIN (param_decl))
1905 DECL_CONTEXT (param_decl) = subprog_decl;
1907 make_decl_rtl (subprog_decl);
1909 /* We handle pending sizes via the elaboration of types, so we don't need to
1910 save them. This causes them to be marked as part of the outer function
1911 and then discarded. */
1912 get_pending_sizes ();
1916 /* Helper for the genericization callback. Return a dereference of VAL
1917 if it is of a reference type. */
1919 static tree
1920 convert_from_reference (tree val)
1922 tree value_type, ref;
1924 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1925 return val;
1927 value_type = TREE_TYPE (TREE_TYPE (val));
1928 ref = build1 (INDIRECT_REF, value_type, val);
1930 /* See if what we reference is CONST or VOLATILE, which requires
1931 looking into array types to get to the component type. */
1933 while (TREE_CODE (value_type) == ARRAY_TYPE)
1934 value_type = TREE_TYPE (value_type);
1936 TREE_READONLY (ref)
1937 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1938 TREE_THIS_VOLATILE (ref)
1939 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1941 TREE_SIDE_EFFECTS (ref)
1942 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1944 return ref;
1947 /* Helper for the genericization callback. Returns true if T denotes
1948 a RESULT_DECL with DECL_BY_REFERENCE set. */
1950 static inline bool
1951 is_byref_result (tree t)
1953 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1957 /* Tree walking callback for gnat_genericize. Currently ...
1959 o Adjust references to the function's DECL_RESULT if it is marked
1960 DECL_BY_REFERENCE and so has had its type turned into a reference
1961 type at the end of the function compilation. */
1963 static tree
1964 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1966 /* This implementation is modeled after what the C++ front-end is
1967 doing, basis of the downstream passes behavior. */
1969 tree stmt = *stmt_p;
1970 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1972 /* If we have a direct mention of the result decl, dereference. */
1973 if (is_byref_result (stmt))
1975 *stmt_p = convert_from_reference (stmt);
1976 *walk_subtrees = 0;
1977 return NULL;
1980 /* Otherwise, no need to walk the the same tree twice. */
1981 if (pointer_set_contains (p_set, stmt))
1983 *walk_subtrees = 0;
1984 return NULL_TREE;
1987 /* If we are taking the address of what now is a reference, just get the
1988 reference value. */
1989 if (TREE_CODE (stmt) == ADDR_EXPR
1990 && is_byref_result (TREE_OPERAND (stmt, 0)))
1992 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1993 *walk_subtrees = 0;
1996 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
1997 else if (TREE_CODE (stmt) == RETURN_EXPR
1998 && TREE_OPERAND (stmt, 0)
1999 && is_byref_result (TREE_OPERAND (stmt, 0)))
2000 *walk_subtrees = 0;
2002 /* Don't look inside trees that cannot embed references of interest. */
2003 else if (IS_TYPE_OR_DECL_P (stmt))
2004 *walk_subtrees = 0;
2006 pointer_set_insert (p_set, *stmt_p);
2008 return NULL;
2011 /* Perform lowering of Ada trees to GENERIC. In particular:
2013 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2014 and adjust all the references to this decl accordingly. */
2016 static void
2017 gnat_genericize (tree fndecl)
2019 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2020 was handled by simply setting TREE_ADDRESSABLE on the result type.
2021 Everything required to actually pass by invisible ref using the target
2022 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2024 This doesn't work with GCC 4 any more for several reasons. First, the
2025 gimplification process might need the creation of temporaries of this
2026 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2027 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2028 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2029 be explicitely accounted for by the front-end in the function body.
2031 We achieve the complete transformation in two steps:
2033 1/ create_subprog_decl performs early attribute tweaks: it clears
2034 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2035 the result decl. The former ensures that the bit isn't set in the GCC
2036 tree saved for the function, so prevents ICEs on temporary creation.
2037 The latter we use here to trigger the rest of the processing.
2039 2/ This function performs the type transformation on the result decl
2040 and adjusts all the references to this decl from the function body
2041 accordingly.
2043 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2044 strategy, which escapes the gimplifier temporary creation issues by
2045 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2046 on simple specific support code in aggregate_value_p to look at the
2047 target function result decl explicitely. */
2049 struct pointer_set_t *p_set;
2050 tree decl_result = DECL_RESULT (fndecl);
2052 if (!DECL_BY_REFERENCE (decl_result))
2053 return;
2055 /* Make the DECL_RESULT explicitely by-reference and adjust all the
2056 occurrences in the function body using the common tree-walking facility.
2057 We want to see every occurrence of the result decl to adjust the
2058 referencing tree, so need to use our own pointer set to control which
2059 trees should be visited again or not. */
2061 p_set = pointer_set_create ();
2063 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2064 TREE_ADDRESSABLE (decl_result) = 0;
2065 relayout_decl (decl_result);
2067 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2069 pointer_set_destroy (p_set);
2072 /* Finish the definition of the current subprogram and compile it all the way
2073 to assembler language output. BODY is the tree corresponding to
2074 the subprogram. */
2076 void
2077 end_subprog_body (tree body)
2079 tree fndecl = current_function_decl;
2081 /* Mark the BLOCK for this level as being for this function and pop the
2082 level. Since the vars in it are the parameters, clear them. */
2083 BLOCK_VARS (current_binding_level->block) = 0;
2084 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2085 DECL_INITIAL (fndecl) = current_binding_level->block;
2086 gnat_poplevel ();
2088 /* Deal with inline. If declared inline or we should default to inline,
2089 set the flag in the decl. */
2090 DECL_INLINE (fndecl)
2091 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
2093 /* We handle pending sizes via the elaboration of types, so we don't
2094 need to save them. */
2095 get_pending_sizes ();
2097 /* Mark the RESULT_DECL as being in this subprogram. */
2098 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2100 DECL_SAVED_TREE (fndecl) = body;
2102 current_function_decl = DECL_CONTEXT (fndecl);
2103 set_cfun (NULL);
2105 /* We cannot track the location of errors past this point. */
2106 error_gnat_node = Empty;
2108 /* If we're only annotating types, don't actually compile this function. */
2109 if (type_annotate_only)
2110 return;
2112 /* Perform the required pre-gimplfication transformations on the tree. */
2113 gnat_genericize (fndecl);
2115 /* We do different things for nested and non-nested functions.
2116 ??? This should be in cgraph. */
2117 if (!DECL_CONTEXT (fndecl))
2119 gnat_gimplify_function (fndecl);
2120 cgraph_finalize_function (fndecl, false);
2122 else
2123 /* Register this function with cgraph just far enough to get it
2124 added to our parent's nested function list. */
2125 (void) cgraph_node (fndecl);
2128 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2130 static void
2131 gnat_gimplify_function (tree fndecl)
2133 struct cgraph_node *cgn;
2135 dump_function (TDI_original, fndecl);
2136 gimplify_function_tree (fndecl);
2137 dump_function (TDI_generic, fndecl);
2139 /* Convert all nested functions to GIMPLE now. We do things in this order
2140 so that items like VLA sizes are expanded properly in the context of the
2141 correct function. */
2142 cgn = cgraph_node (fndecl);
2143 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2144 gnat_gimplify_function (cgn->decl);
2148 tree
2149 gnat_builtin_function (tree decl)
2151 gnat_pushdecl (decl, Empty);
2152 return decl;
2155 /* Handle a "const" attribute; arguments as in
2156 struct attribute_spec.handler. */
2158 static tree
2159 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
2160 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2161 bool *no_add_attrs)
2163 if (TREE_CODE (*node) == FUNCTION_DECL)
2164 TREE_READONLY (*node) = 1;
2165 else
2166 *no_add_attrs = true;
2168 return NULL_TREE;
2171 /* Handle a "nothrow" attribute; arguments as in
2172 struct attribute_spec.handler. */
2174 static tree
2175 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
2176 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2177 bool *no_add_attrs)
2179 if (TREE_CODE (*node) == FUNCTION_DECL)
2180 TREE_NOTHROW (*node) = 1;
2181 else
2182 *no_add_attrs = true;
2184 return NULL_TREE;
2187 /* Return an integer type with the number of bits of precision given by
2188 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2189 it is a signed type. */
2191 tree
2192 gnat_type_for_size (unsigned precision, int unsignedp)
2194 tree t;
2195 char type_name[20];
2197 if (precision <= 2 * MAX_BITS_PER_WORD
2198 && signed_and_unsigned_types[precision][unsignedp])
2199 return signed_and_unsigned_types[precision][unsignedp];
2201 if (unsignedp)
2202 t = make_unsigned_type (precision);
2203 else
2204 t = make_signed_type (precision);
2206 if (precision <= 2 * MAX_BITS_PER_WORD)
2207 signed_and_unsigned_types[precision][unsignedp] = t;
2209 if (!TYPE_NAME (t))
2211 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2212 TYPE_NAME (t) = get_identifier (type_name);
2215 return t;
2218 /* Likewise for floating-point types. */
2220 static tree
2221 float_type_for_precision (int precision, enum machine_mode mode)
2223 tree t;
2224 char type_name[20];
2226 if (float_types[(int) mode])
2227 return float_types[(int) mode];
2229 float_types[(int) mode] = t = make_node (REAL_TYPE);
2230 TYPE_PRECISION (t) = precision;
2231 layout_type (t);
2233 gcc_assert (TYPE_MODE (t) == mode);
2234 if (!TYPE_NAME (t))
2236 sprintf (type_name, "FLOAT_%d", precision);
2237 TYPE_NAME (t) = get_identifier (type_name);
2240 return t;
2243 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2244 an unsigned type; otherwise a signed type is returned. */
2246 tree
2247 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2249 if (mode == BLKmode)
2250 return NULL_TREE;
2251 else if (mode == VOIDmode)
2252 return void_type_node;
2253 else if (COMPLEX_MODE_P (mode))
2254 return NULL_TREE;
2255 else if (SCALAR_FLOAT_MODE_P (mode))
2256 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2257 else if (SCALAR_INT_MODE_P (mode))
2258 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2259 else
2260 return NULL_TREE;
2263 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2265 tree
2266 gnat_unsigned_type (tree type_node)
2268 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2270 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2272 type = copy_node (type);
2273 TREE_TYPE (type) = type_node;
2275 else if (TREE_TYPE (type_node)
2276 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2277 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2279 type = copy_node (type);
2280 TREE_TYPE (type) = TREE_TYPE (type_node);
2283 return type;
2286 /* Return the signed version of a TYPE_NODE, a scalar type. */
2288 tree
2289 gnat_signed_type (tree type_node)
2291 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2293 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2295 type = copy_node (type);
2296 TREE_TYPE (type) = type_node;
2298 else if (TREE_TYPE (type_node)
2299 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2300 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2302 type = copy_node (type);
2303 TREE_TYPE (type) = TREE_TYPE (type_node);
2306 return type;
2310 /* EXP is an expression for the size of an object. If this size contains
2311 discriminant references, replace them with the maximum (if MAX_P) or
2312 minimum (if !MAX_P) possible value of the discriminant. */
2314 tree
2315 max_size (tree exp, bool max_p)
2317 enum tree_code code = TREE_CODE (exp);
2318 tree type = TREE_TYPE (exp);
2320 switch (TREE_CODE_CLASS (code))
2322 case tcc_declaration:
2323 case tcc_constant:
2324 return exp;
2326 case tcc_vl_exp:
2327 if (code == CALL_EXPR)
2329 tree *argarray;
2330 int i, n = call_expr_nargs (exp);
2331 gcc_assert (n > 0);
2333 argarray = (tree *) alloca (n * sizeof (tree));
2334 for (i = 0; i < n; i++)
2335 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2336 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2338 break;
2340 case tcc_reference:
2341 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2342 modify. Otherwise, we treat it like a variable. */
2343 if (!CONTAINS_PLACEHOLDER_P (exp))
2344 return exp;
2346 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2347 return
2348 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2350 case tcc_comparison:
2351 return max_p ? size_one_node : size_zero_node;
2353 case tcc_unary:
2354 case tcc_binary:
2355 case tcc_expression:
2356 switch (TREE_CODE_LENGTH (code))
2358 case 1:
2359 if (code == NON_LVALUE_EXPR)
2360 return max_size (TREE_OPERAND (exp, 0), max_p);
2361 else
2362 return
2363 fold_build1 (code, type,
2364 max_size (TREE_OPERAND (exp, 0),
2365 code == NEGATE_EXPR ? !max_p : max_p));
2367 case 2:
2368 if (code == COMPOUND_EXPR)
2369 return max_size (TREE_OPERAND (exp, 1), max_p);
2371 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2372 may provide a tighter bound on max_size. */
2373 if (code == MINUS_EXPR
2374 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2376 tree lhs = fold_build2 (MINUS_EXPR, type,
2377 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2378 TREE_OPERAND (exp, 1));
2379 tree rhs = fold_build2 (MINUS_EXPR, type,
2380 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2381 TREE_OPERAND (exp, 1));
2382 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2383 max_size (lhs, max_p),
2384 max_size (rhs, max_p));
2388 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2389 tree rhs = max_size (TREE_OPERAND (exp, 1),
2390 code == MINUS_EXPR ? !max_p : max_p);
2392 /* Special-case wanting the maximum value of a MIN_EXPR.
2393 In that case, if one side overflows, return the other.
2394 sizetype is signed, but we know sizes are non-negative.
2395 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2396 overflowing or the maximum possible value and the RHS
2397 a variable. */
2398 if (max_p
2399 && code == MIN_EXPR
2400 && TREE_CODE (rhs) == INTEGER_CST
2401 && TREE_OVERFLOW (rhs))
2402 return lhs;
2403 else if (max_p
2404 && code == MIN_EXPR
2405 && TREE_CODE (lhs) == INTEGER_CST
2406 && TREE_OVERFLOW (lhs))
2407 return rhs;
2408 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2409 && ((TREE_CODE (lhs) == INTEGER_CST
2410 && TREE_OVERFLOW (lhs))
2411 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2412 && !TREE_CONSTANT (rhs))
2413 return lhs;
2414 else
2415 return fold_build2 (code, type, lhs, rhs);
2418 case 3:
2419 if (code == SAVE_EXPR)
2420 return exp;
2421 else if (code == COND_EXPR)
2422 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2423 max_size (TREE_OPERAND (exp, 1), max_p),
2424 max_size (TREE_OPERAND (exp, 2), max_p));
2427 /* Other tree classes cannot happen. */
2428 default:
2429 break;
2432 gcc_unreachable ();
2435 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2436 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2437 Return a constructor for the template. */
2439 tree
2440 build_template (tree template_type, tree array_type, tree expr)
2442 tree template_elts = NULL_TREE;
2443 tree bound_list = NULL_TREE;
2444 tree field;
2446 if (TREE_CODE (array_type) == RECORD_TYPE
2447 && (TYPE_IS_PADDING_P (array_type)
2448 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2449 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2451 if (TREE_CODE (array_type) == ARRAY_TYPE
2452 || (TREE_CODE (array_type) == INTEGER_TYPE
2453 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2454 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2456 /* First make the list for a CONSTRUCTOR for the template. Go down the
2457 field list of the template instead of the type chain because this
2458 array might be an Ada array of arrays and we can't tell where the
2459 nested arrays stop being the underlying object. */
2461 for (field = TYPE_FIELDS (template_type); field;
2462 (bound_list
2463 ? (bound_list = TREE_CHAIN (bound_list))
2464 : (array_type = TREE_TYPE (array_type))),
2465 field = TREE_CHAIN (TREE_CHAIN (field)))
2467 tree bounds, min, max;
2469 /* If we have a bound list, get the bounds from there. Likewise
2470 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2471 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2472 This will give us a maximum range. */
2473 if (bound_list)
2474 bounds = TREE_VALUE (bound_list);
2475 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2476 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2477 else if (expr && TREE_CODE (expr) == PARM_DECL
2478 && DECL_BY_COMPONENT_PTR_P (expr))
2479 bounds = TREE_TYPE (field);
2480 else
2481 gcc_unreachable ();
2483 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2484 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2486 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2487 substitute it from OBJECT. */
2488 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2489 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2491 template_elts = tree_cons (TREE_CHAIN (field), max,
2492 tree_cons (field, min, template_elts));
2495 return gnat_build_constructor (template_type, nreverse (template_elts));
2498 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2499 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2500 in the type contains in its DECL_INITIAL the expression to use when
2501 a constructor is made for the type. GNAT_ENTITY is an entity used
2502 to print out an error message if the mechanism cannot be applied to
2503 an object of that type and also for the name. */
2505 tree
2506 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2508 tree record_type = make_node (RECORD_TYPE);
2509 tree pointer32_type;
2510 tree field_list = 0;
2511 int class;
2512 int dtype = 0;
2513 tree inner_type;
2514 int ndim;
2515 int i;
2516 tree *idx_arr;
2517 tree tem;
2519 /* If TYPE is an unconstrained array, use the underlying array type. */
2520 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2521 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2523 /* If this is an array, compute the number of dimensions in the array,
2524 get the index types, and point to the inner type. */
2525 if (TREE_CODE (type) != ARRAY_TYPE)
2526 ndim = 0;
2527 else
2528 for (ndim = 1, inner_type = type;
2529 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2530 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2531 ndim++, inner_type = TREE_TYPE (inner_type))
2534 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2536 if (mech != By_Descriptor_NCA
2537 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2538 for (i = ndim - 1, inner_type = type;
2539 i >= 0;
2540 i--, inner_type = TREE_TYPE (inner_type))
2541 idx_arr[i] = TYPE_DOMAIN (inner_type);
2542 else
2543 for (i = 0, inner_type = type;
2544 i < ndim;
2545 i++, inner_type = TREE_TYPE (inner_type))
2546 idx_arr[i] = TYPE_DOMAIN (inner_type);
2548 /* Now get the DTYPE value. */
2549 switch (TREE_CODE (type))
2551 case INTEGER_TYPE:
2552 case ENUMERAL_TYPE:
2553 if (TYPE_VAX_FLOATING_POINT_P (type))
2554 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2556 case 6:
2557 dtype = 10;
2558 break;
2559 case 9:
2560 dtype = 11;
2561 break;
2562 case 15:
2563 dtype = 27;
2564 break;
2566 else
2567 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2569 case 8:
2570 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2571 break;
2572 case 16:
2573 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2574 break;
2575 case 32:
2576 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2577 break;
2578 case 64:
2579 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2580 break;
2581 case 128:
2582 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2583 break;
2585 break;
2587 case REAL_TYPE:
2588 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2589 break;
2591 case COMPLEX_TYPE:
2592 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2593 && TYPE_VAX_FLOATING_POINT_P (type))
2594 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2596 case 6:
2597 dtype = 12;
2598 break;
2599 case 9:
2600 dtype = 13;
2601 break;
2602 case 15:
2603 dtype = 29;
2605 else
2606 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2607 break;
2609 case ARRAY_TYPE:
2610 dtype = 14;
2611 break;
2613 default:
2614 break;
2617 /* Get the CLASS value. */
2618 switch (mech)
2620 case By_Descriptor_A:
2621 class = 4;
2622 break;
2623 case By_Descriptor_NCA:
2624 class = 10;
2625 break;
2626 case By_Descriptor_SB:
2627 class = 15;
2628 break;
2629 case By_Descriptor:
2630 case By_Descriptor_S:
2631 default:
2632 class = 1;
2633 break;
2636 /* Make the type for a descriptor for VMS. The first four fields
2637 are the same for all types. */
2639 field_list
2640 = chainon (field_list,
2641 make_descriptor_field
2642 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2643 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2645 field_list = chainon (field_list,
2646 make_descriptor_field ("DTYPE",
2647 gnat_type_for_size (8, 1),
2648 record_type, size_int (dtype)));
2649 field_list = chainon (field_list,
2650 make_descriptor_field ("CLASS",
2651 gnat_type_for_size (8, 1),
2652 record_type, size_int (class)));
2654 /* Of course this will crash at run-time if the address space is not
2655 within the low 32 bits, but there is nothing else we can do. */
2656 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2658 field_list
2659 = chainon (field_list,
2660 make_descriptor_field
2661 ("POINTER", pointer32_type, record_type,
2662 build_unary_op (ADDR_EXPR,
2663 pointer32_type,
2664 build0 (PLACEHOLDER_EXPR, type))));
2666 switch (mech)
2668 case By_Descriptor:
2669 case By_Descriptor_S:
2670 break;
2672 case By_Descriptor_SB:
2673 field_list
2674 = chainon (field_list,
2675 make_descriptor_field
2676 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2677 TREE_CODE (type) == ARRAY_TYPE
2678 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2679 field_list
2680 = chainon (field_list,
2681 make_descriptor_field
2682 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2683 TREE_CODE (type) == ARRAY_TYPE
2684 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2685 break;
2687 case By_Descriptor_A:
2688 case By_Descriptor_NCA:
2689 field_list = chainon (field_list,
2690 make_descriptor_field ("SCALE",
2691 gnat_type_for_size (8, 1),
2692 record_type,
2693 size_zero_node));
2695 field_list = chainon (field_list,
2696 make_descriptor_field ("DIGITS",
2697 gnat_type_for_size (8, 1),
2698 record_type,
2699 size_zero_node));
2701 field_list
2702 = chainon (field_list,
2703 make_descriptor_field
2704 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2705 size_int (mech == By_Descriptor_NCA
2707 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2708 : (TREE_CODE (type) == ARRAY_TYPE
2709 && TYPE_CONVENTION_FORTRAN_P (type)
2710 ? 224 : 192))));
2712 field_list = chainon (field_list,
2713 make_descriptor_field ("DIMCT",
2714 gnat_type_for_size (8, 1),
2715 record_type,
2716 size_int (ndim)));
2718 field_list = chainon (field_list,
2719 make_descriptor_field ("ARSIZE",
2720 gnat_type_for_size (32, 1),
2721 record_type,
2722 size_in_bytes (type)));
2724 /* Now build a pointer to the 0,0,0... element. */
2725 tem = build0 (PLACEHOLDER_EXPR, type);
2726 for (i = 0, inner_type = type; i < ndim;
2727 i++, inner_type = TREE_TYPE (inner_type))
2728 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2729 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2730 NULL_TREE, NULL_TREE);
2732 field_list
2733 = chainon (field_list,
2734 make_descriptor_field
2735 ("A0",
2736 build_pointer_type_for_mode (inner_type, SImode, false),
2737 record_type,
2738 build1 (ADDR_EXPR,
2739 build_pointer_type_for_mode (inner_type, SImode,
2740 false),
2741 tem)));
2743 /* Next come the addressing coefficients. */
2744 tem = size_one_node;
2745 for (i = 0; i < ndim; i++)
2747 char fname[3];
2748 tree idx_length
2749 = size_binop (MULT_EXPR, tem,
2750 size_binop (PLUS_EXPR,
2751 size_binop (MINUS_EXPR,
2752 TYPE_MAX_VALUE (idx_arr[i]),
2753 TYPE_MIN_VALUE (idx_arr[i])),
2754 size_int (1)));
2756 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2757 fname[1] = '0' + i, fname[2] = 0;
2758 field_list
2759 = chainon (field_list,
2760 make_descriptor_field (fname,
2761 gnat_type_for_size (32, 1),
2762 record_type, idx_length));
2764 if (mech == By_Descriptor_NCA)
2765 tem = idx_length;
2768 /* Finally here are the bounds. */
2769 for (i = 0; i < ndim; i++)
2771 char fname[3];
2773 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2774 field_list
2775 = chainon (field_list,
2776 make_descriptor_field
2777 (fname, gnat_type_for_size (32, 1), record_type,
2778 TYPE_MIN_VALUE (idx_arr[i])));
2780 fname[0] = 'U';
2781 field_list
2782 = chainon (field_list,
2783 make_descriptor_field
2784 (fname, gnat_type_for_size (32, 1), record_type,
2785 TYPE_MAX_VALUE (idx_arr[i])));
2787 break;
2789 default:
2790 post_error ("unsupported descriptor type for &", gnat_entity);
2793 finish_record_type (record_type, field_list, 0, true);
2794 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2795 NULL, true, false, gnat_entity);
2797 return record_type;
2800 /* Utility routine for above code to make a field. */
2802 static tree
2803 make_descriptor_field (const char *name, tree type,
2804 tree rec_type, tree initial)
2806 tree field
2807 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2809 DECL_INITIAL (field) = initial;
2810 return field;
2813 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
2814 pointer or fat pointer type. GNAT_SUBPROG is the subprogram to which
2815 the VMS descriptor is passed. */
2817 static tree
2818 convert_vms_descriptor (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2820 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2821 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2822 /* The CLASS field is the 3rd field in the descriptor. */
2823 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2824 /* The POINTER field is the 4th field in the descriptor. */
2825 tree pointer = TREE_CHAIN (class);
2827 /* Retrieve the value of the POINTER field. */
2828 gnu_expr
2829 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2831 if (POINTER_TYPE_P (gnu_type))
2832 return convert (gnu_type, gnu_expr);
2834 else if (TYPE_FAT_POINTER_P (gnu_type))
2836 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2837 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2838 tree template_type = TREE_TYPE (p_bounds_type);
2839 tree min_field = TYPE_FIELDS (template_type);
2840 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2841 tree template, template_addr, aflags, dimct, t, u;
2842 /* See the head comment of build_vms_descriptor. */
2843 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
2845 /* Convert POINTER to the type of the P_ARRAY field. */
2846 gnu_expr = convert (p_array_type, gnu_expr);
2848 switch (iclass)
2850 case 1: /* Class S */
2851 case 15: /* Class SB */
2852 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
2853 t = TYPE_FIELDS (desc_type);
2854 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2855 t = tree_cons (min_field,
2856 convert (TREE_TYPE (min_field), integer_one_node),
2857 tree_cons (max_field,
2858 convert (TREE_TYPE (max_field), t),
2859 NULL_TREE));
2860 template = gnat_build_constructor (template_type, t);
2861 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2863 /* For class S, we are done. */
2864 if (iclass == 1)
2865 break;
2867 /* Test that we really have a SB descriptor, like DEC Ada. */
2868 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
2869 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
2870 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
2871 /* If so, there is already a template in the descriptor and
2872 it is located right after the POINTER field. */
2873 t = TREE_CHAIN (pointer);
2874 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2875 /* Otherwise use the {1, LENGTH} template we build above. */
2876 template_addr = build3 (COND_EXPR, p_bounds_type, u,
2877 build_unary_op (ADDR_EXPR, p_bounds_type,
2878 template),
2879 template_addr);
2880 break;
2882 case 4: /* Class A */
2883 /* The AFLAGS field is the 7th field in the descriptor. */
2884 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
2885 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2886 /* The DIMCT field is the 8th field in the descriptor. */
2887 t = TREE_CHAIN (t);
2888 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2889 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
2890 or FL_COEFF or FL_BOUNDS not set. */
2891 u = build_int_cst (TREE_TYPE (aflags), 192);
2892 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
2893 build_binary_op (NE_EXPR, integer_type_node,
2894 dimct,
2895 convert (TREE_TYPE (dimct),
2896 size_one_node)),
2897 build_binary_op (NE_EXPR, integer_type_node,
2898 build2 (BIT_AND_EXPR,
2899 TREE_TYPE (aflags),
2900 aflags, u),
2901 u));
2902 add_stmt (build3 (COND_EXPR, void_type_node, u,
2903 build_call_raise (CE_Length_Check_Failed, Empty,
2904 N_Raise_Constraint_Error),
2905 NULL_TREE));
2906 /* There is already a template in the descriptor and it is
2907 located at the start of block 3 (12th field). */
2908 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
2909 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2910 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
2911 break;
2913 case 10: /* Class NCA */
2914 default:
2915 post_error ("unsupported descriptor type for &", gnat_subprog);
2916 template_addr = integer_zero_node;
2917 break;
2920 /* Build the fat pointer in the form of a constructor. */
2921 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr,
2922 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
2923 template_addr, NULL_TREE));
2924 return gnat_build_constructor (gnu_type, t);
2927 else
2928 gcc_unreachable ();
2931 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
2932 and the GNAT node GNAT_SUBPROG. */
2934 void
2935 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
2937 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
2938 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
2939 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
2940 tree gnu_body;
2942 gnu_subprog_type = TREE_TYPE (gnu_subprog);
2943 gnu_param_list = NULL_TREE;
2945 begin_subprog_body (gnu_stub_decl);
2946 gnat_pushlevel ();
2948 start_stmt_group ();
2950 /* Loop over the parameters of the stub and translate any of them
2951 passed by descriptor into a by reference one. */
2952 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
2953 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
2954 gnu_stub_param;
2955 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
2956 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
2958 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
2959 gnu_param = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
2960 gnu_stub_param, gnat_subprog);
2961 else
2962 gnu_param = gnu_stub_param;
2964 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
2967 gnu_body = end_stmt_group ();
2969 /* Invoke the internal subprogram. */
2970 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
2971 gnu_subprog);
2972 gnu_subprog_call = build3 (CALL_EXPR, TREE_TYPE (gnu_subprog_type),
2973 gnu_subprog_addr, nreverse (gnu_param_list),
2974 NULL_TREE);
2976 /* Propagate the return value, if any. */
2977 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
2978 append_to_statement_list (gnu_subprog_call, &gnu_body);
2979 else
2980 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
2981 gnu_subprog_call),
2982 &gnu_body);
2984 gnat_poplevel ();
2986 allocate_struct_function (gnu_stub_decl, false);
2987 end_subprog_body (gnu_body);
2990 /* Build a type to be used to represent an aliased object whose nominal
2991 type is an unconstrained array. This consists of a RECORD_TYPE containing
2992 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2993 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2994 is used to represent an arbitrary unconstrained object. Use NAME
2995 as the name of the record. */
2997 tree
2998 build_unc_object_type (tree template_type, tree object_type, tree name)
3000 tree type = make_node (RECORD_TYPE);
3001 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3002 template_type, type, 0, 0, 0, 1);
3003 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3004 type, 0, 0, 0, 1);
3006 TYPE_NAME (type) = name;
3007 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3008 finish_record_type (type,
3009 chainon (chainon (NULL_TREE, template_field),
3010 array_field),
3011 0, false);
3013 return type;
3016 /* Same, taking a thin or fat pointer type instead of a template type. */
3018 tree
3019 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3020 tree name)
3022 tree template_type;
3024 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3026 template_type
3027 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3028 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3029 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3030 return build_unc_object_type (template_type, object_type, name);
3033 /* Shift the component offsets within an unconstrained object TYPE to make it
3034 suitable for use as a designated type for thin pointers. */
3036 void
3037 shift_unc_components_for_thin_pointers (tree type)
3039 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3040 allocated past the BOUNDS template. The designated type is adjusted to
3041 have ARRAY at position zero and the template at a negative offset, so
3042 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3044 tree bounds_field = TYPE_FIELDS (type);
3045 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3047 DECL_FIELD_OFFSET (bounds_field)
3048 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3050 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3051 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3054 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3055 the normal case this is just two adjustments, but we have more to do
3056 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3058 void
3059 update_pointer_to (tree old_type, tree new_type)
3061 tree ptr = TYPE_POINTER_TO (old_type);
3062 tree ref = TYPE_REFERENCE_TO (old_type);
3063 tree ptr1, ref1;
3064 tree type;
3066 /* If this is the main variant, process all the other variants first. */
3067 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3068 for (type = TYPE_NEXT_VARIANT (old_type); type;
3069 type = TYPE_NEXT_VARIANT (type))
3070 update_pointer_to (type, new_type);
3072 /* If no pointer or reference, we are done. */
3073 if (!ptr && !ref)
3074 return;
3076 /* Merge the old type qualifiers in the new type.
3078 Each old variant has qualifiers for specific reasons, and the new
3079 designated type as well. Each set of qualifiers represents useful
3080 information grabbed at some point, and merging the two simply unifies
3081 these inputs into the final type description.
3083 Consider for instance a volatile type frozen after an access to constant
3084 type designating it. After the designated type freeze, we get here with a
3085 volatile new_type and a dummy old_type with a readonly variant, created
3086 when the access type was processed. We shall make a volatile and readonly
3087 designated type, because that's what it really is.
3089 We might also get here for a non-dummy old_type variant with different
3090 qualifiers than the new_type ones, for instance in some cases of pointers
3091 to private record type elaboration (see the comments around the call to
3092 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3093 qualifiers in thoses cases too, to avoid accidentally discarding the
3094 initial set, and will often end up with old_type == new_type then. */
3095 new_type = build_qualified_type (new_type,
3096 TYPE_QUALS (old_type)
3097 | TYPE_QUALS (new_type));
3099 /* If the new type and the old one are identical, there is nothing to
3100 update. */
3101 if (old_type == new_type)
3102 return;
3104 /* Otherwise, first handle the simple case. */
3105 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3107 TYPE_POINTER_TO (new_type) = ptr;
3108 TYPE_REFERENCE_TO (new_type) = ref;
3110 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3111 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3112 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3113 TREE_TYPE (ptr1) = new_type;
3115 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3116 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3117 ref1 = TYPE_NEXT_VARIANT (ref1))
3118 TREE_TYPE (ref1) = new_type;
3121 /* Now deal with the unconstrained array case. In this case the "pointer"
3122 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3123 Turn them into pointers to the correct types using update_pointer_to. */
3124 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3125 gcc_unreachable ();
3127 else
3129 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3130 tree array_field = TYPE_FIELDS (ptr);
3131 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3132 tree new_ptr = TYPE_POINTER_TO (new_type);
3133 tree new_ref;
3134 tree var;
3136 /* Make pointers to the dummy template point to the real template. */
3137 update_pointer_to
3138 (TREE_TYPE (TREE_TYPE (bounds_field)),
3139 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3141 /* The references to the template bounds present in the array type
3142 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3143 are updating ptr to make it a full replacement for new_ptr as
3144 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3145 to make it of type ptr. */
3146 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3147 build0 (PLACEHOLDER_EXPR, ptr),
3148 bounds_field, NULL_TREE);
3150 /* Create the new array for the new PLACEHOLDER_EXPR and make
3151 pointers to the dummy array point to it.
3153 ??? This is now the only use of substitute_in_type,
3154 which is a very "heavy" routine to do this, so it
3155 should be replaced at some point. */
3156 update_pointer_to
3157 (TREE_TYPE (TREE_TYPE (array_field)),
3158 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3159 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3161 /* Make ptr the pointer to new_type. */
3162 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3163 = TREE_TYPE (new_type) = ptr;
3165 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3166 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3168 /* Now handle updating the allocation record, what the thin pointer
3169 points to. Update all pointers from the old record into the new
3170 one, update the type of the array field, and recompute the size. */
3171 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3173 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3174 = TREE_TYPE (TREE_TYPE (array_field));
3176 /* The size recomputation needs to account for alignment constraints, so
3177 we let layout_type work it out. This will reset the field offsets to
3178 what they would be in a regular record, so we shift them back to what
3179 we want them to be for a thin pointer designated type afterwards. */
3180 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3181 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3182 TYPE_SIZE (new_obj_rec) = 0;
3183 layout_type (new_obj_rec);
3185 shift_unc_components_for_thin_pointers (new_obj_rec);
3187 /* We are done, at last. */
3188 rest_of_record_type_compilation (ptr);
3192 /* Convert a pointer to a constrained array into a pointer to a fat
3193 pointer. This involves making or finding a template. */
3195 static tree
3196 convert_to_fat_pointer (tree type, tree expr)
3198 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3199 tree template, template_addr;
3200 tree etype = TREE_TYPE (expr);
3202 /* If EXPR is a constant of zero, we make a fat pointer that has a null
3203 pointer to the template and array. */
3204 if (integer_zerop (expr))
3205 return
3206 gnat_build_constructor
3207 (type,
3208 tree_cons (TYPE_FIELDS (type),
3209 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3210 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3211 convert (build_pointer_type (template_type),
3212 expr),
3213 NULL_TREE)));
3215 /* If EXPR is a thin pointer, make the template and data from the record. */
3217 else if (TYPE_THIN_POINTER_P (etype))
3219 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3221 expr = save_expr (expr);
3222 if (TREE_CODE (expr) == ADDR_EXPR)
3223 expr = TREE_OPERAND (expr, 0);
3224 else
3225 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3227 template = build_component_ref (expr, NULL_TREE, fields, false);
3228 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3229 build_component_ref (expr, NULL_TREE,
3230 TREE_CHAIN (fields), false));
3232 else
3233 /* Otherwise, build the constructor for the template. */
3234 template = build_template (template_type, TREE_TYPE (etype), expr);
3236 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3238 /* The result is a CONSTRUCTOR for the fat pointer.
3240 If expr is an argument of a foreign convention subprogram, the type it
3241 points to is directly the component type. In this case, the expression
3242 type may not match the corresponding FIELD_DECL type at this point, so we
3243 call "convert" here to fix that up if necessary. This type consistency is
3244 required, for instance because it ensures that possible later folding of
3245 component_refs against this constructor always yields something of the
3246 same type as the initial reference.
3248 Note that the call to "build_template" above is still fine, because it
3249 will only refer to the provided template_type in this case. */
3250 return
3251 gnat_build_constructor
3252 (type, tree_cons (TYPE_FIELDS (type),
3253 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3254 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3255 template_addr, NULL_TREE)));
3258 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3259 is something that is a fat pointer, so convert to it first if it EXPR
3260 is not already a fat pointer. */
3262 static tree
3263 convert_to_thin_pointer (tree type, tree expr)
3265 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3266 expr
3267 = convert_to_fat_pointer
3268 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3270 /* We get the pointer to the data and use a NOP_EXPR to make it the
3271 proper GCC type. */
3272 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3273 false);
3274 expr = build1 (NOP_EXPR, type, expr);
3276 return expr;
3279 /* Create an expression whose value is that of EXPR,
3280 converted to type TYPE. The TREE_TYPE of the value
3281 is always TYPE. This function implements all reasonable
3282 conversions; callers should filter out those that are
3283 not permitted by the language being compiled. */
3285 tree
3286 convert (tree type, tree expr)
3288 enum tree_code code = TREE_CODE (type);
3289 tree etype = TREE_TYPE (expr);
3290 enum tree_code ecode = TREE_CODE (etype);
3292 /* If EXPR is already the right type, we are done. */
3293 if (type == etype)
3294 return expr;
3296 /* If both input and output have padding and are of variable size, do this
3297 as an unchecked conversion. Likewise if one is a mere variant of the
3298 other, so we avoid a pointless unpad/repad sequence. */
3299 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
3300 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3301 && (!TREE_CONSTANT (TYPE_SIZE (type))
3302 || !TREE_CONSTANT (TYPE_SIZE (etype))
3303 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)))
3306 /* If the output type has padding, make a constructor to build the
3307 record. */
3308 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3310 /* If we previously converted from another type and our type is
3311 of variable size, remove the conversion to avoid the need for
3312 variable-size temporaries. */
3313 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3314 && !TREE_CONSTANT (TYPE_SIZE (type)))
3315 expr = TREE_OPERAND (expr, 0);
3317 /* If we are just removing the padding from expr, convert the original
3318 object if we have variable size. That will avoid the need
3319 for some variable-size temporaries. */
3320 if (TREE_CODE (expr) == COMPONENT_REF
3321 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3322 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3323 && !TREE_CONSTANT (TYPE_SIZE (type)))
3324 return convert (type, TREE_OPERAND (expr, 0));
3326 /* If the result type is a padded type with a self-referentially-sized
3327 field and the expression type is a record, do this as an
3328 unchecked conversion. */
3329 else if (TREE_CODE (etype) == RECORD_TYPE
3330 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3331 return unchecked_convert (type, expr, false);
3333 else
3334 return
3335 gnat_build_constructor (type,
3336 tree_cons (TYPE_FIELDS (type),
3337 convert (TREE_TYPE
3338 (TYPE_FIELDS (type)),
3339 expr),
3340 NULL_TREE));
3343 /* If the input type has padding, remove it and convert to the output type.
3344 The conditions ordering is arranged to ensure that the output type is not
3345 a padding type here, as it is not clear whether the conversion would
3346 always be correct if this was to happen. */
3347 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3349 tree unpadded;
3351 /* If we have just converted to this padded type, just get the
3352 inner expression. */
3353 if (TREE_CODE (expr) == CONSTRUCTOR
3354 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3355 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3356 == TYPE_FIELDS (etype))
3357 unpadded
3358 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3360 /* Otherwise, build an explicit component reference. */
3361 else
3362 unpadded
3363 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3365 return convert (type, unpadded);
3368 /* If the input is a biased type, adjust first. */
3369 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3370 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3371 fold_convert (TREE_TYPE (etype),
3372 expr),
3373 TYPE_MIN_VALUE (etype)));
3375 /* If the input is a justified modular type, we need to extract the actual
3376 object before converting it to any other type with the exceptions of an
3377 unconstrained array or of a mere type variant. It is useful to avoid the
3378 extraction and conversion in the type variant case because it could end
3379 up replacing a VAR_DECL expr by a constructor and we might be about the
3380 take the address of the result. */
3381 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3382 && code != UNCONSTRAINED_ARRAY_TYPE
3383 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3384 return convert (type, build_component_ref (expr, NULL_TREE,
3385 TYPE_FIELDS (etype), false));
3387 /* If converting to a type that contains a template, convert to the data
3388 type and then build the template. */
3389 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3391 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3393 /* If the source already has a template, get a reference to the
3394 associated array only, as we are going to rebuild a template
3395 for the target type anyway. */
3396 expr = maybe_unconstrained_array (expr);
3398 return
3399 gnat_build_constructor
3400 (type,
3401 tree_cons (TYPE_FIELDS (type),
3402 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3403 obj_type, NULL_TREE),
3404 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3405 convert (obj_type, expr), NULL_TREE)));
3408 /* There are some special cases of expressions that we process
3409 specially. */
3410 switch (TREE_CODE (expr))
3412 case ERROR_MARK:
3413 return expr;
3415 case NULL_EXPR:
3416 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3417 conversion in gnat_expand_expr. NULL_EXPR does not represent
3418 and actual value, so no conversion is needed. */
3419 expr = copy_node (expr);
3420 TREE_TYPE (expr) = type;
3421 return expr;
3423 case STRING_CST:
3424 /* If we are converting a STRING_CST to another constrained array type,
3425 just make a new one in the proper type. */
3426 if (code == ecode && AGGREGATE_TYPE_P (etype)
3427 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3428 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3430 expr = copy_node (expr);
3431 TREE_TYPE (expr) = type;
3432 return expr;
3434 break;
3436 case CONSTRUCTOR:
3437 /* If we are converting a CONSTRUCTOR to another constrained array type
3438 with the same domain, just make a new one in the proper type. */
3439 if (code == ecode && code == ARRAY_TYPE
3440 && TREE_TYPE (type) == TREE_TYPE (etype)
3441 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (type)),
3442 TYPE_MIN_VALUE (TYPE_DOMAIN (etype)))
3443 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3444 TYPE_MAX_VALUE (TYPE_DOMAIN (etype))))
3446 expr = copy_node (expr);
3447 TREE_TYPE (expr) = type;
3448 return expr;
3450 break;
3452 case UNCONSTRAINED_ARRAY_REF:
3453 /* Convert this to the type of the inner array by getting the address of
3454 the array from the template. */
3455 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3456 build_component_ref (TREE_OPERAND (expr, 0),
3457 get_identifier ("P_ARRAY"),
3458 NULL_TREE, false));
3459 etype = TREE_TYPE (expr);
3460 ecode = TREE_CODE (etype);
3461 break;
3463 case VIEW_CONVERT_EXPR:
3465 /* GCC 4.x is very sensitive to type consistency overall, and view
3466 conversions thus are very frequent. Even though just "convert"ing
3467 the inner operand to the output type is fine in most cases, it
3468 might expose unexpected input/output type mismatches in special
3469 circumstances so we avoid such recursive calls when we can. */
3471 tree op0 = TREE_OPERAND (expr, 0);
3473 /* If we are converting back to the original type, we can just
3474 lift the input conversion. This is a common occurrence with
3475 switches back-and-forth amongst type variants. */
3476 if (type == TREE_TYPE (op0))
3477 return op0;
3479 /* Otherwise, if we're converting between two aggregate types, we
3480 might be allowed to substitute the VIEW_CONVERT target type in
3481 place or to just convert the inner expression. */
3482 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3484 /* If we are converting between type variants, we can just
3485 substitute the VIEW_CONVERT in place. */
3486 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3487 return build1 (VIEW_CONVERT_EXPR, type, op0);
3489 /* Otherwise, we may just bypass the input view conversion unless
3490 one of the types is a fat pointer, which is handled by
3491 specialized code below which relies on exact type matching. */
3492 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3493 return convert (type, op0);
3496 break;
3498 case INDIRECT_REF:
3499 /* If both types are record types, just convert the pointer and
3500 make a new INDIRECT_REF.
3502 ??? Disable this for now since it causes problems with the
3503 code in build_binary_op for MODIFY_EXPR which wants to
3504 strip off conversions. But that code really is a mess and
3505 we need to do this a much better way some time. */
3506 if (0
3507 && (TREE_CODE (type) == RECORD_TYPE
3508 || TREE_CODE (type) == UNION_TYPE)
3509 && (TREE_CODE (etype) == RECORD_TYPE
3510 || TREE_CODE (etype) == UNION_TYPE)
3511 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3512 return build_unary_op (INDIRECT_REF, NULL_TREE,
3513 convert (build_pointer_type (type),
3514 TREE_OPERAND (expr, 0)));
3515 break;
3517 default:
3518 break;
3521 /* Check for converting to a pointer to an unconstrained array. */
3522 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3523 return convert_to_fat_pointer (type, expr);
3525 /* If we're converting between two aggregate types that have the same main
3526 variant, just make a VIEW_CONVER_EXPR. */
3527 else if (AGGREGATE_TYPE_P (type)
3528 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3529 return build1 (VIEW_CONVERT_EXPR, type, expr);
3531 /* In all other cases of related types, make a NOP_EXPR. */
3532 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3533 || (code == INTEGER_CST && ecode == INTEGER_CST
3534 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3535 return fold_convert (type, expr);
3537 switch (code)
3539 case VOID_TYPE:
3540 return fold_build1 (CONVERT_EXPR, type, expr);
3542 case BOOLEAN_TYPE:
3543 return fold_convert (type, gnat_truthvalue_conversion (expr));
3545 case INTEGER_TYPE:
3546 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3547 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3548 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3549 return unchecked_convert (type, expr, false);
3550 else if (TYPE_BIASED_REPRESENTATION_P (type))
3551 return fold_convert (type,
3552 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
3553 convert (TREE_TYPE (type), expr),
3554 TYPE_MIN_VALUE (type)));
3556 /* ... fall through ... */
3558 case ENUMERAL_TYPE:
3559 return fold (convert_to_integer (type, expr));
3561 case POINTER_TYPE:
3562 case REFERENCE_TYPE:
3563 /* If converting between two pointers to records denoting
3564 both a template and type, adjust if needed to account
3565 for any differing offsets, since one might be negative. */
3566 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3568 tree bit_diff
3569 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3570 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3571 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3572 sbitsize_int (BITS_PER_UNIT));
3574 expr = build1 (NOP_EXPR, type, expr);
3575 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3576 if (integer_zerop (byte_diff))
3577 return expr;
3579 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
3580 fold (convert (sizetype, byte_diff)));
3583 /* If converting to a thin pointer, handle specially. */
3584 if (TYPE_THIN_POINTER_P (type)
3585 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
3586 return convert_to_thin_pointer (type, expr);
3588 /* If converting fat pointer to normal pointer, get the pointer to the
3589 array and then convert it. */
3590 else if (TYPE_FAT_POINTER_P (etype))
3591 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3592 NULL_TREE, false);
3594 return fold (convert_to_pointer (type, expr));
3596 case REAL_TYPE:
3597 return fold (convert_to_real (type, expr));
3599 case RECORD_TYPE:
3600 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
3601 return
3602 gnat_build_constructor
3603 (type, tree_cons (TYPE_FIELDS (type),
3604 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3605 NULL_TREE));
3607 /* ... fall through ... */
3609 case ARRAY_TYPE:
3610 /* In these cases, assume the front-end has validated the conversion.
3611 If the conversion is valid, it will be a bit-wise conversion, so
3612 it can be viewed as an unchecked conversion. */
3613 return unchecked_convert (type, expr, false);
3615 case UNION_TYPE:
3616 /* This is a either a conversion between a tagged type and some
3617 subtype, which we have to mark as a UNION_TYPE because of
3618 overlapping fields or a conversion of an Unchecked_Union. */
3619 return unchecked_convert (type, expr, false);
3621 case UNCONSTRAINED_ARRAY_TYPE:
3622 /* If EXPR is a constrained array, take its address, convert it to a
3623 fat pointer, and then dereference it. Likewise if EXPR is a
3624 record containing both a template and a constrained array.
3625 Note that a record representing a justified modular type
3626 always represents a packed constrained array. */
3627 if (ecode == ARRAY_TYPE
3628 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3629 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3630 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
3631 return
3632 build_unary_op
3633 (INDIRECT_REF, NULL_TREE,
3634 convert_to_fat_pointer (TREE_TYPE (type),
3635 build_unary_op (ADDR_EXPR,
3636 NULL_TREE, expr)));
3638 /* Do something very similar for converting one unconstrained
3639 array to another. */
3640 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3641 return
3642 build_unary_op (INDIRECT_REF, NULL_TREE,
3643 convert (TREE_TYPE (type),
3644 build_unary_op (ADDR_EXPR,
3645 NULL_TREE, expr)));
3646 else
3647 gcc_unreachable ();
3649 case COMPLEX_TYPE:
3650 return fold (convert_to_complex (type, expr));
3652 default:
3653 gcc_unreachable ();
3657 /* Remove all conversions that are done in EXP. This includes converting
3658 from a padded type or to a justified modular type. If TRUE_ADDRESS
3659 is true, always return the address of the containing object even if
3660 the address is not bit-aligned. */
3662 tree
3663 remove_conversions (tree exp, bool true_address)
3665 switch (TREE_CODE (exp))
3667 case CONSTRUCTOR:
3668 if (true_address
3669 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3670 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3671 return
3672 remove_conversions (VEC_index (constructor_elt,
3673 CONSTRUCTOR_ELTS (exp), 0)->value,
3674 true);
3675 break;
3677 case COMPONENT_REF:
3678 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3679 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3680 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3681 break;
3683 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3684 case NOP_EXPR: case CONVERT_EXPR:
3685 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3687 default:
3688 break;
3691 return exp;
3694 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3695 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3696 likewise return an expression pointing to the underlying array. */
3698 tree
3699 maybe_unconstrained_array (tree exp)
3701 enum tree_code code = TREE_CODE (exp);
3702 tree new;
3704 switch (TREE_CODE (TREE_TYPE (exp)))
3706 case UNCONSTRAINED_ARRAY_TYPE:
3707 if (code == UNCONSTRAINED_ARRAY_REF)
3710 = build_unary_op (INDIRECT_REF, NULL_TREE,
3711 build_component_ref (TREE_OPERAND (exp, 0),
3712 get_identifier ("P_ARRAY"),
3713 NULL_TREE, false));
3714 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3715 return new;
3718 else if (code == NULL_EXPR)
3719 return build1 (NULL_EXPR,
3720 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3721 (TREE_TYPE (TREE_TYPE (exp))))),
3722 TREE_OPERAND (exp, 0));
3724 case RECORD_TYPE:
3725 /* If this is a padded type, convert to the unpadded type and see if
3726 it contains a template. */
3727 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3729 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3730 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3731 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3732 return
3733 build_component_ref (new, NULL_TREE,
3734 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3737 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3738 return
3739 build_component_ref (exp, NULL_TREE,
3740 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3741 break;
3743 default:
3744 break;
3747 return exp;
3750 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
3751 If NOTRUNC_P is true, truncation operations should be suppressed. */
3753 tree
3754 unchecked_convert (tree type, tree expr, bool notrunc_p)
3756 tree etype = TREE_TYPE (expr);
3758 /* If the expression is already the right type, we are done. */
3759 if (etype == type)
3760 return expr;
3762 /* If both types types are integral just do a normal conversion.
3763 Likewise for a conversion to an unconstrained array. */
3764 if ((((INTEGRAL_TYPE_P (type)
3765 && !(TREE_CODE (type) == INTEGER_TYPE
3766 && TYPE_VAX_FLOATING_POINT_P (type)))
3767 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3768 || (TREE_CODE (type) == RECORD_TYPE
3769 && TYPE_JUSTIFIED_MODULAR_P (type)))
3770 && ((INTEGRAL_TYPE_P (etype)
3771 && !(TREE_CODE (etype) == INTEGER_TYPE
3772 && TYPE_VAX_FLOATING_POINT_P (etype)))
3773 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3774 || (TREE_CODE (etype) == RECORD_TYPE
3775 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3776 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3778 tree rtype = type;
3779 bool final_unchecked = false;
3781 if (TREE_CODE (etype) == INTEGER_TYPE
3782 && TYPE_BIASED_REPRESENTATION_P (etype))
3784 tree ntype = copy_type (etype);
3786 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3787 TYPE_MAIN_VARIANT (ntype) = ntype;
3788 expr = build1 (NOP_EXPR, ntype, expr);
3791 if (TREE_CODE (type) == INTEGER_TYPE
3792 && TYPE_BIASED_REPRESENTATION_P (type))
3794 rtype = copy_type (type);
3795 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3796 TYPE_MAIN_VARIANT (rtype) = rtype;
3799 /* We have another special case: if we are unchecked converting subtype
3800 into a base type, we need to ensure that VRP doesn't propagate range
3801 information since this conversion may be done precisely to validate
3802 that the object is within the range it is supposed to have. */
3803 else if (TREE_CODE (expr) != INTEGER_CST
3804 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
3805 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
3806 || TREE_CODE (etype) == ENUMERAL_TYPE
3807 || TREE_CODE (etype) == BOOLEAN_TYPE))
3809 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
3810 in order not to be deemed an useless type conversion, it must
3811 be from subtype to base type.
3813 ??? This may raise addressability and/or aliasing issues because
3814 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
3815 address of its operand to be taken if it is deemed addressable
3816 and not already in GIMPLE form. */
3817 rtype = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
3818 rtype = copy_type (rtype);
3819 TYPE_MAIN_VARIANT (rtype) = rtype;
3820 TREE_TYPE (rtype) = type;
3821 final_unchecked = true;
3824 expr = convert (rtype, expr);
3825 if (type != rtype)
3826 expr = build1 (final_unchecked ? VIEW_CONVERT_EXPR : NOP_EXPR,
3827 type, expr);
3830 /* If we are converting TO an integral type whose precision is not the
3831 same as its size, first unchecked convert to a record that contains
3832 an object of the output type. Then extract the field. */
3833 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3834 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3835 GET_MODE_BITSIZE (TYPE_MODE (type))))
3837 tree rec_type = make_node (RECORD_TYPE);
3838 tree field = create_field_decl (get_identifier ("OBJ"), type,
3839 rec_type, 1, 0, 0, 0);
3841 TYPE_FIELDS (rec_type) = field;
3842 layout_type (rec_type);
3844 expr = unchecked_convert (rec_type, expr, notrunc_p);
3845 expr = build_component_ref (expr, NULL_TREE, field, 0);
3848 /* Similarly for integral input type whose precision is not equal to its
3849 size. */
3850 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3851 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3852 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3854 tree rec_type = make_node (RECORD_TYPE);
3855 tree field
3856 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3857 1, 0, 0, 0);
3859 TYPE_FIELDS (rec_type) = field;
3860 layout_type (rec_type);
3862 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3863 expr = unchecked_convert (type, expr, notrunc_p);
3866 /* We have a special case when we are converting between two
3867 unconstrained array types. In that case, take the address,
3868 convert the fat pointer types, and dereference. */
3869 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3870 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3871 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3872 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3873 build_unary_op (ADDR_EXPR, NULL_TREE,
3874 expr)));
3875 else
3877 expr = maybe_unconstrained_array (expr);
3879 /* There's no point in doing two unchecked conversions in a row. */
3880 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3881 expr = TREE_OPERAND (expr, 0);
3883 etype = TREE_TYPE (expr);
3884 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3887 /* If the result is an integral type whose size is not equal to
3888 the size of the underlying machine type, sign- or zero-extend
3889 the result. We need not do this in the case where the input is
3890 an integral type of the same precision and signedness or if the output
3891 is a biased type or if both the input and output are unsigned. */
3892 if (!notrunc_p
3893 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3894 && !(TREE_CODE (type) == INTEGER_TYPE
3895 && TYPE_BIASED_REPRESENTATION_P (type))
3896 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3897 GET_MODE_BITSIZE (TYPE_MODE (type)))
3898 && !(INTEGRAL_TYPE_P (etype)
3899 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3900 && operand_equal_p (TYPE_RM_SIZE (type),
3901 (TYPE_RM_SIZE (etype) != 0
3902 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3904 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3906 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3907 TYPE_UNSIGNED (type));
3908 tree shift_expr
3909 = convert (base_type,
3910 size_binop (MINUS_EXPR,
3911 bitsize_int
3912 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3913 TYPE_RM_SIZE (type)));
3914 expr
3915 = convert (type,
3916 build_binary_op (RSHIFT_EXPR, base_type,
3917 build_binary_op (LSHIFT_EXPR, base_type,
3918 convert (base_type, expr),
3919 shift_expr),
3920 shift_expr));
3923 /* An unchecked conversion should never raise Constraint_Error. The code
3924 below assumes that GCC's conversion routines overflow the same way that
3925 the underlying hardware does. This is probably true. In the rare case
3926 when it is false, we can rely on the fact that such conversions are
3927 erroneous anyway. */
3928 if (TREE_CODE (expr) == INTEGER_CST)
3929 TREE_OVERFLOW (expr) = 0;
3931 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3932 show no longer constant. */
3933 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3934 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3935 OEP_ONLY_CONST))
3936 TREE_CONSTANT (expr) = 0;
3938 return expr;
3941 /* Search the chain of currently available builtin declarations for a node
3942 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
3943 found, if any, or NULL_TREE otherwise. */
3944 tree
3945 builtin_decl_for (tree name)
3947 unsigned i;
3948 tree decl;
3950 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
3951 if (DECL_NAME (decl) == name)
3952 return decl;
3954 return NULL_TREE;
3957 /* Return the appropriate GCC tree code for the specified GNAT type,
3958 the latter being a record type as predicated by Is_Record_Type. */
3960 enum tree_code
3961 tree_code_for_record_type (Entity_Id gnat_type)
3963 Node_Id component_list
3964 = Component_List (Type_Definition
3965 (Declaration_Node
3966 (Implementation_Base_Type (gnat_type))));
3967 Node_Id component;
3969 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
3970 we have a non-discriminant field outside a variant. In either case,
3971 it's a RECORD_TYPE. */
3973 if (!Is_Unchecked_Union (gnat_type))
3974 return RECORD_TYPE;
3976 for (component = First_Non_Pragma (Component_Items (component_list));
3977 Present (component);
3978 component = Next_Non_Pragma (component))
3979 if (Ekind (Defining_Entity (component)) == E_Component)
3980 return RECORD_TYPE;
3982 return UNION_TYPE;
3985 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
3986 component of an aggregate type. */
3988 bool
3989 type_for_nonaliased_component_p (tree gnu_type)
3991 /* If the type is passed by reference, we may have pointers to the
3992 component so it cannot be made non-aliased. */
3993 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
3994 return false;
3996 /* We used to say that any component of aggregate type is aliased
3997 because the front-end may take 'Reference of it. The front-end
3998 has been enhanced in the meantime so as to use a renaming instead
3999 in most cases, but the back-end can probably take the address of
4000 such a component too so we go for the conservative stance.
4002 For instance, we might need the address of any array type, even
4003 if normally passed by copy, to construct a fat pointer if the
4004 component is used as an actual for an unconstrained formal.
4006 Likewise for record types: even if a specific record subtype is
4007 passed by copy, the parent type might be passed by ref (e.g. if
4008 it's of variable size) and we might take the address of a child
4009 component to pass to a parent formal. We have no way to check
4010 for such conditions here. */
4011 if (AGGREGATE_TYPE_P (gnu_type))
4012 return false;
4014 return true;
4017 /* Perform final processing on global variables. */
4019 void
4020 gnat_write_global_declarations (void)
4022 /* Proceed to optimize and emit assembly.
4023 FIXME: shouldn't be the front end's responsibility to call this. */
4024 cgraph_optimize ();
4026 /* Emit debug info for all global declarations. */
4027 emit_debug_global_declarations (VEC_address (tree, global_decls),
4028 VEC_length (tree, global_decls));
4031 #include "gt-ada-utils.h"
4032 #include "gtype-ada.h"