2007-12-07 Olivier Hainque <hainque@adacore.com>
[official-gcc.git] / gcc / ada / utils.c
blob9e90ba1fbe3ec9d732189369343a6d8a9367cb9d
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2007, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "defaults.h"
33 #include "toplev.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "debug.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "function.h"
40 #include "cgraph.h"
41 #include "tree-inline.h"
42 #include "tree-gimple.h"
43 #include "tree-dump.h"
44 #include "pointer-set.h"
46 #include "ada.h"
47 #include "types.h"
48 #include "atree.h"
49 #include "elists.h"
50 #include "namet.h"
51 #include "nlists.h"
52 #include "stringt.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
62 #endif
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
66 #endif
68 /* If nonzero, pretend we are allocating at global level. */
69 int force_global;
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
77 /* Forward declarations for handlers of attributes. */
78 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
79 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
81 /* Table of machine-independent internal attributes for Ada. We support
82 this minimal set of attributes to accommodate the Alpha back-end which
83 unconditionally puts them on its builtins. */
84 const struct attribute_spec gnat_internal_attribute_table[] =
86 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
87 { "const", 0, 0, true, false, false, handle_const_attribute },
88 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
89 { NULL, 0, 0, false, false, false, NULL }
92 /* Associates a GNAT tree node to a GCC tree node. It is used in
93 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
94 of `save_gnu_tree' for more info. */
95 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
97 #define GET_GNU_TREE(GNAT_ENTITY) \
98 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
100 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
101 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
103 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
104 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
106 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
107 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
109 #define GET_DUMMY_NODE(GNAT_ENTITY) \
110 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
112 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
113 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
115 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
116 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
118 /* This variable keeps a table for types for each precision so that we only
119 allocate each of them once. Signed and unsigned types are kept separate.
121 Note that these types are only used when fold-const requests something
122 special. Perhaps we should NOT share these types; we'll see how it
123 goes later. */
124 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
126 /* Likewise for float types, but record these by mode. */
127 static GTY(()) tree float_types[NUM_MACHINE_MODES];
129 /* For each binding contour we allocate a binding_level structure to indicate
130 the binding depth. */
132 struct gnat_binding_level GTY((chain_next ("%h.chain")))
134 /* The binding level containing this one (the enclosing binding level). */
135 struct gnat_binding_level *chain;
136 /* The BLOCK node for this level. */
137 tree block;
138 /* If nonzero, the setjmp buffer that needs to be updated for any
139 variable-sized definition within this context. */
140 tree jmpbuf_decl;
143 /* The binding level currently in effect. */
144 static GTY(()) struct gnat_binding_level *current_binding_level;
146 /* A chain of gnat_binding_level structures awaiting reuse. */
147 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
149 /* An array of global declarations. */
150 static GTY(()) VEC(tree,gc) *global_decls;
152 /* An array of builtin declarations. */
153 static GTY(()) VEC(tree,gc) *builtin_decls;
155 /* An array of global renaming pointers. */
156 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
158 /* A chain of unused BLOCK nodes. */
159 static GTY((deletable)) tree free_block_chain;
161 static void gnat_install_builtins (void);
162 static tree merge_sizes (tree, tree, tree, bool, bool);
163 static tree compute_related_constant (tree, tree);
164 static tree split_plus (tree, tree *);
165 static void gnat_gimplify_function (tree);
166 static tree float_type_for_precision (int, enum machine_mode);
167 static tree convert_to_fat_pointer (tree, tree);
168 static tree convert_to_thin_pointer (tree, tree);
169 static tree make_descriptor_field (const char *,tree, tree, tree);
170 static bool potential_alignment_gap (tree, tree, tree);
172 /* Initialize the association of GNAT nodes to GCC trees. */
174 void
175 init_gnat_to_gnu (void)
177 associate_gnat_to_gnu
178 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
181 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
182 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
183 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
185 If GNU_DECL is zero, a previous association is to be reset. */
187 void
188 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
190 /* Check that GNAT_ENTITY is not already defined and that it is being set
191 to something which is a decl. Raise gigi 401 if not. Usually, this
192 means GNAT_ENTITY is defined twice, but occasionally is due to some
193 Gigi problem. */
194 gcc_assert (!(gnu_decl
195 && (PRESENT_GNU_TREE (gnat_entity)
196 || (!no_check && !DECL_P (gnu_decl)))));
198 SET_GNU_TREE (gnat_entity, gnu_decl);
201 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
202 Return the ..._DECL node that was associated with it. If there is no tree
203 node associated with GNAT_ENTITY, abort.
205 In some cases, such as delayed elaboration or expressions that need to
206 be elaborated only once, GNAT_ENTITY is really not an entity. */
208 tree
209 get_gnu_tree (Entity_Id gnat_entity)
211 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
212 return GET_GNU_TREE (gnat_entity);
215 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
217 bool
218 present_gnu_tree (Entity_Id gnat_entity)
220 return PRESENT_GNU_TREE (gnat_entity);
223 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
225 void
226 init_dummy_type (void)
228 dummy_node_table
229 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
232 /* Make a dummy type corresponding to GNAT_TYPE. */
234 tree
235 make_dummy_type (Entity_Id gnat_type)
237 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
238 tree gnu_type;
240 /* If there is an equivalent type, get its underlying type. */
241 if (Present (gnat_underlying))
242 gnat_underlying = Underlying_Type (gnat_underlying);
244 /* If there was no equivalent type (can only happen when just annotating
245 types) or underlying type, go back to the original type. */
246 if (No (gnat_underlying))
247 gnat_underlying = gnat_type;
249 /* If it there already a dummy type, use that one. Else make one. */
250 if (PRESENT_DUMMY_NODE (gnat_underlying))
251 return GET_DUMMY_NODE (gnat_underlying);
253 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
254 an ENUMERAL_TYPE. */
255 gnu_type = make_node (Is_Record_Type (gnat_underlying)
256 ? tree_code_for_record_type (gnat_underlying)
257 : ENUMERAL_TYPE);
258 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
259 TYPE_DUMMY_P (gnu_type) = 1;
260 if (AGGREGATE_TYPE_P (gnu_type))
262 TYPE_STUB_DECL (gnu_type) = build_decl (TYPE_DECL, NULL_TREE, gnu_type);
263 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
266 SET_DUMMY_NODE (gnat_underlying, gnu_type);
268 return gnu_type;
271 /* Return nonzero if we are currently in the global binding level. */
274 global_bindings_p (void)
276 return ((force_global || !current_function_decl) ? -1 : 0);
279 /* Enter a new binding level. */
281 void
282 gnat_pushlevel ()
284 struct gnat_binding_level *newlevel = NULL;
286 /* Reuse a struct for this binding level, if there is one. */
287 if (free_binding_level)
289 newlevel = free_binding_level;
290 free_binding_level = free_binding_level->chain;
292 else
293 newlevel
294 = (struct gnat_binding_level *)
295 ggc_alloc (sizeof (struct gnat_binding_level));
297 /* Use a free BLOCK, if any; otherwise, allocate one. */
298 if (free_block_chain)
300 newlevel->block = free_block_chain;
301 free_block_chain = BLOCK_CHAIN (free_block_chain);
302 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
304 else
305 newlevel->block = make_node (BLOCK);
307 /* Point the BLOCK we just made to its parent. */
308 if (current_binding_level)
309 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
311 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
312 TREE_USED (newlevel->block) = 1;
314 /* Add this level to the front of the chain (stack) of levels that are
315 active. */
316 newlevel->chain = current_binding_level;
317 newlevel->jmpbuf_decl = NULL_TREE;
318 current_binding_level = newlevel;
321 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
322 and point FNDECL to this BLOCK. */
324 void
325 set_current_block_context (tree fndecl)
327 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
328 DECL_INITIAL (fndecl) = current_binding_level->block;
331 /* Set the jmpbuf_decl for the current binding level to DECL. */
333 void
334 set_block_jmpbuf_decl (tree decl)
336 current_binding_level->jmpbuf_decl = decl;
339 /* Get the jmpbuf_decl, if any, for the current binding level. */
341 tree
342 get_block_jmpbuf_decl ()
344 return current_binding_level->jmpbuf_decl;
347 /* Exit a binding level. Set any BLOCK into the current code group. */
349 void
350 gnat_poplevel ()
352 struct gnat_binding_level *level = current_binding_level;
353 tree block = level->block;
355 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
356 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
358 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
359 are no variables free the block and merge its subblocks into those of its
360 parent block. Otherwise, add it to the list of its parent. */
361 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
363 else if (BLOCK_VARS (block) == NULL_TREE)
365 BLOCK_SUBBLOCKS (level->chain->block)
366 = chainon (BLOCK_SUBBLOCKS (block),
367 BLOCK_SUBBLOCKS (level->chain->block));
368 BLOCK_CHAIN (block) = free_block_chain;
369 free_block_chain = block;
371 else
373 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
374 BLOCK_SUBBLOCKS (level->chain->block) = block;
375 TREE_USED (block) = 1;
376 set_block_for_group (block);
379 /* Free this binding structure. */
380 current_binding_level = level->chain;
381 level->chain = free_binding_level;
382 free_binding_level = level;
385 /* Insert BLOCK at the end of the list of subblocks of the
386 current binding level. This is used when a BIND_EXPR is expanded,
387 to handle the BLOCK node inside the BIND_EXPR. */
389 void
390 insert_block (tree block)
392 TREE_USED (block) = 1;
393 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
394 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
397 /* Records a ..._DECL node DECL as belonging to the current lexical scope
398 and uses GNAT_NODE for location information and propagating flags. */
400 void
401 gnat_pushdecl (tree decl, Node_Id gnat_node)
403 /* If at top level, there is no context. But PARM_DECLs always go in the
404 level of its function. */
405 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
406 DECL_CONTEXT (decl) = 0;
407 else
409 DECL_CONTEXT (decl) = current_function_decl;
411 /* Functions imported in another function are not really nested. */
412 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
413 DECL_NO_STATIC_CHAIN (decl) = 1;
416 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
418 /* Set the location of DECL and emit a declaration for it. */
419 if (Present (gnat_node))
420 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
421 add_decl_expr (decl, gnat_node);
423 /* Put the declaration on the list. The list of declarations is in reverse
424 order. The list will be reversed later. Put global variables in the
425 globals list and builtin functions in a dedicated list to speed up
426 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
427 the list, as they will cause trouble with the debugger and aren't needed
428 anyway. */
429 if (TREE_CODE (decl) != TYPE_DECL
430 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
432 if (global_bindings_p ())
434 VEC_safe_push (tree, gc, global_decls, decl);
436 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
437 VEC_safe_push (tree, gc, builtin_decls, decl);
439 else
441 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
442 BLOCK_VARS (current_binding_level->block) = decl;
446 /* For the declaration of a type, set its name if it either is not already
447 set, was set to an IDENTIFIER_NODE, indicating an internal name,
448 or if the previous type name was not derived from a source name.
449 We'd rather have the type named with a real name and all the pointer
450 types to the same object have the same POINTER_TYPE node. Code in the
451 equivalent function of c-decl.c makes a copy of the type node here, but
452 that may cause us trouble with incomplete types. We make an exception
453 for fat pointer types because the compiler automatically builds them
454 for unconstrained array types and the debugger uses them to represent
455 both these and pointers to these. */
456 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
458 tree t = TREE_TYPE (decl);
460 if (!TYPE_NAME (t) || TREE_CODE (TYPE_NAME (t)) == IDENTIFIER_NODE)
461 TYPE_NAME (t) = decl;
462 else if (TYPE_FAT_POINTER_P (t))
464 tree tt = build_variant_type_copy (t);
465 TYPE_NAME (tt) = decl;
466 TREE_USED (tt) = TREE_USED (t);
467 TREE_TYPE (decl) = tt;
468 DECL_ORIGINAL_TYPE (decl) = t;
470 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
471 TYPE_NAME (t) = decl;
475 /* Do little here. Set up the standard declarations later after the
476 front end has been run. */
478 void
479 gnat_init_decl_processing (void)
481 /* Make the binding_level structure for global names. */
482 current_function_decl = 0;
483 current_binding_level = 0;
484 free_binding_level = 0;
485 gnat_pushlevel ();
487 build_common_tree_nodes (true, true);
489 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
490 corresponding to the size of Pmode. In most cases when ptr_mode and
491 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
492 far better code using the width of Pmode. Make this here since we need
493 this before we can expand the GNAT types. */
494 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
495 set_sizetype (size_type_node);
496 build_common_tree_nodes_2 (0);
498 ptr_void_type_node = build_pointer_type (void_type_node);
500 gnat_install_builtins ();
503 /* Install the builtin functions we might need. */
505 static void
506 gnat_install_builtins ()
508 /* Builtins used by generic middle-end optimizers. */
509 build_common_builtin_nodes ();
511 /* Target specific builtins, such as the AltiVec family on ppc. */
512 targetm.init_builtins ();
515 /* Create the predefined scalar types such as `integer_type_node' needed
516 in the gcc back-end and initialize the global binding level. */
518 void
519 init_gigi_decls (tree long_long_float_type, tree exception_type)
521 tree endlink, decl;
522 unsigned int i;
524 /* Set the types that GCC and Gigi use from the front end. We would like
525 to do this for char_type_node, but it needs to correspond to the C
526 char type. */
527 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
529 /* In this case, the builtin floating point types are VAX float,
530 so make up a type for use. */
531 longest_float_type_node = make_node (REAL_TYPE);
532 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
533 layout_type (longest_float_type_node);
534 create_type_decl (get_identifier ("longest float type"),
535 longest_float_type_node, NULL, false, true, Empty);
537 else
538 longest_float_type_node = TREE_TYPE (long_long_float_type);
540 except_type_node = TREE_TYPE (exception_type);
542 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
543 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
544 NULL, false, true, Empty);
546 void_type_decl_node = create_type_decl (get_identifier ("void"),
547 void_type_node, NULL, false, true,
548 Empty);
550 void_ftype = build_function_type (void_type_node, NULL_TREE);
551 ptr_void_ftype = build_pointer_type (void_ftype);
553 /* Now declare runtime functions. */
554 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
556 /* malloc is a function declaration tree for a function to allocate
557 memory. */
558 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
559 NULL_TREE,
560 build_function_type (ptr_void_type_node,
561 tree_cons (NULL_TREE,
562 sizetype,
563 endlink)),
564 NULL_TREE, false, true, true, NULL,
565 Empty);
566 DECL_IS_MALLOC (malloc_decl) = 1;
568 /* free is a function declaration tree for a function to free memory. */
569 free_decl
570 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
571 build_function_type (void_type_node,
572 tree_cons (NULL_TREE,
573 ptr_void_type_node,
574 endlink)),
575 NULL_TREE, false, true, true, NULL, Empty);
577 /* Make the types and functions used for exception processing. */
578 jmpbuf_type
579 = build_array_type (gnat_type_for_mode (Pmode, 0),
580 build_index_type (build_int_cst (NULL_TREE, 5)));
581 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
582 true, true, Empty);
583 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
585 /* Functions to get and set the jumpbuf pointer for the current thread. */
586 get_jmpbuf_decl
587 = create_subprog_decl
588 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
589 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
590 NULL_TREE, false, true, true, NULL, Empty);
591 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
592 DECL_IS_PURE (get_jmpbuf_decl) = 1;
594 set_jmpbuf_decl
595 = create_subprog_decl
596 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
597 NULL_TREE,
598 build_function_type (void_type_node,
599 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
600 NULL_TREE, false, true, true, NULL, Empty);
602 /* Function to get the current exception. */
603 get_excptr_decl
604 = create_subprog_decl
605 (get_identifier ("system__soft_links__get_gnat_exception"),
606 NULL_TREE,
607 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
608 NULL_TREE, false, true, true, NULL, Empty);
609 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
610 DECL_IS_PURE (get_excptr_decl) = 1;
612 /* Functions that raise exceptions. */
613 raise_nodefer_decl
614 = create_subprog_decl
615 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
616 build_function_type (void_type_node,
617 tree_cons (NULL_TREE,
618 build_pointer_type (except_type_node),
619 endlink)),
620 NULL_TREE, false, true, true, NULL, Empty);
622 /* Dummy objects to materialize "others" and "all others" in the exception
623 tables. These are exported by a-exexpr.adb, so see this unit for the
624 types to use. */
626 others_decl
627 = create_var_decl (get_identifier ("OTHERS"),
628 get_identifier ("__gnat_others_value"),
629 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
631 all_others_decl
632 = create_var_decl (get_identifier ("ALL_OTHERS"),
633 get_identifier ("__gnat_all_others_value"),
634 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
636 /* Hooks to call when entering/leaving an exception handler. */
637 begin_handler_decl
638 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
639 build_function_type (void_type_node,
640 tree_cons (NULL_TREE,
641 ptr_void_type_node,
642 endlink)),
643 NULL_TREE, false, true, true, NULL, Empty);
645 end_handler_decl
646 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
647 build_function_type (void_type_node,
648 tree_cons (NULL_TREE,
649 ptr_void_type_node,
650 endlink)),
651 NULL_TREE, false, true, true, NULL, Empty);
653 /* If in no exception handlers mode, all raise statements are redirected to
654 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
655 this procedure will never be called in this mode. */
656 if (No_Exception_Handlers_Set ())
658 decl
659 = create_subprog_decl
660 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
661 build_function_type (void_type_node,
662 tree_cons (NULL_TREE,
663 build_pointer_type (char_type_node),
664 tree_cons (NULL_TREE,
665 integer_type_node,
666 endlink))),
667 NULL_TREE, false, true, true, NULL, Empty);
669 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
670 gnat_raise_decls[i] = decl;
672 else
673 /* Otherwise, make one decl for each exception reason. */
674 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
676 char name[17];
678 sprintf (name, "__gnat_rcheck_%.2d", i);
679 gnat_raise_decls[i]
680 = create_subprog_decl
681 (get_identifier (name), NULL_TREE,
682 build_function_type (void_type_node,
683 tree_cons (NULL_TREE,
684 build_pointer_type
685 (char_type_node),
686 tree_cons (NULL_TREE,
687 integer_type_node,
688 endlink))),
689 NULL_TREE, false, true, true, NULL, Empty);
692 /* Indicate that these never return. */
693 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
694 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
695 TREE_TYPE (raise_nodefer_decl)
696 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
697 TYPE_QUAL_VOLATILE);
699 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
701 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
702 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
703 TREE_TYPE (gnat_raise_decls[i])
704 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
705 TYPE_QUAL_VOLATILE);
708 /* setjmp returns an integer and has one operand, which is a pointer to
709 a jmpbuf. */
710 setjmp_decl
711 = create_subprog_decl
712 (get_identifier ("__builtin_setjmp"), NULL_TREE,
713 build_function_type (integer_type_node,
714 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
715 NULL_TREE, false, true, true, NULL, Empty);
717 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
718 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
720 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
721 address. */
722 update_setjmp_buf_decl
723 = create_subprog_decl
724 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
725 build_function_type (void_type_node,
726 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
727 NULL_TREE, false, true, true, NULL, Empty);
729 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
730 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
732 main_identifier_node = get_identifier ("main");
735 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
736 finish constructing the record or union type. If REP_LEVEL is zero, this
737 record has no representation clause and so will be entirely laid out here.
738 If REP_LEVEL is one, this record has a representation clause and has been
739 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
740 this record is derived from a parent record and thus inherits its layout;
741 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
742 true, the record type is expected to be modified afterwards so it will
743 not be sent to the back-end for finalization. */
745 void
746 finish_record_type (tree record_type, tree fieldlist, int rep_level,
747 bool do_not_finalize)
749 enum tree_code code = TREE_CODE (record_type);
750 tree name = TYPE_NAME (record_type);
751 tree ada_size = bitsize_zero_node;
752 tree size = bitsize_zero_node;
753 bool var_size = false;
754 bool had_size = TYPE_SIZE (record_type) != 0;
755 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
756 tree field;
758 if (name && TREE_CODE (name) == TYPE_DECL)
759 name = DECL_NAME (name);
761 TYPE_FIELDS (record_type) = fieldlist;
762 TYPE_STUB_DECL (record_type) = build_decl (TYPE_DECL, name, record_type);
764 /* We don't need both the typedef name and the record name output in
765 the debugging information, since they are the same. */
766 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
768 /* Globally initialize the record first. If this is a rep'ed record,
769 that just means some initializations; otherwise, layout the record. */
770 if (rep_level > 0)
772 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
773 TYPE_MODE (record_type) = BLKmode;
775 if (!had_size_unit)
776 TYPE_SIZE_UNIT (record_type) = size_zero_node;
777 if (!had_size)
778 TYPE_SIZE (record_type) = bitsize_zero_node;
780 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
781 out just like a UNION_TYPE, since the size will be fixed. */
782 else if (code == QUAL_UNION_TYPE)
783 code = UNION_TYPE;
785 else
787 /* Ensure there isn't a size already set. There can be in an error
788 case where there is a rep clause but all fields have errors and
789 no longer have a position. */
790 TYPE_SIZE (record_type) = 0;
791 layout_type (record_type);
794 /* At this point, the position and size of each field is known. It was
795 either set before entry by a rep clause, or by laying out the type above.
797 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
798 to compute the Ada size; the GCC size and alignment (for rep'ed records
799 that are not padding types); and the mode (for rep'ed records). We also
800 clear the DECL_BIT_FIELD indication for the cases we know have not been
801 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
803 if (code == QUAL_UNION_TYPE)
804 fieldlist = nreverse (fieldlist);
806 for (field = fieldlist; field; field = TREE_CHAIN (field))
808 tree pos = bit_position (field);
810 tree type = TREE_TYPE (field);
811 tree this_size = DECL_SIZE (field);
812 tree this_ada_size = DECL_SIZE (field);
814 /* We need to make an XVE/XVU record if any field has variable size,
815 whether or not the record does. For example, if we have a union,
816 it may be that all fields, rounded up to the alignment, have the
817 same size, in which case we'll use that size. But the debug
818 output routines (except Dwarf2) won't be able to output the fields,
819 so we need to make the special record. */
820 if (TREE_CODE (this_size) != INTEGER_CST)
821 var_size = true;
823 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
824 || TREE_CODE (type) == QUAL_UNION_TYPE)
825 && !TYPE_IS_FAT_POINTER_P (type)
826 && !TYPE_CONTAINS_TEMPLATE_P (type)
827 && TYPE_ADA_SIZE (type))
828 this_ada_size = TYPE_ADA_SIZE (type);
830 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
831 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
832 && value_factor_p (pos, BITS_PER_UNIT)
833 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
834 DECL_BIT_FIELD (field) = 0;
836 /* If we still have DECL_BIT_FIELD set at this point, we know the field
837 is technically not addressable. Except that it can actually be
838 addressed if the field is BLKmode and happens to be properly
839 aligned. */
840 DECL_NONADDRESSABLE_P (field)
841 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
843 if ((rep_level > 0) && !DECL_BIT_FIELD (field))
844 TYPE_ALIGN (record_type)
845 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
847 switch (code)
849 case UNION_TYPE:
850 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
851 size = size_binop (MAX_EXPR, size, this_size);
852 break;
854 case QUAL_UNION_TYPE:
855 ada_size
856 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
857 this_ada_size, ada_size);
858 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
859 this_size, size);
860 break;
862 case RECORD_TYPE:
863 /* Since we know here that all fields are sorted in order of
864 increasing bit position, the size of the record is one
865 higher than the ending bit of the last field processed
866 unless we have a rep clause, since in that case we might
867 have a field outside a QUAL_UNION_TYPE that has a higher ending
868 position. So use a MAX in that case. Also, if this field is a
869 QUAL_UNION_TYPE, we need to take into account the previous size in
870 the case of empty variants. */
871 ada_size
872 = merge_sizes (ada_size, pos, this_ada_size,
873 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
874 size
875 = merge_sizes (size, pos, this_size,
876 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
877 break;
879 default:
880 gcc_unreachable ();
884 if (code == QUAL_UNION_TYPE)
885 nreverse (fieldlist);
887 if (rep_level < 2)
889 /* If this is a padding record, we never want to make the size smaller
890 than what was specified in it, if any. */
891 if (TREE_CODE (record_type) == RECORD_TYPE
892 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
893 size = TYPE_SIZE (record_type);
895 /* Now set any of the values we've just computed that apply. */
896 if (!TYPE_IS_FAT_POINTER_P (record_type)
897 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
898 SET_TYPE_ADA_SIZE (record_type, ada_size);
900 if (rep_level > 0)
902 tree size_unit = had_size_unit
903 ? TYPE_SIZE_UNIT (record_type)
904 : convert (sizetype,
905 size_binop (CEIL_DIV_EXPR, size,
906 bitsize_unit_node));
907 unsigned int align = TYPE_ALIGN (record_type);
909 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
910 TYPE_SIZE_UNIT (record_type)
911 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
913 compute_record_mode (record_type);
917 if (!do_not_finalize)
918 rest_of_record_type_compilation (record_type);
921 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
922 the debug information associated with it. It need not be invoked
923 directly in most cases since finish_record_type takes care of doing
924 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
926 void
927 rest_of_record_type_compilation (tree record_type)
929 tree fieldlist = TYPE_FIELDS (record_type);
930 tree field;
931 enum tree_code code = TREE_CODE (record_type);
932 bool var_size = false;
934 for (field = fieldlist; field; field = TREE_CHAIN (field))
936 /* We need to make an XVE/XVU record if any field has variable size,
937 whether or not the record does. For example, if we have a union,
938 it may be that all fields, rounded up to the alignment, have the
939 same size, in which case we'll use that size. But the debug
940 output routines (except Dwarf2) won't be able to output the fields,
941 so we need to make the special record. */
942 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
943 /* If a field has a non-constant qualifier, the record will have
944 variable size too. */
945 || (code == QUAL_UNION_TYPE
946 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
948 var_size = true;
949 break;
953 /* If this record is of variable size, rename it so that the
954 debugger knows it is and make a new, parallel, record
955 that tells the debugger how the record is laid out. See
956 exp_dbug.ads. But don't do this for records that are padding
957 since they confuse GDB. */
958 if (var_size
959 && !(TREE_CODE (record_type) == RECORD_TYPE
960 && TYPE_IS_PADDING_P (record_type)))
962 tree new_record_type
963 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
964 ? UNION_TYPE : TREE_CODE (record_type));
965 tree orig_name = TYPE_NAME (record_type);
966 tree orig_id
967 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
968 : orig_name);
969 tree new_id
970 = concat_id_with_name (orig_id,
971 TREE_CODE (record_type) == QUAL_UNION_TYPE
972 ? "XVU" : "XVE");
973 tree last_pos = bitsize_zero_node;
974 tree old_field;
975 tree prev_old_field = 0;
977 TYPE_NAME (new_record_type) = new_id;
978 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
979 TYPE_STUB_DECL (new_record_type)
980 = build_decl (TYPE_DECL, new_id, new_record_type);
981 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
982 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
983 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
984 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
985 TYPE_SIZE_UNIT (new_record_type)
986 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
988 /* Now scan all the fields, replacing each field with a new
989 field corresponding to the new encoding. */
990 for (old_field = TYPE_FIELDS (record_type); old_field;
991 old_field = TREE_CHAIN (old_field))
993 tree field_type = TREE_TYPE (old_field);
994 tree field_name = DECL_NAME (old_field);
995 tree new_field;
996 tree curpos = bit_position (old_field);
997 bool var = false;
998 unsigned int align = 0;
999 tree pos;
1001 /* See how the position was modified from the last position.
1003 There are two basic cases we support: a value was added
1004 to the last position or the last position was rounded to
1005 a boundary and they something was added. Check for the
1006 first case first. If not, see if there is any evidence
1007 of rounding. If so, round the last position and try
1008 again.
1010 If this is a union, the position can be taken as zero. */
1012 if (TREE_CODE (new_record_type) == UNION_TYPE)
1013 pos = bitsize_zero_node, align = 0;
1014 else
1015 pos = compute_related_constant (curpos, last_pos);
1017 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1018 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1020 tree offset = TREE_OPERAND (curpos, 0);
1021 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1023 /* Strip off any conversions. */
1024 while (TREE_CODE (offset) == NON_LVALUE_EXPR
1025 || TREE_CODE (offset) == NOP_EXPR
1026 || TREE_CODE (offset) == CONVERT_EXPR)
1027 offset = TREE_OPERAND (offset, 0);
1029 /* An offset which is a bitwise AND with a negative power of 2
1030 means an alignment corresponding to this power of 2. */
1031 if (TREE_CODE (offset) == BIT_AND_EXPR
1032 && host_integerp (TREE_OPERAND (offset, 1), 0)
1033 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
1035 unsigned int pow
1036 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1037 if (exact_log2 (pow) > 0)
1038 align *= pow;
1041 pos = compute_related_constant (curpos,
1042 round_up (last_pos, align));
1044 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1045 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1046 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1047 && host_integerp (TREE_OPERAND
1048 (TREE_OPERAND (curpos, 0), 1),
1051 align
1052 = tree_low_cst
1053 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1054 pos = compute_related_constant (curpos,
1055 round_up (last_pos, align));
1057 else if (potential_alignment_gap (prev_old_field, old_field,
1058 pos))
1060 align = TYPE_ALIGN (field_type);
1061 pos = compute_related_constant (curpos,
1062 round_up (last_pos, align));
1065 /* If we can't compute a position, set it to zero.
1067 ??? We really should abort here, but it's too much work
1068 to get this correct for all cases. */
1070 if (!pos)
1071 pos = bitsize_zero_node;
1073 /* See if this type is variable-sized and make a pointer type
1074 and indicate the indirection if so. Beware that the debug
1075 back-end may adjust the position computed above according
1076 to the alignment of the field type, i.e. the pointer type
1077 in this case, if we don't preventively counter that. */
1078 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1080 field_type = build_pointer_type (field_type);
1081 if (align != 0 && TYPE_ALIGN (field_type) > align)
1083 field_type = copy_node (field_type);
1084 TYPE_ALIGN (field_type) = align;
1086 var = true;
1089 /* Make a new field name, if necessary. */
1090 if (var || align != 0)
1092 char suffix[16];
1094 if (align != 0)
1095 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1096 align / BITS_PER_UNIT);
1097 else
1098 strcpy (suffix, "XVL");
1100 field_name = concat_id_with_name (field_name, suffix);
1103 new_field = create_field_decl (field_name, field_type,
1104 new_record_type, 0,
1105 DECL_SIZE (old_field), pos, 0);
1106 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1107 TYPE_FIELDS (new_record_type) = new_field;
1109 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1110 zero. The only time it's not the last field of the record
1111 is when there are other components at fixed positions after
1112 it (meaning there was a rep clause for every field) and we
1113 want to be able to encode them. */
1114 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1115 (TREE_CODE (TREE_TYPE (old_field))
1116 == QUAL_UNION_TYPE)
1117 ? bitsize_zero_node
1118 : DECL_SIZE (old_field));
1119 prev_old_field = old_field;
1122 TYPE_FIELDS (new_record_type)
1123 = nreverse (TYPE_FIELDS (new_record_type));
1125 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
1128 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
1131 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1132 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1133 if this represents a QUAL_UNION_TYPE in which case we must look for
1134 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1135 is nonzero, we must take the MAX of the end position of this field
1136 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1138 We return an expression for the size. */
1140 static tree
1141 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1142 bool has_rep)
1144 tree type = TREE_TYPE (last_size);
1145 tree new;
1147 if (!special || TREE_CODE (size) != COND_EXPR)
1149 new = size_binop (PLUS_EXPR, first_bit, size);
1150 if (has_rep)
1151 new = size_binop (MAX_EXPR, last_size, new);
1154 else
1155 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1156 integer_zerop (TREE_OPERAND (size, 1))
1157 ? last_size : merge_sizes (last_size, first_bit,
1158 TREE_OPERAND (size, 1),
1159 1, has_rep),
1160 integer_zerop (TREE_OPERAND (size, 2))
1161 ? last_size : merge_sizes (last_size, first_bit,
1162 TREE_OPERAND (size, 2),
1163 1, has_rep));
1165 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1166 when fed through substitute_in_expr) into thinking that a constant
1167 size is not constant. */
1168 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1169 new = TREE_OPERAND (new, 0);
1171 return new;
1174 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1175 related by the addition of a constant. Return that constant if so. */
1177 static tree
1178 compute_related_constant (tree op0, tree op1)
1180 tree op0_var, op1_var;
1181 tree op0_con = split_plus (op0, &op0_var);
1182 tree op1_con = split_plus (op1, &op1_var);
1183 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1185 if (operand_equal_p (op0_var, op1_var, 0))
1186 return result;
1187 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1188 return result;
1189 else
1190 return 0;
1193 /* Utility function of above to split a tree OP which may be a sum, into a
1194 constant part, which is returned, and a variable part, which is stored
1195 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1196 bitsizetype. */
1198 static tree
1199 split_plus (tree in, tree *pvar)
1201 /* Strip NOPS in order to ease the tree traversal and maximize the
1202 potential for constant or plus/minus discovery. We need to be careful
1203 to always return and set *pvar to bitsizetype trees, but it's worth
1204 the effort. */
1205 STRIP_NOPS (in);
1207 *pvar = convert (bitsizetype, in);
1209 if (TREE_CODE (in) == INTEGER_CST)
1211 *pvar = bitsize_zero_node;
1212 return convert (bitsizetype, in);
1214 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1216 tree lhs_var, rhs_var;
1217 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1218 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1220 if (lhs_var == TREE_OPERAND (in, 0)
1221 && rhs_var == TREE_OPERAND (in, 1))
1222 return bitsize_zero_node;
1224 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1225 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1227 else
1228 return bitsize_zero_node;
1231 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1232 subprogram. If it is void_type_node, then we are dealing with a procedure,
1233 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1234 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1235 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1236 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1237 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1238 RETURNS_WITH_DSP is nonzero if the function is to return with a
1239 depressed stack pointer. RETURNS_BY_TARGET_PTR is true if the function
1240 is to be passed (as its first parameter) the address of the place to copy
1241 its result. */
1243 tree
1244 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1245 bool returns_unconstrained, bool returns_by_ref,
1246 bool returns_with_dsp, bool returns_by_target_ptr)
1248 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1249 the subprogram formal parameters. This list is generated by traversing the
1250 input list of PARM_DECL nodes. */
1251 tree param_type_list = NULL;
1252 tree param_decl;
1253 tree type;
1255 for (param_decl = param_decl_list; param_decl;
1256 param_decl = TREE_CHAIN (param_decl))
1257 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1258 param_type_list);
1260 /* The list of the function parameter types has to be terminated by the void
1261 type to signal to the back-end that we are not dealing with a variable
1262 parameter subprogram, but that the subprogram has a fixed number of
1263 parameters. */
1264 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1266 /* The list of argument types has been created in reverse
1267 so nreverse it. */
1268 param_type_list = nreverse (param_type_list);
1270 type = build_function_type (return_type, param_type_list);
1272 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1273 or the new type should, make a copy of TYPE. Likewise for
1274 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1275 if (TYPE_CI_CO_LIST (type) || cico_list
1276 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1277 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1278 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1279 type = copy_type (type);
1281 TYPE_CI_CO_LIST (type) = cico_list;
1282 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1283 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1284 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1285 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1286 return type;
1289 /* Return a copy of TYPE but safe to modify in any way. */
1291 tree
1292 copy_type (tree type)
1294 tree new = copy_node (type);
1296 /* copy_node clears this field instead of copying it, because it is
1297 aliased with TREE_CHAIN. */
1298 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1300 TYPE_POINTER_TO (new) = 0;
1301 TYPE_REFERENCE_TO (new) = 0;
1302 TYPE_MAIN_VARIANT (new) = new;
1303 TYPE_NEXT_VARIANT (new) = 0;
1305 return new;
1308 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1309 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1310 the decl. */
1312 tree
1313 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1315 /* First build a type for the desired range. */
1316 tree type = build_index_2_type (min, max);
1318 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1319 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1320 is set, but not to INDEX, make a copy of this type with the requested
1321 index type. Note that we have no way of sharing these types, but that's
1322 only a small hole. */
1323 if (TYPE_INDEX_TYPE (type) == index)
1324 return type;
1325 else if (TYPE_INDEX_TYPE (type))
1326 type = copy_type (type);
1328 SET_TYPE_INDEX_TYPE (type, index);
1329 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1330 return type;
1333 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1334 string) and TYPE is a ..._TYPE node giving its data type.
1335 ARTIFICIAL_P is true if this is a declaration that was generated
1336 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1337 information about this type. GNAT_NODE is used for the position of
1338 the decl. */
1340 tree
1341 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1342 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1344 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1345 enum tree_code code = TREE_CODE (type);
1347 DECL_ARTIFICIAL (type_decl) = artificial_p;
1349 if (!TYPE_IS_DUMMY_P (type))
1350 gnat_pushdecl (type_decl, gnat_node);
1352 process_attributes (type_decl, attr_list);
1354 /* Pass type declaration information to the debugger unless this is an
1355 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1356 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1357 type for which debugging information was not requested. */
1358 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1359 DECL_IGNORED_P (type_decl) = 1;
1360 else if (code != ENUMERAL_TYPE
1361 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1362 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1363 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1364 rest_of_type_decl_compilation (type_decl);
1366 return type_decl;
1369 /* Helper for create_var_decl and create_true_var_decl. Returns a GCC VAR_DECL
1370 or CONST_DECL node.
1372 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1373 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1374 the GCC tree for an optional initial expression; NULL_TREE if none.
1376 CONST_FLAG is true if this variable is constant, in which case we might
1377 return a CONST_DECL node unless CONST_DECL_ALLOWED_FLAG is false.
1379 PUBLIC_FLAG is true if this definition is to be made visible outside of
1380 the current compilation unit. This flag should be set when processing the
1381 variable definitions in a package specification. EXTERN_FLAG is nonzero
1382 when processing an external variable declaration (as opposed to a
1383 definition: no storage is to be allocated for the variable here).
1385 STATIC_FLAG is only relevant when not at top level. In that case
1386 it indicates whether to always allocate storage to the variable.
1388 GNAT_NODE is used for the position of the decl. */
1390 static tree
1391 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1392 bool const_flag, bool const_decl_allowed_flag,
1393 bool public_flag, bool extern_flag, bool static_flag,
1394 struct attrib *attr_list, Node_Id gnat_node)
1396 bool init_const
1397 = (var_init != 0
1398 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1399 && (global_bindings_p () || static_flag
1400 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1401 : TREE_CONSTANT (var_init)));
1403 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1404 case the initializer may be used in-lieu of the DECL node (as done in
1405 Identifier_to_gnu). This is useful to prevent the need of elaboration
1406 code when an identifier for which such a decl is made is in turn used as
1407 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1408 but extra constraints apply to this choice (see below) and are not
1409 relevant to the distinction we wish to make. */
1410 bool constant_p = const_flag && init_const;
1412 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1413 and may be used for scalars in general but not for aggregates. */
1414 tree var_decl
1415 = build_decl ((constant_p && const_decl_allowed_flag
1416 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1417 var_name, type);
1419 /* If this is external, throw away any initializations (they will be done
1420 elsewhere) unless this is a a constant for which we would like to remain
1421 able to get the initializer. If we are defining a global here, leave a
1422 constant initialization and save any variable elaborations for the
1423 elaboration routine. If we are just annotating types, throw away the
1424 initialization if it isn't a constant. */
1425 if ((extern_flag && !constant_p)
1426 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1427 var_init = NULL_TREE;
1429 /* At the global level, an initializer requiring code to be generated
1430 produces elaboration statements. Check that such statements are allowed,
1431 that is, not violating a No_Elaboration_Code restriction. */
1432 if (global_bindings_p () && var_init != 0 && ! init_const)
1433 Check_Elaboration_Code_Allowed (gnat_node);
1435 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1436 try to fiddle with DECL_COMMON. However, on platforms that don't
1437 support global BSS sections, uninitialized global variables would
1438 go in DATA instead, thus increasing the size of the executable. */
1439 if (!flag_no_common
1440 && TREE_CODE (var_decl) == VAR_DECL
1441 && !have_global_bss_p ())
1442 DECL_COMMON (var_decl) = 1;
1443 DECL_INITIAL (var_decl) = var_init;
1444 TREE_READONLY (var_decl) = const_flag;
1445 DECL_EXTERNAL (var_decl) = extern_flag;
1446 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1447 TREE_CONSTANT (var_decl) = constant_p;
1448 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1449 = TYPE_VOLATILE (type);
1451 /* If it's public and not external, always allocate storage for it.
1452 At the global binding level we need to allocate static storage for the
1453 variable if and only if it's not external. If we are not at the top level
1454 we allocate automatic storage unless requested not to. */
1455 TREE_STATIC (var_decl)
1456 = public_flag || (global_bindings_p () ? !extern_flag : static_flag);
1458 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1459 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1461 process_attributes (var_decl, attr_list);
1463 /* Add this decl to the current binding level. */
1464 gnat_pushdecl (var_decl, gnat_node);
1466 if (TREE_SIDE_EFFECTS (var_decl))
1467 TREE_ADDRESSABLE (var_decl) = 1;
1469 if (TREE_CODE (var_decl) != CONST_DECL)
1470 rest_of_decl_compilation (var_decl, global_bindings_p (), 0);
1471 else
1472 expand_decl (var_decl);
1474 return var_decl;
1477 /* Wrapper around create_var_decl_1 for cases where we don't care whether
1478 a VAR or a CONST decl node is created. */
1480 tree
1481 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1482 bool const_flag, bool public_flag, bool extern_flag,
1483 bool static_flag, struct attrib *attr_list,
1484 Node_Id gnat_node)
1486 return create_var_decl_1 (var_name, asm_name, type, var_init,
1487 const_flag, true,
1488 public_flag, extern_flag, static_flag,
1489 attr_list, gnat_node);
1492 /* Wrapper around create_var_decl_1 for cases where a VAR_DECL node is
1493 required. The primary intent is for DECL_CONST_CORRESPONDING_VARs, which
1494 must be VAR_DECLs and on which we want TREE_READONLY set to have them
1495 possibly assigned to a readonly data section. */
1497 tree
1498 create_true_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1499 bool const_flag, bool public_flag, bool extern_flag,
1500 bool static_flag, struct attrib *attr_list,
1501 Node_Id gnat_node)
1503 return create_var_decl_1 (var_name, asm_name, type, var_init,
1504 const_flag, false,
1505 public_flag, extern_flag, static_flag,
1506 attr_list, gnat_node);
1509 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1510 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1511 this field is in a record type with a "pragma pack". If SIZE is nonzero
1512 it is the specified size for this field. If POS is nonzero, it is the bit
1513 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1514 the address of this field for aliasing purposes. If it is negative, we
1515 should not make a bitfield, which is used by make_aligning_type. */
1517 tree
1518 create_field_decl (tree field_name, tree field_type, tree record_type,
1519 int packed, tree size, tree pos, int addressable)
1521 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1523 DECL_CONTEXT (field_decl) = record_type;
1524 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1526 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1527 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1528 if (packed && TYPE_MODE (field_type) == BLKmode)
1529 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1531 /* If a size is specified, use it. Otherwise, if the record type is packed
1532 compute a size to use, which may differ from the object's natural size.
1533 We always set a size in this case to trigger the checks for bitfield
1534 creation below, which is typically required when no position has been
1535 specified. */
1536 if (size)
1537 size = convert (bitsizetype, size);
1538 else if (packed == 1)
1540 size = rm_size (field_type);
1542 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1543 byte. */
1544 if (TREE_CODE (size) == INTEGER_CST
1545 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1546 size = round_up (size, BITS_PER_UNIT);
1549 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1550 specified for two reasons: first if the size differs from the natural
1551 size. Second, if the alignment is insufficient. There are a number of
1552 ways the latter can be true.
1554 We never make a bitfield if the type of the field has a nonconstant size,
1555 because no such entity requiring bitfield operations should reach here.
1557 We do *preventively* make a bitfield when there might be the need for it
1558 but we don't have all the necessary information to decide, as is the case
1559 of a field with no specified position in a packed record.
1561 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1562 in layout_decl or finish_record_type to clear the bit_field indication if
1563 it is in fact not needed. */
1564 if (addressable >= 0
1565 && size
1566 && TREE_CODE (size) == INTEGER_CST
1567 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1568 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1569 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1570 || packed
1571 || (TYPE_ALIGN (record_type) != 0
1572 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1574 DECL_BIT_FIELD (field_decl) = 1;
1575 DECL_SIZE (field_decl) = size;
1576 if (!packed && !pos)
1577 DECL_ALIGN (field_decl)
1578 = (TYPE_ALIGN (record_type) != 0
1579 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1580 : TYPE_ALIGN (field_type));
1583 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1585 /* Bump the alignment if need be, either for bitfield/packing purposes or
1586 to satisfy the type requirements if no such consideration applies. When
1587 we get the alignment from the type, indicate if this is from an explicit
1588 user request, which prevents stor-layout from lowering it later on. */
1590 int bit_align
1591 = (DECL_BIT_FIELD (field_decl) ? 1
1592 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1594 if (bit_align > DECL_ALIGN (field_decl))
1595 DECL_ALIGN (field_decl) = bit_align;
1596 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1598 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1599 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1603 if (pos)
1605 /* We need to pass in the alignment the DECL is known to have.
1606 This is the lowest-order bit set in POS, but no more than
1607 the alignment of the record, if one is specified. Note
1608 that an alignment of 0 is taken as infinite. */
1609 unsigned int known_align;
1611 if (host_integerp (pos, 1))
1612 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1613 else
1614 known_align = BITS_PER_UNIT;
1616 if (TYPE_ALIGN (record_type)
1617 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1618 known_align = TYPE_ALIGN (record_type);
1620 layout_decl (field_decl, known_align);
1621 SET_DECL_OFFSET_ALIGN (field_decl,
1622 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1623 : BITS_PER_UNIT);
1624 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1625 &DECL_FIELD_BIT_OFFSET (field_decl),
1626 DECL_OFFSET_ALIGN (field_decl), pos);
1628 DECL_HAS_REP_P (field_decl) = 1;
1631 /* In addition to what our caller says, claim the field is addressable if we
1632 know that its type is not suitable.
1634 The field may also be "technically" nonaddressable, meaning that even if
1635 we attempt to take the field's address we will actually get the address
1636 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1637 value we have at this point is not accurate enough, so we don't account
1638 for this here and let finish_record_type decide. */
1639 if (!type_for_nonaliased_component_p (field_type))
1640 addressable = 1;
1642 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1644 return field_decl;
1647 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1648 PARAM_TYPE is its type. READONLY is true if the parameter is
1649 readonly (either an IN parameter or an address of a pass-by-ref
1650 parameter). */
1652 tree
1653 create_param_decl (tree param_name, tree param_type, bool readonly)
1655 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1657 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1658 lead to various ABI violations. */
1659 if (targetm.calls.promote_prototypes (param_type)
1660 && (TREE_CODE (param_type) == INTEGER_TYPE
1661 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1662 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1664 /* We have to be careful about biased types here. Make a subtype
1665 of integer_type_node with the proper biasing. */
1666 if (TREE_CODE (param_type) == INTEGER_TYPE
1667 && TYPE_BIASED_REPRESENTATION_P (param_type))
1669 param_type
1670 = copy_type (build_range_type (integer_type_node,
1671 TYPE_MIN_VALUE (param_type),
1672 TYPE_MAX_VALUE (param_type)));
1674 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1676 else
1677 param_type = integer_type_node;
1680 DECL_ARG_TYPE (param_decl) = param_type;
1681 TREE_READONLY (param_decl) = readonly;
1682 return param_decl;
1685 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1687 void
1688 process_attributes (tree decl, struct attrib *attr_list)
1690 for (; attr_list; attr_list = attr_list->next)
1691 switch (attr_list->type)
1693 case ATTR_MACHINE_ATTRIBUTE:
1694 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1695 NULL_TREE),
1696 ATTR_FLAG_TYPE_IN_PLACE);
1697 break;
1699 case ATTR_LINK_ALIAS:
1700 if (! DECL_EXTERNAL (decl))
1702 TREE_STATIC (decl) = 1;
1703 assemble_alias (decl, attr_list->name);
1705 break;
1707 case ATTR_WEAK_EXTERNAL:
1708 if (SUPPORTS_WEAK)
1709 declare_weak (decl);
1710 else
1711 post_error ("?weak declarations not supported on this target",
1712 attr_list->error_point);
1713 break;
1715 case ATTR_LINK_SECTION:
1716 if (targetm.have_named_sections)
1718 DECL_SECTION_NAME (decl)
1719 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1720 IDENTIFIER_POINTER (attr_list->name));
1721 DECL_COMMON (decl) = 0;
1723 else
1724 post_error ("?section attributes are not supported for this target",
1725 attr_list->error_point);
1726 break;
1728 case ATTR_LINK_CONSTRUCTOR:
1729 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1730 TREE_USED (decl) = 1;
1731 break;
1733 case ATTR_LINK_DESTRUCTOR:
1734 DECL_STATIC_DESTRUCTOR (decl) = 1;
1735 TREE_USED (decl) = 1;
1736 break;
1740 /* Record a global renaming pointer. */
1742 void
1743 record_global_renaming_pointer (tree decl)
1745 gcc_assert (DECL_RENAMED_OBJECT (decl));
1746 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1749 /* Invalidate the global renaming pointers. */
1751 void
1752 invalidate_global_renaming_pointers (void)
1754 unsigned int i;
1755 tree iter;
1757 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1758 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1760 VEC_free (tree, gc, global_renaming_pointers);
1763 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1764 a power of 2. */
1766 bool
1767 value_factor_p (tree value, HOST_WIDE_INT factor)
1769 if (host_integerp (value, 1))
1770 return tree_low_cst (value, 1) % factor == 0;
1772 if (TREE_CODE (value) == MULT_EXPR)
1773 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1774 || value_factor_p (TREE_OPERAND (value, 1), factor));
1776 return 0;
1779 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1780 unless we can prove these 2 fields are laid out in such a way that no gap
1781 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1782 is the distance in bits between the end of PREV_FIELD and the starting
1783 position of CURR_FIELD. It is ignored if null. */
1785 static bool
1786 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1788 /* If this is the first field of the record, there cannot be any gap */
1789 if (!prev_field)
1790 return false;
1792 /* If the previous field is a union type, then return False: The only
1793 time when such a field is not the last field of the record is when
1794 there are other components at fixed positions after it (meaning there
1795 was a rep clause for every field), in which case we don't want the
1796 alignment constraint to override them. */
1797 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1798 return false;
1800 /* If the distance between the end of prev_field and the beginning of
1801 curr_field is constant, then there is a gap if the value of this
1802 constant is not null. */
1803 if (offset && host_integerp (offset, 1))
1804 return !integer_zerop (offset);
1806 /* If the size and position of the previous field are constant,
1807 then check the sum of this size and position. There will be a gap
1808 iff it is not multiple of the current field alignment. */
1809 if (host_integerp (DECL_SIZE (prev_field), 1)
1810 && host_integerp (bit_position (prev_field), 1))
1811 return ((tree_low_cst (bit_position (prev_field), 1)
1812 + tree_low_cst (DECL_SIZE (prev_field), 1))
1813 % DECL_ALIGN (curr_field) != 0);
1815 /* If both the position and size of the previous field are multiples
1816 of the current field alignment, there cannot be any gap. */
1817 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1818 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1819 return false;
1821 /* Fallback, return that there may be a potential gap */
1822 return true;
1825 /* Returns a LABEL_DECL node for LABEL_NAME. */
1827 tree
1828 create_label_decl (tree label_name)
1830 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1832 DECL_CONTEXT (label_decl) = current_function_decl;
1833 DECL_MODE (label_decl) = VOIDmode;
1834 DECL_SOURCE_LOCATION (label_decl) = input_location;
1836 return label_decl;
1839 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1840 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1841 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1842 PARM_DECL nodes chained through the TREE_CHAIN field).
1844 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1845 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1847 tree
1848 create_subprog_decl (tree subprog_name, tree asm_name,
1849 tree subprog_type, tree param_decl_list, bool inline_flag,
1850 bool public_flag, bool extern_flag,
1851 struct attrib *attr_list, Node_Id gnat_node)
1853 tree return_type = TREE_TYPE (subprog_type);
1854 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1856 /* If this is a function nested inside an inlined external function, it
1857 means we aren't going to compile the outer function unless it is
1858 actually inlined, so do the same for us. */
1859 if (current_function_decl && DECL_INLINE (current_function_decl)
1860 && DECL_EXTERNAL (current_function_decl))
1861 extern_flag = true;
1863 DECL_EXTERNAL (subprog_decl) = extern_flag;
1864 TREE_PUBLIC (subprog_decl) = public_flag;
1865 TREE_STATIC (subprog_decl) = 1;
1866 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1867 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1868 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1869 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1870 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1871 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1872 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1874 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1875 target by-reference return mechanism. This is not supported all the
1876 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1877 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1878 the RESULT_DECL instead - see gnat_genericize for more details. */
1879 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1881 tree result_decl = DECL_RESULT (subprog_decl);
1883 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1884 DECL_BY_REFERENCE (result_decl) = 1;
1887 if (inline_flag)
1888 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1890 if (asm_name)
1891 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1893 process_attributes (subprog_decl, attr_list);
1895 /* Add this decl to the current binding level. */
1896 gnat_pushdecl (subprog_decl, gnat_node);
1898 /* Output the assembler code and/or RTL for the declaration. */
1899 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1901 return subprog_decl;
1904 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1905 body. This routine needs to be invoked before processing the declarations
1906 appearing in the subprogram. */
1908 void
1909 begin_subprog_body (tree subprog_decl)
1911 tree param_decl;
1913 current_function_decl = subprog_decl;
1914 announce_function (subprog_decl);
1916 /* Enter a new binding level and show that all the parameters belong to
1917 this function. */
1918 gnat_pushlevel ();
1919 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1920 param_decl = TREE_CHAIN (param_decl))
1921 DECL_CONTEXT (param_decl) = subprog_decl;
1923 make_decl_rtl (subprog_decl);
1925 /* We handle pending sizes via the elaboration of types, so we don't need to
1926 save them. This causes them to be marked as part of the outer function
1927 and then discarded. */
1928 get_pending_sizes ();
1932 /* Helper for the genericization callback. Return a dereference of VAL
1933 if it is of a reference type. */
1935 static tree
1936 convert_from_reference (tree val)
1938 tree value_type, ref;
1940 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1941 return val;
1943 value_type = TREE_TYPE (TREE_TYPE (val));
1944 ref = build1 (INDIRECT_REF, value_type, val);
1946 /* See if what we reference is CONST or VOLATILE, which requires
1947 looking into array types to get to the component type. */
1949 while (TREE_CODE (value_type) == ARRAY_TYPE)
1950 value_type = TREE_TYPE (value_type);
1952 TREE_READONLY (ref)
1953 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1954 TREE_THIS_VOLATILE (ref)
1955 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1957 TREE_SIDE_EFFECTS (ref)
1958 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1960 return ref;
1963 /* Helper for the genericization callback. Returns true if T denotes
1964 a RESULT_DECL with DECL_BY_REFERENCE set. */
1966 static inline bool
1967 is_byref_result (tree t)
1969 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1973 /* Tree walking callback for gnat_genericize. Currently ...
1975 o Adjust references to the function's DECL_RESULT if it is marked
1976 DECL_BY_REFERENCE and so has had its type turned into a reference
1977 type at the end of the function compilation. */
1979 static tree
1980 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1982 /* This implementation is modeled after what the C++ front-end is
1983 doing, basis of the downstream passes behavior. */
1985 tree stmt = *stmt_p;
1986 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1988 /* If we have a direct mention of the result decl, dereference. */
1989 if (is_byref_result (stmt))
1991 *stmt_p = convert_from_reference (stmt);
1992 *walk_subtrees = 0;
1993 return NULL;
1996 /* Otherwise, no need to walk the the same tree twice. */
1997 if (pointer_set_contains (p_set, stmt))
1999 *walk_subtrees = 0;
2000 return NULL_TREE;
2003 /* If we are taking the address of what now is a reference, just get the
2004 reference value. */
2005 if (TREE_CODE (stmt) == ADDR_EXPR
2006 && is_byref_result (TREE_OPERAND (stmt, 0)))
2008 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
2009 *walk_subtrees = 0;
2012 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2013 else if (TREE_CODE (stmt) == RETURN_EXPR
2014 && TREE_OPERAND (stmt, 0)
2015 && is_byref_result (TREE_OPERAND (stmt, 0)))
2016 *walk_subtrees = 0;
2018 /* Don't look inside trees that cannot embed references of interest. */
2019 else if (IS_TYPE_OR_DECL_P (stmt))
2020 *walk_subtrees = 0;
2022 pointer_set_insert (p_set, *stmt_p);
2024 return NULL;
2027 /* Perform lowering of Ada trees to GENERIC. In particular:
2029 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2030 and adjust all the references to this decl accordingly. */
2032 static void
2033 gnat_genericize (tree fndecl)
2035 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2036 was handled by simply setting TREE_ADDRESSABLE on the result type.
2037 Everything required to actually pass by invisible ref using the target
2038 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2040 This doesn't work with GCC 4 any more for several reasons. First, the
2041 gimplification process might need the creation of temporaries of this
2042 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2043 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2044 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2045 be explicitely accounted for by the front-end in the function body.
2047 We achieve the complete transformation in two steps:
2049 1/ create_subprog_decl performs early attribute tweaks: it clears
2050 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2051 the result decl. The former ensures that the bit isn't set in the GCC
2052 tree saved for the function, so prevents ICEs on temporary creation.
2053 The latter we use here to trigger the rest of the processing.
2055 2/ This function performs the type transformation on the result decl
2056 and adjusts all the references to this decl from the function body
2057 accordingly.
2059 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2060 strategy, which escapes the gimplifier temporary creation issues by
2061 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2062 on simple specific support code in aggregate_value_p to look at the
2063 target function result decl explicitely. */
2065 struct pointer_set_t *p_set;
2066 tree decl_result = DECL_RESULT (fndecl);
2068 if (!DECL_BY_REFERENCE (decl_result))
2069 return;
2071 /* Make the DECL_RESULT explicitely by-reference and adjust all the
2072 occurrences in the function body using the common tree-walking facility.
2073 We want to see every occurrence of the result decl to adjust the
2074 referencing tree, so need to use our own pointer set to control which
2075 trees should be visited again or not. */
2077 p_set = pointer_set_create ();
2079 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2080 TREE_ADDRESSABLE (decl_result) = 0;
2081 relayout_decl (decl_result);
2083 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2085 pointer_set_destroy (p_set);
2088 /* Finish the definition of the current subprogram and compile it all the way
2089 to assembler language output. BODY is the tree corresponding to
2090 the subprogram. */
2092 void
2093 end_subprog_body (tree body)
2095 tree fndecl = current_function_decl;
2097 /* Mark the BLOCK for this level as being for this function and pop the
2098 level. Since the vars in it are the parameters, clear them. */
2099 BLOCK_VARS (current_binding_level->block) = 0;
2100 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2101 DECL_INITIAL (fndecl) = current_binding_level->block;
2102 gnat_poplevel ();
2104 /* Deal with inline. If declared inline or we should default to inline,
2105 set the flag in the decl. */
2106 DECL_INLINE (fndecl)
2107 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
2109 /* We handle pending sizes via the elaboration of types, so we don't
2110 need to save them. */
2111 get_pending_sizes ();
2113 /* Mark the RESULT_DECL as being in this subprogram. */
2114 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2116 DECL_SAVED_TREE (fndecl) = body;
2118 current_function_decl = DECL_CONTEXT (fndecl);
2119 set_cfun (NULL);
2121 /* We cannot track the location of errors past this point. */
2122 error_gnat_node = Empty;
2124 /* If we're only annotating types, don't actually compile this function. */
2125 if (type_annotate_only)
2126 return;
2128 /* Perform the required pre-gimplfication transformations on the tree. */
2129 gnat_genericize (fndecl);
2131 /* We do different things for nested and non-nested functions.
2132 ??? This should be in cgraph. */
2133 if (!DECL_CONTEXT (fndecl))
2135 gnat_gimplify_function (fndecl);
2136 cgraph_finalize_function (fndecl, false);
2138 else
2139 /* Register this function with cgraph just far enough to get it
2140 added to our parent's nested function list. */
2141 (void) cgraph_node (fndecl);
2144 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2146 static void
2147 gnat_gimplify_function (tree fndecl)
2149 struct cgraph_node *cgn;
2151 dump_function (TDI_original, fndecl);
2152 gimplify_function_tree (fndecl);
2153 dump_function (TDI_generic, fndecl);
2155 /* Convert all nested functions to GIMPLE now. We do things in this order
2156 so that items like VLA sizes are expanded properly in the context of the
2157 correct function. */
2158 cgn = cgraph_node (fndecl);
2159 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2160 gnat_gimplify_function (cgn->decl);
2164 tree
2165 gnat_builtin_function (tree decl)
2167 gnat_pushdecl (decl, Empty);
2168 return decl;
2171 /* Handle a "const" attribute; arguments as in
2172 struct attribute_spec.handler. */
2174 static tree
2175 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
2176 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2177 bool *no_add_attrs)
2179 if (TREE_CODE (*node) == FUNCTION_DECL)
2180 TREE_READONLY (*node) = 1;
2181 else
2182 *no_add_attrs = true;
2184 return NULL_TREE;
2187 /* Handle a "nothrow" attribute; arguments as in
2188 struct attribute_spec.handler. */
2190 static tree
2191 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
2192 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
2193 bool *no_add_attrs)
2195 if (TREE_CODE (*node) == FUNCTION_DECL)
2196 TREE_NOTHROW (*node) = 1;
2197 else
2198 *no_add_attrs = true;
2200 return NULL_TREE;
2203 /* Return an integer type with the number of bits of precision given by
2204 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2205 it is a signed type. */
2207 tree
2208 gnat_type_for_size (unsigned precision, int unsignedp)
2210 tree t;
2211 char type_name[20];
2213 if (precision <= 2 * MAX_BITS_PER_WORD
2214 && signed_and_unsigned_types[precision][unsignedp])
2215 return signed_and_unsigned_types[precision][unsignedp];
2217 if (unsignedp)
2218 t = make_unsigned_type (precision);
2219 else
2220 t = make_signed_type (precision);
2222 if (precision <= 2 * MAX_BITS_PER_WORD)
2223 signed_and_unsigned_types[precision][unsignedp] = t;
2225 if (!TYPE_NAME (t))
2227 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2228 TYPE_NAME (t) = get_identifier (type_name);
2231 return t;
2234 /* Likewise for floating-point types. */
2236 static tree
2237 float_type_for_precision (int precision, enum machine_mode mode)
2239 tree t;
2240 char type_name[20];
2242 if (float_types[(int) mode])
2243 return float_types[(int) mode];
2245 float_types[(int) mode] = t = make_node (REAL_TYPE);
2246 TYPE_PRECISION (t) = precision;
2247 layout_type (t);
2249 gcc_assert (TYPE_MODE (t) == mode);
2250 if (!TYPE_NAME (t))
2252 sprintf (type_name, "FLOAT_%d", precision);
2253 TYPE_NAME (t) = get_identifier (type_name);
2256 return t;
2259 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2260 an unsigned type; otherwise a signed type is returned. */
2262 tree
2263 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2265 if (mode == BLKmode)
2266 return NULL_TREE;
2267 else if (mode == VOIDmode)
2268 return void_type_node;
2269 else if (COMPLEX_MODE_P (mode))
2270 return NULL_TREE;
2271 else if (SCALAR_FLOAT_MODE_P (mode))
2272 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2273 else if (SCALAR_INT_MODE_P (mode))
2274 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2275 else
2276 return NULL_TREE;
2279 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2281 tree
2282 gnat_unsigned_type (tree type_node)
2284 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2286 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2288 type = copy_node (type);
2289 TREE_TYPE (type) = type_node;
2291 else if (TREE_TYPE (type_node)
2292 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2293 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2295 type = copy_node (type);
2296 TREE_TYPE (type) = TREE_TYPE (type_node);
2299 return type;
2302 /* Return the signed version of a TYPE_NODE, a scalar type. */
2304 tree
2305 gnat_signed_type (tree type_node)
2307 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2309 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2311 type = copy_node (type);
2312 TREE_TYPE (type) = type_node;
2314 else if (TREE_TYPE (type_node)
2315 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2316 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2318 type = copy_node (type);
2319 TREE_TYPE (type) = TREE_TYPE (type_node);
2322 return type;
2326 /* EXP is an expression for the size of an object. If this size contains
2327 discriminant references, replace them with the maximum (if MAX_P) or
2328 minimum (if !MAX_P) possible value of the discriminant. */
2330 tree
2331 max_size (tree exp, bool max_p)
2333 enum tree_code code = TREE_CODE (exp);
2334 tree type = TREE_TYPE (exp);
2336 switch (TREE_CODE_CLASS (code))
2338 case tcc_declaration:
2339 case tcc_constant:
2340 return exp;
2342 case tcc_vl_exp:
2343 if (code == CALL_EXPR)
2345 tree *argarray;
2346 int i, n = call_expr_nargs (exp);
2347 gcc_assert (n > 0);
2349 argarray = (tree *) alloca (n * sizeof (tree));
2350 for (i = 0; i < n; i++)
2351 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2352 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2354 break;
2356 case tcc_reference:
2357 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2358 modify. Otherwise, we treat it like a variable. */
2359 if (!CONTAINS_PLACEHOLDER_P (exp))
2360 return exp;
2362 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2363 return
2364 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2366 case tcc_comparison:
2367 return max_p ? size_one_node : size_zero_node;
2369 case tcc_unary:
2370 case tcc_binary:
2371 case tcc_expression:
2372 switch (TREE_CODE_LENGTH (code))
2374 case 1:
2375 if (code == NON_LVALUE_EXPR)
2376 return max_size (TREE_OPERAND (exp, 0), max_p);
2377 else
2378 return
2379 fold_build1 (code, type,
2380 max_size (TREE_OPERAND (exp, 0),
2381 code == NEGATE_EXPR ? !max_p : max_p));
2383 case 2:
2384 if (code == COMPOUND_EXPR)
2385 return max_size (TREE_OPERAND (exp, 1), max_p);
2387 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2388 may provide a tighter bound on max_size. */
2389 if (code == MINUS_EXPR
2390 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2392 tree lhs = fold_build2 (MINUS_EXPR, type,
2393 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2394 TREE_OPERAND (exp, 1));
2395 tree rhs = fold_build2 (MINUS_EXPR, type,
2396 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2397 TREE_OPERAND (exp, 1));
2398 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2399 max_size (lhs, max_p),
2400 max_size (rhs, max_p));
2404 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2405 tree rhs = max_size (TREE_OPERAND (exp, 1),
2406 code == MINUS_EXPR ? !max_p : max_p);
2408 /* Special-case wanting the maximum value of a MIN_EXPR.
2409 In that case, if one side overflows, return the other.
2410 sizetype is signed, but we know sizes are non-negative.
2411 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2412 overflowing or the maximum possible value and the RHS
2413 a variable. */
2414 if (max_p
2415 && code == MIN_EXPR
2416 && TREE_CODE (rhs) == INTEGER_CST
2417 && TREE_OVERFLOW (rhs))
2418 return lhs;
2419 else if (max_p
2420 && code == MIN_EXPR
2421 && TREE_CODE (lhs) == INTEGER_CST
2422 && TREE_OVERFLOW (lhs))
2423 return rhs;
2424 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2425 && ((TREE_CODE (lhs) == INTEGER_CST
2426 && TREE_OVERFLOW (lhs))
2427 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2428 && !TREE_CONSTANT (rhs))
2429 return lhs;
2430 else
2431 return fold_build2 (code, type, lhs, rhs);
2434 case 3:
2435 if (code == SAVE_EXPR)
2436 return exp;
2437 else if (code == COND_EXPR)
2438 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2439 max_size (TREE_OPERAND (exp, 1), max_p),
2440 max_size (TREE_OPERAND (exp, 2), max_p));
2443 /* Other tree classes cannot happen. */
2444 default:
2445 break;
2448 gcc_unreachable ();
2451 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2452 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2453 Return a constructor for the template. */
2455 tree
2456 build_template (tree template_type, tree array_type, tree expr)
2458 tree template_elts = NULL_TREE;
2459 tree bound_list = NULL_TREE;
2460 tree field;
2462 if (TREE_CODE (array_type) == RECORD_TYPE
2463 && (TYPE_IS_PADDING_P (array_type)
2464 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2465 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2467 if (TREE_CODE (array_type) == ARRAY_TYPE
2468 || (TREE_CODE (array_type) == INTEGER_TYPE
2469 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2470 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2472 /* First make the list for a CONSTRUCTOR for the template. Go down the
2473 field list of the template instead of the type chain because this
2474 array might be an Ada array of arrays and we can't tell where the
2475 nested arrays stop being the underlying object. */
2477 for (field = TYPE_FIELDS (template_type); field;
2478 (bound_list
2479 ? (bound_list = TREE_CHAIN (bound_list))
2480 : (array_type = TREE_TYPE (array_type))),
2481 field = TREE_CHAIN (TREE_CHAIN (field)))
2483 tree bounds, min, max;
2485 /* If we have a bound list, get the bounds from there. Likewise
2486 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2487 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2488 This will give us a maximum range. */
2489 if (bound_list)
2490 bounds = TREE_VALUE (bound_list);
2491 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2492 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2493 else if (expr && TREE_CODE (expr) == PARM_DECL
2494 && DECL_BY_COMPONENT_PTR_P (expr))
2495 bounds = TREE_TYPE (field);
2496 else
2497 gcc_unreachable ();
2499 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2500 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2502 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2503 substitute it from OBJECT. */
2504 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2505 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2507 template_elts = tree_cons (TREE_CHAIN (field), max,
2508 tree_cons (field, min, template_elts));
2511 return gnat_build_constructor (template_type, nreverse (template_elts));
2514 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2515 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2516 in the type contains in its DECL_INITIAL the expression to use when
2517 a constructor is made for the type. GNAT_ENTITY is an entity used
2518 to print out an error message if the mechanism cannot be applied to
2519 an object of that type and also for the name. */
2521 tree
2522 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2524 tree record_type = make_node (RECORD_TYPE);
2525 tree pointer32_type;
2526 tree field_list = 0;
2527 int class;
2528 int dtype = 0;
2529 tree inner_type;
2530 int ndim;
2531 int i;
2532 tree *idx_arr;
2533 tree tem;
2535 /* If TYPE is an unconstrained array, use the underlying array type. */
2536 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2537 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2539 /* If this is an array, compute the number of dimensions in the array,
2540 get the index types, and point to the inner type. */
2541 if (TREE_CODE (type) != ARRAY_TYPE)
2542 ndim = 0;
2543 else
2544 for (ndim = 1, inner_type = type;
2545 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2546 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2547 ndim++, inner_type = TREE_TYPE (inner_type))
2550 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2552 if (mech != By_Descriptor_NCA
2553 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2554 for (i = ndim - 1, inner_type = type;
2555 i >= 0;
2556 i--, inner_type = TREE_TYPE (inner_type))
2557 idx_arr[i] = TYPE_DOMAIN (inner_type);
2558 else
2559 for (i = 0, inner_type = type;
2560 i < ndim;
2561 i++, inner_type = TREE_TYPE (inner_type))
2562 idx_arr[i] = TYPE_DOMAIN (inner_type);
2564 /* Now get the DTYPE value. */
2565 switch (TREE_CODE (type))
2567 case INTEGER_TYPE:
2568 case ENUMERAL_TYPE:
2569 if (TYPE_VAX_FLOATING_POINT_P (type))
2570 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2572 case 6:
2573 dtype = 10;
2574 break;
2575 case 9:
2576 dtype = 11;
2577 break;
2578 case 15:
2579 dtype = 27;
2580 break;
2582 else
2583 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2585 case 8:
2586 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2587 break;
2588 case 16:
2589 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2590 break;
2591 case 32:
2592 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2593 break;
2594 case 64:
2595 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2596 break;
2597 case 128:
2598 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2599 break;
2601 break;
2603 case REAL_TYPE:
2604 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2605 break;
2607 case COMPLEX_TYPE:
2608 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2609 && TYPE_VAX_FLOATING_POINT_P (type))
2610 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2612 case 6:
2613 dtype = 12;
2614 break;
2615 case 9:
2616 dtype = 13;
2617 break;
2618 case 15:
2619 dtype = 29;
2621 else
2622 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2623 break;
2625 case ARRAY_TYPE:
2626 dtype = 14;
2627 break;
2629 default:
2630 break;
2633 /* Get the CLASS value. */
2634 switch (mech)
2636 case By_Descriptor_A:
2637 class = 4;
2638 break;
2639 case By_Descriptor_NCA:
2640 class = 10;
2641 break;
2642 case By_Descriptor_SB:
2643 class = 15;
2644 break;
2645 case By_Descriptor:
2646 case By_Descriptor_S:
2647 default:
2648 class = 1;
2649 break;
2652 /* Make the type for a descriptor for VMS. The first four fields
2653 are the same for all types. */
2655 field_list
2656 = chainon (field_list,
2657 make_descriptor_field
2658 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2659 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2661 field_list = chainon (field_list,
2662 make_descriptor_field ("DTYPE",
2663 gnat_type_for_size (8, 1),
2664 record_type, size_int (dtype)));
2665 field_list = chainon (field_list,
2666 make_descriptor_field ("CLASS",
2667 gnat_type_for_size (8, 1),
2668 record_type, size_int (class)));
2670 /* Of course this will crash at run-time if the address space is not
2671 within the low 32 bits, but there is nothing else we can do. */
2672 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2674 field_list
2675 = chainon (field_list,
2676 make_descriptor_field
2677 ("POINTER", pointer32_type, record_type,
2678 build_unary_op (ADDR_EXPR,
2679 pointer32_type,
2680 build0 (PLACEHOLDER_EXPR, type))));
2682 switch (mech)
2684 case By_Descriptor:
2685 case By_Descriptor_S:
2686 break;
2688 case By_Descriptor_SB:
2689 field_list
2690 = chainon (field_list,
2691 make_descriptor_field
2692 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2693 TREE_CODE (type) == ARRAY_TYPE
2694 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2695 field_list
2696 = chainon (field_list,
2697 make_descriptor_field
2698 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2699 TREE_CODE (type) == ARRAY_TYPE
2700 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2701 break;
2703 case By_Descriptor_A:
2704 case By_Descriptor_NCA:
2705 field_list = chainon (field_list,
2706 make_descriptor_field ("SCALE",
2707 gnat_type_for_size (8, 1),
2708 record_type,
2709 size_zero_node));
2711 field_list = chainon (field_list,
2712 make_descriptor_field ("DIGITS",
2713 gnat_type_for_size (8, 1),
2714 record_type,
2715 size_zero_node));
2717 field_list
2718 = chainon (field_list,
2719 make_descriptor_field
2720 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2721 size_int (mech == By_Descriptor_NCA
2723 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2724 : (TREE_CODE (type) == ARRAY_TYPE
2725 && TYPE_CONVENTION_FORTRAN_P (type)
2726 ? 224 : 192))));
2728 field_list = chainon (field_list,
2729 make_descriptor_field ("DIMCT",
2730 gnat_type_for_size (8, 1),
2731 record_type,
2732 size_int (ndim)));
2734 field_list = chainon (field_list,
2735 make_descriptor_field ("ARSIZE",
2736 gnat_type_for_size (32, 1),
2737 record_type,
2738 size_in_bytes (type)));
2740 /* Now build a pointer to the 0,0,0... element. */
2741 tem = build0 (PLACEHOLDER_EXPR, type);
2742 for (i = 0, inner_type = type; i < ndim;
2743 i++, inner_type = TREE_TYPE (inner_type))
2744 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2745 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2746 NULL_TREE, NULL_TREE);
2748 field_list
2749 = chainon (field_list,
2750 make_descriptor_field
2751 ("A0",
2752 build_pointer_type_for_mode (inner_type, SImode, false),
2753 record_type,
2754 build1 (ADDR_EXPR,
2755 build_pointer_type_for_mode (inner_type, SImode,
2756 false),
2757 tem)));
2759 /* Next come the addressing coefficients. */
2760 tem = size_one_node;
2761 for (i = 0; i < ndim; i++)
2763 char fname[3];
2764 tree idx_length
2765 = size_binop (MULT_EXPR, tem,
2766 size_binop (PLUS_EXPR,
2767 size_binop (MINUS_EXPR,
2768 TYPE_MAX_VALUE (idx_arr[i]),
2769 TYPE_MIN_VALUE (idx_arr[i])),
2770 size_int (1)));
2772 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2773 fname[1] = '0' + i, fname[2] = 0;
2774 field_list
2775 = chainon (field_list,
2776 make_descriptor_field (fname,
2777 gnat_type_for_size (32, 1),
2778 record_type, idx_length));
2780 if (mech == By_Descriptor_NCA)
2781 tem = idx_length;
2784 /* Finally here are the bounds. */
2785 for (i = 0; i < ndim; i++)
2787 char fname[3];
2789 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2790 field_list
2791 = chainon (field_list,
2792 make_descriptor_field
2793 (fname, gnat_type_for_size (32, 1), record_type,
2794 TYPE_MIN_VALUE (idx_arr[i])));
2796 fname[0] = 'U';
2797 field_list
2798 = chainon (field_list,
2799 make_descriptor_field
2800 (fname, gnat_type_for_size (32, 1), record_type,
2801 TYPE_MAX_VALUE (idx_arr[i])));
2803 break;
2805 default:
2806 post_error ("unsupported descriptor type for &", gnat_entity);
2809 finish_record_type (record_type, field_list, 0, true);
2810 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2811 NULL, true, false, gnat_entity);
2813 return record_type;
2816 /* Utility routine for above code to make a field. */
2818 static tree
2819 make_descriptor_field (const char *name, tree type,
2820 tree rec_type, tree initial)
2822 tree field
2823 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2825 DECL_INITIAL (field) = initial;
2826 return field;
2829 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
2830 pointer or fat pointer type. GNAT_SUBPROG is the subprogram to which
2831 the VMS descriptor is passed. */
2833 static tree
2834 convert_vms_descriptor (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2836 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2837 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2838 /* The CLASS field is the 3rd field in the descriptor. */
2839 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
2840 /* The POINTER field is the 4th field in the descriptor. */
2841 tree pointer = TREE_CHAIN (class);
2843 /* Retrieve the value of the POINTER field. */
2844 gnu_expr
2845 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2847 if (POINTER_TYPE_P (gnu_type))
2848 return convert (gnu_type, gnu_expr);
2850 else if (TYPE_FAT_POINTER_P (gnu_type))
2852 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2853 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2854 tree template_type = TREE_TYPE (p_bounds_type);
2855 tree min_field = TYPE_FIELDS (template_type);
2856 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2857 tree template, template_addr, aflags, dimct, t, u;
2858 /* See the head comment of build_vms_descriptor. */
2859 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
2861 /* Convert POINTER to the type of the P_ARRAY field. */
2862 gnu_expr = convert (p_array_type, gnu_expr);
2864 switch (iclass)
2866 case 1: /* Class S */
2867 case 15: /* Class SB */
2868 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
2869 t = TYPE_FIELDS (desc_type);
2870 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2871 t = tree_cons (min_field,
2872 convert (TREE_TYPE (min_field), integer_one_node),
2873 tree_cons (max_field,
2874 convert (TREE_TYPE (max_field), t),
2875 NULL_TREE));
2876 template = gnat_build_constructor (template_type, t);
2877 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2879 /* For class S, we are done. */
2880 if (iclass == 1)
2881 break;
2883 /* Test that we really have a SB descriptor, like DEC Ada. */
2884 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
2885 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
2886 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
2887 /* If so, there is already a template in the descriptor and
2888 it is located right after the POINTER field. */
2889 t = TREE_CHAIN (pointer);
2890 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2891 /* Otherwise use the {1, LENGTH} template we build above. */
2892 template_addr = build3 (COND_EXPR, p_bounds_type, u,
2893 build_unary_op (ADDR_EXPR, p_bounds_type,
2894 template),
2895 template_addr);
2896 break;
2898 case 4: /* Class A */
2899 /* The AFLAGS field is the 7th field in the descriptor. */
2900 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
2901 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2902 /* The DIMCT field is the 8th field in the descriptor. */
2903 t = TREE_CHAIN (t);
2904 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2905 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
2906 or FL_COEFF or FL_BOUNDS not set. */
2907 u = build_int_cst (TREE_TYPE (aflags), 192);
2908 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
2909 build_binary_op (NE_EXPR, integer_type_node,
2910 dimct,
2911 convert (TREE_TYPE (dimct),
2912 size_one_node)),
2913 build_binary_op (NE_EXPR, integer_type_node,
2914 build2 (BIT_AND_EXPR,
2915 TREE_TYPE (aflags),
2916 aflags, u),
2917 u));
2918 add_stmt (build3 (COND_EXPR, void_type_node, u,
2919 build_call_raise (CE_Length_Check_Failed, Empty,
2920 N_Raise_Constraint_Error),
2921 NULL_TREE));
2922 /* There is already a template in the descriptor and it is
2923 located at the start of block 3 (12th field). */
2924 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
2925 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2926 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
2927 break;
2929 case 10: /* Class NCA */
2930 default:
2931 post_error ("unsupported descriptor type for &", gnat_subprog);
2932 template_addr = integer_zero_node;
2933 break;
2936 /* Build the fat pointer in the form of a constructor. */
2937 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr,
2938 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
2939 template_addr, NULL_TREE));
2940 return gnat_build_constructor (gnu_type, t);
2943 else
2944 gcc_unreachable ();
2947 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
2948 and the GNAT node GNAT_SUBPROG. */
2950 void
2951 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
2953 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
2954 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
2955 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
2956 tree gnu_body;
2958 gnu_subprog_type = TREE_TYPE (gnu_subprog);
2959 gnu_param_list = NULL_TREE;
2961 begin_subprog_body (gnu_stub_decl);
2962 gnat_pushlevel ();
2964 start_stmt_group ();
2966 /* Loop over the parameters of the stub and translate any of them
2967 passed by descriptor into a by reference one. */
2968 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
2969 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
2970 gnu_stub_param;
2971 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
2972 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
2974 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
2975 gnu_param = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
2976 gnu_stub_param, gnat_subprog);
2977 else
2978 gnu_param = gnu_stub_param;
2980 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
2983 gnu_body = end_stmt_group ();
2985 /* Invoke the internal subprogram. */
2986 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
2987 gnu_subprog);
2988 gnu_subprog_call = build3 (CALL_EXPR, TREE_TYPE (gnu_subprog_type),
2989 gnu_subprog_addr, nreverse (gnu_param_list),
2990 NULL_TREE);
2992 /* Propagate the return value, if any. */
2993 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
2994 append_to_statement_list (gnu_subprog_call, &gnu_body);
2995 else
2996 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
2997 gnu_subprog_call),
2998 &gnu_body);
3000 gnat_poplevel ();
3002 allocate_struct_function (gnu_stub_decl, false);
3003 end_subprog_body (gnu_body);
3006 /* Build a type to be used to represent an aliased object whose nominal
3007 type is an unconstrained array. This consists of a RECORD_TYPE containing
3008 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3009 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3010 is used to represent an arbitrary unconstrained object. Use NAME
3011 as the name of the record. */
3013 tree
3014 build_unc_object_type (tree template_type, tree object_type, tree name)
3016 tree type = make_node (RECORD_TYPE);
3017 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3018 template_type, type, 0, 0, 0, 1);
3019 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3020 type, 0, 0, 0, 1);
3022 TYPE_NAME (type) = name;
3023 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3024 finish_record_type (type,
3025 chainon (chainon (NULL_TREE, template_field),
3026 array_field),
3027 0, false);
3029 return type;
3032 /* Same, taking a thin or fat pointer type instead of a template type. */
3034 tree
3035 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3036 tree name)
3038 tree template_type;
3040 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3042 template_type
3043 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3044 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3045 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3046 return build_unc_object_type (template_type, object_type, name);
3049 /* Shift the component offsets within an unconstrained object TYPE to make it
3050 suitable for use as a designated type for thin pointers. */
3052 void
3053 shift_unc_components_for_thin_pointers (tree type)
3055 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3056 allocated past the BOUNDS template. The designated type is adjusted to
3057 have ARRAY at position zero and the template at a negative offset, so
3058 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3060 tree bounds_field = TYPE_FIELDS (type);
3061 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3063 DECL_FIELD_OFFSET (bounds_field)
3064 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3066 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3067 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3070 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3071 the normal case this is just two adjustments, but we have more to do
3072 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3074 void
3075 update_pointer_to (tree old_type, tree new_type)
3077 tree ptr = TYPE_POINTER_TO (old_type);
3078 tree ref = TYPE_REFERENCE_TO (old_type);
3079 tree ptr1, ref1;
3080 tree type;
3082 /* If this is the main variant, process all the other variants first. */
3083 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3084 for (type = TYPE_NEXT_VARIANT (old_type); type;
3085 type = TYPE_NEXT_VARIANT (type))
3086 update_pointer_to (type, new_type);
3088 /* If no pointer or reference, we are done. */
3089 if (!ptr && !ref)
3090 return;
3092 /* Merge the old type qualifiers in the new type.
3094 Each old variant has qualifiers for specific reasons, and the new
3095 designated type as well. Each set of qualifiers represents useful
3096 information grabbed at some point, and merging the two simply unifies
3097 these inputs into the final type description.
3099 Consider for instance a volatile type frozen after an access to constant
3100 type designating it. After the designated type freeze, we get here with a
3101 volatile new_type and a dummy old_type with a readonly variant, created
3102 when the access type was processed. We shall make a volatile and readonly
3103 designated type, because that's what it really is.
3105 We might also get here for a non-dummy old_type variant with different
3106 qualifiers than the new_type ones, for instance in some cases of pointers
3107 to private record type elaboration (see the comments around the call to
3108 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3109 qualifiers in thoses cases too, to avoid accidentally discarding the
3110 initial set, and will often end up with old_type == new_type then. */
3111 new_type = build_qualified_type (new_type,
3112 TYPE_QUALS (old_type)
3113 | TYPE_QUALS (new_type));
3115 /* If the new type and the old one are identical, there is nothing to
3116 update. */
3117 if (old_type == new_type)
3118 return;
3120 /* Otherwise, first handle the simple case. */
3121 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3123 TYPE_POINTER_TO (new_type) = ptr;
3124 TYPE_REFERENCE_TO (new_type) = ref;
3126 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3127 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3128 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3129 TREE_TYPE (ptr1) = new_type;
3131 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3132 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3133 ref1 = TYPE_NEXT_VARIANT (ref1))
3134 TREE_TYPE (ref1) = new_type;
3137 /* Now deal with the unconstrained array case. In this case the "pointer"
3138 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3139 Turn them into pointers to the correct types using update_pointer_to. */
3140 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
3141 gcc_unreachable ();
3143 else
3145 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3146 tree array_field = TYPE_FIELDS (ptr);
3147 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3148 tree new_ptr = TYPE_POINTER_TO (new_type);
3149 tree new_ref;
3150 tree var;
3152 /* Make pointers to the dummy template point to the real template. */
3153 update_pointer_to
3154 (TREE_TYPE (TREE_TYPE (bounds_field)),
3155 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3157 /* The references to the template bounds present in the array type
3158 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3159 are updating ptr to make it a full replacement for new_ptr as
3160 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3161 to make it of type ptr. */
3162 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3163 build0 (PLACEHOLDER_EXPR, ptr),
3164 bounds_field, NULL_TREE);
3166 /* Create the new array for the new PLACEHOLDER_EXPR and make
3167 pointers to the dummy array point to it.
3169 ??? This is now the only use of substitute_in_type,
3170 which is a very "heavy" routine to do this, so it
3171 should be replaced at some point. */
3172 update_pointer_to
3173 (TREE_TYPE (TREE_TYPE (array_field)),
3174 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3175 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3177 /* Make ptr the pointer to new_type. */
3178 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3179 = TREE_TYPE (new_type) = ptr;
3181 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3182 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3184 /* Now handle updating the allocation record, what the thin pointer
3185 points to. Update all pointers from the old record into the new
3186 one, update the type of the array field, and recompute the size. */
3187 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3189 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3190 = TREE_TYPE (TREE_TYPE (array_field));
3192 /* The size recomputation needs to account for alignment constraints, so
3193 we let layout_type work it out. This will reset the field offsets to
3194 what they would be in a regular record, so we shift them back to what
3195 we want them to be for a thin pointer designated type afterwards. */
3196 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3197 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3198 TYPE_SIZE (new_obj_rec) = 0;
3199 layout_type (new_obj_rec);
3201 shift_unc_components_for_thin_pointers (new_obj_rec);
3203 /* We are done, at last. */
3204 rest_of_record_type_compilation (ptr);
3208 /* Convert a pointer to a constrained array into a pointer to a fat
3209 pointer. This involves making or finding a template. */
3211 static tree
3212 convert_to_fat_pointer (tree type, tree expr)
3214 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3215 tree template, template_addr;
3216 tree etype = TREE_TYPE (expr);
3218 /* If EXPR is a constant of zero, we make a fat pointer that has a null
3219 pointer to the template and array. */
3220 if (integer_zerop (expr))
3221 return
3222 gnat_build_constructor
3223 (type,
3224 tree_cons (TYPE_FIELDS (type),
3225 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3226 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3227 convert (build_pointer_type (template_type),
3228 expr),
3229 NULL_TREE)));
3231 /* If EXPR is a thin pointer, make the template and data from the record. */
3233 else if (TYPE_THIN_POINTER_P (etype))
3235 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3237 expr = save_expr (expr);
3238 if (TREE_CODE (expr) == ADDR_EXPR)
3239 expr = TREE_OPERAND (expr, 0);
3240 else
3241 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3243 template = build_component_ref (expr, NULL_TREE, fields, false);
3244 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3245 build_component_ref (expr, NULL_TREE,
3246 TREE_CHAIN (fields), false));
3248 else
3249 /* Otherwise, build the constructor for the template. */
3250 template = build_template (template_type, TREE_TYPE (etype), expr);
3252 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3254 /* The result is a CONSTRUCTOR for the fat pointer.
3256 If expr is an argument of a foreign convention subprogram, the type it
3257 points to is directly the component type. In this case, the expression
3258 type may not match the corresponding FIELD_DECL type at this point, so we
3259 call "convert" here to fix that up if necessary. This type consistency is
3260 required, for instance because it ensures that possible later folding of
3261 component_refs against this constructor always yields something of the
3262 same type as the initial reference.
3264 Note that the call to "build_template" above is still fine, because it
3265 will only refer to the provided template_type in this case. */
3266 return
3267 gnat_build_constructor
3268 (type, tree_cons (TYPE_FIELDS (type),
3269 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3270 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3271 template_addr, NULL_TREE)));
3274 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3275 is something that is a fat pointer, so convert to it first if it EXPR
3276 is not already a fat pointer. */
3278 static tree
3279 convert_to_thin_pointer (tree type, tree expr)
3281 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3282 expr
3283 = convert_to_fat_pointer
3284 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3286 /* We get the pointer to the data and use a NOP_EXPR to make it the
3287 proper GCC type. */
3288 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3289 false);
3290 expr = build1 (NOP_EXPR, type, expr);
3292 return expr;
3295 /* Create an expression whose value is that of EXPR,
3296 converted to type TYPE. The TREE_TYPE of the value
3297 is always TYPE. This function implements all reasonable
3298 conversions; callers should filter out those that are
3299 not permitted by the language being compiled. */
3301 tree
3302 convert (tree type, tree expr)
3304 enum tree_code code = TREE_CODE (type);
3305 tree etype = TREE_TYPE (expr);
3306 enum tree_code ecode = TREE_CODE (etype);
3308 /* If EXPR is already the right type, we are done. */
3309 if (type == etype)
3310 return expr;
3312 /* If both input and output have padding and are of variable size, do this
3313 as an unchecked conversion. Likewise if one is a mere variant of the
3314 other, so we avoid a pointless unpad/repad sequence. */
3315 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
3316 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3317 && (!TREE_CONSTANT (TYPE_SIZE (type))
3318 || !TREE_CONSTANT (TYPE_SIZE (etype))
3319 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)))
3322 /* If the output type has padding, make a constructor to build the
3323 record. */
3324 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3326 /* If we previously converted from another type and our type is
3327 of variable size, remove the conversion to avoid the need for
3328 variable-size temporaries. */
3329 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3330 && !TREE_CONSTANT (TYPE_SIZE (type)))
3331 expr = TREE_OPERAND (expr, 0);
3333 /* If we are just removing the padding from expr, convert the original
3334 object if we have variable size. That will avoid the need
3335 for some variable-size temporaries. */
3336 if (TREE_CODE (expr) == COMPONENT_REF
3337 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3338 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3339 && !TREE_CONSTANT (TYPE_SIZE (type)))
3340 return convert (type, TREE_OPERAND (expr, 0));
3342 /* If the result type is a padded type with a self-referentially-sized
3343 field and the expression type is a record, do this as an
3344 unchecked conversion. */
3345 else if (TREE_CODE (etype) == RECORD_TYPE
3346 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3347 return unchecked_convert (type, expr, false);
3349 else
3350 return
3351 gnat_build_constructor (type,
3352 tree_cons (TYPE_FIELDS (type),
3353 convert (TREE_TYPE
3354 (TYPE_FIELDS (type)),
3355 expr),
3356 NULL_TREE));
3359 /* If the input type has padding, remove it and convert to the output type.
3360 The conditions ordering is arranged to ensure that the output type is not
3361 a padding type here, as it is not clear whether the conversion would
3362 always be correct if this was to happen. */
3363 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3365 tree unpadded;
3367 /* If we have just converted to this padded type, just get the
3368 inner expression. */
3369 if (TREE_CODE (expr) == CONSTRUCTOR
3370 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3371 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3372 == TYPE_FIELDS (etype))
3373 unpadded
3374 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3376 /* Otherwise, build an explicit component reference. */
3377 else
3378 unpadded
3379 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3381 return convert (type, unpadded);
3384 /* If the input is a biased type, adjust first. */
3385 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3386 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3387 fold_convert (TREE_TYPE (etype),
3388 expr),
3389 TYPE_MIN_VALUE (etype)));
3391 /* If the input is a justified modular type, we need to extract the actual
3392 object before converting it to any other type with the exceptions of an
3393 unconstrained array or of a mere type variant. It is useful to avoid the
3394 extraction and conversion in the type variant case because it could end
3395 up replacing a VAR_DECL expr by a constructor and we might be about the
3396 take the address of the result. */
3397 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3398 && code != UNCONSTRAINED_ARRAY_TYPE
3399 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3400 return convert (type, build_component_ref (expr, NULL_TREE,
3401 TYPE_FIELDS (etype), false));
3403 /* If converting to a type that contains a template, convert to the data
3404 type and then build the template. */
3405 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3407 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3409 /* If the source already has a template, get a reference to the
3410 associated array only, as we are going to rebuild a template
3411 for the target type anyway. */
3412 expr = maybe_unconstrained_array (expr);
3414 return
3415 gnat_build_constructor
3416 (type,
3417 tree_cons (TYPE_FIELDS (type),
3418 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3419 obj_type, NULL_TREE),
3420 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3421 convert (obj_type, expr), NULL_TREE)));
3424 /* There are some special cases of expressions that we process
3425 specially. */
3426 switch (TREE_CODE (expr))
3428 case ERROR_MARK:
3429 return expr;
3431 case NULL_EXPR:
3432 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3433 conversion in gnat_expand_expr. NULL_EXPR does not represent
3434 and actual value, so no conversion is needed. */
3435 expr = copy_node (expr);
3436 TREE_TYPE (expr) = type;
3437 return expr;
3439 case STRING_CST:
3440 /* If we are converting a STRING_CST to another constrained array type,
3441 just make a new one in the proper type. */
3442 if (code == ecode && AGGREGATE_TYPE_P (etype)
3443 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3444 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3446 expr = copy_node (expr);
3447 TREE_TYPE (expr) = type;
3448 return expr;
3450 break;
3452 case CONSTRUCTOR:
3453 /* If we are converting a CONSTRUCTOR to another constrained array type
3454 with the same domain, just make a new one in the proper type. */
3455 if (code == ecode && code == ARRAY_TYPE
3456 && TREE_TYPE (type) == TREE_TYPE (etype)
3457 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (type)),
3458 TYPE_MIN_VALUE (TYPE_DOMAIN (etype)))
3459 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
3460 TYPE_MAX_VALUE (TYPE_DOMAIN (etype))))
3462 expr = copy_node (expr);
3463 TREE_TYPE (expr) = type;
3464 return expr;
3466 break;
3468 case UNCONSTRAINED_ARRAY_REF:
3469 /* Convert this to the type of the inner array by getting the address of
3470 the array from the template. */
3471 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3472 build_component_ref (TREE_OPERAND (expr, 0),
3473 get_identifier ("P_ARRAY"),
3474 NULL_TREE, false));
3475 etype = TREE_TYPE (expr);
3476 ecode = TREE_CODE (etype);
3477 break;
3479 case VIEW_CONVERT_EXPR:
3481 /* GCC 4.x is very sensitive to type consistency overall, and view
3482 conversions thus are very frequent. Even though just "convert"ing
3483 the inner operand to the output type is fine in most cases, it
3484 might expose unexpected input/output type mismatches in special
3485 circumstances so we avoid such recursive calls when we can. */
3487 tree op0 = TREE_OPERAND (expr, 0);
3489 /* If we are converting back to the original type, we can just
3490 lift the input conversion. This is a common occurrence with
3491 switches back-and-forth amongst type variants. */
3492 if (type == TREE_TYPE (op0))
3493 return op0;
3495 /* Otherwise, if we're converting between two aggregate types, we
3496 might be allowed to substitute the VIEW_CONVERT target type in
3497 place or to just convert the inner expression. */
3498 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3500 /* If we are converting between type variants, we can just
3501 substitute the VIEW_CONVERT in place. */
3502 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3503 return build1 (VIEW_CONVERT_EXPR, type, op0);
3505 /* Otherwise, we may just bypass the input view conversion unless
3506 one of the types is a fat pointer, which is handled by
3507 specialized code below which relies on exact type matching. */
3508 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3509 return convert (type, op0);
3512 break;
3514 case INDIRECT_REF:
3515 /* If both types are record types, just convert the pointer and
3516 make a new INDIRECT_REF.
3518 ??? Disable this for now since it causes problems with the
3519 code in build_binary_op for MODIFY_EXPR which wants to
3520 strip off conversions. But that code really is a mess and
3521 we need to do this a much better way some time. */
3522 if (0
3523 && (TREE_CODE (type) == RECORD_TYPE
3524 || TREE_CODE (type) == UNION_TYPE)
3525 && (TREE_CODE (etype) == RECORD_TYPE
3526 || TREE_CODE (etype) == UNION_TYPE)
3527 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3528 return build_unary_op (INDIRECT_REF, NULL_TREE,
3529 convert (build_pointer_type (type),
3530 TREE_OPERAND (expr, 0)));
3531 break;
3533 default:
3534 break;
3537 /* Check for converting to a pointer to an unconstrained array. */
3538 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
3539 return convert_to_fat_pointer (type, expr);
3541 /* If we're converting between two aggregate types that have the same main
3542 variant, just make a VIEW_CONVER_EXPR. */
3543 else if (AGGREGATE_TYPE_P (type)
3544 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
3545 return build1 (VIEW_CONVERT_EXPR, type, expr);
3547 /* In all other cases of related types, make a NOP_EXPR. */
3548 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3549 || (code == INTEGER_CST && ecode == INTEGER_CST
3550 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3551 return fold_convert (type, expr);
3553 switch (code)
3555 case VOID_TYPE:
3556 return fold_build1 (CONVERT_EXPR, type, expr);
3558 case BOOLEAN_TYPE:
3559 return fold_convert (type, gnat_truthvalue_conversion (expr));
3561 case INTEGER_TYPE:
3562 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3563 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3564 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3565 return unchecked_convert (type, expr, false);
3566 else if (TYPE_BIASED_REPRESENTATION_P (type))
3567 return fold_convert (type,
3568 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
3569 convert (TREE_TYPE (type), expr),
3570 TYPE_MIN_VALUE (type)));
3572 /* ... fall through ... */
3574 case ENUMERAL_TYPE:
3575 return fold (convert_to_integer (type, expr));
3577 case POINTER_TYPE:
3578 case REFERENCE_TYPE:
3579 /* If converting between two pointers to records denoting
3580 both a template and type, adjust if needed to account
3581 for any differing offsets, since one might be negative. */
3582 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3584 tree bit_diff
3585 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3586 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3587 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3588 sbitsize_int (BITS_PER_UNIT));
3590 expr = build1 (NOP_EXPR, type, expr);
3591 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3592 if (integer_zerop (byte_diff))
3593 return expr;
3595 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
3596 fold (convert (sizetype, byte_diff)));
3599 /* If converting to a thin pointer, handle specially. */
3600 if (TYPE_THIN_POINTER_P (type)
3601 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
3602 return convert_to_thin_pointer (type, expr);
3604 /* If converting fat pointer to normal pointer, get the pointer to the
3605 array and then convert it. */
3606 else if (TYPE_FAT_POINTER_P (etype))
3607 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3608 NULL_TREE, false);
3610 return fold (convert_to_pointer (type, expr));
3612 case REAL_TYPE:
3613 return fold (convert_to_real (type, expr));
3615 case RECORD_TYPE:
3616 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
3617 return
3618 gnat_build_constructor
3619 (type, tree_cons (TYPE_FIELDS (type),
3620 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3621 NULL_TREE));
3623 /* ... fall through ... */
3625 case ARRAY_TYPE:
3626 /* In these cases, assume the front-end has validated the conversion.
3627 If the conversion is valid, it will be a bit-wise conversion, so
3628 it can be viewed as an unchecked conversion. */
3629 return unchecked_convert (type, expr, false);
3631 case UNION_TYPE:
3632 /* This is a either a conversion between a tagged type and some
3633 subtype, which we have to mark as a UNION_TYPE because of
3634 overlapping fields or a conversion of an Unchecked_Union. */
3635 return unchecked_convert (type, expr, false);
3637 case UNCONSTRAINED_ARRAY_TYPE:
3638 /* If EXPR is a constrained array, take its address, convert it to a
3639 fat pointer, and then dereference it. Likewise if EXPR is a
3640 record containing both a template and a constrained array.
3641 Note that a record representing a justified modular type
3642 always represents a packed constrained array. */
3643 if (ecode == ARRAY_TYPE
3644 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3645 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3646 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
3647 return
3648 build_unary_op
3649 (INDIRECT_REF, NULL_TREE,
3650 convert_to_fat_pointer (TREE_TYPE (type),
3651 build_unary_op (ADDR_EXPR,
3652 NULL_TREE, expr)));
3654 /* Do something very similar for converting one unconstrained
3655 array to another. */
3656 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3657 return
3658 build_unary_op (INDIRECT_REF, NULL_TREE,
3659 convert (TREE_TYPE (type),
3660 build_unary_op (ADDR_EXPR,
3661 NULL_TREE, expr)));
3662 else
3663 gcc_unreachable ();
3665 case COMPLEX_TYPE:
3666 return fold (convert_to_complex (type, expr));
3668 default:
3669 gcc_unreachable ();
3673 /* Remove all conversions that are done in EXP. This includes converting
3674 from a padded type or to a justified modular type. If TRUE_ADDRESS
3675 is true, always return the address of the containing object even if
3676 the address is not bit-aligned. */
3678 tree
3679 remove_conversions (tree exp, bool true_address)
3681 switch (TREE_CODE (exp))
3683 case CONSTRUCTOR:
3684 if (true_address
3685 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3686 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3687 return
3688 remove_conversions (VEC_index (constructor_elt,
3689 CONSTRUCTOR_ELTS (exp), 0)->value,
3690 true);
3691 break;
3693 case COMPONENT_REF:
3694 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3695 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3696 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3697 break;
3699 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3700 case NOP_EXPR: case CONVERT_EXPR:
3701 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3703 default:
3704 break;
3707 return exp;
3710 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3711 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3712 likewise return an expression pointing to the underlying array. */
3714 tree
3715 maybe_unconstrained_array (tree exp)
3717 enum tree_code code = TREE_CODE (exp);
3718 tree new;
3720 switch (TREE_CODE (TREE_TYPE (exp)))
3722 case UNCONSTRAINED_ARRAY_TYPE:
3723 if (code == UNCONSTRAINED_ARRAY_REF)
3726 = build_unary_op (INDIRECT_REF, NULL_TREE,
3727 build_component_ref (TREE_OPERAND (exp, 0),
3728 get_identifier ("P_ARRAY"),
3729 NULL_TREE, false));
3730 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3731 return new;
3734 else if (code == NULL_EXPR)
3735 return build1 (NULL_EXPR,
3736 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3737 (TREE_TYPE (TREE_TYPE (exp))))),
3738 TREE_OPERAND (exp, 0));
3740 case RECORD_TYPE:
3741 /* If this is a padded type, convert to the unpadded type and see if
3742 it contains a template. */
3743 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3745 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3746 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3747 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3748 return
3749 build_component_ref (new, NULL_TREE,
3750 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3753 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3754 return
3755 build_component_ref (exp, NULL_TREE,
3756 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3757 break;
3759 default:
3760 break;
3763 return exp;
3766 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
3767 If NOTRUNC_P is true, truncation operations should be suppressed. */
3769 tree
3770 unchecked_convert (tree type, tree expr, bool notrunc_p)
3772 tree etype = TREE_TYPE (expr);
3774 /* If the expression is already the right type, we are done. */
3775 if (etype == type)
3776 return expr;
3778 /* If both types types are integral just do a normal conversion.
3779 Likewise for a conversion to an unconstrained array. */
3780 if ((((INTEGRAL_TYPE_P (type)
3781 && !(TREE_CODE (type) == INTEGER_TYPE
3782 && TYPE_VAX_FLOATING_POINT_P (type)))
3783 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3784 || (TREE_CODE (type) == RECORD_TYPE
3785 && TYPE_JUSTIFIED_MODULAR_P (type)))
3786 && ((INTEGRAL_TYPE_P (etype)
3787 && !(TREE_CODE (etype) == INTEGER_TYPE
3788 && TYPE_VAX_FLOATING_POINT_P (etype)))
3789 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3790 || (TREE_CODE (etype) == RECORD_TYPE
3791 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3792 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3794 tree rtype = type;
3795 bool final_unchecked = false;
3797 if (TREE_CODE (etype) == INTEGER_TYPE
3798 && TYPE_BIASED_REPRESENTATION_P (etype))
3800 tree ntype = copy_type (etype);
3802 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3803 TYPE_MAIN_VARIANT (ntype) = ntype;
3804 expr = build1 (NOP_EXPR, ntype, expr);
3807 if (TREE_CODE (type) == INTEGER_TYPE
3808 && TYPE_BIASED_REPRESENTATION_P (type))
3810 rtype = copy_type (type);
3811 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3812 TYPE_MAIN_VARIANT (rtype) = rtype;
3815 /* We have another special case: if we are unchecked converting subtype
3816 into a base type, we need to ensure that VRP doesn't propagate range
3817 information since this conversion may be done precisely to validate
3818 that the object is within the range it is supposed to have. */
3819 else if (TREE_CODE (expr) != INTEGER_CST
3820 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
3821 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
3822 || TREE_CODE (etype) == ENUMERAL_TYPE
3823 || TREE_CODE (etype) == BOOLEAN_TYPE))
3825 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
3826 in order not to be deemed an useless type conversion, it must
3827 be from subtype to base type.
3829 ??? This may raise addressability and/or aliasing issues because
3830 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
3831 address of its operand to be taken if it is deemed addressable
3832 and not already in GIMPLE form. */
3833 rtype = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
3834 rtype = copy_type (rtype);
3835 TYPE_MAIN_VARIANT (rtype) = rtype;
3836 TREE_TYPE (rtype) = type;
3837 final_unchecked = true;
3840 expr = convert (rtype, expr);
3841 if (type != rtype)
3842 expr = build1 (final_unchecked ? VIEW_CONVERT_EXPR : NOP_EXPR,
3843 type, expr);
3846 /* If we are converting TO an integral type whose precision is not the
3847 same as its size, first unchecked convert to a record that contains
3848 an object of the output type. Then extract the field. */
3849 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3850 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3851 GET_MODE_BITSIZE (TYPE_MODE (type))))
3853 tree rec_type = make_node (RECORD_TYPE);
3854 tree field = create_field_decl (get_identifier ("OBJ"), type,
3855 rec_type, 1, 0, 0, 0);
3857 TYPE_FIELDS (rec_type) = field;
3858 layout_type (rec_type);
3860 expr = unchecked_convert (rec_type, expr, notrunc_p);
3861 expr = build_component_ref (expr, NULL_TREE, field, 0);
3864 /* Similarly for integral input type whose precision is not equal to its
3865 size. */
3866 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3867 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3868 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3870 tree rec_type = make_node (RECORD_TYPE);
3871 tree field
3872 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3873 1, 0, 0, 0);
3875 TYPE_FIELDS (rec_type) = field;
3876 layout_type (rec_type);
3878 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3879 expr = unchecked_convert (type, expr, notrunc_p);
3882 /* We have a special case when we are converting between two
3883 unconstrained array types. In that case, take the address,
3884 convert the fat pointer types, and dereference. */
3885 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3886 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3887 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3888 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3889 build_unary_op (ADDR_EXPR, NULL_TREE,
3890 expr)));
3891 else
3893 expr = maybe_unconstrained_array (expr);
3895 /* There's no point in doing two unchecked conversions in a row. */
3896 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3897 expr = TREE_OPERAND (expr, 0);
3899 etype = TREE_TYPE (expr);
3900 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3903 /* If the result is an integral type whose size is not equal to
3904 the size of the underlying machine type, sign- or zero-extend
3905 the result. We need not do this in the case where the input is
3906 an integral type of the same precision and signedness or if the output
3907 is a biased type or if both the input and output are unsigned. */
3908 if (!notrunc_p
3909 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3910 && !(TREE_CODE (type) == INTEGER_TYPE
3911 && TYPE_BIASED_REPRESENTATION_P (type))
3912 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3913 GET_MODE_BITSIZE (TYPE_MODE (type)))
3914 && !(INTEGRAL_TYPE_P (etype)
3915 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3916 && operand_equal_p (TYPE_RM_SIZE (type),
3917 (TYPE_RM_SIZE (etype) != 0
3918 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3920 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3922 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3923 TYPE_UNSIGNED (type));
3924 tree shift_expr
3925 = convert (base_type,
3926 size_binop (MINUS_EXPR,
3927 bitsize_int
3928 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3929 TYPE_RM_SIZE (type)));
3930 expr
3931 = convert (type,
3932 build_binary_op (RSHIFT_EXPR, base_type,
3933 build_binary_op (LSHIFT_EXPR, base_type,
3934 convert (base_type, expr),
3935 shift_expr),
3936 shift_expr));
3939 /* An unchecked conversion should never raise Constraint_Error. The code
3940 below assumes that GCC's conversion routines overflow the same way that
3941 the underlying hardware does. This is probably true. In the rare case
3942 when it is false, we can rely on the fact that such conversions are
3943 erroneous anyway. */
3944 if (TREE_CODE (expr) == INTEGER_CST)
3945 TREE_OVERFLOW (expr) = 0;
3947 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3948 show no longer constant. */
3949 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3950 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3951 OEP_ONLY_CONST))
3952 TREE_CONSTANT (expr) = 0;
3954 return expr;
3957 /* Search the chain of currently available builtin declarations for a node
3958 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
3959 found, if any, or NULL_TREE otherwise. */
3960 tree
3961 builtin_decl_for (tree name)
3963 unsigned i;
3964 tree decl;
3966 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
3967 if (DECL_NAME (decl) == name)
3968 return decl;
3970 return NULL_TREE;
3973 /* Return the appropriate GCC tree code for the specified GNAT type,
3974 the latter being a record type as predicated by Is_Record_Type. */
3976 enum tree_code
3977 tree_code_for_record_type (Entity_Id gnat_type)
3979 Node_Id component_list
3980 = Component_List (Type_Definition
3981 (Declaration_Node
3982 (Implementation_Base_Type (gnat_type))));
3983 Node_Id component;
3985 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
3986 we have a non-discriminant field outside a variant. In either case,
3987 it's a RECORD_TYPE. */
3989 if (!Is_Unchecked_Union (gnat_type))
3990 return RECORD_TYPE;
3992 for (component = First_Non_Pragma (Component_Items (component_list));
3993 Present (component);
3994 component = Next_Non_Pragma (component))
3995 if (Ekind (Defining_Entity (component)) == E_Component)
3996 return RECORD_TYPE;
3998 return UNION_TYPE;
4001 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4002 component of an aggregate type. */
4004 bool
4005 type_for_nonaliased_component_p (tree gnu_type)
4007 /* If the type is passed by reference, we may have pointers to the
4008 component so it cannot be made non-aliased. */
4009 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4010 return false;
4012 /* We used to say that any component of aggregate type is aliased
4013 because the front-end may take 'Reference of it. The front-end
4014 has been enhanced in the meantime so as to use a renaming instead
4015 in most cases, but the back-end can probably take the address of
4016 such a component too so we go for the conservative stance.
4018 For instance, we might need the address of any array type, even
4019 if normally passed by copy, to construct a fat pointer if the
4020 component is used as an actual for an unconstrained formal.
4022 Likewise for record types: even if a specific record subtype is
4023 passed by copy, the parent type might be passed by ref (e.g. if
4024 it's of variable size) and we might take the address of a child
4025 component to pass to a parent formal. We have no way to check
4026 for such conditions here. */
4027 if (AGGREGATE_TYPE_P (gnu_type))
4028 return false;
4030 return true;
4033 /* Perform final processing on global variables. */
4035 void
4036 gnat_write_global_declarations (void)
4038 /* Proceed to optimize and emit assembly.
4039 FIXME: shouldn't be the front end's responsibility to call this. */
4040 cgraph_optimize ();
4042 /* Emit debug info for all global declarations. */
4043 emit_debug_global_declarations (VEC_address (tree, global_decls),
4044 VEC_length (tree, global_decls));
4047 #include "gt-ada-utils.h"
4048 #include "gtype-ada.h"