1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2007, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
41 #include "tree-inline.h"
42 #include "tree-gimple.h"
43 #include "tree-dump.h"
44 #include "pointer-set.h"
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
68 /* If nonzero, pretend we are allocating at global level. */
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls
[(int) ADT_LAST
];
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
77 /* Forward declarations for handlers of attributes. */
78 static tree
handle_const_attribute (tree
*, tree
, tree
, int, bool *);
79 static tree
handle_nothrow_attribute (tree
*, tree
, tree
, int, bool *);
81 /* Table of machine-independent internal attributes for Ada. We support
82 this minimal set of attributes to accommodate the Alpha back-end which
83 unconditionally puts them on its builtins. */
84 const struct attribute_spec gnat_internal_attribute_table
[] =
86 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
87 { "const", 0, 0, true, false, false, handle_const_attribute
},
88 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute
},
89 { NULL
, 0, 0, false, false, false, NULL
}
92 /* Associates a GNAT tree node to a GCC tree node. It is used in
93 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
94 of `save_gnu_tree' for more info. */
95 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
97 #define GET_GNU_TREE(GNAT_ENTITY) \
98 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
100 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
101 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
103 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
104 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
106 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
107 static GTY((length ("max_gnat_nodes"))) tree
*dummy_node_table
;
109 #define GET_DUMMY_NODE(GNAT_ENTITY) \
110 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
112 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
113 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
115 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
116 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
118 /* This variable keeps a table for types for each precision so that we only
119 allocate each of them once. Signed and unsigned types are kept separate.
121 Note that these types are only used when fold-const requests something
122 special. Perhaps we should NOT share these types; we'll see how it
124 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
126 /* Likewise for float types, but record these by mode. */
127 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
129 /* For each binding contour we allocate a binding_level structure to indicate
130 the binding depth. */
132 struct gnat_binding_level
GTY((chain_next ("%h.chain")))
134 /* The binding level containing this one (the enclosing binding level). */
135 struct gnat_binding_level
*chain
;
136 /* The BLOCK node for this level. */
138 /* If nonzero, the setjmp buffer that needs to be updated for any
139 variable-sized definition within this context. */
143 /* The binding level currently in effect. */
144 static GTY(()) struct gnat_binding_level
*current_binding_level
;
146 /* A chain of gnat_binding_level structures awaiting reuse. */
147 static GTY((deletable
)) struct gnat_binding_level
*free_binding_level
;
149 /* An array of global declarations. */
150 static GTY(()) VEC(tree
,gc
) *global_decls
;
152 /* An array of builtin declarations. */
153 static GTY(()) VEC(tree
,gc
) *builtin_decls
;
155 /* An array of global renaming pointers. */
156 static GTY(()) VEC(tree
,gc
) *global_renaming_pointers
;
158 /* A chain of unused BLOCK nodes. */
159 static GTY((deletable
)) tree free_block_chain
;
161 static void gnat_install_builtins (void);
162 static tree
merge_sizes (tree
, tree
, tree
, bool, bool);
163 static tree
compute_related_constant (tree
, tree
);
164 static tree
split_plus (tree
, tree
*);
165 static void gnat_gimplify_function (tree
);
166 static tree
float_type_for_precision (int, enum machine_mode
);
167 static tree
convert_to_fat_pointer (tree
, tree
);
168 static tree
convert_to_thin_pointer (tree
, tree
);
169 static tree
make_descriptor_field (const char *,tree
, tree
, tree
);
170 static bool potential_alignment_gap (tree
, tree
, tree
);
172 /* Initialize the association of GNAT nodes to GCC trees. */
175 init_gnat_to_gnu (void)
177 associate_gnat_to_gnu
178 = (tree
*) ggc_alloc_cleared (max_gnat_nodes
* sizeof (tree
));
181 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
182 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
183 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
185 If GNU_DECL is zero, a previous association is to be reset. */
188 save_gnu_tree (Entity_Id gnat_entity
, tree gnu_decl
, bool no_check
)
190 /* Check that GNAT_ENTITY is not already defined and that it is being set
191 to something which is a decl. Raise gigi 401 if not. Usually, this
192 means GNAT_ENTITY is defined twice, but occasionally is due to some
194 gcc_assert (!(gnu_decl
195 && (PRESENT_GNU_TREE (gnat_entity
)
196 || (!no_check
&& !DECL_P (gnu_decl
)))));
198 SET_GNU_TREE (gnat_entity
, gnu_decl
);
201 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
202 Return the ..._DECL node that was associated with it. If there is no tree
203 node associated with GNAT_ENTITY, abort.
205 In some cases, such as delayed elaboration or expressions that need to
206 be elaborated only once, GNAT_ENTITY is really not an entity. */
209 get_gnu_tree (Entity_Id gnat_entity
)
211 gcc_assert (PRESENT_GNU_TREE (gnat_entity
));
212 return GET_GNU_TREE (gnat_entity
);
215 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
218 present_gnu_tree (Entity_Id gnat_entity
)
220 return PRESENT_GNU_TREE (gnat_entity
);
223 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
226 init_dummy_type (void)
229 = (tree
*) ggc_alloc_cleared (max_gnat_nodes
* sizeof (tree
));
232 /* Make a dummy type corresponding to GNAT_TYPE. */
235 make_dummy_type (Entity_Id gnat_type
)
237 Entity_Id gnat_underlying
= Gigi_Equivalent_Type (gnat_type
);
240 /* If there is an equivalent type, get its underlying type. */
241 if (Present (gnat_underlying
))
242 gnat_underlying
= Underlying_Type (gnat_underlying
);
244 /* If there was no equivalent type (can only happen when just annotating
245 types) or underlying type, go back to the original type. */
246 if (No (gnat_underlying
))
247 gnat_underlying
= gnat_type
;
249 /* If it there already a dummy type, use that one. Else make one. */
250 if (PRESENT_DUMMY_NODE (gnat_underlying
))
251 return GET_DUMMY_NODE (gnat_underlying
);
253 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
255 gnu_type
= make_node (Is_Record_Type (gnat_underlying
)
256 ? tree_code_for_record_type (gnat_underlying
)
258 TYPE_NAME (gnu_type
) = get_entity_name (gnat_type
);
259 TYPE_DUMMY_P (gnu_type
) = 1;
260 if (AGGREGATE_TYPE_P (gnu_type
))
262 TYPE_STUB_DECL (gnu_type
) = build_decl (TYPE_DECL
, NULL_TREE
, gnu_type
);
263 TYPE_BY_REFERENCE_P (gnu_type
) = Is_By_Reference_Type (gnat_type
);
266 SET_DUMMY_NODE (gnat_underlying
, gnu_type
);
271 /* Return nonzero if we are currently in the global binding level. */
274 global_bindings_p (void)
276 return ((force_global
|| !current_function_decl
) ? -1 : 0);
279 /* Enter a new binding level. */
284 struct gnat_binding_level
*newlevel
= NULL
;
286 /* Reuse a struct for this binding level, if there is one. */
287 if (free_binding_level
)
289 newlevel
= free_binding_level
;
290 free_binding_level
= free_binding_level
->chain
;
294 = (struct gnat_binding_level
*)
295 ggc_alloc (sizeof (struct gnat_binding_level
));
297 /* Use a free BLOCK, if any; otherwise, allocate one. */
298 if (free_block_chain
)
300 newlevel
->block
= free_block_chain
;
301 free_block_chain
= TREE_CHAIN (free_block_chain
);
302 TREE_CHAIN (newlevel
->block
) = NULL_TREE
;
305 newlevel
->block
= make_node (BLOCK
);
307 /* Point the BLOCK we just made to its parent. */
308 if (current_binding_level
)
309 BLOCK_SUPERCONTEXT (newlevel
->block
) = current_binding_level
->block
;
311 BLOCK_VARS (newlevel
->block
) = BLOCK_SUBBLOCKS (newlevel
->block
) = NULL_TREE
;
312 TREE_USED (newlevel
->block
) = 1;
314 /* Add this level to the front of the chain (stack) of levels that are
316 newlevel
->chain
= current_binding_level
;
317 newlevel
->jmpbuf_decl
= NULL_TREE
;
318 current_binding_level
= newlevel
;
321 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
322 and point FNDECL to this BLOCK. */
325 set_current_block_context (tree fndecl
)
327 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
328 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
331 /* Set the jmpbuf_decl for the current binding level to DECL. */
334 set_block_jmpbuf_decl (tree decl
)
336 current_binding_level
->jmpbuf_decl
= decl
;
339 /* Get the jmpbuf_decl, if any, for the current binding level. */
342 get_block_jmpbuf_decl ()
344 return current_binding_level
->jmpbuf_decl
;
347 /* Exit a binding level. Set any BLOCK into the current code group. */
352 struct gnat_binding_level
*level
= current_binding_level
;
353 tree block
= level
->block
;
355 BLOCK_VARS (block
) = nreverse (BLOCK_VARS (block
));
356 BLOCK_SUBBLOCKS (block
) = nreverse (BLOCK_SUBBLOCKS (block
));
358 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
359 are no variables free the block and merge its subblocks into those of its
360 parent block. Otherwise, add it to the list of its parent. */
361 if (TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
)
363 else if (BLOCK_VARS (block
) == NULL_TREE
)
365 BLOCK_SUBBLOCKS (level
->chain
->block
)
366 = chainon (BLOCK_SUBBLOCKS (block
),
367 BLOCK_SUBBLOCKS (level
->chain
->block
));
368 TREE_CHAIN (block
) = free_block_chain
;
369 free_block_chain
= block
;
373 TREE_CHAIN (block
) = BLOCK_SUBBLOCKS (level
->chain
->block
);
374 BLOCK_SUBBLOCKS (level
->chain
->block
) = block
;
375 TREE_USED (block
) = 1;
376 set_block_for_group (block
);
379 /* Free this binding structure. */
380 current_binding_level
= level
->chain
;
381 level
->chain
= free_binding_level
;
382 free_binding_level
= level
;
385 /* Insert BLOCK at the end of the list of subblocks of the
386 current binding level. This is used when a BIND_EXPR is expanded,
387 to handle the BLOCK node inside the BIND_EXPR. */
390 insert_block (tree block
)
392 TREE_USED (block
) = 1;
393 TREE_CHAIN (block
) = BLOCK_SUBBLOCKS (current_binding_level
->block
);
394 BLOCK_SUBBLOCKS (current_binding_level
->block
) = block
;
397 /* Records a ..._DECL node DECL as belonging to the current lexical scope
398 and uses GNAT_NODE for location information and propagating flags. */
401 gnat_pushdecl (tree decl
, Node_Id gnat_node
)
403 /* If at top level, there is no context. But PARM_DECLs always go in the
404 level of its function. */
405 if (global_bindings_p () && TREE_CODE (decl
) != PARM_DECL
)
406 DECL_CONTEXT (decl
) = 0;
409 DECL_CONTEXT (decl
) = current_function_decl
;
411 /* Functions imported in another function are not really nested. */
412 if (TREE_CODE (decl
) == FUNCTION_DECL
&& TREE_PUBLIC (decl
))
413 DECL_NO_STATIC_CHAIN (decl
) = 1;
416 TREE_NO_WARNING (decl
) = (gnat_node
== Empty
|| Warnings_Off (gnat_node
));
418 /* Set the location of DECL and emit a declaration for it. */
419 if (Present (gnat_node
))
420 Sloc_to_locus (Sloc (gnat_node
), &DECL_SOURCE_LOCATION (decl
));
421 add_decl_expr (decl
, gnat_node
);
423 /* Put the declaration on the list. The list of declarations is in reverse
424 order. The list will be reversed later. Put global variables in the
425 globals list and builtin functions in a dedicated list to speed up
426 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
427 the list, as they will cause trouble with the debugger and aren't needed
429 if (TREE_CODE (decl
) != TYPE_DECL
430 || TREE_CODE (TREE_TYPE (decl
)) != UNCONSTRAINED_ARRAY_TYPE
)
432 if (global_bindings_p ())
434 VEC_safe_push (tree
, gc
, global_decls
, decl
);
436 if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
))
437 VEC_safe_push (tree
, gc
, builtin_decls
, decl
);
441 TREE_CHAIN (decl
) = BLOCK_VARS (current_binding_level
->block
);
442 BLOCK_VARS (current_binding_level
->block
) = decl
;
446 /* For the declaration of a type, set its name if it either is not already
447 set, was set to an IDENTIFIER_NODE, indicating an internal name,
448 or if the previous type name was not derived from a source name.
449 We'd rather have the type named with a real name and all the pointer
450 types to the same object have the same POINTER_TYPE node. Code in the
451 equivalent function of c-decl.c makes a copy of the type node here, but
452 that may cause us trouble with incomplete types. We make an exception
453 for fat pointer types because the compiler automatically builds them
454 for unconstrained array types and the debugger uses them to represent
455 both these and pointers to these. */
456 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_NAME (decl
))
458 tree t
= TREE_TYPE (decl
);
460 if (!TYPE_NAME (t
) || TREE_CODE (TYPE_NAME (t
)) == IDENTIFIER_NODE
)
461 TYPE_NAME (t
) = decl
;
462 else if (TYPE_FAT_POINTER_P (t
))
464 tree tt
= build_variant_type_copy (t
);
465 TYPE_NAME (tt
) = decl
;
466 TREE_USED (tt
) = TREE_USED (t
);
467 TREE_TYPE (decl
) = tt
;
468 DECL_ORIGINAL_TYPE (decl
) = t
;
470 else if (DECL_ARTIFICIAL (TYPE_NAME (t
)) && !DECL_ARTIFICIAL (decl
))
471 TYPE_NAME (t
) = decl
;
475 /* Do little here. Set up the standard declarations later after the
476 front end has been run. */
479 gnat_init_decl_processing (void)
481 /* Make the binding_level structure for global names. */
482 current_function_decl
= 0;
483 current_binding_level
= 0;
484 free_binding_level
= 0;
487 build_common_tree_nodes (true, true);
489 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
490 corresponding to the size of Pmode. In most cases when ptr_mode and
491 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
492 far better code using the width of Pmode. Make this here since we need
493 this before we can expand the GNAT types. */
494 size_type_node
= gnat_type_for_size (GET_MODE_BITSIZE (Pmode
), 0);
495 set_sizetype (size_type_node
);
496 build_common_tree_nodes_2 (0);
498 /* Give names and make TYPE_DECLs for common types. */
499 create_type_decl (get_identifier (SIZE_TYPE
), sizetype
,
500 NULL
, false, true, Empty
);
501 create_type_decl (get_identifier ("integer"), integer_type_node
,
502 NULL
, false, true, Empty
);
503 create_type_decl (get_identifier ("unsigned char"), char_type_node
,
504 NULL
, false, true, Empty
);
505 create_type_decl (get_identifier ("long integer"), long_integer_type_node
,
506 NULL
, false, true, Empty
);
508 ptr_void_type_node
= build_pointer_type (void_type_node
);
510 gnat_install_builtins ();
513 /* Install the builtin functions we might need. */
516 gnat_install_builtins ()
518 /* Builtins used by generic middle-end optimizers. */
519 build_common_builtin_nodes ();
521 /* Target specific builtins, such as the AltiVec family on ppc. */
522 targetm
.init_builtins ();
525 /* Create the predefined scalar types such as `integer_type_node' needed
526 in the gcc back-end and initialize the global binding level. */
529 init_gigi_decls (tree long_long_float_type
, tree exception_type
)
534 /* Set the types that GCC and Gigi use from the front end. We would like
535 to do this for char_type_node, but it needs to correspond to the C
537 if (TREE_CODE (TREE_TYPE (long_long_float_type
)) == INTEGER_TYPE
)
539 /* In this case, the builtin floating point types are VAX float,
540 so make up a type for use. */
541 longest_float_type_node
= make_node (REAL_TYPE
);
542 TYPE_PRECISION (longest_float_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
543 layout_type (longest_float_type_node
);
544 create_type_decl (get_identifier ("longest float type"),
545 longest_float_type_node
, NULL
, false, true, Empty
);
548 longest_float_type_node
= TREE_TYPE (long_long_float_type
);
550 except_type_node
= TREE_TYPE (exception_type
);
552 unsigned_type_node
= gnat_type_for_size (INT_TYPE_SIZE
, 1);
553 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node
,
554 NULL
, false, true, Empty
);
556 void_type_decl_node
= create_type_decl (get_identifier ("void"),
557 void_type_node
, NULL
, false, true,
560 void_ftype
= build_function_type (void_type_node
, NULL_TREE
);
561 ptr_void_ftype
= build_pointer_type (void_ftype
);
563 /* Now declare runtime functions. */
564 endlink
= tree_cons (NULL_TREE
, void_type_node
, NULL_TREE
);
566 /* malloc is a function declaration tree for a function to allocate
568 malloc_decl
= create_subprog_decl (get_identifier ("__gnat_malloc"),
570 build_function_type (ptr_void_type_node
,
571 tree_cons (NULL_TREE
,
574 NULL_TREE
, false, true, true, NULL
,
576 DECL_IS_MALLOC (malloc_decl
) = 1;
578 /* free is a function declaration tree for a function to free memory. */
580 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE
,
581 build_function_type (void_type_node
,
582 tree_cons (NULL_TREE
,
585 NULL_TREE
, false, true, true, NULL
, Empty
);
587 /* Make the types and functions used for exception processing. */
589 = build_array_type (gnat_type_for_mode (Pmode
, 0),
590 build_index_type (build_int_cst (NULL_TREE
, 5)));
591 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type
, NULL
,
593 jmpbuf_ptr_type
= build_pointer_type (jmpbuf_type
);
595 /* Functions to get and set the jumpbuf pointer for the current thread. */
597 = create_subprog_decl
598 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
599 NULL_TREE
, build_function_type (jmpbuf_ptr_type
, NULL_TREE
),
600 NULL_TREE
, false, true, true, NULL
, Empty
);
601 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
602 DECL_IS_PURE (get_jmpbuf_decl
) = 1;
605 = create_subprog_decl
606 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
608 build_function_type (void_type_node
,
609 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
610 NULL_TREE
, false, true, true, NULL
, Empty
);
612 /* Function to get the current exception. */
614 = create_subprog_decl
615 (get_identifier ("system__soft_links__get_gnat_exception"),
617 build_function_type (build_pointer_type (except_type_node
), NULL_TREE
),
618 NULL_TREE
, false, true, true, NULL
, Empty
);
619 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
620 DECL_IS_PURE (get_excptr_decl
) = 1;
622 /* Functions that raise exceptions. */
624 = create_subprog_decl
625 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE
,
626 build_function_type (void_type_node
,
627 tree_cons (NULL_TREE
,
628 build_pointer_type (except_type_node
),
630 NULL_TREE
, false, true, true, NULL
, Empty
);
632 /* Dummy objects to materialize "others" and "all others" in the exception
633 tables. These are exported by a-exexpr.adb, so see this unit for the
637 = create_var_decl (get_identifier ("OTHERS"),
638 get_identifier ("__gnat_others_value"),
639 integer_type_node
, 0, 1, 0, 1, 1, 0, Empty
);
642 = create_var_decl (get_identifier ("ALL_OTHERS"),
643 get_identifier ("__gnat_all_others_value"),
644 integer_type_node
, 0, 1, 0, 1, 1, 0, Empty
);
646 /* Hooks to call when entering/leaving an exception handler. */
648 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE
,
649 build_function_type (void_type_node
,
650 tree_cons (NULL_TREE
,
653 NULL_TREE
, false, true, true, NULL
, Empty
);
656 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE
,
657 build_function_type (void_type_node
,
658 tree_cons (NULL_TREE
,
661 NULL_TREE
, false, true, true, NULL
, Empty
);
663 /* If in no exception handlers mode, all raise statements are redirected to
664 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
665 this procedure will never be called in this mode. */
666 if (No_Exception_Handlers_Set ())
669 = create_subprog_decl
670 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE
,
671 build_function_type (void_type_node
,
672 tree_cons (NULL_TREE
,
673 build_pointer_type (char_type_node
),
674 tree_cons (NULL_TREE
,
677 NULL_TREE
, false, true, true, NULL
, Empty
);
679 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
680 gnat_raise_decls
[i
] = decl
;
683 /* Otherwise, make one decl for each exception reason. */
684 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
688 sprintf (name
, "__gnat_rcheck_%.2d", i
);
690 = create_subprog_decl
691 (get_identifier (name
), NULL_TREE
,
692 build_function_type (void_type_node
,
693 tree_cons (NULL_TREE
,
696 tree_cons (NULL_TREE
,
699 NULL_TREE
, false, true, true, NULL
, Empty
);
702 /* Indicate that these never return. */
703 TREE_THIS_VOLATILE (raise_nodefer_decl
) = 1;
704 TREE_SIDE_EFFECTS (raise_nodefer_decl
) = 1;
705 TREE_TYPE (raise_nodefer_decl
)
706 = build_qualified_type (TREE_TYPE (raise_nodefer_decl
),
709 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
711 TREE_THIS_VOLATILE (gnat_raise_decls
[i
]) = 1;
712 TREE_SIDE_EFFECTS (gnat_raise_decls
[i
]) = 1;
713 TREE_TYPE (gnat_raise_decls
[i
])
714 = build_qualified_type (TREE_TYPE (gnat_raise_decls
[i
]),
718 /* setjmp returns an integer and has one operand, which is a pointer to
721 = create_subprog_decl
722 (get_identifier ("__builtin_setjmp"), NULL_TREE
,
723 build_function_type (integer_type_node
,
724 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
725 NULL_TREE
, false, true, true, NULL
, Empty
);
727 DECL_BUILT_IN_CLASS (setjmp_decl
) = BUILT_IN_NORMAL
;
728 DECL_FUNCTION_CODE (setjmp_decl
) = BUILT_IN_SETJMP
;
730 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
732 update_setjmp_buf_decl
733 = create_subprog_decl
734 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE
,
735 build_function_type (void_type_node
,
736 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
737 NULL_TREE
, false, true, true, NULL
, Empty
);
739 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl
) = BUILT_IN_NORMAL
;
740 DECL_FUNCTION_CODE (update_setjmp_buf_decl
) = BUILT_IN_UPDATE_SETJMP_BUF
;
742 main_identifier_node
= get_identifier ("main");
745 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
746 finish constructing the record or union type. If REP_LEVEL is zero, this
747 record has no representation clause and so will be entirely laid out here.
748 If REP_LEVEL is one, this record has a representation clause and has been
749 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
750 this record is derived from a parent record and thus inherits its layout;
751 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
752 true, the record type is expected to be modified afterwards so it will
753 not be sent to the back-end for finalization. */
756 finish_record_type (tree record_type
, tree fieldlist
, int rep_level
,
757 bool do_not_finalize
)
759 enum tree_code code
= TREE_CODE (record_type
);
760 tree ada_size
= bitsize_zero_node
;
761 tree size
= bitsize_zero_node
;
762 bool var_size
= false;
763 bool had_size
= TYPE_SIZE (record_type
) != 0;
764 bool had_size_unit
= TYPE_SIZE_UNIT (record_type
) != 0;
767 TYPE_FIELDS (record_type
) = fieldlist
;
768 TYPE_STUB_DECL (record_type
)
769 = build_decl (TYPE_DECL
, TYPE_NAME (record_type
), record_type
);
771 /* We don't need both the typedef name and the record name output in
772 the debugging information, since they are the same. */
773 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type
)) = 1;
775 /* Globally initialize the record first. If this is a rep'ed record,
776 that just means some initializations; otherwise, layout the record. */
779 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
780 TYPE_MODE (record_type
) = BLKmode
;
783 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
785 TYPE_SIZE (record_type
) = bitsize_zero_node
;
787 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
788 out just like a UNION_TYPE, since the size will be fixed. */
789 else if (code
== QUAL_UNION_TYPE
)
794 /* Ensure there isn't a size already set. There can be in an error
795 case where there is a rep clause but all fields have errors and
796 no longer have a position. */
797 TYPE_SIZE (record_type
) = 0;
798 layout_type (record_type
);
801 /* At this point, the position and size of each field is known. It was
802 either set before entry by a rep clause, or by laying out the type above.
804 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
805 to compute the Ada size; the GCC size and alignment (for rep'ed records
806 that are not padding types); and the mode (for rep'ed records). We also
807 clear the DECL_BIT_FIELD indication for the cases we know have not been
808 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
810 if (code
== QUAL_UNION_TYPE
)
811 fieldlist
= nreverse (fieldlist
);
813 for (field
= fieldlist
; field
; field
= TREE_CHAIN (field
))
815 tree pos
= bit_position (field
);
817 tree type
= TREE_TYPE (field
);
818 tree this_size
= DECL_SIZE (field
);
819 tree this_ada_size
= DECL_SIZE (field
);
821 /* We need to make an XVE/XVU record if any field has variable size,
822 whether or not the record does. For example, if we have a union,
823 it may be that all fields, rounded up to the alignment, have the
824 same size, in which case we'll use that size. But the debug
825 output routines (except Dwarf2) won't be able to output the fields,
826 so we need to make the special record. */
827 if (TREE_CODE (this_size
) != INTEGER_CST
)
830 if ((TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
831 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
832 && !TYPE_IS_FAT_POINTER_P (type
)
833 && !TYPE_CONTAINS_TEMPLATE_P (type
)
834 && TYPE_ADA_SIZE (type
))
835 this_ada_size
= TYPE_ADA_SIZE (type
);
837 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
838 if (DECL_BIT_FIELD (field
) && !STRICT_ALIGNMENT
839 && value_factor_p (pos
, BITS_PER_UNIT
)
840 && operand_equal_p (this_size
, TYPE_SIZE (type
), 0))
841 DECL_BIT_FIELD (field
) = 0;
843 /* If we still have DECL_BIT_FIELD set at this point, we know the field
844 is technically not addressable. Except that it can actually be
845 addressed if the field is BLKmode and happens to be properly
847 DECL_NONADDRESSABLE_P (field
)
848 |= DECL_BIT_FIELD (field
) && DECL_MODE (field
) != BLKmode
;
850 if ((rep_level
> 0) && !DECL_BIT_FIELD (field
))
851 TYPE_ALIGN (record_type
)
852 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
857 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
858 size
= size_binop (MAX_EXPR
, size
, this_size
);
861 case QUAL_UNION_TYPE
:
863 = fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
864 this_ada_size
, ada_size
);
865 size
= fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
870 /* Since we know here that all fields are sorted in order of
871 increasing bit position, the size of the record is one
872 higher than the ending bit of the last field processed
873 unless we have a rep clause, since in that case we might
874 have a field outside a QUAL_UNION_TYPE that has a higher ending
875 position. So use a MAX in that case. Also, if this field is a
876 QUAL_UNION_TYPE, we need to take into account the previous size in
877 the case of empty variants. */
879 = merge_sizes (ada_size
, pos
, this_ada_size
,
880 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
882 = merge_sizes (size
, pos
, this_size
,
883 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
891 if (code
== QUAL_UNION_TYPE
)
892 nreverse (fieldlist
);
896 /* If this is a padding record, we never want to make the size smaller
897 than what was specified in it, if any. */
898 if (TREE_CODE (record_type
) == RECORD_TYPE
899 && TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
))
900 size
= TYPE_SIZE (record_type
);
902 /* Now set any of the values we've just computed that apply. */
903 if (!TYPE_IS_FAT_POINTER_P (record_type
)
904 && !TYPE_CONTAINS_TEMPLATE_P (record_type
))
905 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
909 tree size_unit
= had_size_unit
910 ? TYPE_SIZE_UNIT (record_type
)
912 size_binop (CEIL_DIV_EXPR
, size
,
914 unsigned int align
= TYPE_ALIGN (record_type
);
916 TYPE_SIZE (record_type
) = variable_size (round_up (size
, align
));
917 TYPE_SIZE_UNIT (record_type
)
918 = variable_size (round_up (size_unit
, align
/ BITS_PER_UNIT
));
920 compute_record_mode (record_type
);
924 if (!do_not_finalize
)
925 rest_of_record_type_compilation (record_type
);
928 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
929 the debug information associated with it. It need not be invoked
930 directly in most cases since finish_record_type takes care of doing
931 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
934 rest_of_record_type_compilation (tree record_type
)
936 tree fieldlist
= TYPE_FIELDS (record_type
);
938 enum tree_code code
= TREE_CODE (record_type
);
939 bool var_size
= false;
941 for (field
= fieldlist
; field
; field
= TREE_CHAIN (field
))
943 /* We need to make an XVE/XVU record if any field has variable size,
944 whether or not the record does. For example, if we have a union,
945 it may be that all fields, rounded up to the alignment, have the
946 same size, in which case we'll use that size. But the debug
947 output routines (except Dwarf2) won't be able to output the fields,
948 so we need to make the special record. */
949 if (TREE_CODE (DECL_SIZE (field
)) != INTEGER_CST
950 /* If a field has a non-constant qualifier, the record will have
951 variable size too. */
952 || (code
== QUAL_UNION_TYPE
953 && TREE_CODE (DECL_QUALIFIER (field
)) != INTEGER_CST
))
960 /* If this record is of variable size, rename it so that the
961 debugger knows it is and make a new, parallel, record
962 that tells the debugger how the record is laid out. See
963 exp_dbug.ads. But don't do this for records that are padding
964 since they confuse GDB. */
966 && !(TREE_CODE (record_type
) == RECORD_TYPE
967 && TYPE_IS_PADDING_P (record_type
)))
970 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
971 ? UNION_TYPE
: TREE_CODE (record_type
));
972 tree orig_name
= TYPE_NAME (record_type
);
974 = (TREE_CODE (orig_name
) == TYPE_DECL
? DECL_NAME (orig_name
)
977 = concat_id_with_name (orig_id
,
978 TREE_CODE (record_type
) == QUAL_UNION_TYPE
980 tree last_pos
= bitsize_zero_node
;
982 tree prev_old_field
= 0;
984 TYPE_NAME (new_record_type
) = new_id
;
985 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
986 TYPE_STUB_DECL (new_record_type
)
987 = build_decl (TYPE_DECL
, new_id
, new_record_type
);
988 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type
)) = 1;
989 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
990 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
991 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
992 TYPE_SIZE_UNIT (new_record_type
)
993 = size_int (TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
995 /* Now scan all the fields, replacing each field with a new
996 field corresponding to the new encoding. */
997 for (old_field
= TYPE_FIELDS (record_type
); old_field
;
998 old_field
= TREE_CHAIN (old_field
))
1000 tree field_type
= TREE_TYPE (old_field
);
1001 tree field_name
= DECL_NAME (old_field
);
1003 tree curpos
= bit_position (old_field
);
1005 unsigned int align
= 0;
1008 /* See how the position was modified from the last position.
1010 There are two basic cases we support: a value was added
1011 to the last position or the last position was rounded to
1012 a boundary and they something was added. Check for the
1013 first case first. If not, see if there is any evidence
1014 of rounding. If so, round the last position and try
1017 If this is a union, the position can be taken as zero. */
1019 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
1020 pos
= bitsize_zero_node
, align
= 0;
1022 pos
= compute_related_constant (curpos
, last_pos
);
1024 if (!pos
&& TREE_CODE (curpos
) == MULT_EXPR
1025 && host_integerp (TREE_OPERAND (curpos
, 1), 1))
1027 tree offset
= TREE_OPERAND (curpos
, 0);
1028 align
= tree_low_cst (TREE_OPERAND (curpos
, 1), 1);
1030 /* Strip off any conversions. */
1031 while (TREE_CODE (offset
) == NON_LVALUE_EXPR
1032 || TREE_CODE (offset
) == NOP_EXPR
1033 || TREE_CODE (offset
) == CONVERT_EXPR
)
1034 offset
= TREE_OPERAND (offset
, 0);
1036 /* An offset which is a bitwise AND with a negative power of 2
1037 means an alignment corresponding to this power of 2. */
1038 if (TREE_CODE (offset
) == BIT_AND_EXPR
1039 && host_integerp (TREE_OPERAND (offset
, 1), 0)
1040 && tree_int_cst_sgn (TREE_OPERAND (offset
, 1)) < 0)
1043 = - tree_low_cst (TREE_OPERAND (offset
, 1), 0);
1044 if (exact_log2 (pow
) > 0)
1048 pos
= compute_related_constant (curpos
,
1049 round_up (last_pos
, align
));
1051 else if (!pos
&& TREE_CODE (curpos
) == PLUS_EXPR
1052 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
1053 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
1054 && host_integerp (TREE_OPERAND
1055 (TREE_OPERAND (curpos
, 0), 1),
1060 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1), 1);
1061 pos
= compute_related_constant (curpos
,
1062 round_up (last_pos
, align
));
1064 else if (potential_alignment_gap (prev_old_field
, old_field
,
1067 align
= TYPE_ALIGN (field_type
);
1068 pos
= compute_related_constant (curpos
,
1069 round_up (last_pos
, align
));
1072 /* If we can't compute a position, set it to zero.
1074 ??? We really should abort here, but it's too much work
1075 to get this correct for all cases. */
1078 pos
= bitsize_zero_node
;
1080 /* See if this type is variable-sized and make a pointer type
1081 and indicate the indirection if so. Beware that the debug
1082 back-end may adjust the position computed above according
1083 to the alignment of the field type, i.e. the pointer type
1084 in this case, if we don't preventively counter that. */
1085 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
1087 field_type
= build_pointer_type (field_type
);
1088 if (align
!= 0 && TYPE_ALIGN (field_type
) > align
)
1090 field_type
= copy_node (field_type
);
1091 TYPE_ALIGN (field_type
) = align
;
1096 /* Make a new field name, if necessary. */
1097 if (var
|| align
!= 0)
1102 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
1103 align
/ BITS_PER_UNIT
);
1105 strcpy (suffix
, "XVL");
1107 field_name
= concat_id_with_name (field_name
, suffix
);
1110 new_field
= create_field_decl (field_name
, field_type
,
1112 DECL_SIZE (old_field
), pos
, 0);
1113 TREE_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
1114 TYPE_FIELDS (new_record_type
) = new_field
;
1116 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1117 zero. The only time it's not the last field of the record
1118 is when there are other components at fixed positions after
1119 it (meaning there was a rep clause for every field) and we
1120 want to be able to encode them. */
1121 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
1122 (TREE_CODE (TREE_TYPE (old_field
))
1125 : DECL_SIZE (old_field
));
1126 prev_old_field
= old_field
;
1129 TYPE_FIELDS (new_record_type
)
1130 = nreverse (TYPE_FIELDS (new_record_type
));
1132 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type
));
1135 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type
));
1138 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1139 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1140 if this represents a QUAL_UNION_TYPE in which case we must look for
1141 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1142 is nonzero, we must take the MAX of the end position of this field
1143 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1145 We return an expression for the size. */
1148 merge_sizes (tree last_size
, tree first_bit
, tree size
, bool special
,
1151 tree type
= TREE_TYPE (last_size
);
1154 if (!special
|| TREE_CODE (size
) != COND_EXPR
)
1156 new = size_binop (PLUS_EXPR
, first_bit
, size
);
1158 new = size_binop (MAX_EXPR
, last_size
, new);
1162 new = fold_build3 (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1163 integer_zerop (TREE_OPERAND (size
, 1))
1164 ? last_size
: merge_sizes (last_size
, first_bit
,
1165 TREE_OPERAND (size
, 1),
1167 integer_zerop (TREE_OPERAND (size
, 2))
1168 ? last_size
: merge_sizes (last_size
, first_bit
,
1169 TREE_OPERAND (size
, 2),
1172 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1173 when fed through substitute_in_expr) into thinking that a constant
1174 size is not constant. */
1175 while (TREE_CODE (new) == NON_LVALUE_EXPR
)
1176 new = TREE_OPERAND (new, 0);
1181 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1182 related by the addition of a constant. Return that constant if so. */
1185 compute_related_constant (tree op0
, tree op1
)
1187 tree op0_var
, op1_var
;
1188 tree op0_con
= split_plus (op0
, &op0_var
);
1189 tree op1_con
= split_plus (op1
, &op1_var
);
1190 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1192 if (operand_equal_p (op0_var
, op1_var
, 0))
1194 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1200 /* Utility function of above to split a tree OP which may be a sum, into a
1201 constant part, which is returned, and a variable part, which is stored
1202 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1206 split_plus (tree in
, tree
*pvar
)
1208 /* Strip NOPS in order to ease the tree traversal and maximize the
1209 potential for constant or plus/minus discovery. We need to be careful
1210 to always return and set *pvar to bitsizetype trees, but it's worth
1214 *pvar
= convert (bitsizetype
, in
);
1216 if (TREE_CODE (in
) == INTEGER_CST
)
1218 *pvar
= bitsize_zero_node
;
1219 return convert (bitsizetype
, in
);
1221 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1223 tree lhs_var
, rhs_var
;
1224 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1225 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1227 if (lhs_var
== TREE_OPERAND (in
, 0)
1228 && rhs_var
== TREE_OPERAND (in
, 1))
1229 return bitsize_zero_node
;
1231 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1232 return size_binop (TREE_CODE (in
), lhs_con
, rhs_con
);
1235 return bitsize_zero_node
;
1238 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1239 subprogram. If it is void_type_node, then we are dealing with a procedure,
1240 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1241 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1242 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1243 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1244 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1245 RETURNS_WITH_DSP is nonzero if the function is to return with a
1246 depressed stack pointer. RETURNS_BY_TARGET_PTR is true if the function
1247 is to be passed (as its first parameter) the address of the place to copy
1251 create_subprog_type (tree return_type
, tree param_decl_list
, tree cico_list
,
1252 bool returns_unconstrained
, bool returns_by_ref
,
1253 bool returns_with_dsp
, bool returns_by_target_ptr
)
1255 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1256 the subprogram formal parameters. This list is generated by traversing the
1257 input list of PARM_DECL nodes. */
1258 tree param_type_list
= NULL
;
1262 for (param_decl
= param_decl_list
; param_decl
;
1263 param_decl
= TREE_CHAIN (param_decl
))
1264 param_type_list
= tree_cons (NULL_TREE
, TREE_TYPE (param_decl
),
1267 /* The list of the function parameter types has to be terminated by the void
1268 type to signal to the back-end that we are not dealing with a variable
1269 parameter subprogram, but that the subprogram has a fixed number of
1271 param_type_list
= tree_cons (NULL_TREE
, void_type_node
, param_type_list
);
1273 /* The list of argument types has been created in reverse
1275 param_type_list
= nreverse (param_type_list
);
1277 type
= build_function_type (return_type
, param_type_list
);
1279 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1280 or the new type should, make a copy of TYPE. Likewise for
1281 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1282 if (TYPE_CI_CO_LIST (type
) || cico_list
1283 || TYPE_RETURNS_UNCONSTRAINED_P (type
) != returns_unconstrained
1284 || TYPE_RETURNS_BY_REF_P (type
) != returns_by_ref
1285 || TYPE_RETURNS_BY_TARGET_PTR_P (type
) != returns_by_target_ptr
)
1286 type
= copy_type (type
);
1288 TYPE_CI_CO_LIST (type
) = cico_list
;
1289 TYPE_RETURNS_UNCONSTRAINED_P (type
) = returns_unconstrained
;
1290 TYPE_RETURNS_STACK_DEPRESSED (type
) = returns_with_dsp
;
1291 TYPE_RETURNS_BY_REF_P (type
) = returns_by_ref
;
1292 TYPE_RETURNS_BY_TARGET_PTR_P (type
) = returns_by_target_ptr
;
1296 /* Return a copy of TYPE but safe to modify in any way. */
1299 copy_type (tree type
)
1301 tree
new = copy_node (type
);
1303 /* copy_node clears this field instead of copying it, because it is
1304 aliased with TREE_CHAIN. */
1305 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type
);
1307 TYPE_POINTER_TO (new) = 0;
1308 TYPE_REFERENCE_TO (new) = 0;
1309 TYPE_MAIN_VARIANT (new) = new;
1310 TYPE_NEXT_VARIANT (new) = 0;
1315 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1316 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position of
1320 create_index_type (tree min
, tree max
, tree index
, Node_Id gnat_node
)
1322 /* First build a type for the desired range. */
1323 tree type
= build_index_2_type (min
, max
);
1325 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1326 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1327 is set, but not to INDEX, make a copy of this type with the requested
1328 index type. Note that we have no way of sharing these types, but that's
1329 only a small hole. */
1330 if (TYPE_INDEX_TYPE (type
) == index
)
1332 else if (TYPE_INDEX_TYPE (type
))
1333 type
= copy_type (type
);
1335 SET_TYPE_INDEX_TYPE (type
, index
);
1336 create_type_decl (NULL_TREE
, type
, NULL
, true, false, gnat_node
);
1340 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1341 string) and TYPE is a ..._TYPE node giving its data type.
1342 ARTIFICIAL_P is true if this is a declaration that was generated
1343 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1344 information about this type. GNAT_NODE is used for the position of
1348 create_type_decl (tree type_name
, tree type
, struct attrib
*attr_list
,
1349 bool artificial_p
, bool debug_info_p
, Node_Id gnat_node
)
1351 tree type_decl
= build_decl (TYPE_DECL
, type_name
, type
);
1352 enum tree_code code
= TREE_CODE (type
);
1354 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1356 if (!TYPE_IS_DUMMY_P (type
))
1357 gnat_pushdecl (type_decl
, gnat_node
);
1359 process_attributes (type_decl
, attr_list
);
1361 /* Pass type declaration information to the debugger unless this is an
1362 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1363 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1364 type for which debugging information was not requested. */
1365 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| !debug_info_p
)
1366 DECL_IGNORED_P (type_decl
) = 1;
1367 else if (code
!= ENUMERAL_TYPE
1368 && (code
!= RECORD_TYPE
|| TYPE_IS_FAT_POINTER_P (type
))
1369 && !((code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1370 && TYPE_IS_DUMMY_P (TREE_TYPE (type
))))
1371 rest_of_type_decl_compilation (type_decl
);
1376 /* Helper for create_var_decl and create_true_var_decl. Returns a GCC VAR_DECL
1379 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1380 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1381 the GCC tree for an optional initial expression; NULL_TREE if none.
1383 CONST_FLAG is true if this variable is constant, in which case we might
1384 return a CONST_DECL node unless CONST_DECL_ALLOWED_FLAG is false.
1386 PUBLIC_FLAG is true if this definition is to be made visible outside of
1387 the current compilation unit. This flag should be set when processing the
1388 variable definitions in a package specification. EXTERN_FLAG is nonzero
1389 when processing an external variable declaration (as opposed to a
1390 definition: no storage is to be allocated for the variable here).
1392 STATIC_FLAG is only relevant when not at top level. In that case
1393 it indicates whether to always allocate storage to the variable.
1395 GNAT_NODE is used for the position of the decl. */
1398 create_var_decl_1 (tree var_name
, tree asm_name
, tree type
, tree var_init
,
1399 bool const_flag
, bool const_decl_allowed_flag
,
1400 bool public_flag
, bool extern_flag
, bool static_flag
,
1401 struct attrib
*attr_list
, Node_Id gnat_node
)
1405 && TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init
))
1406 && (global_bindings_p () || static_flag
1407 ? initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
)) != 0
1408 : TREE_CONSTANT (var_init
)));
1410 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1411 case the initializer may be used in-lieu of the DECL node (as done in
1412 Identifier_to_gnu). This is useful to prevent the need of elaboration
1413 code when an identifier for which such a decl is made is in turn used as
1414 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1415 but extra constraints apply to this choice (see below) and are not
1416 relevant to the distinction we wish to make. */
1417 bool constant_p
= const_flag
&& init_const
;
1419 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1420 and may be used for scalars in general but not for aggregates. */
1422 = build_decl ((constant_p
&& const_decl_allowed_flag
1423 && !AGGREGATE_TYPE_P (type
)) ? CONST_DECL
: VAR_DECL
,
1426 /* If this is external, throw away any initializations (they will be done
1427 elsewhere) unless this is a a constant for which we would like to remain
1428 able to get the initializer. If we are defining a global here, leave a
1429 constant initialization and save any variable elaborations for the
1430 elaboration routine. If we are just annotating types, throw away the
1431 initialization if it isn't a constant. */
1432 if ((extern_flag
&& !constant_p
)
1433 || (type_annotate_only
&& var_init
&& !TREE_CONSTANT (var_init
)))
1434 var_init
= NULL_TREE
;
1436 /* At the global level, an initializer requiring code to be generated
1437 produces elaboration statements. Check that such statements are allowed,
1438 that is, not violating a No_Elaboration_Code restriction. */
1439 if (global_bindings_p () && var_init
!= 0 && ! init_const
)
1440 Check_Elaboration_Code_Allowed (gnat_node
);
1442 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1443 try to fiddle with DECL_COMMON. However, on platforms that don't
1444 support global BSS sections, uninitialized global variables would
1445 go in DATA instead, thus increasing the size of the executable. */
1447 && TREE_CODE (var_decl
) == VAR_DECL
1448 && !have_global_bss_p ())
1449 DECL_COMMON (var_decl
) = 1;
1450 DECL_INITIAL (var_decl
) = var_init
;
1451 TREE_READONLY (var_decl
) = const_flag
;
1452 DECL_EXTERNAL (var_decl
) = extern_flag
;
1453 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
1454 TREE_CONSTANT (var_decl
) = constant_p
;
1455 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
1456 = TYPE_VOLATILE (type
);
1458 /* If it's public and not external, always allocate storage for it.
1459 At the global binding level we need to allocate static storage for the
1460 variable if and only if it's not external. If we are not at the top level
1461 we allocate automatic storage unless requested not to. */
1462 TREE_STATIC (var_decl
)
1463 = public_flag
|| (global_bindings_p () ? !extern_flag
: static_flag
);
1465 if (asm_name
&& VAR_OR_FUNCTION_DECL_P (var_decl
))
1466 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
1468 process_attributes (var_decl
, attr_list
);
1470 /* Add this decl to the current binding level. */
1471 gnat_pushdecl (var_decl
, gnat_node
);
1473 if (TREE_SIDE_EFFECTS (var_decl
))
1474 TREE_ADDRESSABLE (var_decl
) = 1;
1476 if (TREE_CODE (var_decl
) != CONST_DECL
)
1477 rest_of_decl_compilation (var_decl
, global_bindings_p (), 0);
1479 expand_decl (var_decl
);
1484 /* Wrapper around create_var_decl_1 for cases where we don't care whether
1485 a VAR or a CONST decl node is created. */
1488 create_var_decl (tree var_name
, tree asm_name
, tree type
, tree var_init
,
1489 bool const_flag
, bool public_flag
, bool extern_flag
,
1490 bool static_flag
, struct attrib
*attr_list
,
1493 return create_var_decl_1 (var_name
, asm_name
, type
, var_init
,
1495 public_flag
, extern_flag
, static_flag
,
1496 attr_list
, gnat_node
);
1499 /* Wrapper around create_var_decl_1 for cases where a VAR_DECL node is
1500 required. The primary intent is for DECL_CONST_CORRESPONDING_VARs, which
1501 must be VAR_DECLs and on which we want TREE_READONLY set to have them
1502 possibly assigned to a readonly data section. */
1505 create_true_var_decl (tree var_name
, tree asm_name
, tree type
, tree var_init
,
1506 bool const_flag
, bool public_flag
, bool extern_flag
,
1507 bool static_flag
, struct attrib
*attr_list
,
1510 return create_var_decl_1 (var_name
, asm_name
, type
, var_init
,
1512 public_flag
, extern_flag
, static_flag
,
1513 attr_list
, gnat_node
);
1516 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1517 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1518 this field is in a record type with a "pragma pack". If SIZE is nonzero
1519 it is the specified size for this field. If POS is nonzero, it is the bit
1520 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1521 the address of this field for aliasing purposes. If it is negative, we
1522 should not make a bitfield, which is used by make_aligning_type. */
1525 create_field_decl (tree field_name
, tree field_type
, tree record_type
,
1526 int packed
, tree size
, tree pos
, int addressable
)
1528 tree field_decl
= build_decl (FIELD_DECL
, field_name
, field_type
);
1530 DECL_CONTEXT (field_decl
) = record_type
;
1531 TREE_READONLY (field_decl
) = TYPE_READONLY (field_type
);
1533 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1534 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1535 if (packed
&& TYPE_MODE (field_type
) == BLKmode
)
1536 DECL_ALIGN (field_decl
) = BITS_PER_UNIT
;
1538 /* If a size is specified, use it. Otherwise, if the record type is packed
1539 compute a size to use, which may differ from the object's natural size.
1540 We always set a size in this case to trigger the checks for bitfield
1541 creation below, which is typically required when no position has been
1544 size
= convert (bitsizetype
, size
);
1545 else if (packed
== 1)
1547 size
= rm_size (field_type
);
1549 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1551 if (TREE_CODE (size
) == INTEGER_CST
1552 && compare_tree_int (size
, MAX_FIXED_MODE_SIZE
) > 0)
1553 size
= round_up (size
, BITS_PER_UNIT
);
1556 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1557 specified for two reasons: first if the size differs from the natural
1558 size. Second, if the alignment is insufficient. There are a number of
1559 ways the latter can be true.
1561 We never make a bitfield if the type of the field has a nonconstant size,
1562 because no such entity requiring bitfield operations should reach here.
1564 We do *preventively* make a bitfield when there might be the need for it
1565 but we don't have all the necessary information to decide, as is the case
1566 of a field with no specified position in a packed record.
1568 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1569 in layout_decl or finish_record_type to clear the bit_field indication if
1570 it is in fact not needed. */
1571 if (addressable
>= 0
1573 && TREE_CODE (size
) == INTEGER_CST
1574 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
1575 && (!tree_int_cst_equal (size
, TYPE_SIZE (field_type
))
1576 || (pos
&& !value_factor_p (pos
, TYPE_ALIGN (field_type
)))
1578 || (TYPE_ALIGN (record_type
) != 0
1579 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
1581 DECL_BIT_FIELD (field_decl
) = 1;
1582 DECL_SIZE (field_decl
) = size
;
1583 if (!packed
&& !pos
)
1584 DECL_ALIGN (field_decl
)
1585 = (TYPE_ALIGN (record_type
) != 0
1586 ? MIN (TYPE_ALIGN (record_type
), TYPE_ALIGN (field_type
))
1587 : TYPE_ALIGN (field_type
));
1590 DECL_PACKED (field_decl
) = pos
? DECL_BIT_FIELD (field_decl
) : packed
;
1591 DECL_ALIGN (field_decl
)
1592 = MAX (DECL_ALIGN (field_decl
),
1593 DECL_BIT_FIELD (field_decl
) ? 1
1594 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
1595 : TYPE_ALIGN (field_type
));
1599 /* We need to pass in the alignment the DECL is known to have.
1600 This is the lowest-order bit set in POS, but no more than
1601 the alignment of the record, if one is specified. Note
1602 that an alignment of 0 is taken as infinite. */
1603 unsigned int known_align
;
1605 if (host_integerp (pos
, 1))
1606 known_align
= tree_low_cst (pos
, 1) & - tree_low_cst (pos
, 1);
1608 known_align
= BITS_PER_UNIT
;
1610 if (TYPE_ALIGN (record_type
)
1611 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
1612 known_align
= TYPE_ALIGN (record_type
);
1614 layout_decl (field_decl
, known_align
);
1615 SET_DECL_OFFSET_ALIGN (field_decl
,
1616 host_integerp (pos
, 1) ? BIGGEST_ALIGNMENT
1618 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
1619 &DECL_FIELD_BIT_OFFSET (field_decl
),
1620 DECL_OFFSET_ALIGN (field_decl
), pos
);
1622 DECL_HAS_REP_P (field_decl
) = 1;
1625 /* In addition to what our caller says, claim the field is addressable if we
1626 know that its type is not suitable.
1628 The field may also be "technically" nonaddressable, meaning that even if
1629 we attempt to take the field's address we will actually get the address
1630 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1631 value we have at this point is not accurate enough, so we don't account
1632 for this here and let finish_record_type decide. */
1633 if (!type_for_nonaliased_component_p (field_type
))
1636 DECL_NONADDRESSABLE_P (field_decl
) = !addressable
;
1641 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1642 PARAM_TYPE is its type. READONLY is true if the parameter is
1643 readonly (either an IN parameter or an address of a pass-by-ref
1647 create_param_decl (tree param_name
, tree param_type
, bool readonly
)
1649 tree param_decl
= build_decl (PARM_DECL
, param_name
, param_type
);
1651 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1652 lead to various ABI violations. */
1653 if (targetm
.calls
.promote_prototypes (param_type
)
1654 && (TREE_CODE (param_type
) == INTEGER_TYPE
1655 || TREE_CODE (param_type
) == ENUMERAL_TYPE
)
1656 && TYPE_PRECISION (param_type
) < TYPE_PRECISION (integer_type_node
))
1658 /* We have to be careful about biased types here. Make a subtype
1659 of integer_type_node with the proper biasing. */
1660 if (TREE_CODE (param_type
) == INTEGER_TYPE
1661 && TYPE_BIASED_REPRESENTATION_P (param_type
))
1664 = copy_type (build_range_type (integer_type_node
,
1665 TYPE_MIN_VALUE (param_type
),
1666 TYPE_MAX_VALUE (param_type
)));
1668 TYPE_BIASED_REPRESENTATION_P (param_type
) = 1;
1671 param_type
= integer_type_node
;
1674 DECL_ARG_TYPE (param_decl
) = param_type
;
1675 TREE_READONLY (param_decl
) = readonly
;
1679 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1682 process_attributes (tree decl
, struct attrib
*attr_list
)
1684 for (; attr_list
; attr_list
= attr_list
->next
)
1685 switch (attr_list
->type
)
1687 case ATTR_MACHINE_ATTRIBUTE
:
1688 decl_attributes (&decl
, tree_cons (attr_list
->name
, attr_list
->args
,
1690 ATTR_FLAG_TYPE_IN_PLACE
);
1693 case ATTR_LINK_ALIAS
:
1694 if (! DECL_EXTERNAL (decl
))
1696 TREE_STATIC (decl
) = 1;
1697 assemble_alias (decl
, attr_list
->name
);
1701 case ATTR_WEAK_EXTERNAL
:
1703 declare_weak (decl
);
1705 post_error ("?weak declarations not supported on this target",
1706 attr_list
->error_point
);
1709 case ATTR_LINK_SECTION
:
1710 if (targetm
.have_named_sections
)
1712 DECL_SECTION_NAME (decl
)
1713 = build_string (IDENTIFIER_LENGTH (attr_list
->name
),
1714 IDENTIFIER_POINTER (attr_list
->name
));
1715 DECL_COMMON (decl
) = 0;
1718 post_error ("?section attributes are not supported for this target",
1719 attr_list
->error_point
);
1722 case ATTR_LINK_CONSTRUCTOR
:
1723 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
1724 TREE_USED (decl
) = 1;
1727 case ATTR_LINK_DESTRUCTOR
:
1728 DECL_STATIC_DESTRUCTOR (decl
) = 1;
1729 TREE_USED (decl
) = 1;
1734 /* Record a global renaming pointer. */
1737 record_global_renaming_pointer (tree decl
)
1739 gcc_assert (DECL_RENAMED_OBJECT (decl
));
1740 VEC_safe_push (tree
, gc
, global_renaming_pointers
, decl
);
1743 /* Invalidate the global renaming pointers. */
1746 invalidate_global_renaming_pointers (void)
1751 for (i
= 0; VEC_iterate(tree
, global_renaming_pointers
, i
, iter
); i
++)
1752 SET_DECL_RENAMED_OBJECT (iter
, NULL_TREE
);
1754 VEC_free (tree
, gc
, global_renaming_pointers
);
1757 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1761 value_factor_p (tree value
, HOST_WIDE_INT factor
)
1763 if (host_integerp (value
, 1))
1764 return tree_low_cst (value
, 1) % factor
== 0;
1766 if (TREE_CODE (value
) == MULT_EXPR
)
1767 return (value_factor_p (TREE_OPERAND (value
, 0), factor
)
1768 || value_factor_p (TREE_OPERAND (value
, 1), factor
));
1773 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1774 unless we can prove these 2 fields are laid out in such a way that no gap
1775 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1776 is the distance in bits between the end of PREV_FIELD and the starting
1777 position of CURR_FIELD. It is ignored if null. */
1780 potential_alignment_gap (tree prev_field
, tree curr_field
, tree offset
)
1782 /* If this is the first field of the record, there cannot be any gap */
1786 /* If the previous field is a union type, then return False: The only
1787 time when such a field is not the last field of the record is when
1788 there are other components at fixed positions after it (meaning there
1789 was a rep clause for every field), in which case we don't want the
1790 alignment constraint to override them. */
1791 if (TREE_CODE (TREE_TYPE (prev_field
)) == QUAL_UNION_TYPE
)
1794 /* If the distance between the end of prev_field and the beginning of
1795 curr_field is constant, then there is a gap if the value of this
1796 constant is not null. */
1797 if (offset
&& host_integerp (offset
, 1))
1798 return !integer_zerop (offset
);
1800 /* If the size and position of the previous field are constant,
1801 then check the sum of this size and position. There will be a gap
1802 iff it is not multiple of the current field alignment. */
1803 if (host_integerp (DECL_SIZE (prev_field
), 1)
1804 && host_integerp (bit_position (prev_field
), 1))
1805 return ((tree_low_cst (bit_position (prev_field
), 1)
1806 + tree_low_cst (DECL_SIZE (prev_field
), 1))
1807 % DECL_ALIGN (curr_field
) != 0);
1809 /* If both the position and size of the previous field are multiples
1810 of the current field alignment, there cannot be any gap. */
1811 if (value_factor_p (bit_position (prev_field
), DECL_ALIGN (curr_field
))
1812 && value_factor_p (DECL_SIZE (prev_field
), DECL_ALIGN (curr_field
)))
1815 /* Fallback, return that there may be a potential gap */
1819 /* Returns a LABEL_DECL node for LABEL_NAME. */
1822 create_label_decl (tree label_name
)
1824 tree label_decl
= build_decl (LABEL_DECL
, label_name
, void_type_node
);
1826 DECL_CONTEXT (label_decl
) = current_function_decl
;
1827 DECL_MODE (label_decl
) = VOIDmode
;
1828 DECL_SOURCE_LOCATION (label_decl
) = input_location
;
1833 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1834 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1835 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1836 PARM_DECL nodes chained through the TREE_CHAIN field).
1838 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1839 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1842 create_subprog_decl (tree subprog_name
, tree asm_name
,
1843 tree subprog_type
, tree param_decl_list
, bool inline_flag
,
1844 bool public_flag
, bool extern_flag
,
1845 struct attrib
*attr_list
, Node_Id gnat_node
)
1847 tree return_type
= TREE_TYPE (subprog_type
);
1848 tree subprog_decl
= build_decl (FUNCTION_DECL
, subprog_name
, subprog_type
);
1850 /* If this is a function nested inside an inlined external function, it
1851 means we aren't going to compile the outer function unless it is
1852 actually inlined, so do the same for us. */
1853 if (current_function_decl
&& DECL_INLINE (current_function_decl
)
1854 && DECL_EXTERNAL (current_function_decl
))
1857 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
1858 TREE_PUBLIC (subprog_decl
) = public_flag
;
1859 TREE_STATIC (subprog_decl
) = 1;
1860 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
1861 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1862 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1863 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
1864 DECL_RESULT (subprog_decl
) = build_decl (RESULT_DECL
, 0, return_type
);
1865 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl
)) = 1;
1866 DECL_IGNORED_P (DECL_RESULT (subprog_decl
)) = 1;
1868 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1869 target by-reference return mechanism. This is not supported all the
1870 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1871 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1872 the RESULT_DECL instead - see gnat_genericize for more details. */
1873 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl
))))
1875 tree result_decl
= DECL_RESULT (subprog_decl
);
1877 TREE_ADDRESSABLE (TREE_TYPE (result_decl
)) = 0;
1878 DECL_BY_REFERENCE (result_decl
) = 1;
1882 DECL_DECLARED_INLINE_P (subprog_decl
) = 1;
1885 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
1887 process_attributes (subprog_decl
, attr_list
);
1889 /* Add this decl to the current binding level. */
1890 gnat_pushdecl (subprog_decl
, gnat_node
);
1892 /* Output the assembler code and/or RTL for the declaration. */
1893 rest_of_decl_compilation (subprog_decl
, global_bindings_p (), 0);
1895 return subprog_decl
;
1898 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1899 body. This routine needs to be invoked before processing the declarations
1900 appearing in the subprogram. */
1903 begin_subprog_body (tree subprog_decl
)
1907 current_function_decl
= subprog_decl
;
1908 announce_function (subprog_decl
);
1910 /* Enter a new binding level and show that all the parameters belong to
1913 for (param_decl
= DECL_ARGUMENTS (subprog_decl
); param_decl
;
1914 param_decl
= TREE_CHAIN (param_decl
))
1915 DECL_CONTEXT (param_decl
) = subprog_decl
;
1917 make_decl_rtl (subprog_decl
);
1919 /* We handle pending sizes via the elaboration of types, so we don't need to
1920 save them. This causes them to be marked as part of the outer function
1921 and then discarded. */
1922 get_pending_sizes ();
1926 /* Helper for the genericization callback. Return a dereference of VAL
1927 if it is of a reference type. */
1930 convert_from_reference (tree val
)
1932 tree value_type
, ref
;
1934 if (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
)
1937 value_type
= TREE_TYPE (TREE_TYPE (val
));
1938 ref
= build1 (INDIRECT_REF
, value_type
, val
);
1940 /* See if what we reference is CONST or VOLATILE, which requires
1941 looking into array types to get to the component type. */
1943 while (TREE_CODE (value_type
) == ARRAY_TYPE
)
1944 value_type
= TREE_TYPE (value_type
);
1947 = (TYPE_QUALS (value_type
) & TYPE_QUAL_CONST
);
1948 TREE_THIS_VOLATILE (ref
)
1949 = (TYPE_QUALS (value_type
) & TYPE_QUAL_VOLATILE
);
1951 TREE_SIDE_EFFECTS (ref
)
1952 = (TREE_THIS_VOLATILE (ref
) || TREE_SIDE_EFFECTS (val
));
1957 /* Helper for the genericization callback. Returns true if T denotes
1958 a RESULT_DECL with DECL_BY_REFERENCE set. */
1961 is_byref_result (tree t
)
1963 return (TREE_CODE (t
) == RESULT_DECL
&& DECL_BY_REFERENCE (t
));
1967 /* Tree walking callback for gnat_genericize. Currently ...
1969 o Adjust references to the function's DECL_RESULT if it is marked
1970 DECL_BY_REFERENCE and so has had its type turned into a reference
1971 type at the end of the function compilation. */
1974 gnat_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1976 /* This implementation is modeled after what the C++ front-end is
1977 doing, basis of the downstream passes behavior. */
1979 tree stmt
= *stmt_p
;
1980 struct pointer_set_t
*p_set
= (struct pointer_set_t
*) data
;
1982 /* If we have a direct mention of the result decl, dereference. */
1983 if (is_byref_result (stmt
))
1985 *stmt_p
= convert_from_reference (stmt
);
1990 /* Otherwise, no need to walk the the same tree twice. */
1991 if (pointer_set_contains (p_set
, stmt
))
1997 /* If we are taking the address of what now is a reference, just get the
1999 if (TREE_CODE (stmt
) == ADDR_EXPR
2000 && is_byref_result (TREE_OPERAND (stmt
, 0)))
2002 *stmt_p
= convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
2006 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
2007 else if (TREE_CODE (stmt
) == RETURN_EXPR
2008 && TREE_OPERAND (stmt
, 0)
2009 && is_byref_result (TREE_OPERAND (stmt
, 0)))
2012 /* Don't look inside trees that cannot embed references of interest. */
2013 else if (IS_TYPE_OR_DECL_P (stmt
))
2016 pointer_set_insert (p_set
, *stmt_p
);
2021 /* Perform lowering of Ada trees to GENERIC. In particular:
2023 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
2024 and adjust all the references to this decl accordingly. */
2027 gnat_genericize (tree fndecl
)
2029 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
2030 was handled by simply setting TREE_ADDRESSABLE on the result type.
2031 Everything required to actually pass by invisible ref using the target
2032 mechanism (e.g. extra parameter) was handled at RTL expansion time.
2034 This doesn't work with GCC 4 any more for several reasons. First, the
2035 gimplification process might need the creation of temporaries of this
2036 type, and the gimplifier ICEs on such attempts. Second, the middle-end
2037 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
2038 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
2039 be explicitely accounted for by the front-end in the function body.
2041 We achieve the complete transformation in two steps:
2043 1/ create_subprog_decl performs early attribute tweaks: it clears
2044 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
2045 the result decl. The former ensures that the bit isn't set in the GCC
2046 tree saved for the function, so prevents ICEs on temporary creation.
2047 The latter we use here to trigger the rest of the processing.
2049 2/ This function performs the type transformation on the result decl
2050 and adjusts all the references to this decl from the function body
2053 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
2054 strategy, which escapes the gimplifier temporary creation issues by
2055 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
2056 on simple specific support code in aggregate_value_p to look at the
2057 target function result decl explicitely. */
2059 struct pointer_set_t
*p_set
;
2060 tree decl_result
= DECL_RESULT (fndecl
);
2062 if (!DECL_BY_REFERENCE (decl_result
))
2065 /* Make the DECL_RESULT explicitely by-reference and adjust all the
2066 occurrences in the function body using the common tree-walking facility.
2067 We want to see every occurrence of the result decl to adjust the
2068 referencing tree, so need to use our own pointer set to control which
2069 trees should be visited again or not. */
2071 p_set
= pointer_set_create ();
2073 TREE_TYPE (decl_result
) = build_reference_type (TREE_TYPE (decl_result
));
2074 TREE_ADDRESSABLE (decl_result
) = 0;
2075 relayout_decl (decl_result
);
2077 walk_tree (&DECL_SAVED_TREE (fndecl
), gnat_genericize_r
, p_set
, NULL
);
2079 pointer_set_destroy (p_set
);
2082 /* Finish the definition of the current subprogram and compile it all the way
2083 to assembler language output. BODY is the tree corresponding to
2087 end_subprog_body (tree body
)
2089 tree fndecl
= current_function_decl
;
2091 /* Mark the BLOCK for this level as being for this function and pop the
2092 level. Since the vars in it are the parameters, clear them. */
2093 BLOCK_VARS (current_binding_level
->block
) = 0;
2094 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
2095 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
2098 /* Deal with inline. If declared inline or we should default to inline,
2099 set the flag in the decl. */
2100 DECL_INLINE (fndecl
)
2101 = DECL_DECLARED_INLINE_P (fndecl
) || flag_inline_trees
== 2;
2103 /* We handle pending sizes via the elaboration of types, so we don't
2104 need to save them. */
2105 get_pending_sizes ();
2107 /* Mark the RESULT_DECL as being in this subprogram. */
2108 DECL_CONTEXT (DECL_RESULT (fndecl
)) = fndecl
;
2110 DECL_SAVED_TREE (fndecl
) = body
;
2112 current_function_decl
= DECL_CONTEXT (fndecl
);
2115 /* We cannot track the location of errors past this point. */
2116 error_gnat_node
= Empty
;
2118 /* If we're only annotating types, don't actually compile this function. */
2119 if (type_annotate_only
)
2122 /* Perform the required pre-gimplfication transformations on the tree. */
2123 gnat_genericize (fndecl
);
2125 /* We do different things for nested and non-nested functions.
2126 ??? This should be in cgraph. */
2127 if (!DECL_CONTEXT (fndecl
))
2129 gnat_gimplify_function (fndecl
);
2130 cgraph_finalize_function (fndecl
, false);
2133 /* Register this function with cgraph just far enough to get it
2134 added to our parent's nested function list. */
2135 (void) cgraph_node (fndecl
);
2138 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2141 gnat_gimplify_function (tree fndecl
)
2143 struct cgraph_node
*cgn
;
2145 dump_function (TDI_original
, fndecl
);
2146 gimplify_function_tree (fndecl
);
2147 dump_function (TDI_generic
, fndecl
);
2149 /* Convert all nested functions to GIMPLE now. We do things in this order
2150 so that items like VLA sizes are expanded properly in the context of the
2151 correct function. */
2152 cgn
= cgraph_node (fndecl
);
2153 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
2154 gnat_gimplify_function (cgn
->decl
);
2159 gnat_builtin_function (tree decl
)
2161 gnat_pushdecl (decl
, Empty
);
2165 /* Handle a "const" attribute; arguments as in
2166 struct attribute_spec.handler. */
2169 handle_const_attribute (tree
*node
, tree
ARG_UNUSED (name
),
2170 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
2173 if (TREE_CODE (*node
) == FUNCTION_DECL
)
2174 TREE_READONLY (*node
) = 1;
2176 *no_add_attrs
= true;
2181 /* Handle a "nothrow" attribute; arguments as in
2182 struct attribute_spec.handler. */
2185 handle_nothrow_attribute (tree
*node
, tree
ARG_UNUSED (name
),
2186 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
2189 if (TREE_CODE (*node
) == FUNCTION_DECL
)
2190 TREE_NOTHROW (*node
) = 1;
2192 *no_add_attrs
= true;
2197 /* Return an integer type with the number of bits of precision given by
2198 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2199 it is a signed type. */
2202 gnat_type_for_size (unsigned precision
, int unsignedp
)
2207 if (precision
<= 2 * MAX_BITS_PER_WORD
2208 && signed_and_unsigned_types
[precision
][unsignedp
])
2209 return signed_and_unsigned_types
[precision
][unsignedp
];
2212 t
= make_unsigned_type (precision
);
2214 t
= make_signed_type (precision
);
2216 if (precision
<= 2 * MAX_BITS_PER_WORD
)
2217 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
2221 sprintf (type_name
, "%sSIGNED_%d", unsignedp
? "UN" : "", precision
);
2222 TYPE_NAME (t
) = get_identifier (type_name
);
2228 /* Likewise for floating-point types. */
2231 float_type_for_precision (int precision
, enum machine_mode mode
)
2236 if (float_types
[(int) mode
])
2237 return float_types
[(int) mode
];
2239 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
2240 TYPE_PRECISION (t
) = precision
;
2243 gcc_assert (TYPE_MODE (t
) == mode
);
2246 sprintf (type_name
, "FLOAT_%d", precision
);
2247 TYPE_NAME (t
) = get_identifier (type_name
);
2253 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2254 an unsigned type; otherwise a signed type is returned. */
2257 gnat_type_for_mode (enum machine_mode mode
, int unsignedp
)
2259 if (mode
== BLKmode
)
2261 else if (mode
== VOIDmode
)
2262 return void_type_node
;
2263 else if (COMPLEX_MODE_P (mode
))
2265 else if (SCALAR_FLOAT_MODE_P (mode
))
2266 return float_type_for_precision (GET_MODE_PRECISION (mode
), mode
);
2267 else if (SCALAR_INT_MODE_P (mode
))
2268 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
2273 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2276 gnat_unsigned_type (tree type_node
)
2278 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
2280 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2282 type
= copy_node (type
);
2283 TREE_TYPE (type
) = type_node
;
2285 else if (TREE_TYPE (type_node
)
2286 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2287 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2289 type
= copy_node (type
);
2290 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2296 /* Return the signed version of a TYPE_NODE, a scalar type. */
2299 gnat_signed_type (tree type_node
)
2301 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
2303 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2305 type
= copy_node (type
);
2306 TREE_TYPE (type
) = type_node
;
2308 else if (TREE_TYPE (type_node
)
2309 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2310 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2312 type
= copy_node (type
);
2313 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2320 /* EXP is an expression for the size of an object. If this size contains
2321 discriminant references, replace them with the maximum (if MAX_P) or
2322 minimum (if !MAX_P) possible value of the discriminant. */
2325 max_size (tree exp
, bool max_p
)
2327 enum tree_code code
= TREE_CODE (exp
);
2328 tree type
= TREE_TYPE (exp
);
2330 switch (TREE_CODE_CLASS (code
))
2332 case tcc_declaration
:
2337 if (code
== CALL_EXPR
)
2340 int i
, n
= call_expr_nargs (exp
);
2343 argarray
= (tree
*) alloca (n
* sizeof (tree
));
2344 for (i
= 0; i
< n
; i
++)
2345 argarray
[i
] = max_size (CALL_EXPR_ARG (exp
, i
), max_p
);
2346 return build_call_array (type
, CALL_EXPR_FN (exp
), n
, argarray
);
2351 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2352 modify. Otherwise, we treat it like a variable. */
2353 if (!CONTAINS_PLACEHOLDER_P (exp
))
2356 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
2358 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), true);
2360 case tcc_comparison
:
2361 return max_p
? size_one_node
: size_zero_node
;
2365 case tcc_expression
:
2366 switch (TREE_CODE_LENGTH (code
))
2369 if (code
== NON_LVALUE_EXPR
)
2370 return max_size (TREE_OPERAND (exp
, 0), max_p
);
2373 fold_build1 (code
, type
,
2374 max_size (TREE_OPERAND (exp
, 0),
2375 code
== NEGATE_EXPR
? !max_p
: max_p
));
2378 if (code
== COMPOUND_EXPR
)
2379 return max_size (TREE_OPERAND (exp
, 1), max_p
);
2381 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2382 may provide a tighter bound on max_size. */
2383 if (code
== MINUS_EXPR
2384 && TREE_CODE (TREE_OPERAND (exp
, 0)) == COND_EXPR
)
2386 tree lhs
= fold_build2 (MINUS_EXPR
, type
,
2387 TREE_OPERAND (TREE_OPERAND (exp
, 0), 1),
2388 TREE_OPERAND (exp
, 1));
2389 tree rhs
= fold_build2 (MINUS_EXPR
, type
,
2390 TREE_OPERAND (TREE_OPERAND (exp
, 0), 2),
2391 TREE_OPERAND (exp
, 1));
2392 return fold_build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
2393 max_size (lhs
, max_p
),
2394 max_size (rhs
, max_p
));
2398 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
2399 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
2400 code
== MINUS_EXPR
? !max_p
: max_p
);
2402 /* Special-case wanting the maximum value of a MIN_EXPR.
2403 In that case, if one side overflows, return the other.
2404 sizetype is signed, but we know sizes are non-negative.
2405 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2406 overflowing or the maximum possible value and the RHS
2410 && TREE_CODE (rhs
) == INTEGER_CST
2411 && TREE_OVERFLOW (rhs
))
2415 && TREE_CODE (lhs
) == INTEGER_CST
2416 && TREE_OVERFLOW (lhs
))
2418 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
2419 && ((TREE_CODE (lhs
) == INTEGER_CST
2420 && TREE_OVERFLOW (lhs
))
2421 || operand_equal_p (lhs
, TYPE_MAX_VALUE (type
), 0))
2422 && !TREE_CONSTANT (rhs
))
2425 return fold_build2 (code
, type
, lhs
, rhs
);
2429 if (code
== SAVE_EXPR
)
2431 else if (code
== COND_EXPR
)
2432 return fold_build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
2433 max_size (TREE_OPERAND (exp
, 1), max_p
),
2434 max_size (TREE_OPERAND (exp
, 2), max_p
));
2437 /* Other tree classes cannot happen. */
2445 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2446 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2447 Return a constructor for the template. */
2450 build_template (tree template_type
, tree array_type
, tree expr
)
2452 tree template_elts
= NULL_TREE
;
2453 tree bound_list
= NULL_TREE
;
2456 if (TREE_CODE (array_type
) == RECORD_TYPE
2457 && (TYPE_IS_PADDING_P (array_type
)
2458 || TYPE_JUSTIFIED_MODULAR_P (array_type
)))
2459 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
2461 if (TREE_CODE (array_type
) == ARRAY_TYPE
2462 || (TREE_CODE (array_type
) == INTEGER_TYPE
2463 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
2464 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
2466 /* First make the list for a CONSTRUCTOR for the template. Go down the
2467 field list of the template instead of the type chain because this
2468 array might be an Ada array of arrays and we can't tell where the
2469 nested arrays stop being the underlying object. */
2471 for (field
= TYPE_FIELDS (template_type
); field
;
2473 ? (bound_list
= TREE_CHAIN (bound_list
))
2474 : (array_type
= TREE_TYPE (array_type
))),
2475 field
= TREE_CHAIN (TREE_CHAIN (field
)))
2477 tree bounds
, min
, max
;
2479 /* If we have a bound list, get the bounds from there. Likewise
2480 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2481 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2482 This will give us a maximum range. */
2484 bounds
= TREE_VALUE (bound_list
);
2485 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
2486 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
2487 else if (expr
&& TREE_CODE (expr
) == PARM_DECL
2488 && DECL_BY_COMPONENT_PTR_P (expr
))
2489 bounds
= TREE_TYPE (field
);
2493 min
= convert (TREE_TYPE (field
), TYPE_MIN_VALUE (bounds
));
2494 max
= convert (TREE_TYPE (TREE_CHAIN (field
)), TYPE_MAX_VALUE (bounds
));
2496 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2497 substitute it from OBJECT. */
2498 min
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (min
, expr
);
2499 max
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (max
, expr
);
2501 template_elts
= tree_cons (TREE_CHAIN (field
), max
,
2502 tree_cons (field
, min
, template_elts
));
2505 return gnat_build_constructor (template_type
, nreverse (template_elts
));
2508 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2509 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2510 in the type contains in its DECL_INITIAL the expression to use when
2511 a constructor is made for the type. GNAT_ENTITY is an entity used
2512 to print out an error message if the mechanism cannot be applied to
2513 an object of that type and also for the name. */
2516 build_vms_descriptor (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
2518 tree record_type
= make_node (RECORD_TYPE
);
2519 tree pointer32_type
;
2520 tree field_list
= 0;
2529 /* If TYPE is an unconstrained array, use the underlying array type. */
2530 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
2531 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
2533 /* If this is an array, compute the number of dimensions in the array,
2534 get the index types, and point to the inner type. */
2535 if (TREE_CODE (type
) != ARRAY_TYPE
)
2538 for (ndim
= 1, inner_type
= type
;
2539 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
2540 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
2541 ndim
++, inner_type
= TREE_TYPE (inner_type
))
2544 idx_arr
= (tree
*) alloca (ndim
* sizeof (tree
));
2546 if (mech
!= By_Descriptor_NCA
2547 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
2548 for (i
= ndim
- 1, inner_type
= type
;
2550 i
--, inner_type
= TREE_TYPE (inner_type
))
2551 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2553 for (i
= 0, inner_type
= type
;
2555 i
++, inner_type
= TREE_TYPE (inner_type
))
2556 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2558 /* Now get the DTYPE value. */
2559 switch (TREE_CODE (type
))
2563 if (TYPE_VAX_FLOATING_POINT_P (type
))
2564 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2577 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
2580 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
2583 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
2586 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
2589 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
2592 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
2598 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
2602 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
2603 && TYPE_VAX_FLOATING_POINT_P (type
))
2604 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2616 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
2627 /* Get the CLASS value. */
2630 case By_Descriptor_A
:
2633 case By_Descriptor_NCA
:
2636 case By_Descriptor_SB
:
2640 case By_Descriptor_S
:
2646 /* Make the type for a descriptor for VMS. The first four fields
2647 are the same for all types. */
2650 = chainon (field_list
,
2651 make_descriptor_field
2652 ("LENGTH", gnat_type_for_size (16, 1), record_type
,
2653 size_in_bytes (mech
== By_Descriptor_A
? inner_type
: type
)));
2655 field_list
= chainon (field_list
,
2656 make_descriptor_field ("DTYPE",
2657 gnat_type_for_size (8, 1),
2658 record_type
, size_int (dtype
)));
2659 field_list
= chainon (field_list
,
2660 make_descriptor_field ("CLASS",
2661 gnat_type_for_size (8, 1),
2662 record_type
, size_int (class)));
2664 /* Of course this will crash at run-time if the address space is not
2665 within the low 32 bits, but there is nothing else we can do. */
2666 pointer32_type
= build_pointer_type_for_mode (type
, SImode
, false);
2669 = chainon (field_list
,
2670 make_descriptor_field
2671 ("POINTER", pointer32_type
, record_type
,
2672 build_unary_op (ADDR_EXPR
,
2674 build0 (PLACEHOLDER_EXPR
, type
))));
2679 case By_Descriptor_S
:
2682 case By_Descriptor_SB
:
2684 = chainon (field_list
,
2685 make_descriptor_field
2686 ("SB_L1", gnat_type_for_size (32, 1), record_type
,
2687 TREE_CODE (type
) == ARRAY_TYPE
2688 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2690 = chainon (field_list
,
2691 make_descriptor_field
2692 ("SB_U1", gnat_type_for_size (32, 1), record_type
,
2693 TREE_CODE (type
) == ARRAY_TYPE
2694 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2697 case By_Descriptor_A
:
2698 case By_Descriptor_NCA
:
2699 field_list
= chainon (field_list
,
2700 make_descriptor_field ("SCALE",
2701 gnat_type_for_size (8, 1),
2705 field_list
= chainon (field_list
,
2706 make_descriptor_field ("DIGITS",
2707 gnat_type_for_size (8, 1),
2712 = chainon (field_list
,
2713 make_descriptor_field
2714 ("AFLAGS", gnat_type_for_size (8, 1), record_type
,
2715 size_int (mech
== By_Descriptor_NCA
2717 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2718 : (TREE_CODE (type
) == ARRAY_TYPE
2719 && TYPE_CONVENTION_FORTRAN_P (type
)
2722 field_list
= chainon (field_list
,
2723 make_descriptor_field ("DIMCT",
2724 gnat_type_for_size (8, 1),
2728 field_list
= chainon (field_list
,
2729 make_descriptor_field ("ARSIZE",
2730 gnat_type_for_size (32, 1),
2732 size_in_bytes (type
)));
2734 /* Now build a pointer to the 0,0,0... element. */
2735 tem
= build0 (PLACEHOLDER_EXPR
, type
);
2736 for (i
= 0, inner_type
= type
; i
< ndim
;
2737 i
++, inner_type
= TREE_TYPE (inner_type
))
2738 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
2739 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
2740 NULL_TREE
, NULL_TREE
);
2743 = chainon (field_list
,
2744 make_descriptor_field
2746 build_pointer_type_for_mode (inner_type
, SImode
, false),
2749 build_pointer_type_for_mode (inner_type
, SImode
,
2753 /* Next come the addressing coefficients. */
2754 tem
= size_one_node
;
2755 for (i
= 0; i
< ndim
; i
++)
2759 = size_binop (MULT_EXPR
, tem
,
2760 size_binop (PLUS_EXPR
,
2761 size_binop (MINUS_EXPR
,
2762 TYPE_MAX_VALUE (idx_arr
[i
]),
2763 TYPE_MIN_VALUE (idx_arr
[i
])),
2766 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
2767 fname
[1] = '0' + i
, fname
[2] = 0;
2769 = chainon (field_list
,
2770 make_descriptor_field (fname
,
2771 gnat_type_for_size (32, 1),
2772 record_type
, idx_length
));
2774 if (mech
== By_Descriptor_NCA
)
2778 /* Finally here are the bounds. */
2779 for (i
= 0; i
< ndim
; i
++)
2783 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
2785 = chainon (field_list
,
2786 make_descriptor_field
2787 (fname
, gnat_type_for_size (32, 1), record_type
,
2788 TYPE_MIN_VALUE (idx_arr
[i
])));
2792 = chainon (field_list
,
2793 make_descriptor_field
2794 (fname
, gnat_type_for_size (32, 1), record_type
,
2795 TYPE_MAX_VALUE (idx_arr
[i
])));
2800 post_error ("unsupported descriptor type for &", gnat_entity
);
2803 finish_record_type (record_type
, field_list
, 0, true);
2804 create_type_decl (create_concat_name (gnat_entity
, "DESC"), record_type
,
2805 NULL
, true, false, gnat_entity
);
2810 /* Utility routine for above code to make a field. */
2813 make_descriptor_field (const char *name
, tree type
,
2814 tree rec_type
, tree initial
)
2817 = create_field_decl (get_identifier (name
), type
, rec_type
, 0, 0, 0, 0);
2819 DECL_INITIAL (field
) = initial
;
2823 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
2824 pointer or fat pointer type. GNAT_SUBPROG is the subprogram to which
2825 the VMS descriptor is passed. */
2828 convert_vms_descriptor (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
2830 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
2831 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
2832 /* The CLASS field is the 3rd field in the descriptor. */
2833 tree
class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type
)));
2834 /* The POINTER field is the 4th field in the descriptor. */
2835 tree pointer
= TREE_CHAIN (class);
2837 /* Retrieve the value of the POINTER field. */
2839 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
2841 if (POINTER_TYPE_P (gnu_type
))
2842 return convert (gnu_type
, gnu_expr
);
2844 else if (TYPE_FAT_POINTER_P (gnu_type
))
2846 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
2847 tree p_bounds_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type
)));
2848 tree template_type
= TREE_TYPE (p_bounds_type
);
2849 tree min_field
= TYPE_FIELDS (template_type
);
2850 tree max_field
= TREE_CHAIN (TYPE_FIELDS (template_type
));
2851 tree
template, template_addr
, aflags
, dimct
, t
, u
;
2852 /* See the head comment of build_vms_descriptor. */
2853 int iclass
= TREE_INT_CST_LOW (DECL_INITIAL (class));
2855 /* Convert POINTER to the type of the P_ARRAY field. */
2856 gnu_expr
= convert (p_array_type
, gnu_expr
);
2860 case 1: /* Class S */
2861 case 15: /* Class SB */
2862 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
2863 t
= TYPE_FIELDS (desc_type
);
2864 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
2865 t
= tree_cons (min_field
,
2866 convert (TREE_TYPE (min_field
), integer_one_node
),
2867 tree_cons (max_field
,
2868 convert (TREE_TYPE (max_field
), t
),
2870 template = gnat_build_constructor (template_type
, t
);
2871 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template);
2873 /* For class S, we are done. */
2877 /* Test that we really have a SB descriptor, like DEC Ada. */
2878 t
= build3 (COMPONENT_REF
, TREE_TYPE (class), desc
, class, NULL
);
2879 u
= convert (TREE_TYPE (class), DECL_INITIAL (class));
2880 u
= build_binary_op (EQ_EXPR
, integer_type_node
, t
, u
);
2881 /* If so, there is already a template in the descriptor and
2882 it is located right after the POINTER field. */
2883 t
= TREE_CHAIN (pointer
);
2884 template = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
2885 /* Otherwise use the {1, LENGTH} template we build above. */
2886 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
2887 build_unary_op (ADDR_EXPR
, p_bounds_type
,
2892 case 4: /* Class A */
2893 /* The AFLAGS field is the 7th field in the descriptor. */
2894 t
= TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer
)));
2895 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
2896 /* The DIMCT field is the 8th field in the descriptor. */
2898 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
2899 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
2900 or FL_COEFF or FL_BOUNDS not set. */
2901 u
= build_int_cst (TREE_TYPE (aflags
), 192);
2902 u
= build_binary_op (TRUTH_OR_EXPR
, integer_type_node
,
2903 build_binary_op (NE_EXPR
, integer_type_node
,
2905 convert (TREE_TYPE (dimct
),
2907 build_binary_op (NE_EXPR
, integer_type_node
,
2908 build2 (BIT_AND_EXPR
,
2912 add_stmt (build3 (COND_EXPR
, void_type_node
, u
,
2913 build_call_raise (CE_Length_Check_Failed
, Empty
,
2914 N_Raise_Constraint_Error
),
2916 /* There is already a template in the descriptor and it is
2917 located at the start of block 3 (12th field). */
2918 t
= TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t
))));
2919 template = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
2920 template_addr
= build_unary_op (ADDR_EXPR
, p_bounds_type
, template);
2923 case 10: /* Class NCA */
2925 post_error ("unsupported descriptor type for &", gnat_subprog
);
2926 template_addr
= integer_zero_node
;
2930 /* Build the fat pointer in the form of a constructor. */
2931 t
= tree_cons (TYPE_FIELDS (gnu_type
), gnu_expr
,
2932 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type
)),
2933 template_addr
, NULL_TREE
));
2934 return gnat_build_constructor (gnu_type
, t
);
2941 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
2942 and the GNAT node GNAT_SUBPROG. */
2945 build_function_stub (tree gnu_subprog
, Entity_Id gnat_subprog
)
2947 tree gnu_subprog_type
, gnu_subprog_addr
, gnu_subprog_call
;
2948 tree gnu_stub_param
, gnu_param_list
, gnu_arg_types
, gnu_param
;
2949 tree gnu_stub_decl
= DECL_FUNCTION_STUB (gnu_subprog
);
2952 gnu_subprog_type
= TREE_TYPE (gnu_subprog
);
2953 gnu_param_list
= NULL_TREE
;
2955 begin_subprog_body (gnu_stub_decl
);
2958 start_stmt_group ();
2960 /* Loop over the parameters of the stub and translate any of them
2961 passed by descriptor into a by reference one. */
2962 for (gnu_stub_param
= DECL_ARGUMENTS (gnu_stub_decl
),
2963 gnu_arg_types
= TYPE_ARG_TYPES (gnu_subprog_type
);
2965 gnu_stub_param
= TREE_CHAIN (gnu_stub_param
),
2966 gnu_arg_types
= TREE_CHAIN (gnu_arg_types
))
2968 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param
))
2969 gnu_param
= convert_vms_descriptor (TREE_VALUE (gnu_arg_types
),
2970 gnu_stub_param
, gnat_subprog
);
2972 gnu_param
= gnu_stub_param
;
2974 gnu_param_list
= tree_cons (NULL_TREE
, gnu_param
, gnu_param_list
);
2977 gnu_body
= end_stmt_group ();
2979 /* Invoke the internal subprogram. */
2980 gnu_subprog_addr
= build1 (ADDR_EXPR
, build_pointer_type (gnu_subprog_type
),
2982 gnu_subprog_call
= build3 (CALL_EXPR
, TREE_TYPE (gnu_subprog_type
),
2983 gnu_subprog_addr
, nreverse (gnu_param_list
),
2986 /* Propagate the return value, if any. */
2987 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type
)))
2988 append_to_statement_list (gnu_subprog_call
, &gnu_body
);
2990 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl
),
2996 allocate_struct_function (gnu_stub_decl
);
2997 end_subprog_body (gnu_body
);
3000 /* Build a type to be used to represent an aliased object whose nominal
3001 type is an unconstrained array. This consists of a RECORD_TYPE containing
3002 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3003 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3004 is used to represent an arbitrary unconstrained object. Use NAME
3005 as the name of the record. */
3008 build_unc_object_type (tree template_type
, tree object_type
, tree name
)
3010 tree type
= make_node (RECORD_TYPE
);
3011 tree template_field
= create_field_decl (get_identifier ("BOUNDS"),
3012 template_type
, type
, 0, 0, 0, 1);
3013 tree array_field
= create_field_decl (get_identifier ("ARRAY"), object_type
,
3016 TYPE_NAME (type
) = name
;
3017 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
3018 finish_record_type (type
,
3019 chainon (chainon (NULL_TREE
, template_field
),
3026 /* Same, taking a thin or fat pointer type instead of a template type. */
3029 build_unc_object_type_from_ptr (tree thin_fat_ptr_type
, tree object_type
,
3034 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type
));
3037 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type
)
3038 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type
))))
3039 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type
))));
3040 return build_unc_object_type (template_type
, object_type
, name
);
3043 /* Shift the component offsets within an unconstrained object TYPE to make it
3044 suitable for use as a designated type for thin pointers. */
3047 shift_unc_components_for_thin_pointers (tree type
)
3049 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3050 allocated past the BOUNDS template. The designated type is adjusted to
3051 have ARRAY at position zero and the template at a negative offset, so
3052 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3054 tree bounds_field
= TYPE_FIELDS (type
);
3055 tree array_field
= TREE_CHAIN (TYPE_FIELDS (type
));
3057 DECL_FIELD_OFFSET (bounds_field
)
3058 = size_binop (MINUS_EXPR
, size_zero_node
, byte_position (array_field
));
3060 DECL_FIELD_OFFSET (array_field
) = size_zero_node
;
3061 DECL_FIELD_BIT_OFFSET (array_field
) = bitsize_zero_node
;
3064 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
3065 the normal case this is just two adjustments, but we have more to do
3066 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
3069 update_pointer_to (tree old_type
, tree new_type
)
3071 tree ptr
= TYPE_POINTER_TO (old_type
);
3072 tree ref
= TYPE_REFERENCE_TO (old_type
);
3076 /* If this is the main variant, process all the other variants first. */
3077 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
3078 for (type
= TYPE_NEXT_VARIANT (old_type
); type
;
3079 type
= TYPE_NEXT_VARIANT (type
))
3080 update_pointer_to (type
, new_type
);
3082 /* If no pointer or reference, we are done. */
3086 /* Merge the old type qualifiers in the new type.
3088 Each old variant has qualifiers for specific reasons, and the new
3089 designated type as well. Each set of qualifiers represents useful
3090 information grabbed at some point, and merging the two simply unifies
3091 these inputs into the final type description.
3093 Consider for instance a volatile type frozen after an access to constant
3094 type designating it. After the designated type freeze, we get here with a
3095 volatile new_type and a dummy old_type with a readonly variant, created
3096 when the access type was processed. We shall make a volatile and readonly
3097 designated type, because that's what it really is.
3099 We might also get here for a non-dummy old_type variant with different
3100 qualifiers than the new_type ones, for instance in some cases of pointers
3101 to private record type elaboration (see the comments around the call to
3102 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
3103 qualifiers in thoses cases too, to avoid accidentally discarding the
3104 initial set, and will often end up with old_type == new_type then. */
3105 new_type
= build_qualified_type (new_type
,
3106 TYPE_QUALS (old_type
)
3107 | TYPE_QUALS (new_type
));
3109 /* If the new type and the old one are identical, there is nothing to
3111 if (old_type
== new_type
)
3114 /* Otherwise, first handle the simple case. */
3115 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
3117 TYPE_POINTER_TO (new_type
) = ptr
;
3118 TYPE_REFERENCE_TO (new_type
) = ref
;
3120 for (; ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
3121 for (ptr1
= TYPE_MAIN_VARIANT (ptr
); ptr1
;
3122 ptr1
= TYPE_NEXT_VARIANT (ptr1
))
3123 TREE_TYPE (ptr1
) = new_type
;
3125 for (; ref
; ref
= TYPE_NEXT_REF_TO (ref
))
3126 for (ref1
= TYPE_MAIN_VARIANT (ref
); ref1
;
3127 ref1
= TYPE_NEXT_VARIANT (ref1
))
3128 TREE_TYPE (ref1
) = new_type
;
3131 /* Now deal with the unconstrained array case. In this case the "pointer"
3132 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3133 Turn them into pointers to the correct types using update_pointer_to. */
3134 else if (TREE_CODE (ptr
) != RECORD_TYPE
|| !TYPE_IS_FAT_POINTER_P (ptr
))
3139 tree new_obj_rec
= TYPE_OBJECT_RECORD_TYPE (new_type
);
3140 tree array_field
= TYPE_FIELDS (ptr
);
3141 tree bounds_field
= TREE_CHAIN (TYPE_FIELDS (ptr
));
3142 tree new_ptr
= TYPE_POINTER_TO (new_type
);
3146 /* Make pointers to the dummy template point to the real template. */
3148 (TREE_TYPE (TREE_TYPE (bounds_field
)),
3149 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr
)))));
3151 /* The references to the template bounds present in the array type
3152 are made through a PLACEHOLDER_EXPR of type new_ptr. Since we
3153 are updating ptr to make it a full replacement for new_ptr as
3154 pointer to new_type, we must rework the PLACEHOLDER_EXPR so as
3155 to make it of type ptr. */
3156 new_ref
= build3 (COMPONENT_REF
, TREE_TYPE (bounds_field
),
3157 build0 (PLACEHOLDER_EXPR
, ptr
),
3158 bounds_field
, NULL_TREE
);
3160 /* Create the new array for the new PLACEHOLDER_EXPR and make
3161 pointers to the dummy array point to it.
3163 ??? This is now the only use of substitute_in_type,
3164 which is a very "heavy" routine to do this, so it
3165 should be replaced at some point. */
3167 (TREE_TYPE (TREE_TYPE (array_field
)),
3168 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr
))),
3169 TREE_CHAIN (TYPE_FIELDS (new_ptr
)), new_ref
));
3171 /* Make ptr the pointer to new_type. */
3172 TYPE_POINTER_TO (new_type
) = TYPE_REFERENCE_TO (new_type
)
3173 = TREE_TYPE (new_type
) = ptr
;
3175 for (var
= TYPE_MAIN_VARIANT (ptr
); var
; var
= TYPE_NEXT_VARIANT (var
))
3176 SET_TYPE_UNCONSTRAINED_ARRAY (var
, new_type
);
3178 /* Now handle updating the allocation record, what the thin pointer
3179 points to. Update all pointers from the old record into the new
3180 one, update the type of the array field, and recompute the size. */
3181 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
), new_obj_rec
);
3183 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
3184 = TREE_TYPE (TREE_TYPE (array_field
));
3186 /* The size recomputation needs to account for alignment constraints, so
3187 we let layout_type work it out. This will reset the field offsets to
3188 what they would be in a regular record, so we shift them back to what
3189 we want them to be for a thin pointer designated type afterwards. */
3190 DECL_SIZE (TYPE_FIELDS (new_obj_rec
)) = 0;
3191 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
))) = 0;
3192 TYPE_SIZE (new_obj_rec
) = 0;
3193 layout_type (new_obj_rec
);
3195 shift_unc_components_for_thin_pointers (new_obj_rec
);
3197 /* We are done, at last. */
3198 rest_of_record_type_compilation (ptr
);
3202 /* Convert a pointer to a constrained array into a pointer to a fat
3203 pointer. This involves making or finding a template. */
3206 convert_to_fat_pointer (tree type
, tree expr
)
3208 tree template_type
= TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type
))));
3209 tree
template, template_addr
;
3210 tree etype
= TREE_TYPE (expr
);
3212 /* If EXPR is a constant of zero, we make a fat pointer that has a null
3213 pointer to the template and array. */
3214 if (integer_zerop (expr
))
3216 gnat_build_constructor
3218 tree_cons (TYPE_FIELDS (type
),
3219 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
3220 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
3221 convert (build_pointer_type (template_type
),
3225 /* If EXPR is a thin pointer, make the template and data from the record. */
3227 else if (TYPE_THIN_POINTER_P (etype
))
3229 tree fields
= TYPE_FIELDS (TREE_TYPE (etype
));
3231 expr
= save_expr (expr
);
3232 if (TREE_CODE (expr
) == ADDR_EXPR
)
3233 expr
= TREE_OPERAND (expr
, 0);
3235 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
3237 template = build_component_ref (expr
, NULL_TREE
, fields
, false);
3238 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
3239 build_component_ref (expr
, NULL_TREE
,
3240 TREE_CHAIN (fields
), false));
3243 /* Otherwise, build the constructor for the template. */
3244 template = build_template (template_type
, TREE_TYPE (etype
), expr
);
3246 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template);
3248 /* The result is a CONSTRUCTOR for the fat pointer.
3250 If expr is an argument of a foreign convention subprogram, the type it
3251 points to is directly the component type. In this case, the expression
3252 type may not match the corresponding FIELD_DECL type at this point, so we
3253 call "convert" here to fix that up if necessary. This type consistency is
3254 required, for instance because it ensures that possible later folding of
3255 component_refs against this constructor always yields something of the
3256 same type as the initial reference.
3258 Note that the call to "build_template" above is still fine, because it
3259 will only refer to the provided template_type in this case. */
3261 gnat_build_constructor
3262 (type
, tree_cons (TYPE_FIELDS (type
),
3263 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
3264 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
3265 template_addr
, NULL_TREE
)));
3268 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3269 is something that is a fat pointer, so convert to it first if it EXPR
3270 is not already a fat pointer. */
3273 convert_to_thin_pointer (tree type
, tree expr
)
3275 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr
)))
3277 = convert_to_fat_pointer
3278 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
))), expr
);
3280 /* We get the pointer to the data and use a NOP_EXPR to make it the
3282 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (TREE_TYPE (expr
)),
3284 expr
= build1 (NOP_EXPR
, type
, expr
);
3289 /* Create an expression whose value is that of EXPR,
3290 converted to type TYPE. The TREE_TYPE of the value
3291 is always TYPE. This function implements all reasonable
3292 conversions; callers should filter out those that are
3293 not permitted by the language being compiled. */
3296 convert (tree type
, tree expr
)
3298 enum tree_code code
= TREE_CODE (type
);
3299 tree etype
= TREE_TYPE (expr
);
3300 enum tree_code ecode
= TREE_CODE (etype
);
3302 /* If EXPR is already the right type, we are done. */
3306 /* If both input and output have padding and are of variable size, do this
3307 as an unchecked conversion. Likewise if one is a mere variant of the
3308 other, so we avoid a pointless unpad/repad sequence. */
3309 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
3310 && TYPE_IS_PADDING_P (type
) && TYPE_IS_PADDING_P (etype
)
3311 && (!TREE_CONSTANT (TYPE_SIZE (type
))
3312 || !TREE_CONSTANT (TYPE_SIZE (etype
))
3313 || TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
)))
3316 /* If the output type has padding, make a constructor to build the
3318 else if (code
== RECORD_TYPE
&& TYPE_IS_PADDING_P (type
))
3320 /* If we previously converted from another type and our type is
3321 of variable size, remove the conversion to avoid the need for
3322 variable-size temporaries. */
3323 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
3324 && !TREE_CONSTANT (TYPE_SIZE (type
)))
3325 expr
= TREE_OPERAND (expr
, 0);
3327 /* If we are just removing the padding from expr, convert the original
3328 object if we have variable size. That will avoid the need
3329 for some variable-size temporaries. */
3330 if (TREE_CODE (expr
) == COMPONENT_REF
3331 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr
, 0))) == RECORD_TYPE
3332 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
3333 && !TREE_CONSTANT (TYPE_SIZE (type
)))
3334 return convert (type
, TREE_OPERAND (expr
, 0));
3336 /* If the result type is a padded type with a self-referentially-sized
3337 field and the expression type is a record, do this as an
3338 unchecked conversion. */
3339 else if (TREE_CODE (etype
) == RECORD_TYPE
3340 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type
))))
3341 return unchecked_convert (type
, expr
, false);
3345 gnat_build_constructor (type
,
3346 tree_cons (TYPE_FIELDS (type
),
3348 (TYPE_FIELDS (type
)),
3353 /* If the input type has padding, remove it and convert to the output type.
3354 The conditions ordering is arranged to ensure that the output type is not
3355 a padding type here, as it is not clear whether the conversion would
3356 always be correct if this was to happen. */
3357 else if (ecode
== RECORD_TYPE
&& TYPE_IS_PADDING_P (etype
))
3361 /* If we have just converted to this padded type, just get the
3362 inner expression. */
3363 if (TREE_CODE (expr
) == CONSTRUCTOR
3364 && !VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (expr
))
3365 && VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->index
3366 == TYPE_FIELDS (etype
))
3368 = VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->value
;
3370 /* Otherwise, build an explicit component reference. */
3373 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
3375 return convert (type
, unpadded
);
3378 /* If the input is a biased type, adjust first. */
3379 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
3380 return convert (type
, fold_build2 (PLUS_EXPR
, TREE_TYPE (etype
),
3381 fold_convert (TREE_TYPE (etype
),
3383 TYPE_MIN_VALUE (etype
)));
3385 /* If the input is a justified modular type, we need to extract the actual
3386 object before converting it to any other type with the exceptions of an
3387 unconstrained array or of a mere type variant. It is useful to avoid the
3388 extraction and conversion in the type variant case because it could end
3389 up replacing a VAR_DECL expr by a constructor and we might be about the
3390 take the address of the result. */
3391 if (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)
3392 && code
!= UNCONSTRAINED_ARRAY_TYPE
3393 && TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (etype
))
3394 return convert (type
, build_component_ref (expr
, NULL_TREE
,
3395 TYPE_FIELDS (etype
), false));
3397 /* If converting to a type that contains a template, convert to the data
3398 type and then build the template. */
3399 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
))
3401 tree obj_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type
)));
3403 /* If the source already has a template, get a reference to the
3404 associated array only, as we are going to rebuild a template
3405 for the target type anyway. */
3406 expr
= maybe_unconstrained_array (expr
);
3409 gnat_build_constructor
3411 tree_cons (TYPE_FIELDS (type
),
3412 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
3413 obj_type
, NULL_TREE
),
3414 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
3415 convert (obj_type
, expr
), NULL_TREE
)));
3418 /* There are some special cases of expressions that we process
3420 switch (TREE_CODE (expr
))
3426 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3427 conversion in gnat_expand_expr. NULL_EXPR does not represent
3428 and actual value, so no conversion is needed. */
3429 expr
= copy_node (expr
);
3430 TREE_TYPE (expr
) = type
;
3434 /* If we are converting a STRING_CST to another constrained array type,
3435 just make a new one in the proper type. */
3436 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
3437 && !(TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
3438 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
))
3440 expr
= copy_node (expr
);
3441 TREE_TYPE (expr
) = type
;
3447 /* If we are converting a CONSTRUCTOR to another constrained array type
3448 with the same domain, just make a new one in the proper type. */
3449 if (code
== ecode
&& code
== ARRAY_TYPE
3450 && TREE_TYPE (type
) == TREE_TYPE (etype
)
3451 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (type
)),
3452 TYPE_MIN_VALUE (TYPE_DOMAIN (etype
)))
3453 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (type
)),
3454 TYPE_MAX_VALUE (TYPE_DOMAIN (etype
))))
3456 expr
= copy_node (expr
);
3457 TREE_TYPE (expr
) = type
;
3462 case UNCONSTRAINED_ARRAY_REF
:
3463 /* Convert this to the type of the inner array by getting the address of
3464 the array from the template. */
3465 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
3466 build_component_ref (TREE_OPERAND (expr
, 0),
3467 get_identifier ("P_ARRAY"),
3469 etype
= TREE_TYPE (expr
);
3470 ecode
= TREE_CODE (etype
);
3473 case VIEW_CONVERT_EXPR
:
3475 /* GCC 4.x is very sensitive to type consistency overall, and view
3476 conversions thus are very frequent. Even though just "convert"ing
3477 the inner operand to the output type is fine in most cases, it
3478 might expose unexpected input/output type mismatches in special
3479 circumstances so we avoid such recursive calls when we can. */
3481 tree op0
= TREE_OPERAND (expr
, 0);
3483 /* If we are converting back to the original type, we can just
3484 lift the input conversion. This is a common occurrence with
3485 switches back-and-forth amongst type variants. */
3486 if (type
== TREE_TYPE (op0
))
3489 /* Otherwise, if we're converting between two aggregate types, we
3490 might be allowed to substitute the VIEW_CONVERT target type in
3491 place or to just convert the inner expression. */
3492 if (AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
))
3494 /* If we are converting between type variants, we can just
3495 substitute the VIEW_CONVERT in place. */
3496 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
3497 return build1 (VIEW_CONVERT_EXPR
, type
, op0
);
3499 /* Otherwise, we may just bypass the input view conversion unless
3500 one of the types is a fat pointer, which is handled by
3501 specialized code below which relies on exact type matching. */
3502 else if (!TYPE_FAT_POINTER_P (type
) && !TYPE_FAT_POINTER_P (etype
))
3503 return convert (type
, op0
);
3509 /* If both types are record types, just convert the pointer and
3510 make a new INDIRECT_REF.
3512 ??? Disable this for now since it causes problems with the
3513 code in build_binary_op for MODIFY_EXPR which wants to
3514 strip off conversions. But that code really is a mess and
3515 we need to do this a much better way some time. */
3517 && (TREE_CODE (type
) == RECORD_TYPE
3518 || TREE_CODE (type
) == UNION_TYPE
)
3519 && (TREE_CODE (etype
) == RECORD_TYPE
3520 || TREE_CODE (etype
) == UNION_TYPE
)
3521 && !TYPE_FAT_POINTER_P (type
) && !TYPE_FAT_POINTER_P (etype
))
3522 return build_unary_op (INDIRECT_REF
, NULL_TREE
,
3523 convert (build_pointer_type (type
),
3524 TREE_OPERAND (expr
, 0)));
3531 /* Check for converting to a pointer to an unconstrained array. */
3532 if (TYPE_FAT_POINTER_P (type
) && !TYPE_FAT_POINTER_P (etype
))
3533 return convert_to_fat_pointer (type
, expr
);
3535 /* If we're converting between two aggregate types that have the same main
3536 variant, just make a VIEW_CONVER_EXPR. */
3537 else if (AGGREGATE_TYPE_P (type
)
3538 && TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
3539 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
3541 /* In all other cases of related types, make a NOP_EXPR. */
3542 else if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
)
3543 || (code
== INTEGER_CST
&& ecode
== INTEGER_CST
3544 && (type
== TREE_TYPE (etype
) || etype
== TREE_TYPE (type
))))
3545 return fold_convert (type
, expr
);
3550 return fold_build1 (CONVERT_EXPR
, type
, expr
);
3553 return fold_convert (type
, gnat_truthvalue_conversion (expr
));
3556 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
3557 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
3558 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))))
3559 return unchecked_convert (type
, expr
, false);
3560 else if (TYPE_BIASED_REPRESENTATION_P (type
))
3561 return fold_convert (type
,
3562 fold_build2 (MINUS_EXPR
, TREE_TYPE (type
),
3563 convert (TREE_TYPE (type
), expr
),
3564 TYPE_MIN_VALUE (type
)));
3566 /* ... fall through ... */
3569 return fold (convert_to_integer (type
, expr
));
3572 case REFERENCE_TYPE
:
3573 /* If converting between two pointers to records denoting
3574 both a template and type, adjust if needed to account
3575 for any differing offsets, since one might be negative. */
3576 if (TYPE_THIN_POINTER_P (etype
) && TYPE_THIN_POINTER_P (type
))
3579 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype
))),
3580 bit_position (TYPE_FIELDS (TREE_TYPE (type
))));
3581 tree byte_diff
= size_binop (CEIL_DIV_EXPR
, bit_diff
,
3582 sbitsize_int (BITS_PER_UNIT
));
3584 expr
= build1 (NOP_EXPR
, type
, expr
);
3585 TREE_CONSTANT (expr
) = TREE_CONSTANT (TREE_OPERAND (expr
, 0));
3586 if (integer_zerop (byte_diff
))
3589 return build_binary_op (POINTER_PLUS_EXPR
, type
, expr
,
3590 fold (convert (sizetype
, byte_diff
)));
3593 /* If converting to a thin pointer, handle specially. */
3594 if (TYPE_THIN_POINTER_P (type
)
3595 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)))
3596 return convert_to_thin_pointer (type
, expr
);
3598 /* If converting fat pointer to normal pointer, get the pointer to the
3599 array and then convert it. */
3600 else if (TYPE_FAT_POINTER_P (etype
))
3601 expr
= build_component_ref (expr
, get_identifier ("P_ARRAY"),
3604 return fold (convert_to_pointer (type
, expr
));
3607 return fold (convert_to_real (type
, expr
));
3610 if (TYPE_JUSTIFIED_MODULAR_P (type
) && !AGGREGATE_TYPE_P (etype
))
3612 gnat_build_constructor
3613 (type
, tree_cons (TYPE_FIELDS (type
),
3614 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
3617 /* ... fall through ... */
3620 /* In these cases, assume the front-end has validated the conversion.
3621 If the conversion is valid, it will be a bit-wise conversion, so
3622 it can be viewed as an unchecked conversion. */
3623 return unchecked_convert (type
, expr
, false);
3626 /* This is a either a conversion between a tagged type and some
3627 subtype, which we have to mark as a UNION_TYPE because of
3628 overlapping fields or a conversion of an Unchecked_Union. */
3629 return unchecked_convert (type
, expr
, false);
3631 case UNCONSTRAINED_ARRAY_TYPE
:
3632 /* If EXPR is a constrained array, take its address, convert it to a
3633 fat pointer, and then dereference it. Likewise if EXPR is a
3634 record containing both a template and a constrained array.
3635 Note that a record representing a justified modular type
3636 always represents a packed constrained array. */
3637 if (ecode
== ARRAY_TYPE
3638 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
3639 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
3640 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)))
3643 (INDIRECT_REF
, NULL_TREE
,
3644 convert_to_fat_pointer (TREE_TYPE (type
),
3645 build_unary_op (ADDR_EXPR
,
3648 /* Do something very similar for converting one unconstrained
3649 array to another. */
3650 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
3652 build_unary_op (INDIRECT_REF
, NULL_TREE
,
3653 convert (TREE_TYPE (type
),
3654 build_unary_op (ADDR_EXPR
,
3660 return fold (convert_to_complex (type
, expr
));
3667 /* Remove all conversions that are done in EXP. This includes converting
3668 from a padded type or to a justified modular type. If TRUE_ADDRESS
3669 is true, always return the address of the containing object even if
3670 the address is not bit-aligned. */
3673 remove_conversions (tree exp
, bool true_address
)
3675 switch (TREE_CODE (exp
))
3679 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
3680 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
3682 remove_conversions (VEC_index (constructor_elt
,
3683 CONSTRUCTOR_ELTS (exp
), 0)->value
,
3688 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == RECORD_TYPE
3689 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3690 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
3693 case VIEW_CONVERT_EXPR
: case NON_LVALUE_EXPR
:
3694 case NOP_EXPR
: case CONVERT_EXPR
:
3695 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
3704 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3705 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3706 likewise return an expression pointing to the underlying array. */
3709 maybe_unconstrained_array (tree exp
)
3711 enum tree_code code
= TREE_CODE (exp
);
3714 switch (TREE_CODE (TREE_TYPE (exp
)))
3716 case UNCONSTRAINED_ARRAY_TYPE
:
3717 if (code
== UNCONSTRAINED_ARRAY_REF
)
3720 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
3721 build_component_ref (TREE_OPERAND (exp
, 0),
3722 get_identifier ("P_ARRAY"),
3724 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp
);
3728 else if (code
== NULL_EXPR
)
3729 return build1 (NULL_EXPR
,
3730 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3731 (TREE_TYPE (TREE_TYPE (exp
))))),
3732 TREE_OPERAND (exp
, 0));
3735 /* If this is a padded type, convert to the unpadded type and see if
3736 it contains a template. */
3737 if (TYPE_IS_PADDING_P (TREE_TYPE (exp
)))
3739 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp
))), exp
);
3740 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3741 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3743 build_component_ref (new, NULL_TREE
,
3744 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3747 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp
)))
3749 build_component_ref (exp
, NULL_TREE
,
3750 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp
))), 0);
3760 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
3761 If NOTRUNC_P is true, truncation operations should be suppressed. */
3764 unchecked_convert (tree type
, tree expr
, bool notrunc_p
)
3766 tree etype
= TREE_TYPE (expr
);
3768 /* If the expression is already the right type, we are done. */
3772 /* If both types types are integral just do a normal conversion.
3773 Likewise for a conversion to an unconstrained array. */
3774 if ((((INTEGRAL_TYPE_P (type
)
3775 && !(TREE_CODE (type
) == INTEGER_TYPE
3776 && TYPE_VAX_FLOATING_POINT_P (type
)))
3777 || (POINTER_TYPE_P (type
) && ! TYPE_THIN_POINTER_P (type
))
3778 || (TREE_CODE (type
) == RECORD_TYPE
3779 && TYPE_JUSTIFIED_MODULAR_P (type
)))
3780 && ((INTEGRAL_TYPE_P (etype
)
3781 && !(TREE_CODE (etype
) == INTEGER_TYPE
3782 && TYPE_VAX_FLOATING_POINT_P (etype
)))
3783 || (POINTER_TYPE_P (etype
) && !TYPE_THIN_POINTER_P (etype
))
3784 || (TREE_CODE (etype
) == RECORD_TYPE
3785 && TYPE_JUSTIFIED_MODULAR_P (etype
))))
3786 || TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3789 bool final_unchecked
= false;
3791 if (TREE_CODE (etype
) == INTEGER_TYPE
3792 && TYPE_BIASED_REPRESENTATION_P (etype
))
3794 tree ntype
= copy_type (etype
);
3796 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
3797 TYPE_MAIN_VARIANT (ntype
) = ntype
;
3798 expr
= build1 (NOP_EXPR
, ntype
, expr
);
3801 if (TREE_CODE (type
) == INTEGER_TYPE
3802 && TYPE_BIASED_REPRESENTATION_P (type
))
3804 rtype
= copy_type (type
);
3805 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
3806 TYPE_MAIN_VARIANT (rtype
) = rtype
;
3809 /* We have another special case: if we are unchecked converting subtype
3810 into a base type, we need to ensure that VRP doesn't propagate range
3811 information since this conversion may be done precisely to validate
3812 that the object is within the range it is supposed to have. */
3813 else if (TREE_CODE (expr
) != INTEGER_CST
3814 && TREE_CODE (type
) == INTEGER_TYPE
&& !TREE_TYPE (type
)
3815 && ((TREE_CODE (etype
) == INTEGER_TYPE
&& TREE_TYPE (etype
))
3816 || TREE_CODE (etype
) == ENUMERAL_TYPE
3817 || TREE_CODE (etype
) == BOOLEAN_TYPE
))
3819 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
3820 in order not to be deemed an useless type conversion, it must
3821 be from subtype to base type.
3823 ??? This may raise addressability and/or aliasing issues because
3824 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
3825 address of its operand to be taken if it is deemed addressable
3826 and not already in GIMPLE form. */
3827 rtype
= gnat_type_for_mode (TYPE_MODE (type
), TYPE_UNSIGNED (type
));
3828 rtype
= copy_type (rtype
);
3829 TYPE_MAIN_VARIANT (rtype
) = rtype
;
3830 TREE_TYPE (rtype
) = type
;
3831 final_unchecked
= true;
3834 expr
= convert (rtype
, expr
);
3836 expr
= build1 (final_unchecked
? VIEW_CONVERT_EXPR
: NOP_EXPR
,
3840 /* If we are converting TO an integral type whose precision is not the
3841 same as its size, first unchecked convert to a record that contains
3842 an object of the output type. Then extract the field. */
3843 else if (INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
3844 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
3845 GET_MODE_BITSIZE (TYPE_MODE (type
))))
3847 tree rec_type
= make_node (RECORD_TYPE
);
3848 tree field
= create_field_decl (get_identifier ("OBJ"), type
,
3849 rec_type
, 1, 0, 0, 0);
3851 TYPE_FIELDS (rec_type
) = field
;
3852 layout_type (rec_type
);
3854 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
3855 expr
= build_component_ref (expr
, NULL_TREE
, field
, 0);
3858 /* Similarly for integral input type whose precision is not equal to its
3860 else if (INTEGRAL_TYPE_P (etype
) && TYPE_RM_SIZE (etype
)
3861 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
3862 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
3864 tree rec_type
= make_node (RECORD_TYPE
);
3866 = create_field_decl (get_identifier ("OBJ"), etype
, rec_type
,
3869 TYPE_FIELDS (rec_type
) = field
;
3870 layout_type (rec_type
);
3872 expr
= gnat_build_constructor (rec_type
, build_tree_list (field
, expr
));
3873 expr
= unchecked_convert (type
, expr
, notrunc_p
);
3876 /* We have a special case when we are converting between two
3877 unconstrained array types. In that case, take the address,
3878 convert the fat pointer types, and dereference. */
3879 else if (TREE_CODE (etype
) == UNCONSTRAINED_ARRAY_TYPE
3880 && TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3881 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
3882 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
3883 build_unary_op (ADDR_EXPR
, NULL_TREE
,
3887 expr
= maybe_unconstrained_array (expr
);
3889 /* There's no point in doing two unchecked conversions in a row. */
3890 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3891 expr
= TREE_OPERAND (expr
, 0);
3893 etype
= TREE_TYPE (expr
);
3894 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
3897 /* If the result is an integral type whose size is not equal to
3898 the size of the underlying machine type, sign- or zero-extend
3899 the result. We need not do this in the case where the input is
3900 an integral type of the same precision and signedness or if the output
3901 is a biased type or if both the input and output are unsigned. */
3903 && INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
3904 && !(TREE_CODE (type
) == INTEGER_TYPE
3905 && TYPE_BIASED_REPRESENTATION_P (type
))
3906 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
3907 GET_MODE_BITSIZE (TYPE_MODE (type
)))
3908 && !(INTEGRAL_TYPE_P (etype
)
3909 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (etype
)
3910 && operand_equal_p (TYPE_RM_SIZE (type
),
3911 (TYPE_RM_SIZE (etype
) != 0
3912 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
3914 && !(TYPE_UNSIGNED (type
) && TYPE_UNSIGNED (etype
)))
3916 tree base_type
= gnat_type_for_mode (TYPE_MODE (type
),
3917 TYPE_UNSIGNED (type
));
3919 = convert (base_type
,
3920 size_binop (MINUS_EXPR
,
3922 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
3923 TYPE_RM_SIZE (type
)));
3926 build_binary_op (RSHIFT_EXPR
, base_type
,
3927 build_binary_op (LSHIFT_EXPR
, base_type
,
3928 convert (base_type
, expr
),
3933 /* An unchecked conversion should never raise Constraint_Error. The code
3934 below assumes that GCC's conversion routines overflow the same way that
3935 the underlying hardware does. This is probably true. In the rare case
3936 when it is false, we can rely on the fact that such conversions are
3937 erroneous anyway. */
3938 if (TREE_CODE (expr
) == INTEGER_CST
)
3939 TREE_OVERFLOW (expr
) = 0;
3941 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3942 show no longer constant. */
3943 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
3944 && !operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
),
3946 TREE_CONSTANT (expr
) = 0;
3951 /* Search the chain of currently available builtin declarations for a node
3952 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
3953 found, if any, or NULL_TREE otherwise. */
3955 builtin_decl_for (tree name
)
3960 for (i
= 0; VEC_iterate(tree
, builtin_decls
, i
, decl
); i
++)
3961 if (DECL_NAME (decl
) == name
)
3967 /* Return the appropriate GCC tree code for the specified GNAT type,
3968 the latter being a record type as predicated by Is_Record_Type. */
3971 tree_code_for_record_type (Entity_Id gnat_type
)
3973 Node_Id component_list
3974 = Component_List (Type_Definition
3976 (Implementation_Base_Type (gnat_type
))));
3979 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
3980 we have a non-discriminant field outside a variant. In either case,
3981 it's a RECORD_TYPE. */
3983 if (!Is_Unchecked_Union (gnat_type
))
3986 for (component
= First_Non_Pragma (Component_Items (component_list
));
3987 Present (component
);
3988 component
= Next_Non_Pragma (component
))
3989 if (Ekind (Defining_Entity (component
)) == E_Component
)
3995 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
3996 component of an aggregate type. */
3999 type_for_nonaliased_component_p (tree gnu_type
)
4001 /* If the type is passed by reference, we may have pointers to the
4002 component so it cannot be made non-aliased. */
4003 if (must_pass_by_ref (gnu_type
) || default_pass_by_ref (gnu_type
))
4006 /* We used to say that any component of aggregate type is aliased
4007 because the front-end may take 'Reference of it. The front-end
4008 has been enhanced in the meantime so as to use a renaming instead
4009 in most cases, but the back-end can probably take the address of
4010 such a component too so we go for the conservative stance.
4012 For instance, we might need the address of any array type, even
4013 if normally passed by copy, to construct a fat pointer if the
4014 component is used as an actual for an unconstrained formal.
4016 Likewise for record types: even if a specific record subtype is
4017 passed by copy, the parent type might be passed by ref (e.g. if
4018 it's of variable size) and we might take the address of a child
4019 component to pass to a parent formal. We have no way to check
4020 for such conditions here. */
4021 if (AGGREGATE_TYPE_P (gnu_type
))
4027 /* Perform final processing on global variables. */
4030 gnat_write_global_declarations (void)
4032 /* Proceed to optimize and emit assembly.
4033 FIXME: shouldn't be the front end's responsibility to call this. */
4036 /* Emit debug info for all global declarations. */
4037 emit_debug_global_declarations (VEC_address (tree
, global_decls
),
4038 VEC_length (tree
, global_decls
));
4041 #include "gt-ada-utils.h"
4042 #include "gtype-ada.h"