1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 ****************************************************************************/
29 #include "coretypes.h"
42 #include "tree-inline.h"
43 #include "tree-gimple.h"
44 #include "tree-dump.h"
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
68 /* If nonzero, pretend we are allocating at global level. */
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls
[(int) ADT_LAST
];
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
77 /* Associates a GNAT tree node to a GCC tree node. It is used in
78 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
79 of `save_gnu_tree' for more info. */
80 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
82 /* This variable keeps a table for types for each precision so that we only
83 allocate each of them once. Signed and unsigned types are kept separate.
85 Note that these types are only used when fold-const requests something
86 special. Perhaps we should NOT share these types; we'll see how it
88 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
90 /* Likewise for float types, but record these by mode. */
91 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
93 /* For each binding contour we allocate a binding_level structure to indicate
96 struct gnat_binding_level
GTY((chain_next ("%h.chain")))
98 /* The binding level containing this one (the enclosing binding level). */
99 struct gnat_binding_level
*chain
;
100 /* The BLOCK node for this level. */
102 /* If nonzero, the setjmp buffer that needs to be updated for any
103 variable-sized definition within this context. */
107 /* The binding level currently in effect. */
108 static GTY(()) struct gnat_binding_level
*current_binding_level
;
110 /* A chain of gnat_binding_level structures awaiting reuse. */
111 static GTY((deletable
)) struct gnat_binding_level
*free_binding_level
;
113 /* A chain of unused BLOCK nodes. */
114 static GTY((deletable
)) tree free_block_chain
;
116 struct language_function
GTY(())
121 static void gnat_define_builtin (const char *, tree
, int, const char *, bool);
122 static void gnat_install_builtins (void);
123 static tree
merge_sizes (tree
, tree
, tree
, bool, bool);
124 static tree
compute_related_constant (tree
, tree
);
125 static tree
split_plus (tree
, tree
*);
126 static bool value_zerop (tree
);
127 static void gnat_gimplify_function (tree
);
128 static void gnat_finalize (tree
);
129 static tree
float_type_for_precision (int, enum machine_mode
);
130 static tree
convert_to_fat_pointer (tree
, tree
);
131 static tree
convert_to_thin_pointer (tree
, tree
);
132 static tree
make_descriptor_field (const char *,tree
, tree
, tree
);
133 static bool value_factor_p (tree
, HOST_WIDE_INT
);
134 static bool potential_alignment_gap (tree
, tree
, tree
);
136 /* Initialize the association of GNAT nodes to GCC trees. */
139 init_gnat_to_gnu (void)
141 associate_gnat_to_gnu
142 = (tree
*) ggc_alloc_cleared (max_gnat_nodes
* sizeof (tree
));
145 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
146 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
147 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
149 If GNU_DECL is zero, a previous association is to be reset. */
152 save_gnu_tree (Entity_Id gnat_entity
, tree gnu_decl
, bool no_check
)
154 /* Check that GNAT_ENTITY is not already defined and that it is being set
155 to something which is a decl. Raise gigi 401 if not. Usually, this
156 means GNAT_ENTITY is defined twice, but occasionally is due to some
159 && (associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
]
160 || (!no_check
&& !DECL_P (gnu_decl
))))
163 associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
] = gnu_decl
;
166 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
167 Return the ..._DECL node that was associated with it. If there is no tree
168 node associated with GNAT_ENTITY, abort.
170 In some cases, such as delayed elaboration or expressions that need to
171 be elaborated only once, GNAT_ENTITY is really not an entity. */
174 get_gnu_tree (Entity_Id gnat_entity
)
176 if (!associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
])
179 return associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
];
182 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
185 present_gnu_tree (Entity_Id gnat_entity
)
187 return (associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
]) != 0;
191 /* Return non-zero if we are currently in the global binding level. */
194 global_bindings_p (void)
196 return ((force_global
|| !current_function_decl
) ? -1 : 0);
199 /* Enter a new binding level. */
204 struct gnat_binding_level
*newlevel
= NULL
;
206 /* Reuse a struct for this binding level, if there is one. */
207 if (free_binding_level
)
209 newlevel
= free_binding_level
;
210 free_binding_level
= free_binding_level
->chain
;
214 = (struct gnat_binding_level
*)
215 ggc_alloc (sizeof (struct gnat_binding_level
));
217 /* Use a free BLOCK, if any; otherwise, allocate one. */
218 if (free_block_chain
)
220 newlevel
->block
= free_block_chain
;
221 free_block_chain
= TREE_CHAIN (free_block_chain
);
222 TREE_CHAIN (newlevel
->block
) = NULL_TREE
;
225 newlevel
->block
= make_node (BLOCK
);
227 /* Point the BLOCK we just made to its parent. */
228 if (current_binding_level
)
229 BLOCK_SUPERCONTEXT (newlevel
->block
) = current_binding_level
->block
;
231 BLOCK_VARS (newlevel
->block
) = BLOCK_SUBBLOCKS (newlevel
->block
) = NULL_TREE
;
232 TREE_USED (newlevel
->block
) = 1;
234 /* Add this level to the front of the chain (stack) of levels that are
236 newlevel
->chain
= current_binding_level
;
237 newlevel
->jmpbuf_decl
= NULL_TREE
;
238 current_binding_level
= newlevel
;
241 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
242 and point FNDECL to this BLOCK. */
245 set_current_block_context (tree fndecl
)
247 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
248 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
251 /* Set the jmpbuf_decl for the current binding level to DECL. */
254 set_block_jmpbuf_decl (tree decl
)
256 current_binding_level
->jmpbuf_decl
= decl
;
259 /* Get the jmpbuf_decl, if any, for the current binding level. */
262 get_block_jmpbuf_decl ()
264 return current_binding_level
->jmpbuf_decl
;
267 /* Exit a binding level. Set any BLOCK into the current code group. */
272 struct gnat_binding_level
*level
= current_binding_level
;
273 tree block
= level
->block
;
275 BLOCK_VARS (block
) = nreverse (BLOCK_VARS (block
));
276 BLOCK_SUBBLOCKS (block
) = nreverse (BLOCK_SUBBLOCKS (block
));
278 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
279 are no variables free the block and merge its subblocks into those of its
280 parent block. Otherwise, add it to the list of its parent. */
281 if (TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
)
283 else if (BLOCK_VARS (block
) == NULL_TREE
)
285 BLOCK_SUBBLOCKS (level
->chain
->block
)
286 = chainon (BLOCK_SUBBLOCKS (block
),
287 BLOCK_SUBBLOCKS (level
->chain
->block
));
288 TREE_CHAIN (block
) = free_block_chain
;
289 free_block_chain
= block
;
293 TREE_CHAIN (block
) = BLOCK_SUBBLOCKS (level
->chain
->block
);
294 BLOCK_SUBBLOCKS (level
->chain
->block
) = block
;
295 TREE_USED (block
) = 1;
296 set_block_for_group (block
);
299 /* Free this binding structure. */
300 current_binding_level
= level
->chain
;
301 level
->chain
= free_binding_level
;
302 free_binding_level
= level
;
305 /* Insert BLOCK at the end of the list of subblocks of the
306 current binding level. This is used when a BIND_EXPR is expanded,
307 to handle the BLOCK node inside the BIND_EXPR. */
310 insert_block (tree block
)
312 TREE_USED (block
) = 1;
313 TREE_CHAIN (block
) = BLOCK_SUBBLOCKS (current_binding_level
->block
);
314 BLOCK_SUBBLOCKS (current_binding_level
->block
) = block
;
317 /* Records a ..._DECL node DECL as belonging to the current lexical scope
318 and uses GNAT_NODE for location information. */
321 gnat_pushdecl (tree decl
, Node_Id gnat_node
)
323 /* If at top level, there is no context. But PARM_DECLs always go in the
324 level of its function. */
325 if (global_bindings_p () && TREE_CODE (decl
) != PARM_DECL
)
326 DECL_CONTEXT (decl
) = 0;
328 DECL_CONTEXT (decl
) = current_function_decl
;
330 /* Set the location of DECL and emit a declaration for it. */
331 if (Present (gnat_node
))
332 Sloc_to_locus (Sloc (gnat_node
), &DECL_SOURCE_LOCATION (decl
));
333 add_decl_expr (decl
, gnat_node
);
335 /* Put the declaration on the list. The list of declarations is in reverse
336 order. The list will be reversed later. We don't do this for global
337 variables. Also, don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
338 the list. They will cause trouble with the debugger and aren't needed
340 if (!global_bindings_p ()
341 && (TREE_CODE (decl
) != TYPE_DECL
342 || TREE_CODE (TREE_TYPE (decl
)) != UNCONSTRAINED_ARRAY_TYPE
))
344 TREE_CHAIN (decl
) = BLOCK_VARS (current_binding_level
->block
);
345 BLOCK_VARS (current_binding_level
->block
) = decl
;
348 /* For the declaration of a type, set its name if it either is not already
349 set, was set to an IDENTIFIER_NODE, indicating an internal name,
350 or if the previous type name was not derived from a source name.
351 We'd rather have the type named with a real name and all the pointer
352 types to the same object have the same POINTER_TYPE node. Code in this
353 function in c-decl.c makes a copy of the type node here, but that may
354 cause us trouble with incomplete types, so let's not try it (at least
357 if (TREE_CODE (decl
) == TYPE_DECL
359 && (!TYPE_NAME (TREE_TYPE (decl
))
360 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl
))) == IDENTIFIER_NODE
361 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl
))) == TYPE_DECL
362 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl
)))
363 && !DECL_ARTIFICIAL (decl
))))
364 TYPE_NAME (TREE_TYPE (decl
)) = decl
;
366 if (TREE_CODE (decl
) != CONST_DECL
)
367 rest_of_decl_compilation (decl
, global_bindings_p (), 0);
370 /* Do little here. Set up the standard declarations later after the
371 front end has been run. */
374 gnat_init_decl_processing (void)
378 /* Make the binding_level structure for global names. */
379 current_function_decl
= 0;
380 current_binding_level
= 0;
381 free_binding_level
= 0;
384 build_common_tree_nodes (true, true);
386 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
387 corresponding to the size of Pmode. In most cases when ptr_mode and
388 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
389 far better code using the width of Pmode. Make this here since we need
390 this before we can expand the GNAT types. */
391 size_type_node
= gnat_type_for_size (GET_MODE_BITSIZE (Pmode
), 0);
392 set_sizetype (size_type_node
);
393 build_common_tree_nodes_2 (0);
395 /* Give names and make TYPE_DECLs for common types. */
396 gnat_pushdecl (build_decl (TYPE_DECL
, get_identifier (SIZE_TYPE
), sizetype
),
398 gnat_pushdecl (build_decl (TYPE_DECL
, get_identifier ("integer"),
401 gnat_pushdecl (build_decl (TYPE_DECL
, get_identifier ("unsigned char"),
404 gnat_pushdecl (build_decl (TYPE_DECL
, get_identifier ("long integer"),
405 long_integer_type_node
),
408 ptr_void_type_node
= build_pointer_type (void_type_node
);
410 gnat_install_builtins ();
413 /* Define a builtin function. This is temporary and is just being done
414 to initialize *_built_in_decls for the middle-end. We'll want
415 to do full builtin processing soon. */
418 gnat_define_builtin (const char *name
, tree type
,
419 int function_code
, const char *library_name
, bool const_p
)
421 tree decl
= build_decl (FUNCTION_DECL
, get_identifier (name
), type
);
423 DECL_EXTERNAL (decl
) = 1;
424 TREE_PUBLIC (decl
) = 1;
426 SET_DECL_ASSEMBLER_NAME (decl
, get_identifier (library_name
));
427 make_decl_rtl (decl
);
428 gnat_pushdecl (decl
, Empty
);
429 DECL_BUILT_IN_CLASS (decl
) = BUILT_IN_NORMAL
;
430 DECL_FUNCTION_CODE (decl
) = function_code
;
431 TREE_READONLY (decl
) = const_p
;
433 implicit_built_in_decls
[function_code
] = decl
;
434 built_in_decls
[function_code
] = decl
;
437 /* Install the builtin functions the middle-end needs. */
440 gnat_install_builtins ()
445 tmp
= tree_cons (NULL_TREE
, long_integer_type_node
, void_list_node
);
446 tmp
= tree_cons (NULL_TREE
, long_integer_type_node
, tmp
);
447 ftype
= build_function_type (long_integer_type_node
, tmp
);
448 gnat_define_builtin ("__builtin_expect", ftype
, BUILT_IN_EXPECT
,
449 "__builtin_expect", true);
451 tmp
= tree_cons (NULL_TREE
, size_type_node
, void_list_node
);
452 tmp
= tree_cons (NULL_TREE
, ptr_void_type_node
, tmp
);
453 tmp
= tree_cons (NULL_TREE
, ptr_void_type_node
, tmp
);
454 ftype
= build_function_type (ptr_void_type_node
, tmp
);
455 gnat_define_builtin ("__builtin_memcpy", ftype
, BUILT_IN_MEMCPY
,
458 tmp
= tree_cons (NULL_TREE
, size_type_node
, void_list_node
);
459 tmp
= tree_cons (NULL_TREE
, ptr_void_type_node
, tmp
);
460 tmp
= tree_cons (NULL_TREE
, ptr_void_type_node
, tmp
);
461 ftype
= build_function_type (integer_type_node
, tmp
);
462 gnat_define_builtin ("__builtin_memcmp", ftype
, BUILT_IN_MEMCMP
,
465 tmp
= tree_cons (NULL_TREE
, size_type_node
, void_list_node
);
466 tmp
= tree_cons (NULL_TREE
, integer_type_node
, tmp
);
467 tmp
= tree_cons (NULL_TREE
, ptr_void_type_node
, tmp
);
468 ftype
= build_function_type (integer_type_node
, tmp
);
469 gnat_define_builtin ("__builtin_memset", ftype
, BUILT_IN_MEMSET
,
472 tmp
= tree_cons (NULL_TREE
, integer_type_node
, void_list_node
);
473 ftype
= build_function_type (integer_type_node
, tmp
);
474 gnat_define_builtin ("__builtin_clz", ftype
, BUILT_IN_CLZ
, "clz", true);
476 tmp
= tree_cons (NULL_TREE
, long_integer_type_node
, void_list_node
);
477 ftype
= build_function_type (integer_type_node
, tmp
);
478 gnat_define_builtin ("__builtin_clzl", ftype
, BUILT_IN_CLZL
, "clzl", true);
480 tmp
= tree_cons (NULL_TREE
, long_long_integer_type_node
, void_list_node
);
481 ftype
= build_function_type (integer_type_node
, tmp
);
482 gnat_define_builtin ("__builtin_clzll", ftype
, BUILT_IN_CLZLL
, "clzll",
485 /* The init_trampoline and adjust_trampoline builtins aren't used directly.
486 They are inserted during lowering of nested functions. */
488 tmp
= tree_cons (NULL_TREE
, ptr_void_type_node
, void_list_node
);
489 tmp
= tree_cons (NULL_TREE
, ptr_void_type_node
, tmp
);
490 tmp
= tree_cons (NULL_TREE
, ptr_void_type_node
, tmp
);
491 ftype
= build_function_type (void_type_node
, tmp
);
492 gnat_define_builtin ("__builtin_init_trampoline", ftype
,
493 BUILT_IN_INIT_TRAMPOLINE
, "init_trampoline", false);
495 tmp
= tree_cons (NULL_TREE
, ptr_void_type_node
, void_list_node
);
496 ftype
= build_function_type (ptr_void_type_node
, tmp
);
497 gnat_define_builtin ("__builtin_adjust_trampoline", ftype
,
498 BUILT_IN_ADJUST_TRAMPOLINE
, "adjust_trampoline", true);
500 /* The stack_save, stack_restore, and alloca builtins aren't used directly.
501 They are inserted during gimplification to implement variable sized stack
504 ftype
= build_function_type (ptr_void_type_node
, void_list_node
);
505 gnat_define_builtin ("__builtin_stack_save", ftype
, BUILT_IN_STACK_SAVE
,
506 "stack_save", false);
508 tmp
= tree_cons (NULL_TREE
, ptr_void_type_node
, void_list_node
);
509 ftype
= build_function_type (void_type_node
, tmp
);
510 gnat_define_builtin ("__builtin_stack_restore", ftype
,
511 BUILT_IN_STACK_RESTORE
, "stack_restore", false);
513 tmp
= tree_cons (NULL_TREE
, size_type_node
, void_list_node
);
514 ftype
= build_function_type (ptr_void_type_node
, tmp
);
515 gnat_define_builtin ("__builtin_alloca", ftype
, BUILT_IN_ALLOCA
,
519 /* Create the predefined scalar types such as `integer_type_node' needed
520 in the gcc back-end and initialize the global binding level. */
523 init_gigi_decls (tree long_long_float_type
, tree exception_type
)
528 /* Set the types that GCC and Gigi use from the front end. We would like
529 to do this for char_type_node, but it needs to correspond to the C
531 if (TREE_CODE (TREE_TYPE (long_long_float_type
)) == INTEGER_TYPE
)
533 /* In this case, the builtin floating point types are VAX float,
534 so make up a type for use. */
535 longest_float_type_node
= make_node (REAL_TYPE
);
536 TYPE_PRECISION (longest_float_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
537 layout_type (longest_float_type_node
);
538 create_type_decl (get_identifier ("longest float type"),
539 longest_float_type_node
, NULL
, false, true, Empty
);
542 longest_float_type_node
= TREE_TYPE (long_long_float_type
);
544 except_type_node
= TREE_TYPE (exception_type
);
546 unsigned_type_node
= gnat_type_for_size (INT_TYPE_SIZE
, 1);
547 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node
,
548 NULL
, false, true, Empty
);
550 void_type_decl_node
= create_type_decl (get_identifier ("void"),
551 void_type_node
, NULL
, false, true,
554 void_ftype
= build_function_type (void_type_node
, NULL_TREE
);
555 ptr_void_ftype
= build_pointer_type (void_ftype
);
557 /* Now declare runtime functions. */
558 endlink
= tree_cons (NULL_TREE
, void_type_node
, NULL_TREE
);
560 /* malloc is a function declaration tree for a function to allocate
562 malloc_decl
= create_subprog_decl (get_identifier ("__gnat_malloc"),
564 build_function_type (ptr_void_type_node
,
565 tree_cons (NULL_TREE
,
568 NULL_TREE
, false, true, true, NULL
,
571 /* free is a function declaration tree for a function to free memory. */
573 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE
,
574 build_function_type (void_type_node
,
575 tree_cons (NULL_TREE
,
578 NULL_TREE
, false, true, true, NULL
, Empty
);
580 /* Make the types and functions used for exception processing. */
582 = build_array_type (gnat_type_for_mode (Pmode
, 0),
583 build_index_type (build_int_cst (NULL_TREE
, 5)));
584 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type
, NULL
,
586 jmpbuf_ptr_type
= build_pointer_type (jmpbuf_type
);
588 /* Functions to get and set the jumpbuf pointer for the current thread. */
590 = create_subprog_decl
591 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
592 NULL_TREE
, build_function_type (jmpbuf_ptr_type
, NULL_TREE
),
593 NULL_TREE
, false, true, true, NULL
, Empty
);
596 = create_subprog_decl
597 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
599 build_function_type (void_type_node
,
600 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
601 NULL_TREE
, false, true, true, NULL
, Empty
);
603 /* Function to get the current exception. */
605 = create_subprog_decl
606 (get_identifier ("system__soft_links__get_gnat_exception"),
608 build_function_type (build_pointer_type (except_type_node
), NULL_TREE
),
609 NULL_TREE
, false, true, true, NULL
, Empty
);
611 /* Functions that raise exceptions. */
613 = create_subprog_decl
614 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE
,
615 build_function_type (void_type_node
,
616 tree_cons (NULL_TREE
,
617 build_pointer_type (except_type_node
),
619 NULL_TREE
, false, true, true, NULL
, Empty
);
621 /* Hooks to call when entering/leaving an exception handler. */
623 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE
,
624 build_function_type (void_type_node
,
625 tree_cons (NULL_TREE
,
628 NULL_TREE
, false, true, true, NULL
, Empty
);
631 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE
,
632 build_function_type (void_type_node
,
633 tree_cons (NULL_TREE
,
636 NULL_TREE
, false, true, true, NULL
, Empty
);
638 /* If in no exception handlers mode, all raise statements are redirected to
639 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
640 this procedure will never be called in this mode. */
641 if (No_Exception_Handlers_Set ())
644 = create_subprog_decl
645 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE
,
646 build_function_type (void_type_node
,
647 tree_cons (NULL_TREE
,
648 build_pointer_type (char_type_node
),
649 tree_cons (NULL_TREE
,
652 NULL_TREE
, false, true, true, NULL
, Empty
);
654 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
655 gnat_raise_decls
[i
] = decl
;
658 /* Otherwise, make one decl for each exception reason. */
659 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
663 sprintf (name
, "__gnat_rcheck_%.2d", i
);
665 = create_subprog_decl
666 (get_identifier (name
), NULL_TREE
,
667 build_function_type (void_type_node
,
668 tree_cons (NULL_TREE
,
671 tree_cons (NULL_TREE
,
674 NULL_TREE
, false, true, true, NULL
, Empty
);
677 /* Indicate that these never return. */
678 TREE_THIS_VOLATILE (raise_nodefer_decl
) = 1;
679 TREE_SIDE_EFFECTS (raise_nodefer_decl
) = 1;
680 TREE_TYPE (raise_nodefer_decl
)
681 = build_qualified_type (TREE_TYPE (raise_nodefer_decl
),
684 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
686 TREE_THIS_VOLATILE (gnat_raise_decls
[i
]) = 1;
687 TREE_SIDE_EFFECTS (gnat_raise_decls
[i
]) = 1;
688 TREE_TYPE (gnat_raise_decls
[i
])
689 = build_qualified_type (TREE_TYPE (gnat_raise_decls
[i
]),
693 /* setjmp returns an integer and has one operand, which is a pointer to
696 = create_subprog_decl
697 (get_identifier ("__builtin_setjmp"), NULL_TREE
,
698 build_function_type (integer_type_node
,
699 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
700 NULL_TREE
, false, true, true, NULL
, Empty
);
702 DECL_BUILT_IN_CLASS (setjmp_decl
) = BUILT_IN_NORMAL
;
703 DECL_FUNCTION_CODE (setjmp_decl
) = BUILT_IN_SETJMP
;
705 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
707 update_setjmp_buf_decl
708 = create_subprog_decl
709 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE
,
710 build_function_type (void_type_node
,
711 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
712 NULL_TREE
, false, true, true, NULL
, Empty
);
714 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl
) = BUILT_IN_NORMAL
;
715 DECL_FUNCTION_CODE (update_setjmp_buf_decl
) = BUILT_IN_UPDATE_SETJMP_BUF
;
717 main_identifier_node
= get_identifier ("main");
720 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL nodes
721 (FIELDLIST), finish constructing the record or union type. If HAS_REP is
722 true, this record has a rep clause; don't call layout_type but merely set
723 the size and alignment ourselves. If DEFER_DEBUG is true, do not call
724 the debugging routines on this type; it will be done later. */
727 finish_record_type (tree record_type
, tree fieldlist
, bool has_rep
,
730 enum tree_code code
= TREE_CODE (record_type
);
731 tree ada_size
= bitsize_zero_node
;
732 tree size
= bitsize_zero_node
;
733 tree size_unit
= size_zero_node
;
734 bool var_size
= false;
737 TYPE_FIELDS (record_type
) = fieldlist
;
738 TYPE_STUB_DECL (record_type
)
739 = build_decl (TYPE_DECL
, NULL_TREE
, record_type
);
741 /* We don't need both the typedef name and the record name output in
742 the debugging information, since they are the same. */
743 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type
)) = 1;
745 /* Globally initialize the record first. If this is a rep'ed record,
746 that just means some initializations; otherwise, layout the record. */
750 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
751 TYPE_MODE (record_type
) = BLKmode
;
752 if (!TYPE_SIZE (record_type
))
754 TYPE_SIZE (record_type
) = bitsize_zero_node
;
755 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
757 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
758 out just like a UNION_TYPE, since the size will be fixed. */
759 else if (code
== QUAL_UNION_TYPE
)
764 /* Ensure there isn't a size already set. There can be in an error
765 case where there is a rep clause but all fields have errors and
766 no longer have a position. */
767 TYPE_SIZE (record_type
) = 0;
768 layout_type (record_type
);
771 /* At this point, the position and size of each field is known. It was
772 either set before entry by a rep clause, or by laying out the type above.
774 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
775 to compute the Ada size; the GCC size and alignment (for rep'ed records
776 that are not padding types); and the mode (for rep'ed records). We also
777 clear the DECL_BIT_FIELD indication for the cases we know have not been
778 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
780 if (code
== QUAL_UNION_TYPE
)
781 fieldlist
= nreverse (fieldlist
);
783 for (field
= fieldlist
; field
; field
= TREE_CHAIN (field
))
785 tree pos
= bit_position (field
);
787 tree type
= TREE_TYPE (field
);
788 tree this_size
= DECL_SIZE (field
);
789 tree this_size_unit
= DECL_SIZE_UNIT (field
);
790 tree this_ada_size
= DECL_SIZE (field
);
792 /* We need to make an XVE/XVU record if any field has variable size,
793 whether or not the record does. For example, if we have an union,
794 it may be that all fields, rounded up to the alignment, have the
795 same size, in which case we'll use that size. But the debug
796 output routines (except Dwarf2) won't be able to output the fields,
797 so we need to make the special record. */
798 if (TREE_CODE (this_size
) != INTEGER_CST
)
801 if ((TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
802 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
803 && !TYPE_IS_FAT_POINTER_P (type
)
804 && !TYPE_CONTAINS_TEMPLATE_P (type
)
805 && TYPE_ADA_SIZE (type
))
806 this_ada_size
= TYPE_ADA_SIZE (type
);
808 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
809 if (DECL_BIT_FIELD (field
) && !STRICT_ALIGNMENT
810 && value_factor_p (pos
, BITS_PER_UNIT
)
811 && operand_equal_p (this_size
, TYPE_SIZE (type
), 0))
812 DECL_BIT_FIELD (field
) = 0;
814 /* If we still have DECL_BIT_FIELD set at this point, we know the field
815 is technically not addressable. Except that it can actually be
816 addressed if the field is BLKmode and happens to be properly
818 DECL_NONADDRESSABLE_P (field
)
819 |= DECL_BIT_FIELD (field
) && DECL_MODE (field
) != BLKmode
;
821 if (has_rep
&& !DECL_BIT_FIELD (field
))
822 TYPE_ALIGN (record_type
)
823 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
828 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
829 size
= size_binop (MAX_EXPR
, size
, this_size
);
830 size_unit
= size_binop (MAX_EXPR
, size_unit
, this_size_unit
);
833 case QUAL_UNION_TYPE
:
835 = fold (build (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
836 this_ada_size
, ada_size
));
837 size
= fold (build (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
839 size_unit
= fold (build (COND_EXPR
, sizetype
, DECL_QUALIFIER (field
),
840 this_size_unit
, size_unit
));
844 /* Since we know here that all fields are sorted in order of
845 increasing bit position, the size of the record is one
846 higher than the ending bit of the last field processed
847 unless we have a rep clause, since in that case we might
848 have a field outside a QUAL_UNION_TYPE that has a higher ending
849 position. So use a MAX in that case. Also, if this field is a
850 QUAL_UNION_TYPE, we need to take into account the previous size in
851 the case of empty variants. */
853 = merge_sizes (ada_size
, pos
, this_ada_size
,
854 TREE_CODE (type
) == QUAL_UNION_TYPE
, has_rep
);
855 size
= merge_sizes (size
, pos
, this_size
,
856 TREE_CODE (type
) == QUAL_UNION_TYPE
, has_rep
);
858 = merge_sizes (size_unit
, byte_position (field
), this_size_unit
,
859 TREE_CODE (type
) == QUAL_UNION_TYPE
, has_rep
);
867 if (code
== QUAL_UNION_TYPE
)
868 nreverse (fieldlist
);
870 /* If this is a padding record, we never want to make the size smaller than
871 what was specified in it, if any. */
872 if (TREE_CODE (record_type
) == RECORD_TYPE
873 && TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
))
875 size
= TYPE_SIZE (record_type
);
876 size_unit
= TYPE_SIZE_UNIT (record_type
);
879 /* Now set any of the values we've just computed that apply. */
880 if (!TYPE_IS_FAT_POINTER_P (record_type
)
881 && !TYPE_CONTAINS_TEMPLATE_P (record_type
))
882 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
886 if (!(TREE_CODE (record_type
) == RECORD_TYPE
887 && TYPE_IS_PADDING_P (record_type
)
888 && CONTAINS_PLACEHOLDER_P (size
)))
890 TYPE_SIZE (record_type
) = round_up (size
, TYPE_ALIGN (record_type
));
891 TYPE_SIZE_UNIT (record_type
)
892 = round_up (size_unit
,
893 TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
896 compute_record_mode (record_type
);
901 /* If this record is of variable size, rename it so that the
902 debugger knows it is and make a new, parallel, record
903 that tells the debugger how the record is laid out. See
904 exp_dbug.ads. But don't do this for records that are padding
905 since they confuse GDB. */
907 && !(TREE_CODE (record_type
) == RECORD_TYPE
908 && TYPE_IS_PADDING_P (record_type
)))
911 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
912 ? UNION_TYPE
: TREE_CODE (record_type
));
913 tree orig_name
= TYPE_NAME (record_type
);
915 = (TREE_CODE (orig_name
) == TYPE_DECL
? DECL_NAME (orig_name
)
918 = concat_id_with_name (orig_id
,
919 TREE_CODE (record_type
) == QUAL_UNION_TYPE
921 tree last_pos
= bitsize_zero_node
;
923 tree prev_old_field
= 0;
925 TYPE_NAME (new_record_type
) = new_id
;
926 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
927 TYPE_STUB_DECL (new_record_type
)
928 = build_decl (TYPE_DECL
, NULL_TREE
, new_record_type
);
929 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type
)) = 1;
930 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
931 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
932 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
933 TYPE_SIZE_UNIT (new_record_type
)
934 = size_int (TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
936 /* Now scan all the fields, replacing each field with a new
937 field corresponding to the new encoding. */
938 for (old_field
= TYPE_FIELDS (record_type
); old_field
;
939 old_field
= TREE_CHAIN (old_field
))
941 tree field_type
= TREE_TYPE (old_field
);
942 tree field_name
= DECL_NAME (old_field
);
944 tree curpos
= bit_position (old_field
);
946 unsigned int align
= 0;
949 /* See how the position was modified from the last position.
951 There are two basic cases we support: a value was added
952 to the last position or the last position was rounded to
953 a boundary and they something was added. Check for the
954 first case first. If not, see if there is any evidence
955 of rounding. If so, round the last position and try
958 If this is a union, the position can be taken as zero. */
960 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
961 pos
= bitsize_zero_node
, align
= 0;
963 pos
= compute_related_constant (curpos
, last_pos
);
965 if (!pos
&& TREE_CODE (curpos
) == MULT_EXPR
966 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
)
968 align
= TREE_INT_CST_LOW (TREE_OPERAND (curpos
, 1));
969 pos
= compute_related_constant (curpos
,
970 round_up (last_pos
, align
));
972 else if (!pos
&& TREE_CODE (curpos
) == PLUS_EXPR
973 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
974 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
975 && host_integerp (TREE_OPERAND
976 (TREE_OPERAND (curpos
, 0), 1),
981 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1), 1);
982 pos
= compute_related_constant (curpos
,
983 round_up (last_pos
, align
));
985 else if (potential_alignment_gap (prev_old_field
, old_field
,
988 align
= TYPE_ALIGN (field_type
);
989 pos
= compute_related_constant (curpos
,
990 round_up (last_pos
, align
));
993 /* If we can't compute a position, set it to zero.
995 ??? We really should abort here, but it's too much work
996 to get this correct for all cases. */
999 pos
= bitsize_zero_node
;
1001 /* See if this type is variable-size and make a new type
1002 and indicate the indirection if so. */
1003 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
1005 field_type
= build_pointer_type (field_type
);
1009 /* Make a new field name, if necessary. */
1010 if (var
|| align
!= 0)
1015 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
1016 align
/ BITS_PER_UNIT
);
1018 strcpy (suffix
, "XVL");
1020 field_name
= concat_id_with_name (field_name
, suffix
);
1023 new_field
= create_field_decl (field_name
, field_type
,
1025 DECL_SIZE (old_field
), pos
, 0);
1026 TREE_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
1027 TYPE_FIELDS (new_record_type
) = new_field
;
1029 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1030 zero. The only time it's not the last field of the record
1031 is when there are other components at fixed positions after
1032 it (meaning there was a rep clause for every field) and we
1033 want to be able to encode them. */
1034 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
1035 (TREE_CODE (TREE_TYPE (old_field
))
1038 : DECL_SIZE (old_field
));
1039 prev_old_field
= old_field
;
1042 TYPE_FIELDS (new_record_type
)
1043 = nreverse (TYPE_FIELDS (new_record_type
));
1045 rest_of_type_compilation (new_record_type
, global_bindings_p ());
1048 rest_of_type_compilation (record_type
, global_bindings_p ());
1052 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1053 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1054 if this represents a QUAL_UNION_TYPE in which case we must look for
1055 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1056 is nonzero, we must take the MAX of the end position of this field
1057 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1059 We return an expression for the size. */
1062 merge_sizes (tree last_size
, tree first_bit
, tree size
, bool special
,
1065 tree type
= TREE_TYPE (last_size
);
1068 if (!special
|| TREE_CODE (size
) != COND_EXPR
)
1070 new = size_binop (PLUS_EXPR
, first_bit
, size
);
1072 new = size_binop (MAX_EXPR
, last_size
, new);
1076 new = fold (build (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1077 integer_zerop (TREE_OPERAND (size
, 1))
1078 ? last_size
: merge_sizes (last_size
, first_bit
,
1079 TREE_OPERAND (size
, 1),
1081 integer_zerop (TREE_OPERAND (size
, 2))
1082 ? last_size
: merge_sizes (last_size
, first_bit
,
1083 TREE_OPERAND (size
, 2),
1086 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1087 when fed through substitute_in_expr) into thinking that a constant
1088 size is not constant. */
1089 while (TREE_CODE (new) == NON_LVALUE_EXPR
)
1090 new = TREE_OPERAND (new, 0);
1095 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1096 related by the addition of a constant. Return that constant if so. */
1099 compute_related_constant (tree op0
, tree op1
)
1101 tree op0_var
, op1_var
;
1102 tree op0_con
= split_plus (op0
, &op0_var
);
1103 tree op1_con
= split_plus (op1
, &op1_var
);
1104 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1106 if (operand_equal_p (op0_var
, op1_var
, 0))
1108 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1114 /* Utility function of above to split a tree OP which may be a sum, into a
1115 constant part, which is returned, and a variable part, which is stored
1116 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1120 split_plus (tree in
, tree
*pvar
)
1122 /* Strip NOPS in order to ease the tree traversal and maximize the
1123 potential for constant or plus/minus discovery. We need to be careful
1124 to always return and set *pvar to bitsizetype trees, but it's worth
1128 *pvar
= convert (bitsizetype
, in
);
1130 if (TREE_CODE (in
) == INTEGER_CST
)
1132 *pvar
= bitsize_zero_node
;
1133 return convert (bitsizetype
, in
);
1135 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1137 tree lhs_var
, rhs_var
;
1138 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1139 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1141 if (lhs_var
== TREE_OPERAND (in
, 0)
1142 && rhs_var
== TREE_OPERAND (in
, 1))
1143 return bitsize_zero_node
;
1145 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1146 return size_binop (TREE_CODE (in
), lhs_con
, rhs_con
);
1149 return bitsize_zero_node
;
1152 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1153 subprogram. If it is void_type_node, then we are dealing with a procedure,
1154 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1155 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1156 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1157 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1158 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1159 RETURNS_WITH_DSP is nonzero if the function is to return with a
1160 depressed stack pointer. */
1163 create_subprog_type (tree return_type
, tree param_decl_list
, tree cico_list
,
1164 bool returns_unconstrained
, bool returns_by_ref
,
1165 bool returns_with_dsp
)
1167 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1168 the subprogram formal parameters. This list is generated by traversing the
1169 input list of PARM_DECL nodes. */
1170 tree param_type_list
= NULL
;
1174 for (param_decl
= param_decl_list
; param_decl
;
1175 param_decl
= TREE_CHAIN (param_decl
))
1176 param_type_list
= tree_cons (NULL_TREE
, TREE_TYPE (param_decl
),
1179 /* The list of the function parameter types has to be terminated by the void
1180 type to signal to the back-end that we are not dealing with a variable
1181 parameter subprogram, but that the subprogram has a fixed number of
1183 param_type_list
= tree_cons (NULL_TREE
, void_type_node
, param_type_list
);
1185 /* The list of argument types has been created in reverse
1187 param_type_list
= nreverse (param_type_list
);
1189 type
= build_function_type (return_type
, param_type_list
);
1191 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1192 or the new type should, make a copy of TYPE. Likewise for
1193 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1194 if (TYPE_CI_CO_LIST (type
) || cico_list
1195 || TYPE_RETURNS_UNCONSTRAINED_P (type
) != returns_unconstrained
1196 || TYPE_RETURNS_BY_REF_P (type
) != returns_by_ref
)
1197 type
= copy_type (type
);
1199 TYPE_CI_CO_LIST (type
) = cico_list
;
1200 TYPE_RETURNS_UNCONSTRAINED_P (type
) = returns_unconstrained
;
1201 TYPE_RETURNS_STACK_DEPRESSED (type
) = returns_with_dsp
;
1202 TYPE_RETURNS_BY_REF_P (type
) = returns_by_ref
;
1206 /* Return a copy of TYPE but safe to modify in any way. */
1209 copy_type (tree type
)
1211 tree
new = copy_node (type
);
1213 /* copy_node clears this field instead of copying it, because it is
1214 aliased with TREE_CHAIN. */
1215 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type
);
1217 TYPE_POINTER_TO (new) = 0;
1218 TYPE_REFERENCE_TO (new) = 0;
1219 TYPE_MAIN_VARIANT (new) = new;
1220 TYPE_NEXT_VARIANT (new) = 0;
1225 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1226 TYPE_INDEX_TYPE is INDEX. */
1229 create_index_type (tree min
, tree max
, tree index
)
1231 /* First build a type for the desired range. */
1232 tree type
= build_index_2_type (min
, max
);
1234 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1235 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1236 is set, but not to INDEX, make a copy of this type with the requested
1237 index type. Note that we have no way of sharing these types, but that's
1238 only a small hole. */
1239 if (TYPE_INDEX_TYPE (type
) == index
)
1241 else if (TYPE_INDEX_TYPE (type
))
1242 type
= copy_type (type
);
1244 SET_TYPE_INDEX_TYPE (type
, index
);
1245 create_type_decl (NULL_TREE
, type
, NULL
, true, false, Empty
);
1249 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1250 string) and TYPE is a ..._TYPE node giving its data type.
1251 ARTIFICIAL_P is true if this is a declaration that was generated
1252 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1253 information about this type. GNAT_NODE is used for the position of
1257 create_type_decl (tree type_name
, tree type
, struct attrib
*attr_list
,
1258 bool artificial_p
, bool debug_info_p
, Node_Id gnat_node
)
1260 tree type_decl
= build_decl (TYPE_DECL
, type_name
, type
);
1261 enum tree_code code
= TREE_CODE (type
);
1263 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1265 process_attributes (type_decl
, attr_list
);
1267 /* Pass type declaration information to the debugger unless this is an
1268 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1269 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1270 a dummy type, which will be completed later, or a type for which
1271 debugging information was not requested. */
1272 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| TYPE_IS_DUMMY_P (type
)
1274 DECL_IGNORED_P (type_decl
) = 1;
1275 else if (code
!= ENUMERAL_TYPE
&& code
!= RECORD_TYPE
1276 && !((code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1277 && TYPE_IS_DUMMY_P (TREE_TYPE (type
))))
1278 rest_of_decl_compilation (type_decl
, global_bindings_p (), 0);
1280 if (!TYPE_IS_DUMMY_P (type
))
1281 gnat_pushdecl (type_decl
, gnat_node
);
1286 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1287 ASM_NAME is its assembler name (if provided). TYPE is its data type
1288 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1289 expression; NULL_TREE if none.
1291 CONST_FLAG is true if this variable is constant.
1293 PUBLIC_FLAG is true if this definition is to be made visible outside of
1294 the current compilation unit. This flag should be set when processing the
1295 variable definitions in a package specification. EXTERN_FLAG is nonzero
1296 when processing an external variable declaration (as opposed to a
1297 definition: no storage is to be allocated for the variable here).
1299 STATIC_FLAG is only relevant when not at top level. In that case
1300 it indicates whether to always allocate storage to the variable.
1302 GNAT_NODE is used for the position of the decl. */
1305 create_var_decl (tree var_name
, tree asm_name
, tree type
, tree var_init
,
1306 bool const_flag
, bool public_flag
, bool extern_flag
,
1307 bool static_flag
, struct attrib
*attr_list
, Node_Id gnat_node
)
1312 : (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init
))
1313 && (global_bindings_p () || static_flag
1314 ? 0 != initializer_constant_valid_p (var_init
,
1315 TREE_TYPE (var_init
))
1316 : TREE_CONSTANT (var_init
))));
1318 = build_decl ((const_flag
&& init_const
1319 /* Only make a CONST_DECL for sufficiently-small objects.
1320 We consider complex double "sufficiently-small" */
1321 && TYPE_SIZE (type
) != 0
1322 && host_integerp (TYPE_SIZE_UNIT (type
), 1)
1323 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type
),
1324 GET_MODE_SIZE (DCmode
)))
1325 ? CONST_DECL
: VAR_DECL
, var_name
, type
);
1327 /* If this is external, throw away any initializations unless this is a
1328 CONST_DECL (meaning we have a constant); they will be done elsewhere.
1329 If we are defining a global here, leave a constant initialization and
1330 save any variable elaborations for the elaboration routine. If we are
1331 just annotating types, throw away the initialization if it isn't a
1333 if ((extern_flag
&& TREE_CODE (var_decl
) != CONST_DECL
)
1334 || (type_annotate_only
&& var_init
&& !TREE_CONSTANT (var_init
)))
1335 var_init
= NULL_TREE
;
1337 DECL_INITIAL (var_decl
) = var_init
;
1338 TREE_READONLY (var_decl
) = const_flag
;
1339 DECL_EXTERNAL (var_decl
) = extern_flag
;
1340 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
1341 TREE_CONSTANT (var_decl
) = TREE_CODE (var_decl
) == CONST_DECL
;
1342 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
1343 = TYPE_VOLATILE (type
);
1345 /* At the global binding level we need to allocate static storage for the
1346 variable if and only if its not external. If we are not at the top level
1347 we allocate automatic storage unless requested not to. */
1348 TREE_STATIC (var_decl
) = global_bindings_p () ? !extern_flag
: static_flag
;
1351 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
1353 process_attributes (var_decl
, attr_list
);
1355 /* Add this decl to the current binding level. */
1356 gnat_pushdecl (var_decl
, gnat_node
);
1358 if (TREE_SIDE_EFFECTS (var_decl
))
1359 TREE_ADDRESSABLE (var_decl
) = 1;
1361 if (TREE_CODE (var_decl
) != CONST_DECL
)
1362 rest_of_decl_compilation (var_decl
, global_bindings_p (), 0);
1367 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1368 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1369 this field is in a record type with a "pragma pack". If SIZE is nonzero
1370 it is the specified size for this field. If POS is nonzero, it is the bit
1371 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1372 the address of this field for aliasing purposes. */
1375 create_field_decl (tree field_name
, tree field_type
, tree record_type
,
1376 int packed
, tree size
, tree pos
, int addressable
)
1378 tree field_decl
= build_decl (FIELD_DECL
, field_name
, field_type
);
1380 DECL_CONTEXT (field_decl
) = record_type
;
1381 TREE_READONLY (field_decl
) = TYPE_READONLY (field_type
);
1383 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1384 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1385 if (packed
&& TYPE_MODE (field_type
) == BLKmode
)
1386 DECL_ALIGN (field_decl
) = BITS_PER_UNIT
;
1388 /* If a size is specified, use it. Otherwise, if the record type is packed
1389 compute a size to use, which may differ from the object's natural size.
1390 We always set a size in this case to trigger the checks for bitfield
1391 creation below, which is typically required when no position has been
1394 size
= convert (bitsizetype
, size
);
1395 else if (packed
== 1)
1397 size
= rm_size (field_type
);
1399 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1401 if (TREE_CODE (size
) == INTEGER_CST
1402 && compare_tree_int (size
, MAX_FIXED_MODE_SIZE
) > 0)
1403 size
= round_up (size
, BITS_PER_UNIT
);
1406 /* Make a bitfield if a size is specified for two reasons: first if the size
1407 differs from the natural size. Second, if the alignment is insufficient.
1408 There are a number of ways the latter can be true.
1410 We never make a bitfield if the type of the field has a nonconstant size,
1411 or if it is claimed to be addressable, because no such entity requiring
1412 bitfield operations should reach here.
1414 We do *preventively* make a bitfield when there might be the need for it
1415 but we don't have all the necessary information to decide, as is the case
1416 of a field with no specified position in a packed record.
1418 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1419 in layout_decl or finish_record_type to clear the bit_field indication if
1420 it is in fact not needed. */
1421 if (size
&& TREE_CODE (size
) == INTEGER_CST
1422 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
1424 && (!operand_equal_p (TYPE_SIZE (field_type
), size
, 0)
1426 && !value_zerop (size_binop (TRUNC_MOD_EXPR
, pos
,
1427 bitsize_int (TYPE_ALIGN
1430 || (TYPE_ALIGN (record_type
) != 0
1431 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
1433 DECL_BIT_FIELD (field_decl
) = 1;
1434 DECL_SIZE (field_decl
) = size
;
1435 if (!packed
&& !pos
)
1436 DECL_ALIGN (field_decl
)
1437 = (TYPE_ALIGN (record_type
) != 0
1438 ? MIN (TYPE_ALIGN (record_type
), TYPE_ALIGN (field_type
))
1439 : TYPE_ALIGN (field_type
));
1442 DECL_PACKED (field_decl
) = pos
? DECL_BIT_FIELD (field_decl
) : packed
;
1443 DECL_ALIGN (field_decl
)
1444 = MAX (DECL_ALIGN (field_decl
),
1445 DECL_BIT_FIELD (field_decl
) ? 1
1446 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
1447 : TYPE_ALIGN (field_type
));
1451 /* We need to pass in the alignment the DECL is known to have.
1452 This is the lowest-order bit set in POS, but no more than
1453 the alignment of the record, if one is specified. Note
1454 that an alignment of 0 is taken as infinite. */
1455 unsigned int known_align
;
1457 if (host_integerp (pos
, 1))
1458 known_align
= tree_low_cst (pos
, 1) & - tree_low_cst (pos
, 1);
1460 known_align
= BITS_PER_UNIT
;
1462 if (TYPE_ALIGN (record_type
)
1463 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
1464 known_align
= TYPE_ALIGN (record_type
);
1466 layout_decl (field_decl
, known_align
);
1467 SET_DECL_OFFSET_ALIGN (field_decl
,
1468 host_integerp (pos
, 1) ? BIGGEST_ALIGNMENT
1470 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
1471 &DECL_FIELD_BIT_OFFSET (field_decl
),
1472 DECL_OFFSET_ALIGN (field_decl
), pos
);
1474 DECL_HAS_REP_P (field_decl
) = 1;
1477 /* If the field type is passed by reference, we will have pointers to the
1478 field, so it is addressable. */
1479 if (must_pass_by_ref (field_type
) || default_pass_by_ref (field_type
))
1482 /* ??? For now, we say that any field of aggregate type is addressable
1483 because the front end may take 'Reference of it. */
1484 if (AGGREGATE_TYPE_P (field_type
))
1487 /* Mark the decl as nonaddressable if it is indicated so semantically,
1488 meaning we won't ever attempt to take the address of the field.
1490 It may also be "technically" nonaddressable, meaning that even if we
1491 attempt to take the field's address we will actually get the address of a
1492 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1493 we have at this point is not accurate enough, so we don't account for
1494 this here and let finish_record_type decide. */
1495 DECL_NONADDRESSABLE_P (field_decl
) = !addressable
;
1500 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1501 effects, has the value of zero. */
1504 value_zerop (tree exp
)
1506 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
1507 return value_zerop (TREE_OPERAND (exp
, 1));
1509 return integer_zerop (exp
);
1512 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1513 PARAM_TYPE is its type. READONLY is true if the parameter is
1514 readonly (either an IN parameter or an address of a pass-by-ref
1518 create_param_decl (tree param_name
, tree param_type
, bool readonly
)
1520 tree param_decl
= build_decl (PARM_DECL
, param_name
, param_type
);
1522 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1523 lead to various ABI violations. */
1524 if (targetm
.calls
.promote_prototypes (param_type
)
1525 && (TREE_CODE (param_type
) == INTEGER_TYPE
1526 || TREE_CODE (param_type
) == ENUMERAL_TYPE
)
1527 && TYPE_PRECISION (param_type
) < TYPE_PRECISION (integer_type_node
))
1529 /* We have to be careful about biased types here. Make a subtype
1530 of integer_type_node with the proper biasing. */
1531 if (TREE_CODE (param_type
) == INTEGER_TYPE
1532 && TYPE_BIASED_REPRESENTATION_P (param_type
))
1535 = copy_type (build_range_type (integer_type_node
,
1536 TYPE_MIN_VALUE (param_type
),
1537 TYPE_MAX_VALUE (param_type
)));
1539 TYPE_BIASED_REPRESENTATION_P (param_type
) = 1;
1542 param_type
= integer_type_node
;
1545 DECL_ARG_TYPE (param_decl
) = param_type
;
1546 DECL_ARG_TYPE_AS_WRITTEN (param_decl
) = param_type
;
1547 TREE_READONLY (param_decl
) = readonly
;
1551 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1554 process_attributes (tree decl
, struct attrib
*attr_list
)
1556 for (; attr_list
; attr_list
= attr_list
->next
)
1557 switch (attr_list
->type
)
1559 case ATTR_MACHINE_ATTRIBUTE
:
1560 decl_attributes (&decl
, tree_cons (attr_list
->name
, attr_list
->arg
,
1562 ATTR_FLAG_TYPE_IN_PLACE
);
1565 case ATTR_LINK_ALIAS
:
1566 TREE_STATIC (decl
) = 1;
1567 assemble_alias (decl
, attr_list
->name
);
1570 case ATTR_WEAK_EXTERNAL
:
1572 declare_weak (decl
);
1574 post_error ("?weak declarations not supported on this target",
1575 attr_list
->error_point
);
1578 case ATTR_LINK_SECTION
:
1579 if (targetm
.have_named_sections
)
1581 DECL_SECTION_NAME (decl
)
1582 = build_string (IDENTIFIER_LENGTH (attr_list
->name
),
1583 IDENTIFIER_POINTER (attr_list
->name
));
1586 post_error ("?section attributes are not supported for this target",
1587 attr_list
->error_point
);
1592 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1596 value_factor_p (tree value
, HOST_WIDE_INT factor
)
1598 if (host_integerp (value
, 1))
1599 return tree_low_cst (value
, 1) % factor
== 0;
1601 if (TREE_CODE (value
) == MULT_EXPR
)
1602 return (value_factor_p (TREE_OPERAND (value
, 0), factor
)
1603 || value_factor_p (TREE_OPERAND (value
, 1), factor
));
1608 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1609 unless we can prove these 2 fields are laid out in such a way that no gap
1610 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1611 is the distance in bits between the end of PREV_FIELD and the starting
1612 position of CURR_FIELD. It is ignored if null. */
1615 potential_alignment_gap (tree prev_field
, tree curr_field
, tree offset
)
1617 /* If this is the first field of the record, there cannot be any gap */
1621 /* If the previous field is a union type, then return False: The only
1622 time when such a field is not the last field of the record is when
1623 there are other components at fixed positions after it (meaning there
1624 was a rep clause for every field), in which case we don't want the
1625 alignment constraint to override them. */
1626 if (TREE_CODE (TREE_TYPE (prev_field
)) == QUAL_UNION_TYPE
)
1629 /* If the distance between the end of prev_field and the begining of
1630 curr_field is constant, then there is a gap if the value of this
1631 constant is not null. */
1632 if (offset
&& host_integerp (offset
, 1))
1633 return !integer_zerop (offset
);
1635 /* If the size and position of the previous field are constant,
1636 then check the sum of this size and position. There will be a gap
1637 iff it is not multiple of the current field alignment. */
1638 if (host_integerp (DECL_SIZE (prev_field
), 1)
1639 && host_integerp (bit_position (prev_field
), 1))
1640 return ((tree_low_cst (bit_position (prev_field
), 1)
1641 + tree_low_cst (DECL_SIZE (prev_field
), 1))
1642 % DECL_ALIGN (curr_field
) != 0);
1644 /* If both the position and size of the previous field are multiples
1645 of the current field alignment, there can not be any gap. */
1646 if (value_factor_p (bit_position (prev_field
), DECL_ALIGN (curr_field
))
1647 && value_factor_p (DECL_SIZE (prev_field
), DECL_ALIGN (curr_field
)))
1650 /* Fallback, return that there may be a potential gap */
1654 /* Returns a LABEL_DECL node for LABEL_NAME. */
1657 create_label_decl (tree label_name
)
1659 tree label_decl
= build_decl (LABEL_DECL
, label_name
, void_type_node
);
1661 DECL_CONTEXT (label_decl
) = current_function_decl
;
1662 DECL_MODE (label_decl
) = VOIDmode
;
1663 DECL_SOURCE_LOCATION (label_decl
) = input_location
;
1668 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1669 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1670 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1671 PARM_DECL nodes chained through the TREE_CHAIN field).
1673 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1674 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1677 create_subprog_decl (tree subprog_name
, tree asm_name
,
1678 tree subprog_type
, tree param_decl_list
, bool inline_flag
,
1679 bool public_flag
, bool extern_flag
,
1680 struct attrib
*attr_list
, Node_Id gnat_node
)
1682 tree return_type
= TREE_TYPE (subprog_type
);
1683 tree subprog_decl
= build_decl (FUNCTION_DECL
, subprog_name
, subprog_type
);
1685 /* If this is a function nested inside an inlined external function, it
1686 means we aren't going to compile the outer function unless it is
1687 actually inlined, so do the same for us. */
1688 if (current_function_decl
&& DECL_INLINE (current_function_decl
)
1689 && DECL_EXTERNAL (current_function_decl
))
1692 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
1693 TREE_PUBLIC (subprog_decl
) = public_flag
;
1694 TREE_STATIC (subprog_decl
) = 1;
1695 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
1696 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1697 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1698 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
1699 DECL_RESULT (subprog_decl
) = build_decl (RESULT_DECL
, 0, return_type
);
1700 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl
)) = 1;
1701 DECL_IGNORED_P (DECL_RESULT (subprog_decl
)) = 1;
1704 DECL_DECLARED_INLINE_P (subprog_decl
) = 1;
1707 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
1709 process_attributes (subprog_decl
, attr_list
);
1711 /* Add this decl to the current binding level. */
1712 gnat_pushdecl (subprog_decl
, gnat_node
);
1714 /* Output the assembler code and/or RTL for the declaration. */
1715 rest_of_decl_compilation (subprog_decl
, global_bindings_p (), 0);
1717 return subprog_decl
;
1720 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1721 body. This routine needs to be invoked before processing the declarations
1722 appearing in the subprogram. */
1725 begin_subprog_body (tree subprog_decl
)
1729 current_function_decl
= subprog_decl
;
1730 announce_function (subprog_decl
);
1732 /* Enter a new binding level and show that all the parameters belong to
1735 for (param_decl
= DECL_ARGUMENTS (subprog_decl
); param_decl
;
1736 param_decl
= TREE_CHAIN (param_decl
))
1737 DECL_CONTEXT (param_decl
) = subprog_decl
;
1739 make_decl_rtl (subprog_decl
);
1741 /* We handle pending sizes via the elaboration of types, so we don't need to
1742 save them. This causes them to be marked as part of the outer function
1743 and then discarded. */
1744 get_pending_sizes ();
1747 /* Finish the definition of the current subprogram and compile it all the way
1748 to assembler language output. BODY is the tree corresponding to
1752 end_subprog_body (tree body
)
1754 tree fndecl
= current_function_decl
;
1756 /* Mark the BLOCK for this level as being for this function and pop the
1757 level. Since the vars in it are the parameters, clear them. */
1758 BLOCK_VARS (current_binding_level
->block
) = 0;
1759 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
1760 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
1763 /* Deal with inline. If declared inline or we should default to inline,
1764 set the flag in the decl. */
1765 DECL_INLINE (fndecl
)
1766 = DECL_DECLARED_INLINE_P (fndecl
) || flag_inline_trees
== 2;
1768 /* We handle pending sizes via the elaboration of types, so we don't
1769 need to save them. */
1770 get_pending_sizes ();
1772 /* Mark the RESULT_DECL as being in this subprogram. */
1773 DECL_CONTEXT (DECL_RESULT (fndecl
)) = fndecl
;
1775 DECL_SAVED_TREE (fndecl
) = body
;
1777 current_function_decl
= DECL_CONTEXT (fndecl
);
1780 /* If we're only annotating types, don't actually compile this function. */
1781 if (type_annotate_only
)
1784 /* We do different things for nested and non-nested functions.
1785 ??? This should be in cgraph. */
1786 if (!DECL_CONTEXT (fndecl
))
1788 gnat_gimplify_function (fndecl
);
1789 lower_nested_functions (fndecl
);
1790 gnat_finalize (fndecl
);
1793 /* Register this function with cgraph just far enough to get it
1794 added to our parent's nested function list. */
1795 (void) cgraph_node (fndecl
);
1798 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
1801 gnat_gimplify_function (tree fndecl
)
1803 struct cgraph_node
*cgn
;
1805 dump_function (TDI_original
, fndecl
);
1806 gimplify_function_tree (fndecl
);
1807 dump_function (TDI_generic
, fndecl
);
1809 /* Convert all nested functions to GIMPLE now. We do things in this order
1810 so that items like VLA sizes are expanded properly in the context of the
1811 correct function. */
1812 cgn
= cgraph_node (fndecl
);
1813 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
1814 gnat_gimplify_function (cgn
->decl
);
1817 /* Give FNDECL and all its nested functions to cgraph for compilation. */
1820 gnat_finalize (tree fndecl
)
1822 struct cgraph_node
*cgn
;
1824 /* Finalize all nested functions now. */
1825 cgn
= cgraph_node (fndecl
);
1826 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
1827 gnat_finalize (cgn
->decl
);
1829 cgraph_finalize_function (fndecl
, false);
1832 /* Return a definition for a builtin function named NAME and whose data type
1833 is TYPE. TYPE should be a function type with argument types.
1834 FUNCTION_CODE tells later passes how to compile calls to this function.
1835 See tree.h for its possible values.
1837 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1838 the name to be called if we can't opencode the function. If
1839 ATTRS is nonzero, use that for the function attribute list. */
1842 builtin_function (const char *name
, tree type
, int function_code
,
1843 enum built_in_class
class, const char *library_name
,
1846 tree decl
= build_decl (FUNCTION_DECL
, get_identifier (name
), type
);
1848 DECL_EXTERNAL (decl
) = 1;
1849 TREE_PUBLIC (decl
) = 1;
1851 SET_DECL_ASSEMBLER_NAME (decl
, get_identifier (library_name
));
1853 gnat_pushdecl (decl
, Empty
);
1854 DECL_BUILT_IN_CLASS (decl
) = class;
1855 DECL_FUNCTION_CODE (decl
) = function_code
;
1857 decl_attributes (&decl
, attrs
, ATTR_FLAG_BUILT_IN
);
1861 /* Return an integer type with the number of bits of precision given by
1862 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1863 it is a signed type. */
1866 gnat_type_for_size (unsigned precision
, int unsignedp
)
1871 if (precision
<= 2 * MAX_BITS_PER_WORD
1872 && signed_and_unsigned_types
[precision
][unsignedp
])
1873 return signed_and_unsigned_types
[precision
][unsignedp
];
1876 t
= make_unsigned_type (precision
);
1878 t
= make_signed_type (precision
);
1880 if (precision
<= 2 * MAX_BITS_PER_WORD
)
1881 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
1885 sprintf (type_name
, "%sSIGNED_%d", unsignedp
? "UN" : "", precision
);
1886 TYPE_NAME (t
) = get_identifier (type_name
);
1892 /* Likewise for floating-point types. */
1895 float_type_for_precision (int precision
, enum machine_mode mode
)
1900 if (float_types
[(int) mode
])
1901 return float_types
[(int) mode
];
1903 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
1904 TYPE_PRECISION (t
) = precision
;
1907 if (TYPE_MODE (t
) != mode
)
1912 sprintf (type_name
, "FLOAT_%d", precision
);
1913 TYPE_NAME (t
) = get_identifier (type_name
);
1919 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1920 an unsigned type; otherwise a signed type is returned. */
1923 gnat_type_for_mode (enum machine_mode mode
, int unsignedp
)
1925 if (mode
== BLKmode
)
1927 else if (mode
== VOIDmode
)
1928 return void_type_node
;
1929 else if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
1930 return float_type_for_precision (GET_MODE_PRECISION (mode
), mode
);
1932 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
1935 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1938 gnat_unsigned_type (tree type_node
)
1940 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
1942 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
1944 type
= copy_node (type
);
1945 TREE_TYPE (type
) = type_node
;
1947 else if (TREE_TYPE (type_node
)
1948 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
1949 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
1951 type
= copy_node (type
);
1952 TREE_TYPE (type
) = TREE_TYPE (type_node
);
1958 /* Return the signed version of a TYPE_NODE, a scalar type. */
1961 gnat_signed_type (tree type_node
)
1963 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
1965 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
1967 type
= copy_node (type
);
1968 TREE_TYPE (type
) = type_node
;
1970 else if (TREE_TYPE (type_node
)
1971 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
1972 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
1974 type
= copy_node (type
);
1975 TREE_TYPE (type
) = TREE_TYPE (type_node
);
1981 /* Return a type the same as TYPE except unsigned or signed according to
1985 gnat_signed_or_unsigned_type (int unsignedp
, tree type
)
1987 if (!INTEGRAL_TYPE_P (type
) || TYPE_UNSIGNED (type
) == unsignedp
)
1990 return gnat_type_for_size (TYPE_PRECISION (type
), unsignedp
);
1993 /* EXP is an expression for the size of an object. If this size contains
1994 discriminant references, replace them with the maximum (if MAX_P) or
1995 minimum (if !MAX_P) possible value of the discriminant. */
1998 max_size (tree exp
, bool max_p
)
2000 enum tree_code code
= TREE_CODE (exp
);
2001 tree type
= TREE_TYPE (exp
);
2003 switch (TREE_CODE_CLASS (code
))
2010 if (code
== TREE_LIST
)
2011 return tree_cons (TREE_PURPOSE (exp
),
2012 max_size (TREE_VALUE (exp
), max_p
),
2014 ? max_size (TREE_CHAIN (exp
), max_p
) : NULL_TREE
);
2018 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2019 modify. Otherwise, we treat it like a variable. */
2020 if (!CONTAINS_PLACEHOLDER_P (exp
))
2023 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
2025 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), true);
2028 return max_p
? size_one_node
: size_zero_node
;
2033 switch (TREE_CODE_LENGTH (code
))
2036 if (code
== NON_LVALUE_EXPR
)
2037 return max_size (TREE_OPERAND (exp
, 0), max_p
);
2040 fold (build1 (code
, type
,
2041 max_size (TREE_OPERAND (exp
, 0),
2042 code
== NEGATE_EXPR
? !max_p
: max_p
)));
2045 if (code
== COMPOUND_EXPR
)
2046 return max_size (TREE_OPERAND (exp
, 1), max_p
);
2049 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
2050 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
2051 code
== MINUS_EXPR
? !max_p
: max_p
);
2053 /* Special-case wanting the maximum value of a MIN_EXPR.
2054 In that case, if one side overflows, return the other.
2055 sizetype is signed, but we know sizes are non-negative.
2056 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2057 overflowing or the maximum possible value and the RHS
2059 if (max_p
&& code
== MIN_EXPR
&& TREE_OVERFLOW (rhs
))
2061 else if (max_p
&& code
== MIN_EXPR
&& TREE_OVERFLOW (lhs
))
2063 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
2064 && ((TREE_CONSTANT (lhs
) && TREE_OVERFLOW (lhs
))
2065 || operand_equal_p (lhs
, TYPE_MAX_VALUE (type
), 0))
2066 && !TREE_CONSTANT (rhs
))
2069 return fold (build (code
, type
, lhs
, rhs
));
2073 if (code
== SAVE_EXPR
)
2075 else if (code
== COND_EXPR
)
2076 return fold (build (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
2077 max_size (TREE_OPERAND (exp
, 1), max_p
),
2078 max_size (TREE_OPERAND (exp
, 2), max_p
)));
2079 else if (code
== CALL_EXPR
&& TREE_OPERAND (exp
, 1))
2080 return build (CALL_EXPR
, type
, TREE_OPERAND (exp
, 0),
2081 max_size (TREE_OPERAND (exp
, 1), max_p
), NULL
);
2088 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2089 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2090 Return a constructor for the template. */
2093 build_template (tree template_type
, tree array_type
, tree expr
)
2095 tree template_elts
= NULL_TREE
;
2096 tree bound_list
= NULL_TREE
;
2099 if (TREE_CODE (array_type
) == RECORD_TYPE
2100 && (TYPE_IS_PADDING_P (array_type
)
2101 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type
)))
2102 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
2104 if (TREE_CODE (array_type
) == ARRAY_TYPE
2105 || (TREE_CODE (array_type
) == INTEGER_TYPE
2106 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
2107 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
2109 /* First make the list for a CONSTRUCTOR for the template. Go down the
2110 field list of the template instead of the type chain because this
2111 array might be an Ada array of arrays and we can't tell where the
2112 nested arrays stop being the underlying object. */
2114 for (field
= TYPE_FIELDS (template_type
); field
;
2116 ? (bound_list
= TREE_CHAIN (bound_list
))
2117 : (array_type
= TREE_TYPE (array_type
))),
2118 field
= TREE_CHAIN (TREE_CHAIN (field
)))
2120 tree bounds
, min
, max
;
2122 /* If we have a bound list, get the bounds from there. Likewise
2123 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2124 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2125 This will give us a maximum range. */
2127 bounds
= TREE_VALUE (bound_list
);
2128 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
2129 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
2130 else if (expr
&& TREE_CODE (expr
) == PARM_DECL
2131 && DECL_BY_COMPONENT_PTR_P (expr
))
2132 bounds
= TREE_TYPE (field
);
2136 min
= convert (TREE_TYPE (TREE_CHAIN (field
)), TYPE_MIN_VALUE (bounds
));
2137 max
= convert (TREE_TYPE (field
), TYPE_MAX_VALUE (bounds
));
2139 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2140 substitute it from OBJECT. */
2141 min
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (min
, expr
);
2142 max
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (max
, expr
);
2144 template_elts
= tree_cons (TREE_CHAIN (field
), max
,
2145 tree_cons (field
, min
, template_elts
));
2148 return gnat_build_constructor (template_type
, nreverse (template_elts
));
2151 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2152 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2153 in the type contains in its DECL_INITIAL the expression to use when
2154 a constructor is made for the type. GNAT_ENTITY is an entity used
2155 to print out an error message if the mechanism cannot be applied to
2156 an object of that type and also for the name. */
2159 build_vms_descriptor (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
2161 tree record_type
= make_node (RECORD_TYPE
);
2162 tree field_list
= 0;
2171 /* If TYPE is an unconstrained array, use the underlying array type. */
2172 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
2173 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
2175 /* If this is an array, compute the number of dimensions in the array,
2176 get the index types, and point to the inner type. */
2177 if (TREE_CODE (type
) != ARRAY_TYPE
)
2180 for (ndim
= 1, inner_type
= type
;
2181 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
2182 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
2183 ndim
++, inner_type
= TREE_TYPE (inner_type
))
2186 idx_arr
= (tree
*) alloca (ndim
* sizeof (tree
));
2188 if (mech
!= By_Descriptor_NCA
2189 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
2190 for (i
= ndim
- 1, inner_type
= type
;
2192 i
--, inner_type
= TREE_TYPE (inner_type
))
2193 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2195 for (i
= 0, inner_type
= type
;
2197 i
++, inner_type
= TREE_TYPE (inner_type
))
2198 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2200 /* Now get the DTYPE value. */
2201 switch (TREE_CODE (type
))
2205 if (TYPE_VAX_FLOATING_POINT_P (type
))
2206 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2219 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
2222 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
2225 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
2228 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
2231 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
2234 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
2240 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
2244 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
2245 && TYPE_VAX_FLOATING_POINT_P (type
))
2246 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2258 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
2269 /* Get the CLASS value. */
2272 case By_Descriptor_A
:
2275 case By_Descriptor_NCA
:
2278 case By_Descriptor_SB
:
2285 /* Make the type for a descriptor for VMS. The first four fields
2286 are the same for all types. */
2289 = chainon (field_list
,
2290 make_descriptor_field
2291 ("LENGTH", gnat_type_for_size (16, 1), record_type
,
2292 size_in_bytes (mech
== By_Descriptor_A
? inner_type
: type
)));
2294 field_list
= chainon (field_list
,
2295 make_descriptor_field ("DTYPE",
2296 gnat_type_for_size (8, 1),
2297 record_type
, size_int (dtype
)));
2298 field_list
= chainon (field_list
,
2299 make_descriptor_field ("CLASS",
2300 gnat_type_for_size (8, 1),
2301 record_type
, size_int (class)));
2304 = chainon (field_list
,
2305 make_descriptor_field
2307 build_pointer_type_for_mode (type
, SImode
, false), record_type
,
2309 build_pointer_type_for_mode (type
, SImode
, false),
2310 build (PLACEHOLDER_EXPR
, type
))));
2315 case By_Descriptor_S
:
2318 case By_Descriptor_SB
:
2320 = chainon (field_list
,
2321 make_descriptor_field
2322 ("SB_L1", gnat_type_for_size (32, 1), record_type
,
2323 TREE_CODE (type
) == ARRAY_TYPE
2324 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2326 = chainon (field_list
,
2327 make_descriptor_field
2328 ("SB_L2", gnat_type_for_size (32, 1), record_type
,
2329 TREE_CODE (type
) == ARRAY_TYPE
2330 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2333 case By_Descriptor_A
:
2334 case By_Descriptor_NCA
:
2335 field_list
= chainon (field_list
,
2336 make_descriptor_field ("SCALE",
2337 gnat_type_for_size (8, 1),
2341 field_list
= chainon (field_list
,
2342 make_descriptor_field ("DIGITS",
2343 gnat_type_for_size (8, 1),
2348 = chainon (field_list
,
2349 make_descriptor_field
2350 ("AFLAGS", gnat_type_for_size (8, 1), record_type
,
2351 size_int (mech
== By_Descriptor_NCA
2353 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2354 : (TREE_CODE (type
) == ARRAY_TYPE
2355 && TYPE_CONVENTION_FORTRAN_P (type
)
2358 field_list
= chainon (field_list
,
2359 make_descriptor_field ("DIMCT",
2360 gnat_type_for_size (8, 1),
2364 field_list
= chainon (field_list
,
2365 make_descriptor_field ("ARSIZE",
2366 gnat_type_for_size (32, 1),
2368 size_in_bytes (type
)));
2370 /* Now build a pointer to the 0,0,0... element. */
2371 tem
= build (PLACEHOLDER_EXPR
, type
);
2372 for (i
= 0, inner_type
= type
; i
< ndim
;
2373 i
++, inner_type
= TREE_TYPE (inner_type
))
2374 tem
= build (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
2375 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
2376 NULL_TREE
, NULL_TREE
);
2379 = chainon (field_list
,
2380 make_descriptor_field
2382 build_pointer_type_for_mode (inner_type
, SImode
, false),
2385 build_pointer_type_for_mode (inner_type
, SImode
,
2389 /* Next come the addressing coefficients. */
2391 for (i
= 0; i
< ndim
; i
++)
2395 = size_binop (MULT_EXPR
, tem
,
2396 size_binop (PLUS_EXPR
,
2397 size_binop (MINUS_EXPR
,
2398 TYPE_MAX_VALUE (idx_arr
[i
]),
2399 TYPE_MIN_VALUE (idx_arr
[i
])),
2402 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
2403 fname
[1] = '0' + i
, fname
[2] = 0;
2405 = chainon (field_list
,
2406 make_descriptor_field (fname
,
2407 gnat_type_for_size (32, 1),
2408 record_type
, idx_length
));
2410 if (mech
== By_Descriptor_NCA
)
2414 /* Finally here are the bounds. */
2415 for (i
= 0; i
< ndim
; i
++)
2419 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
2421 = chainon (field_list
,
2422 make_descriptor_field
2423 (fname
, gnat_type_for_size (32, 1), record_type
,
2424 TYPE_MIN_VALUE (idx_arr
[i
])));
2428 = chainon (field_list
,
2429 make_descriptor_field
2430 (fname
, gnat_type_for_size (32, 1), record_type
,
2431 TYPE_MAX_VALUE (idx_arr
[i
])));
2436 post_error ("unsupported descriptor type for &", gnat_entity
);
2439 finish_record_type (record_type
, field_list
, false, true);
2440 create_type_decl (create_concat_name (gnat_entity
, "DESC"), record_type
,
2441 NULL
, true, false, gnat_entity
);
2446 /* Utility routine for above code to make a field. */
2449 make_descriptor_field (const char *name
, tree type
,
2450 tree rec_type
, tree initial
)
2453 = create_field_decl (get_identifier (name
), type
, rec_type
, 0, 0, 0, 0);
2455 DECL_INITIAL (field
) = initial
;
2459 /* Build a type to be used to represent an aliased object whose nominal
2460 type is an unconstrained array. This consists of a RECORD_TYPE containing
2461 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2462 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2463 is used to represent an arbitrary unconstrained object. Use NAME
2464 as the name of the record. */
2467 build_unc_object_type (tree template_type
, tree object_type
, tree name
)
2469 tree type
= make_node (RECORD_TYPE
);
2470 tree template_field
= create_field_decl (get_identifier ("BOUNDS"),
2471 template_type
, type
, 0, 0, 0, 1);
2472 tree array_field
= create_field_decl (get_identifier ("ARRAY"), object_type
,
2475 TYPE_NAME (type
) = name
;
2476 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
2477 finish_record_type (type
,
2478 chainon (chainon (NULL_TREE
, template_field
),
2485 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2486 the normal case this is just two adjustments, but we have more to do
2487 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2490 update_pointer_to (tree old_type
, tree new_type
)
2492 tree ptr
= TYPE_POINTER_TO (old_type
);
2493 tree ref
= TYPE_REFERENCE_TO (old_type
);
2497 /* If this is the main variant, process all the other variants first. */
2498 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
2499 for (type
= TYPE_NEXT_VARIANT (old_type
); type
;
2500 type
= TYPE_NEXT_VARIANT (type
))
2501 update_pointer_to (type
, new_type
);
2503 /* If no pointer or reference, we are done. */
2507 /* Merge the old type qualifiers in the new type.
2509 Each old variant has qualifiers for specific reasons, and the new
2510 designated type as well. Each set of qualifiers represents useful
2511 information grabbed at some point, and merging the two simply unifies
2512 these inputs into the final type description.
2514 Consider for instance a volatile type frozen after an access to constant
2515 type designating it. After the designated type freeze, we get here with a
2516 volatile new_type and a dummy old_type with a readonly variant, created
2517 when the access type was processed. We shall make a volatile and readonly
2518 designated type, because that's what it really is.
2520 We might also get here for a non-dummy old_type variant with different
2521 qualifiers than the new_type ones, for instance in some cases of pointers
2522 to private record type elaboration (see the comments around the call to
2523 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2524 qualifiers in thoses cases too, to avoid accidentally discarding the
2525 initial set, and will often end up with old_type == new_type then. */
2526 new_type
= build_qualified_type (new_type
,
2527 TYPE_QUALS (old_type
)
2528 | TYPE_QUALS (new_type
));
2530 /* If the new type and the old one are identical, there is nothing to
2532 if (old_type
== new_type
)
2535 /* Otherwise, first handle the simple case. */
2536 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
2538 TYPE_POINTER_TO (new_type
) = ptr
;
2539 TYPE_REFERENCE_TO (new_type
) = ref
;
2541 for (; ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
2542 for (ptr1
= TYPE_MAIN_VARIANT (ptr
); ptr1
;
2543 ptr1
= TYPE_NEXT_VARIANT (ptr1
))
2545 TREE_TYPE (ptr1
) = new_type
;
2547 if (TYPE_NAME (ptr1
)
2548 && TREE_CODE (TYPE_NAME (ptr1
)) == TYPE_DECL
2549 && TREE_CODE (new_type
) != ENUMERAL_TYPE
)
2550 rest_of_decl_compilation (TYPE_NAME (ptr1
),
2551 global_bindings_p (), 0);
2554 for (; ref
; ref
= TYPE_NEXT_PTR_TO (ref
))
2555 for (ref1
= TYPE_MAIN_VARIANT (ref
); ref1
;
2556 ref1
= TYPE_NEXT_VARIANT (ref1
))
2558 TREE_TYPE (ref1
) = new_type
;
2560 if (TYPE_NAME (ref1
)
2561 && TREE_CODE (TYPE_NAME (ref1
)) == TYPE_DECL
2562 && TREE_CODE (new_type
) != ENUMERAL_TYPE
)
2563 rest_of_decl_compilation (TYPE_NAME (ref1
),
2564 global_bindings_p (), 0);
2568 /* Now deal with the unconstrained array case. In this case the "pointer"
2569 is actually a RECORD_TYPE where the types of both fields are
2570 pointers to void. In that case, copy the field list from the
2571 old type to the new one and update the fields' context. */
2572 else if (TREE_CODE (ptr
) != RECORD_TYPE
|| !TYPE_IS_FAT_POINTER_P (ptr
))
2577 tree new_obj_rec
= TYPE_OBJECT_RECORD_TYPE (new_type
);
2582 SET_DECL_ORIGINAL_FIELD (TYPE_FIELDS (ptr
),
2583 TYPE_FIELDS (TYPE_POINTER_TO (new_type
)));
2584 SET_DECL_ORIGINAL_FIELD (TREE_CHAIN (TYPE_FIELDS (ptr
)),
2585 TREE_CHAIN (TYPE_FIELDS
2586 (TYPE_POINTER_TO (new_type
))));
2588 TYPE_FIELDS (ptr
) = TYPE_FIELDS (TYPE_POINTER_TO (new_type
));
2589 DECL_CONTEXT (TYPE_FIELDS (ptr
)) = ptr
;
2590 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr
))) = ptr
;
2592 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2595 ??? This is now the only use of gnat_substitute_in_type, which
2596 is now a very "heavy" routine to do this, so it should be replaced
2598 ptr_temp_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr
)));
2599 new_ref
= build (COMPONENT_REF
, ptr_temp_type
,
2600 build (PLACEHOLDER_EXPR
, ptr
),
2601 TREE_CHAIN (TYPE_FIELDS (ptr
)), NULL_TREE
);
2604 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
2605 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
2606 TREE_CHAIN (TYPE_FIELDS (ptr
)), new_ref
));
2608 for (var
= TYPE_MAIN_VARIANT (ptr
); var
; var
= TYPE_NEXT_VARIANT (var
))
2609 SET_TYPE_UNCONSTRAINED_ARRAY (var
, new_type
);
2611 TYPE_POINTER_TO (new_type
) = TYPE_REFERENCE_TO (new_type
)
2612 = TREE_TYPE (new_type
) = ptr
;
2614 /* Now handle updating the allocation record, what the thin pointer
2615 points to. Update all pointers from the old record into the new
2616 one, update the types of the fields, and recompute the size. */
2618 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
), new_obj_rec
);
2620 TREE_TYPE (TYPE_FIELDS (new_obj_rec
)) = TREE_TYPE (ptr_temp_type
);
2621 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2622 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
)));
2623 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2624 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))));
2625 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2626 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))));
2628 TYPE_SIZE (new_obj_rec
)
2629 = size_binop (PLUS_EXPR
,
2630 DECL_SIZE (TYPE_FIELDS (new_obj_rec
)),
2631 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
))));
2632 TYPE_SIZE_UNIT (new_obj_rec
)
2633 = size_binop (PLUS_EXPR
,
2634 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec
)),
2635 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
))));
2636 rest_of_type_compilation (ptr
, global_bindings_p ());
2640 /* Convert a pointer to a constrained array into a pointer to a fat
2641 pointer. This involves making or finding a template. */
2644 convert_to_fat_pointer (tree type
, tree expr
)
2646 tree template_type
= TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type
))));
2647 tree
template, template_addr
;
2648 tree etype
= TREE_TYPE (expr
);
2650 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2651 pointer to the template and array. */
2652 if (integer_zerop (expr
))
2654 gnat_build_constructor
2656 tree_cons (TYPE_FIELDS (type
),
2657 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
2658 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2659 convert (build_pointer_type (template_type
),
2663 /* If EXPR is a thin pointer, make the template and data from the record. */
2665 else if (TYPE_THIN_POINTER_P (etype
))
2667 tree fields
= TYPE_FIELDS (TREE_TYPE (etype
));
2669 expr
= save_expr (expr
);
2670 if (TREE_CODE (expr
) == ADDR_EXPR
)
2671 expr
= TREE_OPERAND (expr
, 0);
2673 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
2675 template = build_component_ref (expr
, NULL_TREE
, fields
, false);
2676 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
2677 build_component_ref (expr
, NULL_TREE
,
2678 TREE_CHAIN (fields
), false));
2681 /* Otherwise, build the constructor for the template. */
2682 template = build_template (template_type
, TREE_TYPE (etype
), expr
);
2684 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template);
2686 /* The result is a CONSTRUCTOR for the fat pointer.
2688 If expr is an argument of a foreign convention subprogram, the type it
2689 points to is directly the component type. In this case, the expression
2690 type may not match the corresponding FIELD_DECL type at this point, so we
2691 call "convert" here to fix that up if necessary. This type consistency is
2692 required, for instance because it ensures that possible later folding of
2693 component_refs against this constructor always yields something of the
2694 same type as the initial reference.
2696 Note that the call to "build_template" above is still fine, because it
2697 will only refer to the provided template_type in this case. */
2699 gnat_build_constructor
2700 (type
, tree_cons (TYPE_FIELDS (type
),
2701 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
2702 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2703 template_addr
, NULL_TREE
)));
2706 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2707 is something that is a fat pointer, so convert to it first if it EXPR
2708 is not already a fat pointer. */
2711 convert_to_thin_pointer (tree type
, tree expr
)
2713 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr
)))
2715 = convert_to_fat_pointer
2716 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
))), expr
);
2718 /* We get the pointer to the data and use a NOP_EXPR to make it the
2720 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (TREE_TYPE (expr
)),
2722 expr
= build1 (NOP_EXPR
, type
, expr
);
2727 /* Create an expression whose value is that of EXPR,
2728 converted to type TYPE. The TREE_TYPE of the value
2729 is always TYPE. This function implements all reasonable
2730 conversions; callers should filter out those that are
2731 not permitted by the language being compiled. */
2734 convert (tree type
, tree expr
)
2736 enum tree_code code
= TREE_CODE (type
);
2737 tree etype
= TREE_TYPE (expr
);
2738 enum tree_code ecode
= TREE_CODE (etype
);
2741 /* If EXPR is already the right type, we are done. */
2745 /* If the input type has padding, remove it by doing a component reference
2746 to the field. If the output type has padding, make a constructor
2747 to build the record. If both input and output have padding and are
2748 of variable size, do this as an unchecked conversion. */
2749 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
2750 && TYPE_IS_PADDING_P (type
) && TYPE_IS_PADDING_P (etype
)
2751 && (!TREE_CONSTANT (TYPE_SIZE (type
))
2752 || !TREE_CONSTANT (TYPE_SIZE (etype
))))
2754 else if (ecode
== RECORD_TYPE
&& TYPE_IS_PADDING_P (etype
))
2756 /* If we have just converted to this padded type, just get
2757 the inner expression. */
2758 if (TREE_CODE (expr
) == CONSTRUCTOR
2759 && CONSTRUCTOR_ELTS (expr
)
2760 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr
)) == TYPE_FIELDS (etype
))
2761 return TREE_VALUE (CONSTRUCTOR_ELTS (expr
));
2763 return convert (type
,
2764 build_component_ref (expr
, NULL_TREE
,
2765 TYPE_FIELDS (etype
), false));
2767 else if (code
== RECORD_TYPE
&& TYPE_IS_PADDING_P (type
))
2769 /* If we previously converted from another type and our type is
2770 of variable size, remove the conversion to avoid the need for
2771 variable-size temporaries. */
2772 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
2773 && !TREE_CONSTANT (TYPE_SIZE (type
)))
2774 expr
= TREE_OPERAND (expr
, 0);
2776 /* If we are just removing the padding from expr, convert the original
2777 object if we have variable size. That will avoid the need
2778 for some variable-size temporaries. */
2779 if (TREE_CODE (expr
) == COMPONENT_REF
2780 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr
, 0))) == RECORD_TYPE
2781 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
2782 && !TREE_CONSTANT (TYPE_SIZE (type
)))
2783 return convert (type
, TREE_OPERAND (expr
, 0));
2785 /* If the result type is a padded type with a self-referentially-sized
2786 field and the expression type is a record, do this as an
2787 unchecked converstion. */
2788 else if (TREE_CODE (etype
) == RECORD_TYPE
2789 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type
))))
2790 return unchecked_convert (type
, expr
, false);
2794 gnat_build_constructor (type
,
2795 tree_cons (TYPE_FIELDS (type
),
2797 (TYPE_FIELDS (type
)),
2802 /* If the input is a biased type, adjust first. */
2803 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
2804 return convert (type
, fold (build (PLUS_EXPR
, TREE_TYPE (etype
),
2805 fold (build1 (NOP_EXPR
,
2806 TREE_TYPE (etype
), expr
)),
2807 TYPE_MIN_VALUE (etype
))));
2809 /* If the input is a left-justified modular type, we need to extract
2810 the actual object before converting it to any other type with the
2811 exception of an unconstrained array. */
2812 if (ecode
== RECORD_TYPE
&& TYPE_LEFT_JUSTIFIED_MODULAR_P (etype
)
2813 && code
!= UNCONSTRAINED_ARRAY_TYPE
)
2814 return convert (type
, build_component_ref (expr
, NULL_TREE
,
2815 TYPE_FIELDS (etype
), false));
2817 /* If converting to a type that contains a template, convert to the data
2818 type and then build the template. */
2819 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
))
2821 tree obj_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type
)));
2823 /* If the source already has a template, get a reference to the
2824 associated array only, as we are going to rebuild a template
2825 for the target type anyway. */
2826 expr
= maybe_unconstrained_array (expr
);
2829 gnat_build_constructor
2831 tree_cons (TYPE_FIELDS (type
),
2832 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
2833 obj_type
, NULL_TREE
),
2834 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2835 convert (obj_type
, expr
), NULL_TREE
)));
2838 /* There are some special cases of expressions that we process
2840 switch (TREE_CODE (expr
))
2846 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2847 conversion in gnat_expand_expr. NULL_EXPR does not represent
2848 and actual value, so no conversion is needed. */
2849 expr
= copy_node (expr
);
2850 TREE_TYPE (expr
) = type
;
2854 /* If we are converting a STRING_CST to another constrained array type,
2855 just make a new one in the proper type. */
2856 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
2857 && !(TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
2858 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
2859 && (TREE_CODE (expr
) == STRING_CST
2860 || get_alias_set (etype
) == get_alias_set (type
)))
2862 expr
= copy_node (expr
);
2863 TREE_TYPE (expr
) = type
;
2868 case UNCONSTRAINED_ARRAY_REF
:
2869 /* Convert this to the type of the inner array by getting the address of
2870 the array from the template. */
2871 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
2872 build_component_ref (TREE_OPERAND (expr
, 0),
2873 get_identifier ("P_ARRAY"),
2875 etype
= TREE_TYPE (expr
);
2876 ecode
= TREE_CODE (etype
);
2879 case VIEW_CONVERT_EXPR
:
2880 if (AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
)
2881 && !TYPE_FAT_POINTER_P (type
) && !TYPE_FAT_POINTER_P (etype
))
2882 return convert (type
, TREE_OPERAND (expr
, 0));
2886 /* If both types are record types, just convert the pointer and
2887 make a new INDIRECT_REF.
2889 ??? Disable this for now since it causes problems with the
2890 code in build_binary_op for MODIFY_EXPR which wants to
2891 strip off conversions. But that code really is a mess and
2892 we need to do this a much better way some time. */
2894 && (TREE_CODE (type
) == RECORD_TYPE
2895 || TREE_CODE (type
) == UNION_TYPE
)
2896 && (TREE_CODE (etype
) == RECORD_TYPE
2897 || TREE_CODE (etype
) == UNION_TYPE
)
2898 && !TYPE_FAT_POINTER_P (type
) && !TYPE_FAT_POINTER_P (etype
))
2899 return build_unary_op (INDIRECT_REF
, NULL_TREE
,
2900 convert (build_pointer_type (type
),
2901 TREE_OPERAND (expr
, 0)));
2908 /* Check for converting to a pointer to an unconstrained array. */
2909 if (TYPE_FAT_POINTER_P (type
) && !TYPE_FAT_POINTER_P (etype
))
2910 return convert_to_fat_pointer (type
, expr
);
2912 /* If we're converting between two aggregate types that have the same main
2913 variant, just make a VIEW_CONVER_EXPR. */
2914 else if (AGGREGATE_TYPE_P (type
)
2915 && TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
2916 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
2918 /* In all other cases of related types, make a NOP_EXPR. */
2919 else if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
)
2920 || (code
== INTEGER_CST
&& ecode
== INTEGER_CST
2921 && (type
== TREE_TYPE (etype
) || etype
== TREE_TYPE (type
))))
2922 return fold (build1 (NOP_EXPR
, type
, expr
));
2927 return build1 (CONVERT_EXPR
, type
, expr
);
2930 return fold (build1 (NOP_EXPR
, type
, gnat_truthvalue_conversion (expr
)));
2933 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
2934 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
2935 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))))
2936 return unchecked_convert (type
, expr
, false);
2937 else if (TYPE_BIASED_REPRESENTATION_P (type
))
2938 return fold (build1 (CONVERT_EXPR
, type
,
2939 fold (build (MINUS_EXPR
, TREE_TYPE (type
),
2940 convert (TREE_TYPE (type
), expr
),
2941 TYPE_MIN_VALUE (type
)))));
2943 /* ... fall through ... */
2946 return fold (convert_to_integer (type
, expr
));
2949 case REFERENCE_TYPE
:
2950 /* If converting between two pointers to records denoting
2951 both a template and type, adjust if needed to account
2952 for any differing offsets, since one might be negative. */
2953 if (TYPE_THIN_POINTER_P (etype
) && TYPE_THIN_POINTER_P (type
))
2956 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype
))),
2957 bit_position (TYPE_FIELDS (TREE_TYPE (type
))));
2958 tree byte_diff
= size_binop (CEIL_DIV_EXPR
, bit_diff
,
2959 sbitsize_int (BITS_PER_UNIT
));
2961 expr
= build1 (NOP_EXPR
, type
, expr
);
2962 TREE_CONSTANT (expr
) = TREE_CONSTANT (TREE_OPERAND (expr
, 0));
2963 if (integer_zerop (byte_diff
))
2966 return build_binary_op (PLUS_EXPR
, type
, expr
,
2967 fold (convert_to_pointer (type
, byte_diff
)));
2970 /* If converting to a thin pointer, handle specially. */
2971 if (TYPE_THIN_POINTER_P (type
)
2972 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)))
2973 return convert_to_thin_pointer (type
, expr
);
2975 /* If converting fat pointer to normal pointer, get the pointer to the
2976 array and then convert it. */
2977 else if (TYPE_FAT_POINTER_P (etype
))
2978 expr
= build_component_ref (expr
, get_identifier ("P_ARRAY"),
2981 return fold (convert_to_pointer (type
, expr
));
2984 return fold (convert_to_real (type
, expr
));
2987 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type
) && !AGGREGATE_TYPE_P (etype
))
2989 gnat_build_constructor
2990 (type
, tree_cons (TYPE_FIELDS (type
),
2991 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
2994 /* ... fall through ... */
2997 /* In these cases, assume the front-end has validated the conversion.
2998 If the conversion is valid, it will be a bit-wise conversion, so
2999 it can be viewed as an unchecked conversion. */
3000 return unchecked_convert (type
, expr
, false);
3003 /* Just validate that the type is indeed that of a field
3004 of the type. Then make the simple conversion. */
3005 for (tem
= TYPE_FIELDS (type
); tem
; tem
= TREE_CHAIN (tem
))
3007 if (TREE_TYPE (tem
) == etype
)
3008 return build1 (CONVERT_EXPR
, type
, expr
);
3009 else if (TREE_CODE (TREE_TYPE (tem
)) == RECORD_TYPE
3010 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem
))
3011 || TYPE_IS_PADDING_P (TREE_TYPE (tem
)))
3012 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem
))) == etype
)
3013 return build1 (CONVERT_EXPR
, type
,
3014 convert (TREE_TYPE (tem
), expr
));
3019 case UNCONSTRAINED_ARRAY_TYPE
:
3020 /* If EXPR is a constrained array, take its address, convert it to a
3021 fat pointer, and then dereference it. Likewise if EXPR is a
3022 record containing both a template and a constrained array.
3023 Note that a record representing a left justified modular type
3024 always represents a packed constrained array. */
3025 if (ecode
== ARRAY_TYPE
3026 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
3027 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
3028 || (ecode
== RECORD_TYPE
&& TYPE_LEFT_JUSTIFIED_MODULAR_P (etype
)))
3031 (INDIRECT_REF
, NULL_TREE
,
3032 convert_to_fat_pointer (TREE_TYPE (type
),
3033 build_unary_op (ADDR_EXPR
,
3036 /* Do something very similar for converting one unconstrained
3037 array to another. */
3038 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
3040 build_unary_op (INDIRECT_REF
, NULL_TREE
,
3041 convert (TREE_TYPE (type
),
3042 build_unary_op (ADDR_EXPR
,
3048 return fold (convert_to_complex (type
, expr
));
3055 /* Remove all conversions that are done in EXP. This includes converting
3056 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3057 is true, always return the address of the containing object even if
3058 the address is not bit-aligned. */
3061 remove_conversions (tree exp
, bool true_address
)
3063 switch (TREE_CODE (exp
))
3067 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
3068 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
3069 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp
)), true);
3073 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == RECORD_TYPE
3074 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3075 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
3078 case VIEW_CONVERT_EXPR
: case NON_LVALUE_EXPR
:
3079 case NOP_EXPR
: case CONVERT_EXPR
:
3080 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
3089 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3090 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3091 likewise return an expression pointing to the underlying array. */
3094 maybe_unconstrained_array (tree exp
)
3096 enum tree_code code
= TREE_CODE (exp
);
3099 switch (TREE_CODE (TREE_TYPE (exp
)))
3101 case UNCONSTRAINED_ARRAY_TYPE
:
3102 if (code
== UNCONSTRAINED_ARRAY_REF
)
3105 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
3106 build_component_ref (TREE_OPERAND (exp
, 0),
3107 get_identifier ("P_ARRAY"),
3109 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp
);
3113 else if (code
== NULL_EXPR
)
3114 return build1 (NULL_EXPR
,
3115 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3116 (TREE_TYPE (TREE_TYPE (exp
))))),
3117 TREE_OPERAND (exp
, 0));
3120 /* If this is a padded type, convert to the unpadded type and see if
3121 it contains a template. */
3122 if (TYPE_IS_PADDING_P (TREE_TYPE (exp
)))
3124 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp
))), exp
);
3125 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3126 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3128 build_component_ref (new, NULL_TREE
,
3129 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3132 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp
)))
3134 build_component_ref (exp
, NULL_TREE
,
3135 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp
))), 0);
3145 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3146 If NOTRUNC_P is true, truncation operations should be suppressed. */
3149 unchecked_convert (tree type
, tree expr
, bool notrunc_p
)
3151 tree etype
= TREE_TYPE (expr
);
3153 /* If the expression is already the right type, we are done. */
3157 /* If both types types are integral just do a normal conversion.
3158 Likewise for a conversion to an unconstrained array. */
3159 if ((((INTEGRAL_TYPE_P (type
)
3160 && !(TREE_CODE (type
) == INTEGER_TYPE
3161 && TYPE_VAX_FLOATING_POINT_P (type
)))
3162 || (POINTER_TYPE_P (type
) && ! TYPE_THIN_POINTER_P (type
))
3163 || (TREE_CODE (type
) == RECORD_TYPE
3164 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type
)))
3165 && ((INTEGRAL_TYPE_P (etype
)
3166 && !(TREE_CODE (etype
) == INTEGER_TYPE
3167 && TYPE_VAX_FLOATING_POINT_P (etype
)))
3168 || (POINTER_TYPE_P (etype
) && !TYPE_THIN_POINTER_P (etype
))
3169 || (TREE_CODE (etype
) == RECORD_TYPE
3170 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype
))))
3171 || TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3175 if (TREE_CODE (etype
) == INTEGER_TYPE
3176 && TYPE_BIASED_REPRESENTATION_P (etype
))
3178 tree ntype
= copy_type (etype
);
3180 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
3181 TYPE_MAIN_VARIANT (ntype
) = ntype
;
3182 expr
= build1 (NOP_EXPR
, ntype
, expr
);
3185 if (TREE_CODE (type
) == INTEGER_TYPE
3186 && TYPE_BIASED_REPRESENTATION_P (type
))
3188 rtype
= copy_type (type
);
3189 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
3190 TYPE_MAIN_VARIANT (rtype
) = rtype
;
3193 expr
= convert (rtype
, expr
);
3195 expr
= build1 (NOP_EXPR
, type
, expr
);
3198 /* If we are converting TO an integral type whose precision is not the
3199 same as its size, first unchecked convert to a record that contains
3200 an object of the output type. Then extract the field. */
3201 else if (INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
3202 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
3203 GET_MODE_BITSIZE (TYPE_MODE (type
))))
3205 tree rec_type
= make_node (RECORD_TYPE
);
3206 tree field
= create_field_decl (get_identifier ("OBJ"), type
,
3207 rec_type
, 1, 0, 0, 0);
3209 TYPE_FIELDS (rec_type
) = field
;
3210 layout_type (rec_type
);
3212 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
3213 expr
= build_component_ref (expr
, NULL_TREE
, field
, 0);
3216 /* Similarly for integral input type whose precision is not equal to its
3218 else if (INTEGRAL_TYPE_P (etype
) && TYPE_RM_SIZE (etype
)
3219 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
3220 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
3222 tree rec_type
= make_node (RECORD_TYPE
);
3224 = create_field_decl (get_identifier ("OBJ"), etype
, rec_type
,
3227 TYPE_FIELDS (rec_type
) = field
;
3228 layout_type (rec_type
);
3230 expr
= gnat_build_constructor (rec_type
, build_tree_list (field
, expr
));
3231 expr
= unchecked_convert (type
, expr
, notrunc_p
);
3234 /* We have a special case when we are converting between two
3235 unconstrained array types. In that case, take the address,
3236 convert the fat pointer types, and dereference. */
3237 else if (TREE_CODE (etype
) == UNCONSTRAINED_ARRAY_TYPE
3238 && TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3239 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
3240 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
3241 build_unary_op (ADDR_EXPR
, NULL_TREE
,
3245 expr
= maybe_unconstrained_array (expr
);
3247 /* There's no point in doing two unchecked conversions in a row. */
3248 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3249 expr
= TREE_OPERAND (expr
, 0);
3251 etype
= TREE_TYPE (expr
);
3252 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
3255 /* If the result is an integral type whose size is not equal to
3256 the size of the underlying machine type, sign- or zero-extend
3257 the result. We need not do this in the case where the input is
3258 an integral type of the same precision and signedness or if the output
3259 is a biased type or if both the input and output are unsigned. */
3261 && INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
3262 && !(TREE_CODE (type
) == INTEGER_TYPE
3263 && TYPE_BIASED_REPRESENTATION_P (type
))
3264 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
3265 GET_MODE_BITSIZE (TYPE_MODE (type
)))
3266 && !(INTEGRAL_TYPE_P (etype
)
3267 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (etype
)
3268 && operand_equal_p (TYPE_RM_SIZE (type
),
3269 (TYPE_RM_SIZE (etype
) != 0
3270 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
3272 && !(TYPE_UNSIGNED (type
) && TYPE_UNSIGNED (etype
)))
3274 tree base_type
= gnat_type_for_mode (TYPE_MODE (type
),
3275 TYPE_UNSIGNED (type
));
3277 = convert (base_type
,
3278 size_binop (MINUS_EXPR
,
3280 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
3281 TYPE_RM_SIZE (type
)));
3284 build_binary_op (RSHIFT_EXPR
, base_type
,
3285 build_binary_op (LSHIFT_EXPR
, base_type
,
3286 convert (base_type
, expr
),
3291 /* An unchecked conversion should never raise Constraint_Error. The code
3292 below assumes that GCC's conversion routines overflow the same way that
3293 the underlying hardware does. This is probably true. In the rare case
3294 when it is false, we can rely on the fact that such conversions are
3295 erroneous anyway. */
3296 if (TREE_CODE (expr
) == INTEGER_CST
)
3297 TREE_OVERFLOW (expr
) = TREE_CONSTANT_OVERFLOW (expr
) = 0;
3299 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3300 show no longer constant. */
3301 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
3302 && !operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
),
3304 TREE_CONSTANT (expr
) = 0;
3309 #include "gt-ada-utils.h"
3310 #include "gtype-ada.h"