1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2006, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 51 Franklin Street, Fifth Floor, *
20 * Boston, MA 02110-1301, USA. *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 ****************************************************************************/
29 #include "coretypes.h"
42 #include "tree-inline.h"
43 #include "tree-gimple.h"
44 #include "tree-dump.h"
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
68 /* If nonzero, pretend we are allocating at global level. */
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls
[(int) ADT_LAST
];
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
77 /* List of functions called automatically at the beginning and
78 end of execution, on targets without .ctors/.dtors sections. */
82 /* Forward declarations for handlers of attributes. */
83 static tree
handle_const_attribute (tree
*, tree
, tree
, int, bool *);
84 static tree
handle_nothrow_attribute (tree
*, tree
, tree
, int, bool *);
86 /* Table of machine-independent internal attributes for Ada. We support
87 this minimal set ot attributes to accomodate the Alpha back-end which
88 unconditionally puts them on its builtins. */
89 const struct attribute_spec gnat_internal_attribute_table
[] =
91 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
92 { "const", 0, 0, true, false, false, handle_const_attribute
},
93 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute
},
94 { NULL
, 0, 0, false, false, false, NULL
}
97 /* Associates a GNAT tree node to a GCC tree node. It is used in
98 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
99 of `save_gnu_tree' for more info. */
100 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
102 /* This variable keeps a table for types for each precision so that we only
103 allocate each of them once. Signed and unsigned types are kept separate.
105 Note that these types are only used when fold-const requests something
106 special. Perhaps we should NOT share these types; we'll see how it
108 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
110 /* Likewise for float types, but record these by mode. */
111 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
113 /* For each binding contour we allocate a binding_level structure to indicate
114 the binding depth. */
116 struct gnat_binding_level
GTY((chain_next ("%h.chain")))
118 /* The binding level containing this one (the enclosing binding level). */
119 struct gnat_binding_level
*chain
;
120 /* The BLOCK node for this level. */
122 /* If nonzero, the setjmp buffer that needs to be updated for any
123 variable-sized definition within this context. */
127 /* The binding level currently in effect. */
128 static GTY(()) struct gnat_binding_level
*current_binding_level
;
130 /* A chain of gnat_binding_level structures awaiting reuse. */
131 static GTY((deletable
)) struct gnat_binding_level
*free_binding_level
;
133 /* A chain of unused BLOCK nodes. */
134 static GTY((deletable
)) tree free_block_chain
;
136 struct language_function
GTY(())
141 static void gnat_install_builtins (void);
142 static tree
merge_sizes (tree
, tree
, tree
, bool, bool);
143 static tree
compute_related_constant (tree
, tree
);
144 static tree
split_plus (tree
, tree
*);
145 static bool value_zerop (tree
);
146 static void gnat_gimplify_function (tree
);
147 static tree
float_type_for_precision (int, enum machine_mode
);
148 static tree
convert_to_fat_pointer (tree
, tree
);
149 static tree
convert_to_thin_pointer (tree
, tree
);
150 static tree
make_descriptor_field (const char *,tree
, tree
, tree
);
151 static bool potential_alignment_gap (tree
, tree
, tree
);
153 /* Initialize the association of GNAT nodes to GCC trees. */
156 init_gnat_to_gnu (void)
158 associate_gnat_to_gnu
159 = (tree
*) ggc_alloc_cleared (max_gnat_nodes
* sizeof (tree
));
162 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
163 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
164 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
166 If GNU_DECL is zero, a previous association is to be reset. */
169 save_gnu_tree (Entity_Id gnat_entity
, tree gnu_decl
, bool no_check
)
171 /* Check that GNAT_ENTITY is not already defined and that it is being set
172 to something which is a decl. Raise gigi 401 if not. Usually, this
173 means GNAT_ENTITY is defined twice, but occasionally is due to some
175 gcc_assert (!gnu_decl
176 || (!associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
]
177 && (no_check
|| DECL_P (gnu_decl
))));
178 associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
] = gnu_decl
;
181 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
182 Return the ..._DECL node that was associated with it. If there is no tree
183 node associated with GNAT_ENTITY, abort.
185 In some cases, such as delayed elaboration or expressions that need to
186 be elaborated only once, GNAT_ENTITY is really not an entity. */
189 get_gnu_tree (Entity_Id gnat_entity
)
191 gcc_assert (associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
]);
192 return associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
];
195 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
198 present_gnu_tree (Entity_Id gnat_entity
)
200 return (associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
]) != 0;
204 /* Return nonzero if we are currently in the global binding level. */
207 global_bindings_p (void)
209 return ((force_global
|| !current_function_decl
) ? -1 : 0);
212 /* Enter a new binding level. */
217 struct gnat_binding_level
*newlevel
= NULL
;
219 /* Reuse a struct for this binding level, if there is one. */
220 if (free_binding_level
)
222 newlevel
= free_binding_level
;
223 free_binding_level
= free_binding_level
->chain
;
227 = (struct gnat_binding_level
*)
228 ggc_alloc (sizeof (struct gnat_binding_level
));
230 /* Use a free BLOCK, if any; otherwise, allocate one. */
231 if (free_block_chain
)
233 newlevel
->block
= free_block_chain
;
234 free_block_chain
= TREE_CHAIN (free_block_chain
);
235 TREE_CHAIN (newlevel
->block
) = NULL_TREE
;
238 newlevel
->block
= make_node (BLOCK
);
240 /* Point the BLOCK we just made to its parent. */
241 if (current_binding_level
)
242 BLOCK_SUPERCONTEXT (newlevel
->block
) = current_binding_level
->block
;
244 BLOCK_VARS (newlevel
->block
) = BLOCK_SUBBLOCKS (newlevel
->block
) = NULL_TREE
;
245 TREE_USED (newlevel
->block
) = 1;
247 /* Add this level to the front of the chain (stack) of levels that are
249 newlevel
->chain
= current_binding_level
;
250 newlevel
->jmpbuf_decl
= NULL_TREE
;
251 current_binding_level
= newlevel
;
254 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
255 and point FNDECL to this BLOCK. */
258 set_current_block_context (tree fndecl
)
260 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
261 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
264 /* Set the jmpbuf_decl for the current binding level to DECL. */
267 set_block_jmpbuf_decl (tree decl
)
269 current_binding_level
->jmpbuf_decl
= decl
;
272 /* Get the jmpbuf_decl, if any, for the current binding level. */
275 get_block_jmpbuf_decl ()
277 return current_binding_level
->jmpbuf_decl
;
280 /* Exit a binding level. Set any BLOCK into the current code group. */
285 struct gnat_binding_level
*level
= current_binding_level
;
286 tree block
= level
->block
;
288 BLOCK_VARS (block
) = nreverse (BLOCK_VARS (block
));
289 BLOCK_SUBBLOCKS (block
) = nreverse (BLOCK_SUBBLOCKS (block
));
291 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
292 are no variables free the block and merge its subblocks into those of its
293 parent block. Otherwise, add it to the list of its parent. */
294 if (TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
)
296 else if (BLOCK_VARS (block
) == NULL_TREE
)
298 BLOCK_SUBBLOCKS (level
->chain
->block
)
299 = chainon (BLOCK_SUBBLOCKS (block
),
300 BLOCK_SUBBLOCKS (level
->chain
->block
));
301 TREE_CHAIN (block
) = free_block_chain
;
302 free_block_chain
= block
;
306 TREE_CHAIN (block
) = BLOCK_SUBBLOCKS (level
->chain
->block
);
307 BLOCK_SUBBLOCKS (level
->chain
->block
) = block
;
308 TREE_USED (block
) = 1;
309 set_block_for_group (block
);
312 /* Free this binding structure. */
313 current_binding_level
= level
->chain
;
314 level
->chain
= free_binding_level
;
315 free_binding_level
= level
;
318 /* Insert BLOCK at the end of the list of subblocks of the
319 current binding level. This is used when a BIND_EXPR is expanded,
320 to handle the BLOCK node inside the BIND_EXPR. */
323 insert_block (tree block
)
325 TREE_USED (block
) = 1;
326 TREE_CHAIN (block
) = BLOCK_SUBBLOCKS (current_binding_level
->block
);
327 BLOCK_SUBBLOCKS (current_binding_level
->block
) = block
;
330 /* Records a ..._DECL node DECL as belonging to the current lexical scope
331 and uses GNAT_NODE for location information and propagating flags. */
334 gnat_pushdecl (tree decl
, Node_Id gnat_node
)
336 /* If at top level, there is no context. But PARM_DECLs always go in the
337 level of its function. */
338 if (global_bindings_p () && TREE_CODE (decl
) != PARM_DECL
)
339 DECL_CONTEXT (decl
) = 0;
342 DECL_CONTEXT (decl
) = current_function_decl
;
344 /* Functions imported in another function are not really nested. */
345 if (TREE_CODE (decl
) == FUNCTION_DECL
&& TREE_PUBLIC (decl
))
346 DECL_NO_STATIC_CHAIN (decl
) = 1;
349 TREE_NO_WARNING (decl
) = (gnat_node
== Empty
|| Warnings_Off (gnat_node
));
351 /* Set the location of DECL and emit a declaration for it. */
352 if (Present (gnat_node
))
353 Sloc_to_locus (Sloc (gnat_node
), &DECL_SOURCE_LOCATION (decl
));
354 add_decl_expr (decl
, gnat_node
);
356 /* Put the declaration on the list. The list of declarations is in reverse
357 order. The list will be reversed later. We don't do this for global
358 variables. Also, don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
359 the list. They will cause trouble with the debugger and aren't needed
361 if (!global_bindings_p ()
362 && (TREE_CODE (decl
) != TYPE_DECL
363 || TREE_CODE (TREE_TYPE (decl
)) != UNCONSTRAINED_ARRAY_TYPE
))
365 TREE_CHAIN (decl
) = BLOCK_VARS (current_binding_level
->block
);
366 BLOCK_VARS (current_binding_level
->block
) = decl
;
369 /* For the declaration of a type, set its name if it either is not already
370 set, was set to an IDENTIFIER_NODE, indicating an internal name,
371 or if the previous type name was not derived from a source name.
372 We'd rather have the type named with a real name and all the pointer
373 types to the same object have the same POINTER_TYPE node. Code in this
374 function in c-decl.c makes a copy of the type node here, but that may
375 cause us trouble with incomplete types, so let's not try it (at least
378 if (TREE_CODE (decl
) == TYPE_DECL
380 && (!TYPE_NAME (TREE_TYPE (decl
))
381 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl
))) == IDENTIFIER_NODE
382 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl
))) == TYPE_DECL
383 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl
)))
384 && !DECL_ARTIFICIAL (decl
))))
385 TYPE_NAME (TREE_TYPE (decl
)) = decl
;
387 /* if (TREE_CODE (decl) != CONST_DECL)
388 rest_of_decl_compilation (decl, global_bindings_p (), 0); */
391 /* Do little here. Set up the standard declarations later after the
392 front end has been run. */
395 gnat_init_decl_processing (void)
399 /* Make the binding_level structure for global names. */
400 current_function_decl
= 0;
401 current_binding_level
= 0;
402 free_binding_level
= 0;
405 build_common_tree_nodes (true, true);
407 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
408 corresponding to the size of Pmode. In most cases when ptr_mode and
409 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
410 far better code using the width of Pmode. Make this here since we need
411 this before we can expand the GNAT types. */
412 size_type_node
= gnat_type_for_size (GET_MODE_BITSIZE (Pmode
), 0);
413 set_sizetype (size_type_node
);
414 build_common_tree_nodes_2 (0);
416 /* Give names and make TYPE_DECLs for common types. */
417 gnat_pushdecl (build_decl (TYPE_DECL
, get_identifier (SIZE_TYPE
), sizetype
),
419 gnat_pushdecl (build_decl (TYPE_DECL
, get_identifier ("integer"),
422 gnat_pushdecl (build_decl (TYPE_DECL
, get_identifier ("unsigned char"),
425 gnat_pushdecl (build_decl (TYPE_DECL
, get_identifier ("long integer"),
426 long_integer_type_node
),
429 ptr_void_type_node
= build_pointer_type (void_type_node
);
431 gnat_install_builtins ();
434 /* Install the builtin functions the middle-end needs. */
437 gnat_install_builtins ()
439 /* Builtins used by generic optimizers. */
440 build_common_builtin_nodes ();
442 /* Target specific builtins, such as the AltiVec family on ppc. */
443 targetm
.init_builtins ();
446 /* Create the predefined scalar types such as `integer_type_node' needed
447 in the gcc back-end and initialize the global binding level. */
450 init_gigi_decls (tree long_long_float_type
, tree exception_type
)
455 /* Set the types that GCC and Gigi use from the front end. We would like
456 to do this for char_type_node, but it needs to correspond to the C
458 if (TREE_CODE (TREE_TYPE (long_long_float_type
)) == INTEGER_TYPE
)
460 /* In this case, the builtin floating point types are VAX float,
461 so make up a type for use. */
462 longest_float_type_node
= make_node (REAL_TYPE
);
463 TYPE_PRECISION (longest_float_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
464 layout_type (longest_float_type_node
);
465 create_type_decl (get_identifier ("longest float type"),
466 longest_float_type_node
, NULL
, false, true, Empty
);
469 longest_float_type_node
= TREE_TYPE (long_long_float_type
);
471 except_type_node
= TREE_TYPE (exception_type
);
473 unsigned_type_node
= gnat_type_for_size (INT_TYPE_SIZE
, 1);
474 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node
,
475 NULL
, false, true, Empty
);
477 void_type_decl_node
= create_type_decl (get_identifier ("void"),
478 void_type_node
, NULL
, false, true,
481 void_ftype
= build_function_type (void_type_node
, NULL_TREE
);
482 ptr_void_ftype
= build_pointer_type (void_ftype
);
484 /* Now declare runtime functions. */
485 endlink
= tree_cons (NULL_TREE
, void_type_node
, NULL_TREE
);
487 /* malloc is a function declaration tree for a function to allocate
489 malloc_decl
= create_subprog_decl (get_identifier ("__gnat_malloc"),
491 build_function_type (ptr_void_type_node
,
492 tree_cons (NULL_TREE
,
495 NULL_TREE
, false, true, true, NULL
,
498 /* free is a function declaration tree for a function to free memory. */
500 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE
,
501 build_function_type (void_type_node
,
502 tree_cons (NULL_TREE
,
505 NULL_TREE
, false, true, true, NULL
, Empty
);
507 /* Make the types and functions used for exception processing. */
509 = build_array_type (gnat_type_for_mode (Pmode
, 0),
510 build_index_type (build_int_cst (NULL_TREE
, 5)));
511 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type
, NULL
,
513 jmpbuf_ptr_type
= build_pointer_type (jmpbuf_type
);
515 /* Functions to get and set the jumpbuf pointer for the current thread. */
517 = create_subprog_decl
518 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
519 NULL_TREE
, build_function_type (jmpbuf_ptr_type
, NULL_TREE
),
520 NULL_TREE
, false, true, true, NULL
, Empty
);
521 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
522 DECL_IS_PURE (get_jmpbuf_decl
) = 1;
525 = create_subprog_decl
526 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
528 build_function_type (void_type_node
,
529 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
530 NULL_TREE
, false, true, true, NULL
, Empty
);
532 /* Function to get the current exception. */
534 = create_subprog_decl
535 (get_identifier ("system__soft_links__get_gnat_exception"),
537 build_function_type (build_pointer_type (except_type_node
), NULL_TREE
),
538 NULL_TREE
, false, true, true, NULL
, Empty
);
539 /* Avoid creating superfluous edges to __builtin_setjmp receivers. */
540 DECL_IS_PURE (get_excptr_decl
) = 1;
542 /* Functions that raise exceptions. */
544 = create_subprog_decl
545 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE
,
546 build_function_type (void_type_node
,
547 tree_cons (NULL_TREE
,
548 build_pointer_type (except_type_node
),
550 NULL_TREE
, false, true, true, NULL
, Empty
);
552 /* Dummy objects to materialize "others" and "all others" in the exception
553 tables. These are exported by a-exexpr.adb, so see this unit for the
557 = create_var_decl (get_identifier ("OTHERS"),
558 get_identifier ("__gnat_others_value"),
559 integer_type_node
, 0, 1, 0, 1, 1, 0, Empty
);
562 = create_var_decl (get_identifier ("ALL_OTHERS"),
563 get_identifier ("__gnat_all_others_value"),
564 integer_type_node
, 0, 1, 0, 1, 1, 0, Empty
);
566 /* Hooks to call when entering/leaving an exception handler. */
568 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE
,
569 build_function_type (void_type_node
,
570 tree_cons (NULL_TREE
,
573 NULL_TREE
, false, true, true, NULL
, Empty
);
576 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE
,
577 build_function_type (void_type_node
,
578 tree_cons (NULL_TREE
,
581 NULL_TREE
, false, true, true, NULL
, Empty
);
583 /* If in no exception handlers mode, all raise statements are redirected to
584 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
585 this procedure will never be called in this mode. */
586 if (No_Exception_Handlers_Set ())
589 = create_subprog_decl
590 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE
,
591 build_function_type (void_type_node
,
592 tree_cons (NULL_TREE
,
593 build_pointer_type (char_type_node
),
594 tree_cons (NULL_TREE
,
597 NULL_TREE
, false, true, true, NULL
, Empty
);
599 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
600 gnat_raise_decls
[i
] = decl
;
603 /* Otherwise, make one decl for each exception reason. */
604 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
608 sprintf (name
, "__gnat_rcheck_%.2d", i
);
610 = create_subprog_decl
611 (get_identifier (name
), NULL_TREE
,
612 build_function_type (void_type_node
,
613 tree_cons (NULL_TREE
,
616 tree_cons (NULL_TREE
,
619 NULL_TREE
, false, true, true, NULL
, Empty
);
622 /* Indicate that these never return. */
623 TREE_THIS_VOLATILE (raise_nodefer_decl
) = 1;
624 TREE_SIDE_EFFECTS (raise_nodefer_decl
) = 1;
625 TREE_TYPE (raise_nodefer_decl
)
626 = build_qualified_type (TREE_TYPE (raise_nodefer_decl
),
629 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
631 TREE_THIS_VOLATILE (gnat_raise_decls
[i
]) = 1;
632 TREE_SIDE_EFFECTS (gnat_raise_decls
[i
]) = 1;
633 TREE_TYPE (gnat_raise_decls
[i
])
634 = build_qualified_type (TREE_TYPE (gnat_raise_decls
[i
]),
638 /* setjmp returns an integer and has one operand, which is a pointer to
641 = create_subprog_decl
642 (get_identifier ("__builtin_setjmp"), NULL_TREE
,
643 build_function_type (integer_type_node
,
644 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
645 NULL_TREE
, false, true, true, NULL
, Empty
);
647 DECL_BUILT_IN_CLASS (setjmp_decl
) = BUILT_IN_NORMAL
;
648 DECL_FUNCTION_CODE (setjmp_decl
) = BUILT_IN_SETJMP
;
650 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
652 update_setjmp_buf_decl
653 = create_subprog_decl
654 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE
,
655 build_function_type (void_type_node
,
656 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
657 NULL_TREE
, false, true, true, NULL
, Empty
);
659 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl
) = BUILT_IN_NORMAL
;
660 DECL_FUNCTION_CODE (update_setjmp_buf_decl
) = BUILT_IN_UPDATE_SETJMP_BUF
;
662 main_identifier_node
= get_identifier ("main");
665 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL nodes
666 (FIELDLIST), finish constructing the record or union type. If HAS_REP is
667 true, this record has a rep clause; don't call layout_type but merely set
668 the size and alignment ourselves. If DEFER_DEBUG is true, do not call
669 the debugging routines on this type; it will be done later. */
672 finish_record_type (tree record_type
, tree fieldlist
, bool has_rep
,
675 enum tree_code code
= TREE_CODE (record_type
);
676 tree ada_size
= bitsize_zero_node
;
677 tree size
= bitsize_zero_node
;
678 bool var_size
= false;
679 bool had_size
= TYPE_SIZE (record_type
) != 0;
680 bool had_size_unit
= TYPE_SIZE_UNIT (record_type
) != 0;
683 TYPE_FIELDS (record_type
) = fieldlist
;
684 TYPE_STUB_DECL (record_type
)
685 = build_decl (TYPE_DECL
, NULL_TREE
, record_type
);
687 /* We don't need both the typedef name and the record name output in
688 the debugging information, since they are the same. */
689 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type
)) = 1;
691 /* Globally initialize the record first. If this is a rep'ed record,
692 that just means some initializations; otherwise, layout the record. */
696 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
697 TYPE_MODE (record_type
) = BLKmode
;
700 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
702 TYPE_SIZE (record_type
) = bitsize_zero_node
;
704 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
705 out just like a UNION_TYPE, since the size will be fixed. */
706 else if (code
== QUAL_UNION_TYPE
)
711 /* Ensure there isn't a size already set. There can be in an error
712 case where there is a rep clause but all fields have errors and
713 no longer have a position. */
714 TYPE_SIZE (record_type
) = 0;
715 layout_type (record_type
);
718 /* At this point, the position and size of each field is known. It was
719 either set before entry by a rep clause, or by laying out the type above.
721 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
722 to compute the Ada size; the GCC size and alignment (for rep'ed records
723 that are not padding types); and the mode (for rep'ed records). We also
724 clear the DECL_BIT_FIELD indication for the cases we know have not been
725 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
727 if (code
== QUAL_UNION_TYPE
)
728 fieldlist
= nreverse (fieldlist
);
730 for (field
= fieldlist
; field
; field
= TREE_CHAIN (field
))
732 tree pos
= bit_position (field
);
734 tree type
= TREE_TYPE (field
);
735 tree this_size
= DECL_SIZE (field
);
736 tree this_ada_size
= DECL_SIZE (field
);
738 /* We need to make an XVE/XVU record if any field has variable size,
739 whether or not the record does. For example, if we have a union,
740 it may be that all fields, rounded up to the alignment, have the
741 same size, in which case we'll use that size. But the debug
742 output routines (except Dwarf2) won't be able to output the fields,
743 so we need to make the special record. */
744 if (TREE_CODE (this_size
) != INTEGER_CST
)
747 if ((TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
748 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
749 && !TYPE_IS_FAT_POINTER_P (type
)
750 && !TYPE_CONTAINS_TEMPLATE_P (type
)
751 && TYPE_ADA_SIZE (type
))
752 this_ada_size
= TYPE_ADA_SIZE (type
);
754 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
755 if (DECL_BIT_FIELD (field
) && !STRICT_ALIGNMENT
756 && value_factor_p (pos
, BITS_PER_UNIT
)
757 && operand_equal_p (this_size
, TYPE_SIZE (type
), 0))
758 DECL_BIT_FIELD (field
) = 0;
760 /* If we still have DECL_BIT_FIELD set at this point, we know the field
761 is technically not addressable. Except that it can actually be
762 addressed if the field is BLKmode and happens to be properly
764 DECL_NONADDRESSABLE_P (field
)
765 |= DECL_BIT_FIELD (field
) && DECL_MODE (field
) != BLKmode
;
767 if (has_rep
&& !DECL_BIT_FIELD (field
))
768 TYPE_ALIGN (record_type
)
769 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
774 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
775 size
= size_binop (MAX_EXPR
, size
, this_size
);
778 case QUAL_UNION_TYPE
:
780 = fold (build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
781 this_ada_size
, ada_size
));
782 size
= fold (build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
787 /* Since we know here that all fields are sorted in order of
788 increasing bit position, the size of the record is one
789 higher than the ending bit of the last field processed
790 unless we have a rep clause, since in that case we might
791 have a field outside a QUAL_UNION_TYPE that has a higher ending
792 position. So use a MAX in that case. Also, if this field is a
793 QUAL_UNION_TYPE, we need to take into account the previous size in
794 the case of empty variants. */
796 = merge_sizes (ada_size
, pos
, this_ada_size
,
797 TREE_CODE (type
) == QUAL_UNION_TYPE
, has_rep
);
798 size
= merge_sizes (size
, pos
, this_size
,
799 TREE_CODE (type
) == QUAL_UNION_TYPE
, has_rep
);
807 if (code
== QUAL_UNION_TYPE
)
808 nreverse (fieldlist
);
810 /* If this is a padding record, we never want to make the size smaller than
811 what was specified in it, if any. */
812 if (TREE_CODE (record_type
) == RECORD_TYPE
813 && TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
))
814 size
= TYPE_SIZE (record_type
);
816 /* Now set any of the values we've just computed that apply. */
817 if (!TYPE_IS_FAT_POINTER_P (record_type
)
818 && !TYPE_CONTAINS_TEMPLATE_P (record_type
))
819 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
824 = (had_size_unit
? TYPE_SIZE_UNIT (record_type
)
825 : convert (sizetype
, size_binop (CEIL_DIV_EXPR
, size
,
826 bitsize_unit_node
)));
828 TYPE_SIZE (record_type
)
829 = variable_size (round_up (size
, TYPE_ALIGN (record_type
)));
830 TYPE_SIZE_UNIT (record_type
)
831 = variable_size (round_up (size_unit
,
832 TYPE_ALIGN (record_type
) / BITS_PER_UNIT
));
834 compute_record_mode (record_type
);
838 write_record_type_debug_info (record_type
);
841 /* Output the debug information associated to a record type. */
844 write_record_type_debug_info (tree record_type
)
846 tree fieldlist
= TYPE_FIELDS (record_type
);
848 bool var_size
= false;
850 for (field
= fieldlist
; field
; field
= TREE_CHAIN (field
))
852 /* We need to make an XVE/XVU record if any field has variable size,
853 whether or not the record does. For example, if we have a union,
854 it may be that all fields, rounded up to the alignment, have the
855 same size, in which case we'll use that size. But the debug
856 output routines (except Dwarf2) won't be able to output the fields,
857 so we need to make the special record. */
858 if (TREE_CODE (DECL_SIZE (field
)) != INTEGER_CST
)
865 /* If this record is of variable size, rename it so that the
866 debugger knows it is and make a new, parallel, record
867 that tells the debugger how the record is laid out. See
868 exp_dbug.ads. But don't do this for records that are padding
869 since they confuse GDB. */
871 && !(TREE_CODE (record_type
) == RECORD_TYPE
872 && TYPE_IS_PADDING_P (record_type
)))
875 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
876 ? UNION_TYPE
: TREE_CODE (record_type
));
877 tree orig_name
= TYPE_NAME (record_type
);
879 = (TREE_CODE (orig_name
) == TYPE_DECL
? DECL_NAME (orig_name
)
882 = concat_id_with_name (orig_id
,
883 TREE_CODE (record_type
) == QUAL_UNION_TYPE
885 tree last_pos
= bitsize_zero_node
;
887 tree prev_old_field
= 0;
889 TYPE_NAME (new_record_type
) = new_id
;
890 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
891 TYPE_STUB_DECL (new_record_type
)
892 = build_decl (TYPE_DECL
, NULL_TREE
, new_record_type
);
893 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type
)) = 1;
894 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
895 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
896 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
897 TYPE_SIZE_UNIT (new_record_type
)
898 = size_int (TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
900 /* Now scan all the fields, replacing each field with a new
901 field corresponding to the new encoding. */
902 for (old_field
= TYPE_FIELDS (record_type
); old_field
;
903 old_field
= TREE_CHAIN (old_field
))
905 tree field_type
= TREE_TYPE (old_field
);
906 tree field_name
= DECL_NAME (old_field
);
908 tree curpos
= bit_position (old_field
);
910 unsigned int align
= 0;
913 /* See how the position was modified from the last position.
915 There are two basic cases we support: a value was added
916 to the last position or the last position was rounded to
917 a boundary and they something was added. Check for the
918 first case first. If not, see if there is any evidence
919 of rounding. If so, round the last position and try
922 If this is a union, the position can be taken as zero. */
924 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
925 pos
= bitsize_zero_node
, align
= 0;
927 pos
= compute_related_constant (curpos
, last_pos
);
929 if (!pos
&& TREE_CODE (curpos
) == MULT_EXPR
930 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
)
932 align
= TREE_INT_CST_LOW (TREE_OPERAND (curpos
, 1));
933 pos
= compute_related_constant (curpos
,
934 round_up (last_pos
, align
));
936 else if (!pos
&& TREE_CODE (curpos
) == PLUS_EXPR
937 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
938 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
939 && host_integerp (TREE_OPERAND
940 (TREE_OPERAND (curpos
, 0), 1),
945 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1), 1);
946 pos
= compute_related_constant (curpos
,
947 round_up (last_pos
, align
));
949 else if (potential_alignment_gap (prev_old_field
, old_field
,
952 align
= TYPE_ALIGN (field_type
);
953 pos
= compute_related_constant (curpos
,
954 round_up (last_pos
, align
));
957 /* If we can't compute a position, set it to zero.
959 ??? We really should abort here, but it's too much work
960 to get this correct for all cases. */
963 pos
= bitsize_zero_node
;
965 /* See if this type is variable-size and make a new type
966 and indicate the indirection if so. */
967 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
969 field_type
= build_pointer_type (field_type
);
973 /* Make a new field name, if necessary. */
974 if (var
|| align
!= 0)
979 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
980 align
/ BITS_PER_UNIT
);
982 strcpy (suffix
, "XVL");
984 field_name
= concat_id_with_name (field_name
, suffix
);
987 new_field
= create_field_decl (field_name
, field_type
,
989 DECL_SIZE (old_field
), pos
, 0);
990 TREE_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
991 TYPE_FIELDS (new_record_type
) = new_field
;
993 /* If old_field is a QUAL_UNION_TYPE, take its size as being
994 zero. The only time it's not the last field of the record
995 is when there are other components at fixed positions after
996 it (meaning there was a rep clause for every field) and we
997 want to be able to encode them. */
998 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
999 (TREE_CODE (TREE_TYPE (old_field
))
1002 : DECL_SIZE (old_field
));
1003 prev_old_field
= old_field
;
1006 TYPE_FIELDS (new_record_type
)
1007 = nreverse (TYPE_FIELDS (new_record_type
));
1009 rest_of_type_compilation (new_record_type
, global_bindings_p ());
1012 rest_of_type_compilation (record_type
, global_bindings_p ());
1015 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1016 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1017 if this represents a QUAL_UNION_TYPE in which case we must look for
1018 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1019 is nonzero, we must take the MAX of the end position of this field
1020 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1022 We return an expression for the size. */
1025 merge_sizes (tree last_size
, tree first_bit
, tree size
, bool special
,
1028 tree type
= TREE_TYPE (last_size
);
1031 if (!special
|| TREE_CODE (size
) != COND_EXPR
)
1033 new = size_binop (PLUS_EXPR
, first_bit
, size
);
1035 new = size_binop (MAX_EXPR
, last_size
, new);
1039 new = fold (build3 (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1040 integer_zerop (TREE_OPERAND (size
, 1))
1041 ? last_size
: merge_sizes (last_size
, first_bit
,
1042 TREE_OPERAND (size
, 1),
1044 integer_zerop (TREE_OPERAND (size
, 2))
1045 ? last_size
: merge_sizes (last_size
, first_bit
,
1046 TREE_OPERAND (size
, 2),
1049 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1050 when fed through substitute_in_expr) into thinking that a constant
1051 size is not constant. */
1052 while (TREE_CODE (new) == NON_LVALUE_EXPR
)
1053 new = TREE_OPERAND (new, 0);
1058 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1059 related by the addition of a constant. Return that constant if so. */
1062 compute_related_constant (tree op0
, tree op1
)
1064 tree op0_var
, op1_var
;
1065 tree op0_con
= split_plus (op0
, &op0_var
);
1066 tree op1_con
= split_plus (op1
, &op1_var
);
1067 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1069 if (operand_equal_p (op0_var
, op1_var
, 0))
1071 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1077 /* Utility function of above to split a tree OP which may be a sum, into a
1078 constant part, which is returned, and a variable part, which is stored
1079 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1083 split_plus (tree in
, tree
*pvar
)
1085 /* Strip NOPS in order to ease the tree traversal and maximize the
1086 potential for constant or plus/minus discovery. We need to be careful
1087 to always return and set *pvar to bitsizetype trees, but it's worth
1091 *pvar
= convert (bitsizetype
, in
);
1093 if (TREE_CODE (in
) == INTEGER_CST
)
1095 *pvar
= bitsize_zero_node
;
1096 return convert (bitsizetype
, in
);
1098 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1100 tree lhs_var
, rhs_var
;
1101 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1102 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1104 if (lhs_var
== TREE_OPERAND (in
, 0)
1105 && rhs_var
== TREE_OPERAND (in
, 1))
1106 return bitsize_zero_node
;
1108 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1109 return size_binop (TREE_CODE (in
), lhs_con
, rhs_con
);
1112 return bitsize_zero_node
;
1115 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1116 subprogram. If it is void_type_node, then we are dealing with a procedure,
1117 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1118 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1119 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1120 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1121 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1122 RETURNS_WITH_DSP is nonzero if the function is to return with a
1123 depressed stack pointer. RETURNS_BY_TARGET_PTR is true if the function
1124 is to be passed (as its first parameter) the address of the place to copy
1128 create_subprog_type (tree return_type
, tree param_decl_list
, tree cico_list
,
1129 bool returns_unconstrained
, bool returns_by_ref
,
1130 bool returns_with_dsp
, bool returns_by_target_ptr
)
1132 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1133 the subprogram formal parameters. This list is generated by traversing the
1134 input list of PARM_DECL nodes. */
1135 tree param_type_list
= NULL
;
1139 for (param_decl
= param_decl_list
; param_decl
;
1140 param_decl
= TREE_CHAIN (param_decl
))
1141 param_type_list
= tree_cons (NULL_TREE
, TREE_TYPE (param_decl
),
1144 /* The list of the function parameter types has to be terminated by the void
1145 type to signal to the back-end that we are not dealing with a variable
1146 parameter subprogram, but that the subprogram has a fixed number of
1148 param_type_list
= tree_cons (NULL_TREE
, void_type_node
, param_type_list
);
1150 /* The list of argument types has been created in reverse
1152 param_type_list
= nreverse (param_type_list
);
1154 type
= build_function_type (return_type
, param_type_list
);
1156 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1157 or the new type should, make a copy of TYPE. Likewise for
1158 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1159 if (TYPE_CI_CO_LIST (type
) || cico_list
1160 || TYPE_RETURNS_UNCONSTRAINED_P (type
) != returns_unconstrained
1161 || TYPE_RETURNS_BY_REF_P (type
) != returns_by_ref
1162 || TYPE_RETURNS_BY_TARGET_PTR_P (type
) != returns_by_target_ptr
)
1163 type
= copy_type (type
);
1165 TYPE_CI_CO_LIST (type
) = cico_list
;
1166 TYPE_RETURNS_UNCONSTRAINED_P (type
) = returns_unconstrained
;
1167 TYPE_RETURNS_STACK_DEPRESSED (type
) = returns_with_dsp
;
1168 TYPE_RETURNS_BY_REF_P (type
) = returns_by_ref
;
1169 TYPE_RETURNS_BY_TARGET_PTR_P (type
) = returns_by_target_ptr
;
1173 /* Return a copy of TYPE but safe to modify in any way. */
1176 copy_type (tree type
)
1178 tree
new = copy_node (type
);
1180 /* copy_node clears this field instead of copying it, because it is
1181 aliased with TREE_CHAIN. */
1182 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type
);
1184 TYPE_POINTER_TO (new) = 0;
1185 TYPE_REFERENCE_TO (new) = 0;
1186 TYPE_MAIN_VARIANT (new) = new;
1187 TYPE_NEXT_VARIANT (new) = 0;
1192 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1193 TYPE_INDEX_TYPE is INDEX. */
1196 create_index_type (tree min
, tree max
, tree index
)
1198 /* First build a type for the desired range. */
1199 tree type
= build_index_2_type (min
, max
);
1201 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1202 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1203 is set, but not to INDEX, make a copy of this type with the requested
1204 index type. Note that we have no way of sharing these types, but that's
1205 only a small hole. */
1206 if (TYPE_INDEX_TYPE (type
) == index
)
1208 else if (TYPE_INDEX_TYPE (type
))
1209 type
= copy_type (type
);
1211 SET_TYPE_INDEX_TYPE (type
, index
);
1212 create_type_decl (NULL_TREE
, type
, NULL
, true, false, Empty
);
1216 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1217 string) and TYPE is a ..._TYPE node giving its data type.
1218 ARTIFICIAL_P is true if this is a declaration that was generated
1219 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1220 information about this type. GNAT_NODE is used for the position of
1224 create_type_decl (tree type_name
, tree type
, struct attrib
*attr_list
,
1225 bool artificial_p
, bool debug_info_p
, Node_Id gnat_node
)
1227 tree type_decl
= build_decl (TYPE_DECL
, type_name
, type
);
1228 enum tree_code code
= TREE_CODE (type
);
1230 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1232 process_attributes (type_decl
, attr_list
);
1234 /* Pass type declaration information to the debugger unless this is an
1235 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1236 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately, or
1237 type for which debugging information was not requested. */
1238 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| ! debug_info_p
)
1239 DECL_IGNORED_P (type_decl
) = 1;
1240 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| TYPE_IS_DUMMY_P (type
)
1242 DECL_IGNORED_P (type_decl
) = 1;
1243 else if (code
!= ENUMERAL_TYPE
&& code
!= RECORD_TYPE
1244 && !((code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1245 && TYPE_IS_DUMMY_P (TREE_TYPE (type
))))
1246 rest_of_decl_compilation (type_decl
, global_bindings_p (), 0);
1248 if (!TYPE_IS_DUMMY_P (type
))
1249 gnat_pushdecl (type_decl
, gnat_node
);
1254 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1255 ASM_NAME is its assembler name (if provided). TYPE is its data type
1256 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1257 expression; NULL_TREE if none.
1259 CONST_FLAG is true if this variable is constant.
1261 PUBLIC_FLAG is true if this definition is to be made visible outside of
1262 the current compilation unit. This flag should be set when processing the
1263 variable definitions in a package specification. EXTERN_FLAG is nonzero
1264 when processing an external variable declaration (as opposed to a
1265 definition: no storage is to be allocated for the variable here).
1267 STATIC_FLAG is only relevant when not at top level. In that case
1268 it indicates whether to always allocate storage to the variable.
1270 GNAT_NODE is used for the position of the decl. */
1273 create_var_decl (tree var_name
, tree asm_name
, tree type
, tree var_init
,
1274 bool const_flag
, bool public_flag
, bool extern_flag
,
1275 bool static_flag
, struct attrib
*attr_list
, Node_Id gnat_node
)
1280 : (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init
))
1281 && (global_bindings_p () || static_flag
1282 ? 0 != initializer_constant_valid_p (var_init
,
1283 TREE_TYPE (var_init
))
1284 : TREE_CONSTANT (var_init
))));
1286 = build_decl ((const_flag
&& init_const
1287 /* Only make a CONST_DECL for sufficiently-small objects.
1288 We consider complex double "sufficiently-small" */
1289 && TYPE_SIZE (type
) != 0
1290 && host_integerp (TYPE_SIZE_UNIT (type
), 1)
1291 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type
),
1292 GET_MODE_SIZE (DCmode
)))
1293 ? CONST_DECL
: VAR_DECL
, var_name
, type
);
1295 /* If this is external, throw away any initializations unless this is a
1296 CONST_DECL (meaning we have a constant); they will be done elsewhere.
1297 If we are defining a global here, leave a constant initialization and
1298 save any variable elaborations for the elaboration routine. If we are
1299 just annotating types, throw away the initialization if it isn't a
1301 if ((extern_flag
&& TREE_CODE (var_decl
) != CONST_DECL
)
1302 || (type_annotate_only
&& var_init
&& !TREE_CONSTANT (var_init
)))
1303 var_init
= NULL_TREE
;
1305 /* At the global level, an initializer requiring code to be generated
1306 produces elaboration statements. Check that such statements are allowed,
1307 that is, not violating a No_Elaboration_Code restriction. */
1308 if (global_bindings_p () && var_init
!= 0 && ! init_const
)
1309 Check_Elaboration_Code_Allowed (gnat_node
);
1311 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1312 try to fiddle with DECL_COMMON. However, on platforms that don't
1313 support global BSS sections, uninitialized global variables would
1314 go in DATA instead, thus increasing the size of the executable. */
1316 && TREE_CODE (var_decl
) == VAR_DECL
1317 && !have_global_bss_p ())
1318 DECL_COMMON (var_decl
) = 1;
1319 DECL_INITIAL (var_decl
) = var_init
;
1320 TREE_READONLY (var_decl
) = const_flag
;
1321 DECL_EXTERNAL (var_decl
) = extern_flag
;
1322 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
1323 TREE_CONSTANT (var_decl
) = TREE_CODE (var_decl
) == CONST_DECL
;
1324 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
1325 = TYPE_VOLATILE (type
);
1327 /* If it's public and not external, always allocate storage for it.
1328 At the global binding level we need to allocate static storage for the
1329 variable if and only if it's not external. If we are not at the top level
1330 we allocate automatic storage unless requested not to. */
1331 TREE_STATIC (var_decl
)
1332 = public_flag
|| (global_bindings_p () ? !extern_flag
: static_flag
);
1334 if (asm_name
&& VAR_OR_FUNCTION_DECL_P (var_decl
))
1335 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
1337 process_attributes (var_decl
, attr_list
);
1339 /* Add this decl to the current binding level. */
1340 gnat_pushdecl (var_decl
, gnat_node
);
1342 if (TREE_SIDE_EFFECTS (var_decl
))
1343 TREE_ADDRESSABLE (var_decl
) = 1;
1345 if (TREE_CODE (var_decl
) != CONST_DECL
)
1346 rest_of_decl_compilation (var_decl
, global_bindings_p (), 0);
1348 /* expand CONST_DECLs to set their MODE, ALIGN, SIZE and SIZE_UNIT,
1349 which we need for later back-annotations. */
1350 expand_decl (var_decl
);
1355 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1356 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1357 this field is in a record type with a "pragma pack". If SIZE is nonzero
1358 it is the specified size for this field. If POS is nonzero, it is the bit
1359 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1360 the address of this field for aliasing purposes. If it is negative, we
1361 should not make a bitfield, which is used by make_aligning_type. */
1364 create_field_decl (tree field_name
, tree field_type
, tree record_type
,
1365 int packed
, tree size
, tree pos
, int addressable
)
1367 tree field_decl
= build_decl (FIELD_DECL
, field_name
, field_type
);
1369 DECL_CONTEXT (field_decl
) = record_type
;
1370 TREE_READONLY (field_decl
) = TYPE_READONLY (field_type
);
1372 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1373 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1374 if (packed
&& TYPE_MODE (field_type
) == BLKmode
)
1375 DECL_ALIGN (field_decl
) = BITS_PER_UNIT
;
1377 /* If a size is specified, use it. Otherwise, if the record type is packed
1378 compute a size to use, which may differ from the object's natural size.
1379 We always set a size in this case to trigger the checks for bitfield
1380 creation below, which is typically required when no position has been
1383 size
= convert (bitsizetype
, size
);
1384 else if (packed
== 1)
1386 size
= rm_size (field_type
);
1388 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1390 if (TREE_CODE (size
) == INTEGER_CST
1391 && compare_tree_int (size
, MAX_FIXED_MODE_SIZE
) > 0)
1392 size
= round_up (size
, BITS_PER_UNIT
);
1395 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1396 specified for two reasons: first if the size differs from the natural
1397 size. Second, if the alignment is insufficient. There are a number of
1398 ways the latter can be true.
1400 We never make a bitfield if the type of the field has a nonconstant size,
1401 because no such entity requiring bitfield operations should reach here.
1403 We do *preventively* make a bitfield when there might be the need for it
1404 but we don't have all the necessary information to decide, as is the case
1405 of a field with no specified position in a packed record.
1407 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1408 in layout_decl or finish_record_type to clear the bit_field indication if
1409 it is in fact not needed. */
1410 if (addressable
>= 0
1412 && TREE_CODE (size
) == INTEGER_CST
1413 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
1414 && (!operand_equal_p (TYPE_SIZE (field_type
), size
, 0)
1415 || (pos
&& !value_factor_p (pos
, TYPE_ALIGN (field_type
)))
1417 || (TYPE_ALIGN (record_type
) != 0
1418 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
1420 DECL_BIT_FIELD (field_decl
) = 1;
1421 DECL_SIZE (field_decl
) = size
;
1422 if (!packed
&& !pos
)
1423 DECL_ALIGN (field_decl
)
1424 = (TYPE_ALIGN (record_type
) != 0
1425 ? MIN (TYPE_ALIGN (record_type
), TYPE_ALIGN (field_type
))
1426 : TYPE_ALIGN (field_type
));
1429 DECL_PACKED (field_decl
) = pos
? DECL_BIT_FIELD (field_decl
) : packed
;
1430 DECL_ALIGN (field_decl
)
1431 = MAX (DECL_ALIGN (field_decl
),
1432 DECL_BIT_FIELD (field_decl
) ? 1
1433 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
1434 : TYPE_ALIGN (field_type
));
1438 /* We need to pass in the alignment the DECL is known to have.
1439 This is the lowest-order bit set in POS, but no more than
1440 the alignment of the record, if one is specified. Note
1441 that an alignment of 0 is taken as infinite. */
1442 unsigned int known_align
;
1444 if (host_integerp (pos
, 1))
1445 known_align
= tree_low_cst (pos
, 1) & - tree_low_cst (pos
, 1);
1447 known_align
= BITS_PER_UNIT
;
1449 if (TYPE_ALIGN (record_type
)
1450 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
1451 known_align
= TYPE_ALIGN (record_type
);
1453 layout_decl (field_decl
, known_align
);
1454 SET_DECL_OFFSET_ALIGN (field_decl
,
1455 host_integerp (pos
, 1) ? BIGGEST_ALIGNMENT
1457 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
1458 &DECL_FIELD_BIT_OFFSET (field_decl
),
1459 DECL_OFFSET_ALIGN (field_decl
), pos
);
1461 DECL_HAS_REP_P (field_decl
) = 1;
1464 /* If the field type is passed by reference, we will have pointers to the
1465 field, so it is addressable. */
1466 if (must_pass_by_ref (field_type
) || default_pass_by_ref (field_type
))
1469 /* ??? For now, we say that any field of aggregate type is addressable
1470 because the front end may take 'Reference of it. */
1471 if (AGGREGATE_TYPE_P (field_type
))
1474 /* Mark the decl as nonaddressable if it is indicated so semantically,
1475 meaning we won't ever attempt to take the address of the field.
1477 It may also be "technically" nonaddressable, meaning that even if we
1478 attempt to take the field's address we will actually get the address of a
1479 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1480 we have at this point is not accurate enough, so we don't account for
1481 this here and let finish_record_type decide. */
1482 DECL_NONADDRESSABLE_P (field_decl
) = !addressable
;
1487 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1488 effects, has the value of zero. */
1491 value_zerop (tree exp
)
1493 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
1494 return value_zerop (TREE_OPERAND (exp
, 1));
1496 return integer_zerop (exp
);
1499 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1500 PARAM_TYPE is its type. READONLY is true if the parameter is
1501 readonly (either an IN parameter or an address of a pass-by-ref
1505 create_param_decl (tree param_name
, tree param_type
, bool readonly
)
1507 tree param_decl
= build_decl (PARM_DECL
, param_name
, param_type
);
1509 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1510 lead to various ABI violations. */
1511 if (targetm
.calls
.promote_prototypes (param_type
)
1512 && (TREE_CODE (param_type
) == INTEGER_TYPE
1513 || TREE_CODE (param_type
) == ENUMERAL_TYPE
)
1514 && TYPE_PRECISION (param_type
) < TYPE_PRECISION (integer_type_node
))
1516 /* We have to be careful about biased types here. Make a subtype
1517 of integer_type_node with the proper biasing. */
1518 if (TREE_CODE (param_type
) == INTEGER_TYPE
1519 && TYPE_BIASED_REPRESENTATION_P (param_type
))
1522 = copy_type (build_range_type (integer_type_node
,
1523 TYPE_MIN_VALUE (param_type
),
1524 TYPE_MAX_VALUE (param_type
)));
1526 TYPE_BIASED_REPRESENTATION_P (param_type
) = 1;
1529 param_type
= integer_type_node
;
1532 DECL_ARG_TYPE (param_decl
) = param_type
;
1533 TREE_READONLY (param_decl
) = readonly
;
1537 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1540 process_attributes (tree decl
, struct attrib
*attr_list
)
1542 for (; attr_list
; attr_list
= attr_list
->next
)
1543 switch (attr_list
->type
)
1545 case ATTR_MACHINE_ATTRIBUTE
:
1546 decl_attributes (&decl
, tree_cons (attr_list
->name
, attr_list
->args
,
1548 ATTR_FLAG_TYPE_IN_PLACE
);
1551 case ATTR_LINK_ALIAS
:
1552 if (! DECL_EXTERNAL (decl
))
1554 TREE_STATIC (decl
) = 1;
1555 assemble_alias (decl
, attr_list
->name
);
1559 case ATTR_WEAK_EXTERNAL
:
1561 declare_weak (decl
);
1563 post_error ("?weak declarations not supported on this target",
1564 attr_list
->error_point
);
1567 case ATTR_LINK_SECTION
:
1568 if (targetm
.have_named_sections
)
1570 DECL_SECTION_NAME (decl
)
1571 = build_string (IDENTIFIER_LENGTH (attr_list
->name
),
1572 IDENTIFIER_POINTER (attr_list
->name
));
1573 DECL_COMMON (decl
) = 0;
1576 post_error ("?section attributes are not supported for this target",
1577 attr_list
->error_point
);
1580 case ATTR_LINK_CONSTRUCTOR
:
1581 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
1582 TREE_USED (decl
) = 1;
1585 case ATTR_LINK_DESTRUCTOR
:
1586 DECL_STATIC_DESTRUCTOR (decl
) = 1;
1587 TREE_USED (decl
) = 1;
1592 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1596 value_factor_p (tree value
, HOST_WIDE_INT factor
)
1598 if (host_integerp (value
, 1))
1599 return tree_low_cst (value
, 1) % factor
== 0;
1601 if (TREE_CODE (value
) == MULT_EXPR
)
1602 return (value_factor_p (TREE_OPERAND (value
, 0), factor
)
1603 || value_factor_p (TREE_OPERAND (value
, 1), factor
));
1608 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1609 unless we can prove these 2 fields are laid out in such a way that no gap
1610 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1611 is the distance in bits between the end of PREV_FIELD and the starting
1612 position of CURR_FIELD. It is ignored if null. */
1615 potential_alignment_gap (tree prev_field
, tree curr_field
, tree offset
)
1617 /* If this is the first field of the record, there cannot be any gap */
1621 /* If the previous field is a union type, then return False: The only
1622 time when such a field is not the last field of the record is when
1623 there are other components at fixed positions after it (meaning there
1624 was a rep clause for every field), in which case we don't want the
1625 alignment constraint to override them. */
1626 if (TREE_CODE (TREE_TYPE (prev_field
)) == QUAL_UNION_TYPE
)
1629 /* If the distance between the end of prev_field and the beginning of
1630 curr_field is constant, then there is a gap if the value of this
1631 constant is not null. */
1632 if (offset
&& host_integerp (offset
, 1))
1633 return !integer_zerop (offset
);
1635 /* If the size and position of the previous field are constant,
1636 then check the sum of this size and position. There will be a gap
1637 iff it is not multiple of the current field alignment. */
1638 if (host_integerp (DECL_SIZE (prev_field
), 1)
1639 && host_integerp (bit_position (prev_field
), 1))
1640 return ((tree_low_cst (bit_position (prev_field
), 1)
1641 + tree_low_cst (DECL_SIZE (prev_field
), 1))
1642 % DECL_ALIGN (curr_field
) != 0);
1644 /* If both the position and size of the previous field are multiples
1645 of the current field alignment, there cannot be any gap. */
1646 if (value_factor_p (bit_position (prev_field
), DECL_ALIGN (curr_field
))
1647 && value_factor_p (DECL_SIZE (prev_field
), DECL_ALIGN (curr_field
)))
1650 /* Fallback, return that there may be a potential gap */
1654 /* Returns a LABEL_DECL node for LABEL_NAME. */
1657 create_label_decl (tree label_name
)
1659 tree label_decl
= build_decl (LABEL_DECL
, label_name
, void_type_node
);
1661 DECL_CONTEXT (label_decl
) = current_function_decl
;
1662 DECL_MODE (label_decl
) = VOIDmode
;
1663 DECL_SOURCE_LOCATION (label_decl
) = input_location
;
1668 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1669 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1670 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1671 PARM_DECL nodes chained through the TREE_CHAIN field).
1673 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1674 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1677 create_subprog_decl (tree subprog_name
, tree asm_name
,
1678 tree subprog_type
, tree param_decl_list
, bool inline_flag
,
1679 bool public_flag
, bool extern_flag
,
1680 struct attrib
*attr_list
, Node_Id gnat_node
)
1682 tree return_type
= TREE_TYPE (subprog_type
);
1683 tree subprog_decl
= build_decl (FUNCTION_DECL
, subprog_name
, subprog_type
);
1685 /* If this is a function nested inside an inlined external function, it
1686 means we aren't going to compile the outer function unless it is
1687 actually inlined, so do the same for us. */
1688 if (current_function_decl
&& DECL_INLINE (current_function_decl
)
1689 && DECL_EXTERNAL (current_function_decl
))
1692 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
1693 TREE_PUBLIC (subprog_decl
) = public_flag
;
1694 TREE_STATIC (subprog_decl
) = 1;
1695 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
1696 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1697 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1698 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
1699 DECL_RESULT (subprog_decl
) = build_decl (RESULT_DECL
, 0, return_type
);
1700 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl
)) = 1;
1701 DECL_IGNORED_P (DECL_RESULT (subprog_decl
)) = 1;
1704 DECL_DECLARED_INLINE_P (subprog_decl
) = 1;
1707 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
1709 process_attributes (subprog_decl
, attr_list
);
1711 /* Add this decl to the current binding level. */
1712 gnat_pushdecl (subprog_decl
, gnat_node
);
1714 /* Output the assembler code and/or RTL for the declaration. */
1715 rest_of_decl_compilation (subprog_decl
, global_bindings_p (), 0);
1717 return subprog_decl
;
1720 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1721 body. This routine needs to be invoked before processing the declarations
1722 appearing in the subprogram. */
1725 begin_subprog_body (tree subprog_decl
)
1729 current_function_decl
= subprog_decl
;
1730 announce_function (subprog_decl
);
1732 /* Enter a new binding level and show that all the parameters belong to
1735 for (param_decl
= DECL_ARGUMENTS (subprog_decl
); param_decl
;
1736 param_decl
= TREE_CHAIN (param_decl
))
1737 DECL_CONTEXT (param_decl
) = subprog_decl
;
1739 make_decl_rtl (subprog_decl
);
1741 /* We handle pending sizes via the elaboration of types, so we don't need to
1742 save them. This causes them to be marked as part of the outer function
1743 and then discarded. */
1744 get_pending_sizes ();
1747 /* Finish the definition of the current subprogram and compile it all the way
1748 to assembler language output. BODY is the tree corresponding to
1752 end_subprog_body (tree body
)
1754 tree fndecl
= current_function_decl
;
1756 /* Mark the BLOCK for this level as being for this function and pop the
1757 level. Since the vars in it are the parameters, clear them. */
1758 BLOCK_VARS (current_binding_level
->block
) = 0;
1759 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
1760 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
1763 /* Deal with inline. If declared inline or we should default to inline,
1764 set the flag in the decl. */
1765 DECL_INLINE (fndecl
)
1766 = DECL_DECLARED_INLINE_P (fndecl
) || flag_inline_trees
== 2;
1768 /* We handle pending sizes via the elaboration of types, so we don't
1769 need to save them. */
1770 get_pending_sizes ();
1772 /* Mark the RESULT_DECL as being in this subprogram. */
1773 DECL_CONTEXT (DECL_RESULT (fndecl
)) = fndecl
;
1775 DECL_SAVED_TREE (fndecl
) = body
;
1777 current_function_decl
= DECL_CONTEXT (fndecl
);
1780 /* If we're only annotating types, don't actually compile this function. */
1781 if (type_annotate_only
)
1784 /* If we don't have .ctors/.dtors sections, and this is a static
1785 constructor or destructor, it must be recorded now. */
1786 if (DECL_STATIC_CONSTRUCTOR (fndecl
) && !targetm
.have_ctors_dtors
)
1787 static_ctors
= tree_cons (NULL_TREE
, fndecl
, static_ctors
);
1789 if (DECL_STATIC_DESTRUCTOR (fndecl
) && !targetm
.have_ctors_dtors
)
1790 static_dtors
= tree_cons (NULL_TREE
, fndecl
, static_dtors
);
1792 /* We do different things for nested and non-nested functions.
1793 ??? This should be in cgraph. */
1794 if (!DECL_CONTEXT (fndecl
))
1796 gnat_gimplify_function (fndecl
);
1797 cgraph_finalize_function (fndecl
, false);
1800 /* Register this function with cgraph just far enough to get it
1801 added to our parent's nested function list. */
1802 (void) cgraph_node (fndecl
);
1805 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
1808 gnat_gimplify_function (tree fndecl
)
1810 struct cgraph_node
*cgn
;
1812 dump_function (TDI_original
, fndecl
);
1813 gimplify_function_tree (fndecl
);
1814 dump_function (TDI_generic
, fndecl
);
1816 /* Convert all nested functions to GIMPLE now. We do things in this order
1817 so that items like VLA sizes are expanded properly in the context of the
1818 correct function. */
1819 cgn
= cgraph_node (fndecl
);
1820 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
1821 gnat_gimplify_function (cgn
->decl
);
1824 /* Return a definition for a builtin function named NAME and whose data type
1825 is TYPE. TYPE should be a function type with argument types.
1826 FUNCTION_CODE tells later passes how to compile calls to this function.
1827 See tree.h for its possible values.
1829 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1830 the name to be called if we can't opencode the function. If
1831 ATTRS is nonzero, use that for the function attribute list. */
1834 builtin_function (const char *name
, tree type
, int function_code
,
1835 enum built_in_class
class, const char *library_name
,
1838 tree decl
= build_decl (FUNCTION_DECL
, get_identifier (name
), type
);
1840 DECL_EXTERNAL (decl
) = 1;
1841 TREE_PUBLIC (decl
) = 1;
1843 SET_DECL_ASSEMBLER_NAME (decl
, get_identifier (library_name
));
1845 gnat_pushdecl (decl
, Empty
);
1846 DECL_BUILT_IN_CLASS (decl
) = class;
1847 DECL_FUNCTION_CODE (decl
) = function_code
;
1849 /* Possibly apply some default attributes to this built-in function. */
1851 decl_attributes (&decl
, attrs
, ATTR_FLAG_BUILT_IN
);
1853 decl_attributes (&decl
, NULL_TREE
, 0);
1858 /* Handle a "const" attribute; arguments as in
1859 struct attribute_spec.handler. */
1862 handle_const_attribute (tree
*node
, tree
ARG_UNUSED (name
),
1863 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
1866 if (TREE_CODE (*node
) == FUNCTION_DECL
)
1867 TREE_READONLY (*node
) = 1;
1869 *no_add_attrs
= true;
1874 /* Handle a "nothrow" attribute; arguments as in
1875 struct attribute_spec.handler. */
1878 handle_nothrow_attribute (tree
*node
, tree
ARG_UNUSED (name
),
1879 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
1882 if (TREE_CODE (*node
) == FUNCTION_DECL
)
1883 TREE_NOTHROW (*node
) = 1;
1885 *no_add_attrs
= true;
1890 /* Return an integer type with the number of bits of precision given by
1891 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1892 it is a signed type. */
1895 gnat_type_for_size (unsigned precision
, int unsignedp
)
1900 if (precision
<= 2 * MAX_BITS_PER_WORD
1901 && signed_and_unsigned_types
[precision
][unsignedp
])
1902 return signed_and_unsigned_types
[precision
][unsignedp
];
1905 t
= make_unsigned_type (precision
);
1907 t
= make_signed_type (precision
);
1909 if (precision
<= 2 * MAX_BITS_PER_WORD
)
1910 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
1914 sprintf (type_name
, "%sSIGNED_%d", unsignedp
? "UN" : "", precision
);
1915 TYPE_NAME (t
) = get_identifier (type_name
);
1921 /* Likewise for floating-point types. */
1924 float_type_for_precision (int precision
, enum machine_mode mode
)
1929 if (float_types
[(int) mode
])
1930 return float_types
[(int) mode
];
1932 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
1933 TYPE_PRECISION (t
) = precision
;
1936 gcc_assert (TYPE_MODE (t
) == mode
);
1939 sprintf (type_name
, "FLOAT_%d", precision
);
1940 TYPE_NAME (t
) = get_identifier (type_name
);
1946 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1947 an unsigned type; otherwise a signed type is returned. */
1950 gnat_type_for_mode (enum machine_mode mode
, int unsignedp
)
1952 if (mode
== BLKmode
)
1954 else if (mode
== VOIDmode
)
1955 return void_type_node
;
1956 else if (COMPLEX_MODE_P (mode
))
1958 else if (SCALAR_FLOAT_MODE_P (mode
))
1959 return float_type_for_precision (GET_MODE_PRECISION (mode
), mode
);
1960 else if (SCALAR_INT_MODE_P (mode
))
1961 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
1966 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1969 gnat_unsigned_type (tree type_node
)
1971 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
1973 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
1975 type
= copy_node (type
);
1976 TREE_TYPE (type
) = type_node
;
1978 else if (TREE_TYPE (type_node
)
1979 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
1980 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
1982 type
= copy_node (type
);
1983 TREE_TYPE (type
) = TREE_TYPE (type_node
);
1989 /* Return the signed version of a TYPE_NODE, a scalar type. */
1992 gnat_signed_type (tree type_node
)
1994 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
1996 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
1998 type
= copy_node (type
);
1999 TREE_TYPE (type
) = type_node
;
2001 else if (TREE_TYPE (type_node
)
2002 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2003 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2005 type
= copy_node (type
);
2006 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2012 /* Return a type the same as TYPE except unsigned or signed according to
2016 gnat_signed_or_unsigned_type (int unsignedp
, tree type
)
2018 if (!INTEGRAL_TYPE_P (type
) || TYPE_UNSIGNED (type
) == unsignedp
)
2021 return gnat_type_for_size (TYPE_PRECISION (type
), unsignedp
);
2024 /* EXP is an expression for the size of an object. If this size contains
2025 discriminant references, replace them with the maximum (if MAX_P) or
2026 minimum (if !MAX_P) possible value of the discriminant. */
2029 max_size (tree exp
, bool max_p
)
2031 enum tree_code code
= TREE_CODE (exp
);
2032 tree type
= TREE_TYPE (exp
);
2034 switch (TREE_CODE_CLASS (code
))
2036 case tcc_declaration
:
2040 case tcc_exceptional
:
2041 if (code
== TREE_LIST
)
2042 return tree_cons (TREE_PURPOSE (exp
),
2043 max_size (TREE_VALUE (exp
), max_p
),
2045 ? max_size (TREE_CHAIN (exp
), max_p
) : NULL_TREE
);
2049 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2050 modify. Otherwise, we treat it like a variable. */
2051 if (!CONTAINS_PLACEHOLDER_P (exp
))
2054 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
2056 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), true);
2058 case tcc_comparison
:
2059 return max_p
? size_one_node
: size_zero_node
;
2063 case tcc_expression
:
2064 switch (TREE_CODE_LENGTH (code
))
2067 if (code
== NON_LVALUE_EXPR
)
2068 return max_size (TREE_OPERAND (exp
, 0), max_p
);
2071 fold (build1 (code
, type
,
2072 max_size (TREE_OPERAND (exp
, 0),
2073 code
== NEGATE_EXPR
? !max_p
: max_p
)));
2076 if (code
== COMPOUND_EXPR
)
2077 return max_size (TREE_OPERAND (exp
, 1), max_p
);
2079 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2080 may provide a tighter bound on max_size. */
2081 if (code
== MINUS_EXPR
2082 && TREE_CODE (TREE_OPERAND (exp
, 0)) == COND_EXPR
)
2084 tree lhs
= fold_build2 (MINUS_EXPR
, type
,
2085 TREE_OPERAND (TREE_OPERAND (exp
, 0), 1),
2086 TREE_OPERAND (exp
, 1));
2087 tree rhs
= fold_build2 (MINUS_EXPR
, type
,
2088 TREE_OPERAND (TREE_OPERAND (exp
, 0), 2),
2089 TREE_OPERAND (exp
, 1));
2090 return fold_build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
2091 max_size (lhs
, max_p
),
2092 max_size (rhs
, max_p
));
2096 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
2097 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
2098 code
== MINUS_EXPR
? !max_p
: max_p
);
2100 /* Special-case wanting the maximum value of a MIN_EXPR.
2101 In that case, if one side overflows, return the other.
2102 sizetype is signed, but we know sizes are non-negative.
2103 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2104 overflowing or the maximum possible value and the RHS
2108 && TREE_CODE (rhs
) == INTEGER_CST
2109 && TREE_OVERFLOW (rhs
))
2113 && TREE_CODE (lhs
) == INTEGER_CST
2114 && TREE_OVERFLOW (lhs
))
2116 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
2117 && ((TREE_CODE (lhs
) == INTEGER_CST
2118 && TREE_OVERFLOW (lhs
))
2119 || operand_equal_p (lhs
, TYPE_MAX_VALUE (type
), 0))
2120 && !TREE_CONSTANT (rhs
))
2123 return fold (build2 (code
, type
, lhs
, rhs
));
2127 if (code
== SAVE_EXPR
)
2129 else if (code
== COND_EXPR
)
2130 return fold (build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
2131 max_size (TREE_OPERAND (exp
, 1), max_p
),
2132 max_size (TREE_OPERAND (exp
, 2), max_p
)));
2133 else if (code
== CALL_EXPR
&& TREE_OPERAND (exp
, 1))
2134 return build3 (CALL_EXPR
, type
, TREE_OPERAND (exp
, 0),
2135 max_size (TREE_OPERAND (exp
, 1), max_p
), NULL
);
2138 /* Other tree classes cannot happen. */
2146 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2147 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2148 Return a constructor for the template. */
2151 build_template (tree template_type
, tree array_type
, tree expr
)
2153 tree template_elts
= NULL_TREE
;
2154 tree bound_list
= NULL_TREE
;
2157 if (TREE_CODE (array_type
) == RECORD_TYPE
2158 && (TYPE_IS_PADDING_P (array_type
)
2159 || TYPE_JUSTIFIED_MODULAR_P (array_type
)))
2160 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
2162 if (TREE_CODE (array_type
) == ARRAY_TYPE
2163 || (TREE_CODE (array_type
) == INTEGER_TYPE
2164 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
2165 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
2167 /* First make the list for a CONSTRUCTOR for the template. Go down the
2168 field list of the template instead of the type chain because this
2169 array might be an Ada array of arrays and we can't tell where the
2170 nested arrays stop being the underlying object. */
2172 for (field
= TYPE_FIELDS (template_type
); field
;
2174 ? (bound_list
= TREE_CHAIN (bound_list
))
2175 : (array_type
= TREE_TYPE (array_type
))),
2176 field
= TREE_CHAIN (TREE_CHAIN (field
)))
2178 tree bounds
, min
, max
;
2180 /* If we have a bound list, get the bounds from there. Likewise
2181 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2182 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2183 This will give us a maximum range. */
2185 bounds
= TREE_VALUE (bound_list
);
2186 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
2187 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
2188 else if (expr
&& TREE_CODE (expr
) == PARM_DECL
2189 && DECL_BY_COMPONENT_PTR_P (expr
))
2190 bounds
= TREE_TYPE (field
);
2194 min
= convert (TREE_TYPE (TREE_CHAIN (field
)), TYPE_MIN_VALUE (bounds
));
2195 max
= convert (TREE_TYPE (field
), TYPE_MAX_VALUE (bounds
));
2197 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2198 substitute it from OBJECT. */
2199 min
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (min
, expr
);
2200 max
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (max
, expr
);
2202 template_elts
= tree_cons (TREE_CHAIN (field
), max
,
2203 tree_cons (field
, min
, template_elts
));
2206 return gnat_build_constructor (template_type
, nreverse (template_elts
));
2209 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2210 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2211 in the type contains in its DECL_INITIAL the expression to use when
2212 a constructor is made for the type. GNAT_ENTITY is an entity used
2213 to print out an error message if the mechanism cannot be applied to
2214 an object of that type and also for the name. */
2217 build_vms_descriptor (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
2219 tree record_type
= make_node (RECORD_TYPE
);
2220 tree field_list
= 0;
2229 /* If TYPE is an unconstrained array, use the underlying array type. */
2230 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
2231 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
2233 /* If this is an array, compute the number of dimensions in the array,
2234 get the index types, and point to the inner type. */
2235 if (TREE_CODE (type
) != ARRAY_TYPE
)
2238 for (ndim
= 1, inner_type
= type
;
2239 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
2240 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
2241 ndim
++, inner_type
= TREE_TYPE (inner_type
))
2244 idx_arr
= (tree
*) alloca (ndim
* sizeof (tree
));
2246 if (mech
!= By_Descriptor_NCA
2247 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
2248 for (i
= ndim
- 1, inner_type
= type
;
2250 i
--, inner_type
= TREE_TYPE (inner_type
))
2251 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2253 for (i
= 0, inner_type
= type
;
2255 i
++, inner_type
= TREE_TYPE (inner_type
))
2256 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2258 /* Now get the DTYPE value. */
2259 switch (TREE_CODE (type
))
2263 if (TYPE_VAX_FLOATING_POINT_P (type
))
2264 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2277 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
2280 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
2283 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
2286 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
2289 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
2292 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
2298 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
2302 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
2303 && TYPE_VAX_FLOATING_POINT_P (type
))
2304 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2316 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
2327 /* Get the CLASS value. */
2330 case By_Descriptor_A
:
2333 case By_Descriptor_NCA
:
2336 case By_Descriptor_SB
:
2343 /* Make the type for a descriptor for VMS. The first four fields
2344 are the same for all types. */
2347 = chainon (field_list
,
2348 make_descriptor_field
2349 ("LENGTH", gnat_type_for_size (16, 1), record_type
,
2350 size_in_bytes (mech
== By_Descriptor_A
? inner_type
: type
)));
2352 field_list
= chainon (field_list
,
2353 make_descriptor_field ("DTYPE",
2354 gnat_type_for_size (8, 1),
2355 record_type
, size_int (dtype
)));
2356 field_list
= chainon (field_list
,
2357 make_descriptor_field ("CLASS",
2358 gnat_type_for_size (8, 1),
2359 record_type
, size_int (class)));
2362 = chainon (field_list
,
2363 make_descriptor_field
2365 build_pointer_type_for_mode (type
, SImode
, false), record_type
,
2367 build_pointer_type_for_mode (type
, SImode
, false),
2368 build0 (PLACEHOLDER_EXPR
, type
))));
2373 case By_Descriptor_S
:
2376 case By_Descriptor_SB
:
2378 = chainon (field_list
,
2379 make_descriptor_field
2380 ("SB_L1", gnat_type_for_size (32, 1), record_type
,
2381 TREE_CODE (type
) == ARRAY_TYPE
2382 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2384 = chainon (field_list
,
2385 make_descriptor_field
2386 ("SB_L2", gnat_type_for_size (32, 1), record_type
,
2387 TREE_CODE (type
) == ARRAY_TYPE
2388 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2391 case By_Descriptor_A
:
2392 case By_Descriptor_NCA
:
2393 field_list
= chainon (field_list
,
2394 make_descriptor_field ("SCALE",
2395 gnat_type_for_size (8, 1),
2399 field_list
= chainon (field_list
,
2400 make_descriptor_field ("DIGITS",
2401 gnat_type_for_size (8, 1),
2406 = chainon (field_list
,
2407 make_descriptor_field
2408 ("AFLAGS", gnat_type_for_size (8, 1), record_type
,
2409 size_int (mech
== By_Descriptor_NCA
2411 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2412 : (TREE_CODE (type
) == ARRAY_TYPE
2413 && TYPE_CONVENTION_FORTRAN_P (type
)
2416 field_list
= chainon (field_list
,
2417 make_descriptor_field ("DIMCT",
2418 gnat_type_for_size (8, 1),
2422 field_list
= chainon (field_list
,
2423 make_descriptor_field ("ARSIZE",
2424 gnat_type_for_size (32, 1),
2426 size_in_bytes (type
)));
2428 /* Now build a pointer to the 0,0,0... element. */
2429 tem
= build0 (PLACEHOLDER_EXPR
, type
);
2430 for (i
= 0, inner_type
= type
; i
< ndim
;
2431 i
++, inner_type
= TREE_TYPE (inner_type
))
2432 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
2433 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
2434 NULL_TREE
, NULL_TREE
);
2437 = chainon (field_list
,
2438 make_descriptor_field
2440 build_pointer_type_for_mode (inner_type
, SImode
, false),
2443 build_pointer_type_for_mode (inner_type
, SImode
,
2447 /* Next come the addressing coefficients. */
2449 for (i
= 0; i
< ndim
; i
++)
2453 = size_binop (MULT_EXPR
, tem
,
2454 size_binop (PLUS_EXPR
,
2455 size_binop (MINUS_EXPR
,
2456 TYPE_MAX_VALUE (idx_arr
[i
]),
2457 TYPE_MIN_VALUE (idx_arr
[i
])),
2460 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
2461 fname
[1] = '0' + i
, fname
[2] = 0;
2463 = chainon (field_list
,
2464 make_descriptor_field (fname
,
2465 gnat_type_for_size (32, 1),
2466 record_type
, idx_length
));
2468 if (mech
== By_Descriptor_NCA
)
2472 /* Finally here are the bounds. */
2473 for (i
= 0; i
< ndim
; i
++)
2477 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
2479 = chainon (field_list
,
2480 make_descriptor_field
2481 (fname
, gnat_type_for_size (32, 1), record_type
,
2482 TYPE_MIN_VALUE (idx_arr
[i
])));
2486 = chainon (field_list
,
2487 make_descriptor_field
2488 (fname
, gnat_type_for_size (32, 1), record_type
,
2489 TYPE_MAX_VALUE (idx_arr
[i
])));
2494 post_error ("unsupported descriptor type for &", gnat_entity
);
2497 finish_record_type (record_type
, field_list
, false, true);
2498 create_type_decl (create_concat_name (gnat_entity
, "DESC"), record_type
,
2499 NULL
, true, false, gnat_entity
);
2504 /* Utility routine for above code to make a field. */
2507 make_descriptor_field (const char *name
, tree type
,
2508 tree rec_type
, tree initial
)
2511 = create_field_decl (get_identifier (name
), type
, rec_type
, 0, 0, 0, 0);
2513 DECL_INITIAL (field
) = initial
;
2517 /* Build a type to be used to represent an aliased object whose nominal
2518 type is an unconstrained array. This consists of a RECORD_TYPE containing
2519 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2520 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2521 is used to represent an arbitrary unconstrained object. Use NAME
2522 as the name of the record. */
2525 build_unc_object_type (tree template_type
, tree object_type
, tree name
)
2527 tree type
= make_node (RECORD_TYPE
);
2528 tree template_field
= create_field_decl (get_identifier ("BOUNDS"),
2529 template_type
, type
, 0, 0, 0, 1);
2530 tree array_field
= create_field_decl (get_identifier ("ARRAY"), object_type
,
2533 TYPE_NAME (type
) = name
;
2534 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
2535 finish_record_type (type
,
2536 chainon (chainon (NULL_TREE
, template_field
),
2543 /* Same, taking a thin or fat pointer type instead of a template type. */
2546 build_unc_object_type_from_ptr (tree thin_fat_ptr_type
, tree object_type
,
2551 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type
));
2554 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type
)
2555 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type
))))
2556 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type
))));
2557 return build_unc_object_type (template_type
, object_type
, name
);
2560 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2561 the normal case this is just two adjustments, but we have more to do
2562 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2565 update_pointer_to (tree old_type
, tree new_type
)
2567 tree ptr
= TYPE_POINTER_TO (old_type
);
2568 tree ref
= TYPE_REFERENCE_TO (old_type
);
2572 /* If this is the main variant, process all the other variants first. */
2573 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
2574 for (type
= TYPE_NEXT_VARIANT (old_type
); type
;
2575 type
= TYPE_NEXT_VARIANT (type
))
2576 update_pointer_to (type
, new_type
);
2578 /* If no pointer or reference, we are done. */
2582 /* Merge the old type qualifiers in the new type.
2584 Each old variant has qualifiers for specific reasons, and the new
2585 designated type as well. Each set of qualifiers represents useful
2586 information grabbed at some point, and merging the two simply unifies
2587 these inputs into the final type description.
2589 Consider for instance a volatile type frozen after an access to constant
2590 type designating it. After the designated type freeze, we get here with a
2591 volatile new_type and a dummy old_type with a readonly variant, created
2592 when the access type was processed. We shall make a volatile and readonly
2593 designated type, because that's what it really is.
2595 We might also get here for a non-dummy old_type variant with different
2596 qualifiers than the new_type ones, for instance in some cases of pointers
2597 to private record type elaboration (see the comments around the call to
2598 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2599 qualifiers in thoses cases too, to avoid accidentally discarding the
2600 initial set, and will often end up with old_type == new_type then. */
2601 new_type
= build_qualified_type (new_type
,
2602 TYPE_QUALS (old_type
)
2603 | TYPE_QUALS (new_type
));
2605 /* If the new type and the old one are identical, there is nothing to
2607 if (old_type
== new_type
)
2610 /* Otherwise, first handle the simple case. */
2611 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
2613 TYPE_POINTER_TO (new_type
) = ptr
;
2614 TYPE_REFERENCE_TO (new_type
) = ref
;
2616 for (; ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
2617 for (ptr1
= TYPE_MAIN_VARIANT (ptr
); ptr1
;
2618 ptr1
= TYPE_NEXT_VARIANT (ptr1
))
2619 TREE_TYPE (ptr1
) = new_type
;
2621 for (; ref
; ref
= TYPE_NEXT_REF_TO (ref
))
2622 for (ref1
= TYPE_MAIN_VARIANT (ref
); ref1
;
2623 ref1
= TYPE_NEXT_VARIANT (ref1
))
2624 TREE_TYPE (ref1
) = new_type
;
2627 /* Now deal with the unconstrained array case. In this case the "pointer"
2628 is actually a RECORD_TYPE where the types of both fields are
2629 pointers to void. In that case, copy the field list from the
2630 old type to the new one and update the fields' context. */
2631 else if (TREE_CODE (ptr
) != RECORD_TYPE
|| !TYPE_IS_FAT_POINTER_P (ptr
))
2636 tree new_obj_rec
= TYPE_OBJECT_RECORD_TYPE (new_type
);
2641 SET_DECL_ORIGINAL_FIELD (TYPE_FIELDS (ptr
),
2642 TYPE_FIELDS (TYPE_POINTER_TO (new_type
)));
2643 SET_DECL_ORIGINAL_FIELD (TREE_CHAIN (TYPE_FIELDS (ptr
)),
2644 TREE_CHAIN (TYPE_FIELDS
2645 (TYPE_POINTER_TO (new_type
))));
2647 TYPE_FIELDS (ptr
) = TYPE_FIELDS (TYPE_POINTER_TO (new_type
));
2648 DECL_CONTEXT (TYPE_FIELDS (ptr
)) = ptr
;
2649 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr
))) = ptr
;
2651 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2654 ??? This is now the only use of gnat_substitute_in_type, which
2655 is now a very "heavy" routine to do this, so it should be replaced
2657 ptr_temp_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr
)));
2658 new_ref
= build3 (COMPONENT_REF
, ptr_temp_type
,
2659 build0 (PLACEHOLDER_EXPR
, ptr
),
2660 TREE_CHAIN (TYPE_FIELDS (ptr
)), NULL_TREE
);
2663 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
2664 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
2665 TREE_CHAIN (TYPE_FIELDS (ptr
)), new_ref
));
2667 for (var
= TYPE_MAIN_VARIANT (ptr
); var
; var
= TYPE_NEXT_VARIANT (var
))
2669 SET_TYPE_UNCONSTRAINED_ARRAY (var
, new_type
);
2671 /* This may seem a bit gross, in particular wrt DECL_CONTEXT, but
2672 actually is in keeping with what build_qualified_type does. */
2673 TYPE_FIELDS (var
) = TYPE_FIELDS (ptr
);
2676 TYPE_POINTER_TO (new_type
) = TYPE_REFERENCE_TO (new_type
)
2677 = TREE_TYPE (new_type
) = ptr
;
2679 /* Now handle updating the allocation record, what the thin pointer
2680 points to. Update all pointers from the old record into the new
2681 one, update the types of the fields, and recompute the size. */
2683 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
), new_obj_rec
);
2685 TREE_TYPE (TYPE_FIELDS (new_obj_rec
)) = TREE_TYPE (ptr_temp_type
);
2686 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2687 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
)));
2688 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2689 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))));
2690 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2691 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))));
2693 TYPE_SIZE (new_obj_rec
)
2694 = size_binop (PLUS_EXPR
,
2695 DECL_SIZE (TYPE_FIELDS (new_obj_rec
)),
2696 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
))));
2697 TYPE_SIZE_UNIT (new_obj_rec
)
2698 = size_binop (PLUS_EXPR
,
2699 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec
)),
2700 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
))));
2701 rest_of_type_compilation (ptr
, global_bindings_p ());
2705 /* Convert a pointer to a constrained array into a pointer to a fat
2706 pointer. This involves making or finding a template. */
2709 convert_to_fat_pointer (tree type
, tree expr
)
2711 tree template_type
= TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type
))));
2712 tree
template, template_addr
;
2713 tree etype
= TREE_TYPE (expr
);
2715 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2716 pointer to the template and array. */
2717 if (integer_zerop (expr
))
2719 gnat_build_constructor
2721 tree_cons (TYPE_FIELDS (type
),
2722 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
2723 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2724 convert (build_pointer_type (template_type
),
2728 /* If EXPR is a thin pointer, make the template and data from the record. */
2730 else if (TYPE_THIN_POINTER_P (etype
))
2732 tree fields
= TYPE_FIELDS (TREE_TYPE (etype
));
2734 expr
= save_expr (expr
);
2735 if (TREE_CODE (expr
) == ADDR_EXPR
)
2736 expr
= TREE_OPERAND (expr
, 0);
2738 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
2740 template = build_component_ref (expr
, NULL_TREE
, fields
, false);
2741 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
2742 build_component_ref (expr
, NULL_TREE
,
2743 TREE_CHAIN (fields
), false));
2746 /* Otherwise, build the constructor for the template. */
2747 template = build_template (template_type
, TREE_TYPE (etype
), expr
);
2749 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template);
2751 /* The result is a CONSTRUCTOR for the fat pointer.
2753 If expr is an argument of a foreign convention subprogram, the type it
2754 points to is directly the component type. In this case, the expression
2755 type may not match the corresponding FIELD_DECL type at this point, so we
2756 call "convert" here to fix that up if necessary. This type consistency is
2757 required, for instance because it ensures that possible later folding of
2758 component_refs against this constructor always yields something of the
2759 same type as the initial reference.
2761 Note that the call to "build_template" above is still fine, because it
2762 will only refer to the provided template_type in this case. */
2764 gnat_build_constructor
2765 (type
, tree_cons (TYPE_FIELDS (type
),
2766 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
2767 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2768 template_addr
, NULL_TREE
)));
2771 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2772 is something that is a fat pointer, so convert to it first if it EXPR
2773 is not already a fat pointer. */
2776 convert_to_thin_pointer (tree type
, tree expr
)
2778 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr
)))
2780 = convert_to_fat_pointer
2781 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
))), expr
);
2783 /* We get the pointer to the data and use a NOP_EXPR to make it the
2785 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (TREE_TYPE (expr
)),
2787 expr
= build1 (NOP_EXPR
, type
, expr
);
2792 /* Create an expression whose value is that of EXPR,
2793 converted to type TYPE. The TREE_TYPE of the value
2794 is always TYPE. This function implements all reasonable
2795 conversions; callers should filter out those that are
2796 not permitted by the language being compiled. */
2799 convert (tree type
, tree expr
)
2801 enum tree_code code
= TREE_CODE (type
);
2802 tree etype
= TREE_TYPE (expr
);
2803 enum tree_code ecode
= TREE_CODE (etype
);
2805 /* If EXPR is already the right type, we are done. */
2809 /* If the input type has padding, remove it by doing a component reference
2810 to the field. If the output type has padding, make a constructor
2811 to build the record. If both input and output have padding and are
2812 of variable size, do this as an unchecked conversion. */
2813 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
2814 && TYPE_IS_PADDING_P (type
) && TYPE_IS_PADDING_P (etype
)
2815 && (!TREE_CONSTANT (TYPE_SIZE (type
))
2816 || !TREE_CONSTANT (TYPE_SIZE (etype
))))
2818 else if (ecode
== RECORD_TYPE
&& TYPE_IS_PADDING_P (etype
))
2820 /* If we have just converted to this padded type, just get
2821 the inner expression. */
2822 if (TREE_CODE (expr
) == CONSTRUCTOR
2823 && !VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (expr
))
2824 && VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->index
2825 == TYPE_FIELDS (etype
))
2826 return VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->value
;
2828 return convert (type
,
2829 build_component_ref (expr
, NULL_TREE
,
2830 TYPE_FIELDS (etype
), false));
2832 else if (code
== RECORD_TYPE
&& TYPE_IS_PADDING_P (type
))
2834 /* If we previously converted from another type and our type is
2835 of variable size, remove the conversion to avoid the need for
2836 variable-size temporaries. */
2837 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
2838 && !TREE_CONSTANT (TYPE_SIZE (type
)))
2839 expr
= TREE_OPERAND (expr
, 0);
2841 /* If we are just removing the padding from expr, convert the original
2842 object if we have variable size. That will avoid the need
2843 for some variable-size temporaries. */
2844 if (TREE_CODE (expr
) == COMPONENT_REF
2845 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr
, 0))) == RECORD_TYPE
2846 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
2847 && !TREE_CONSTANT (TYPE_SIZE (type
)))
2848 return convert (type
, TREE_OPERAND (expr
, 0));
2850 /* If the result type is a padded type with a self-referentially-sized
2851 field and the expression type is a record, do this as an
2852 unchecked conversion. */
2853 else if (TREE_CODE (etype
) == RECORD_TYPE
2854 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type
))))
2855 return unchecked_convert (type
, expr
, false);
2859 gnat_build_constructor (type
,
2860 tree_cons (TYPE_FIELDS (type
),
2862 (TYPE_FIELDS (type
)),
2867 /* If the input is a biased type, adjust first. */
2868 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
2869 return convert (type
, fold (build2 (PLUS_EXPR
, TREE_TYPE (etype
),
2870 fold_convert (TREE_TYPE (etype
),
2872 TYPE_MIN_VALUE (etype
))));
2874 /* If the input is a justified modular type, we need to extract the actual
2875 object before converting it to any other type with the exceptions of an
2876 unconstrained array or of a mere type variant. It is useful to avoid the
2877 extraction and conversion in the type variant case because it could end
2878 up replacing a VAR_DECL expr by a constructor and we might be about the
2879 take the address of the result. */
2880 if (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)
2881 && code
!= UNCONSTRAINED_ARRAY_TYPE
2882 && TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (etype
))
2883 return convert (type
, build_component_ref (expr
, NULL_TREE
,
2884 TYPE_FIELDS (etype
), false));
2886 /* If converting to a type that contains a template, convert to the data
2887 type and then build the template. */
2888 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
))
2890 tree obj_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type
)));
2892 /* If the source already has a template, get a reference to the
2893 associated array only, as we are going to rebuild a template
2894 for the target type anyway. */
2895 expr
= maybe_unconstrained_array (expr
);
2898 gnat_build_constructor
2900 tree_cons (TYPE_FIELDS (type
),
2901 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
2902 obj_type
, NULL_TREE
),
2903 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2904 convert (obj_type
, expr
), NULL_TREE
)));
2907 /* There are some special cases of expressions that we process
2909 switch (TREE_CODE (expr
))
2915 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2916 conversion in gnat_expand_expr. NULL_EXPR does not represent
2917 and actual value, so no conversion is needed. */
2918 expr
= copy_node (expr
);
2919 TREE_TYPE (expr
) = type
;
2923 /* If we are converting a STRING_CST to another constrained array type,
2924 just make a new one in the proper type. */
2925 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
2926 && !(TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
2927 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
))
2929 expr
= copy_node (expr
);
2930 TREE_TYPE (expr
) = type
;
2935 case UNCONSTRAINED_ARRAY_REF
:
2936 /* Convert this to the type of the inner array by getting the address of
2937 the array from the template. */
2938 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
2939 build_component_ref (TREE_OPERAND (expr
, 0),
2940 get_identifier ("P_ARRAY"),
2942 etype
= TREE_TYPE (expr
);
2943 ecode
= TREE_CODE (etype
);
2946 case VIEW_CONVERT_EXPR
:
2948 /* GCC 4.x is very sensitive to type consistency overall, and view
2949 conversions thus are very frequent. Even though just "convert"ing
2950 the inner operand to the output type is fine in most cases, it
2951 might expose unexpected input/output type mismatches in special
2952 circumstances so we avoid such recursive calls when we can. */
2954 tree op0
= TREE_OPERAND (expr
, 0);
2956 /* If we are converting back to the original type, we can just
2957 lift the input conversion. This is a common occurrence with
2958 switches back-and-forth amongst type variants. */
2959 if (type
== TREE_TYPE (op0
))
2962 /* Otherwise, if we're converting between two aggregate types, we
2963 might be allowed to substitute the VIEW_CONVERT target type in
2964 place or to just convert the inner expression. */
2965 if (AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
))
2967 /* If we are converting between type variants, we can just
2968 substitute the VIEW_CONVERT in place. */
2969 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
2970 return build1 (VIEW_CONVERT_EXPR
, type
, op0
);
2972 /* Otherwise, we may just bypass the input view conversion unless
2973 one of the types is a fat pointer, which is handled by
2974 specialized code below which relies on exact type matching. */
2975 else if (!TYPE_FAT_POINTER_P (type
) && !TYPE_FAT_POINTER_P (etype
))
2976 return convert (type
, op0
);
2982 /* If both types are record types, just convert the pointer and
2983 make a new INDIRECT_REF.
2985 ??? Disable this for now since it causes problems with the
2986 code in build_binary_op for MODIFY_EXPR which wants to
2987 strip off conversions. But that code really is a mess and
2988 we need to do this a much better way some time. */
2990 && (TREE_CODE (type
) == RECORD_TYPE
2991 || TREE_CODE (type
) == UNION_TYPE
)
2992 && (TREE_CODE (etype
) == RECORD_TYPE
2993 || TREE_CODE (etype
) == UNION_TYPE
)
2994 && !TYPE_FAT_POINTER_P (type
) && !TYPE_FAT_POINTER_P (etype
))
2995 return build_unary_op (INDIRECT_REF
, NULL_TREE
,
2996 convert (build_pointer_type (type
),
2997 TREE_OPERAND (expr
, 0)));
3004 /* Check for converting to a pointer to an unconstrained array. */
3005 if (TYPE_FAT_POINTER_P (type
) && !TYPE_FAT_POINTER_P (etype
))
3006 return convert_to_fat_pointer (type
, expr
);
3008 /* If we're converting between two aggregate types that have the same main
3009 variant, just make a VIEW_CONVER_EXPR. */
3010 else if (AGGREGATE_TYPE_P (type
)
3011 && TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
3012 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
3014 /* In all other cases of related types, make a NOP_EXPR. */
3015 else if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
)
3016 || (code
== INTEGER_CST
&& ecode
== INTEGER_CST
3017 && (type
== TREE_TYPE (etype
) || etype
== TREE_TYPE (type
))))
3018 return fold_convert (type
, expr
);
3023 return build1 (CONVERT_EXPR
, type
, expr
);
3026 return fold_convert (type
, gnat_truthvalue_conversion (expr
));
3029 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
3030 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
3031 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))))
3032 return unchecked_convert (type
, expr
, false);
3033 else if (TYPE_BIASED_REPRESENTATION_P (type
))
3034 return fold_convert (type
,
3035 fold_build2 (MINUS_EXPR
, TREE_TYPE (type
),
3036 convert (TREE_TYPE (type
), expr
),
3037 TYPE_MIN_VALUE (type
)));
3039 /* ... fall through ... */
3042 return fold (convert_to_integer (type
, expr
));
3045 case REFERENCE_TYPE
:
3046 /* If converting between two pointers to records denoting
3047 both a template and type, adjust if needed to account
3048 for any differing offsets, since one might be negative. */
3049 if (TYPE_THIN_POINTER_P (etype
) && TYPE_THIN_POINTER_P (type
))
3052 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype
))),
3053 bit_position (TYPE_FIELDS (TREE_TYPE (type
))));
3054 tree byte_diff
= size_binop (CEIL_DIV_EXPR
, bit_diff
,
3055 sbitsize_int (BITS_PER_UNIT
));
3057 expr
= build1 (NOP_EXPR
, type
, expr
);
3058 TREE_CONSTANT (expr
) = TREE_CONSTANT (TREE_OPERAND (expr
, 0));
3059 if (integer_zerop (byte_diff
))
3062 return build_binary_op (PLUS_EXPR
, type
, expr
,
3063 fold (convert_to_pointer (type
, byte_diff
)));
3066 /* If converting to a thin pointer, handle specially. */
3067 if (TYPE_THIN_POINTER_P (type
)
3068 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)))
3069 return convert_to_thin_pointer (type
, expr
);
3071 /* If converting fat pointer to normal pointer, get the pointer to the
3072 array and then convert it. */
3073 else if (TYPE_FAT_POINTER_P (etype
))
3074 expr
= build_component_ref (expr
, get_identifier ("P_ARRAY"),
3077 return fold (convert_to_pointer (type
, expr
));
3080 return fold (convert_to_real (type
, expr
));
3083 if (TYPE_JUSTIFIED_MODULAR_P (type
) && !AGGREGATE_TYPE_P (etype
))
3085 gnat_build_constructor
3086 (type
, tree_cons (TYPE_FIELDS (type
),
3087 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
3090 /* ... fall through ... */
3093 /* In these cases, assume the front-end has validated the conversion.
3094 If the conversion is valid, it will be a bit-wise conversion, so
3095 it can be viewed as an unchecked conversion. */
3096 return unchecked_convert (type
, expr
, false);
3099 /* This is a either a conversion between a tagged type and some
3100 subtype, which we have to mark as a UNION_TYPE because of
3101 overlapping fields or a conversion of an Unchecked_Union. */
3102 return unchecked_convert (type
, expr
, false);
3104 case UNCONSTRAINED_ARRAY_TYPE
:
3105 /* If EXPR is a constrained array, take its address, convert it to a
3106 fat pointer, and then dereference it. Likewise if EXPR is a
3107 record containing both a template and a constrained array.
3108 Note that a record representing a justified modular type
3109 always represents a packed constrained array. */
3110 if (ecode
== ARRAY_TYPE
3111 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
3112 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
3113 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)))
3116 (INDIRECT_REF
, NULL_TREE
,
3117 convert_to_fat_pointer (TREE_TYPE (type
),
3118 build_unary_op (ADDR_EXPR
,
3121 /* Do something very similar for converting one unconstrained
3122 array to another. */
3123 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
3125 build_unary_op (INDIRECT_REF
, NULL_TREE
,
3126 convert (TREE_TYPE (type
),
3127 build_unary_op (ADDR_EXPR
,
3133 return fold (convert_to_complex (type
, expr
));
3140 /* Remove all conversions that are done in EXP. This includes converting
3141 from a padded type or to a justified modular type. If TRUE_ADDRESS
3142 is true, always return the address of the containing object even if
3143 the address is not bit-aligned. */
3146 remove_conversions (tree exp
, bool true_address
)
3148 switch (TREE_CODE (exp
))
3152 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
3153 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
3155 remove_conversions (VEC_index (constructor_elt
,
3156 CONSTRUCTOR_ELTS (exp
), 0)->value
,
3161 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == RECORD_TYPE
3162 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3163 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
3166 case VIEW_CONVERT_EXPR
: case NON_LVALUE_EXPR
:
3167 case NOP_EXPR
: case CONVERT_EXPR
:
3168 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
3177 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3178 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3179 likewise return an expression pointing to the underlying array. */
3182 maybe_unconstrained_array (tree exp
)
3184 enum tree_code code
= TREE_CODE (exp
);
3187 switch (TREE_CODE (TREE_TYPE (exp
)))
3189 case UNCONSTRAINED_ARRAY_TYPE
:
3190 if (code
== UNCONSTRAINED_ARRAY_REF
)
3193 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
3194 build_component_ref (TREE_OPERAND (exp
, 0),
3195 get_identifier ("P_ARRAY"),
3197 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp
);
3201 else if (code
== NULL_EXPR
)
3202 return build1 (NULL_EXPR
,
3203 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3204 (TREE_TYPE (TREE_TYPE (exp
))))),
3205 TREE_OPERAND (exp
, 0));
3208 /* If this is a padded type, convert to the unpadded type and see if
3209 it contains a template. */
3210 if (TYPE_IS_PADDING_P (TREE_TYPE (exp
)))
3212 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp
))), exp
);
3213 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3214 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3216 build_component_ref (new, NULL_TREE
,
3217 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3220 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp
)))
3222 build_component_ref (exp
, NULL_TREE
,
3223 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp
))), 0);
3233 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
3234 If NOTRUNC_P is true, truncation operations should be suppressed. */
3237 unchecked_convert (tree type
, tree expr
, bool notrunc_p
)
3239 tree etype
= TREE_TYPE (expr
);
3241 /* If the expression is already the right type, we are done. */
3245 /* If both types types are integral just do a normal conversion.
3246 Likewise for a conversion to an unconstrained array. */
3247 if ((((INTEGRAL_TYPE_P (type
)
3248 && !(TREE_CODE (type
) == INTEGER_TYPE
3249 && TYPE_VAX_FLOATING_POINT_P (type
)))
3250 || (POINTER_TYPE_P (type
) && ! TYPE_THIN_POINTER_P (type
))
3251 || (TREE_CODE (type
) == RECORD_TYPE
3252 && TYPE_JUSTIFIED_MODULAR_P (type
)))
3253 && ((INTEGRAL_TYPE_P (etype
)
3254 && !(TREE_CODE (etype
) == INTEGER_TYPE
3255 && TYPE_VAX_FLOATING_POINT_P (etype
)))
3256 || (POINTER_TYPE_P (etype
) && !TYPE_THIN_POINTER_P (etype
))
3257 || (TREE_CODE (etype
) == RECORD_TYPE
3258 && TYPE_JUSTIFIED_MODULAR_P (etype
))))
3259 || TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3263 if (TREE_CODE (etype
) == INTEGER_TYPE
3264 && TYPE_BIASED_REPRESENTATION_P (etype
))
3266 tree ntype
= copy_type (etype
);
3268 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
3269 TYPE_MAIN_VARIANT (ntype
) = ntype
;
3270 expr
= build1 (NOP_EXPR
, ntype
, expr
);
3273 if (TREE_CODE (type
) == INTEGER_TYPE
3274 && TYPE_BIASED_REPRESENTATION_P (type
))
3276 rtype
= copy_type (type
);
3277 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
3278 TYPE_MAIN_VARIANT (rtype
) = rtype
;
3281 expr
= convert (rtype
, expr
);
3283 expr
= build1 (NOP_EXPR
, type
, expr
);
3286 /* If we are converting TO an integral type whose precision is not the
3287 same as its size, first unchecked convert to a record that contains
3288 an object of the output type. Then extract the field. */
3289 else if (INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
3290 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
3291 GET_MODE_BITSIZE (TYPE_MODE (type
))))
3293 tree rec_type
= make_node (RECORD_TYPE
);
3294 tree field
= create_field_decl (get_identifier ("OBJ"), type
,
3295 rec_type
, 1, 0, 0, 0);
3297 TYPE_FIELDS (rec_type
) = field
;
3298 layout_type (rec_type
);
3300 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
3301 expr
= build_component_ref (expr
, NULL_TREE
, field
, 0);
3304 /* Similarly for integral input type whose precision is not equal to its
3306 else if (INTEGRAL_TYPE_P (etype
) && TYPE_RM_SIZE (etype
)
3307 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
3308 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
3310 tree rec_type
= make_node (RECORD_TYPE
);
3312 = create_field_decl (get_identifier ("OBJ"), etype
, rec_type
,
3315 TYPE_FIELDS (rec_type
) = field
;
3316 layout_type (rec_type
);
3318 expr
= gnat_build_constructor (rec_type
, build_tree_list (field
, expr
));
3319 expr
= unchecked_convert (type
, expr
, notrunc_p
);
3322 /* We have a special case when we are converting between two
3323 unconstrained array types. In that case, take the address,
3324 convert the fat pointer types, and dereference. */
3325 else if (TREE_CODE (etype
) == UNCONSTRAINED_ARRAY_TYPE
3326 && TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3327 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
3328 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
3329 build_unary_op (ADDR_EXPR
, NULL_TREE
,
3333 expr
= maybe_unconstrained_array (expr
);
3335 /* There's no point in doing two unchecked conversions in a row. */
3336 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
3337 expr
= TREE_OPERAND (expr
, 0);
3339 etype
= TREE_TYPE (expr
);
3340 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
3343 /* If the result is an integral type whose size is not equal to
3344 the size of the underlying machine type, sign- or zero-extend
3345 the result. We need not do this in the case where the input is
3346 an integral type of the same precision and signedness or if the output
3347 is a biased type or if both the input and output are unsigned. */
3349 && INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
3350 && !(TREE_CODE (type
) == INTEGER_TYPE
3351 && TYPE_BIASED_REPRESENTATION_P (type
))
3352 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
3353 GET_MODE_BITSIZE (TYPE_MODE (type
)))
3354 && !(INTEGRAL_TYPE_P (etype
)
3355 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (etype
)
3356 && operand_equal_p (TYPE_RM_SIZE (type
),
3357 (TYPE_RM_SIZE (etype
) != 0
3358 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
3360 && !(TYPE_UNSIGNED (type
) && TYPE_UNSIGNED (etype
)))
3362 tree base_type
= gnat_type_for_mode (TYPE_MODE (type
),
3363 TYPE_UNSIGNED (type
));
3365 = convert (base_type
,
3366 size_binop (MINUS_EXPR
,
3368 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
3369 TYPE_RM_SIZE (type
)));
3372 build_binary_op (RSHIFT_EXPR
, base_type
,
3373 build_binary_op (LSHIFT_EXPR
, base_type
,
3374 convert (base_type
, expr
),
3379 /* An unchecked conversion should never raise Constraint_Error. The code
3380 below assumes that GCC's conversion routines overflow the same way that
3381 the underlying hardware does. This is probably true. In the rare case
3382 when it is false, we can rely on the fact that such conversions are
3383 erroneous anyway. */
3384 if (TREE_CODE (expr
) == INTEGER_CST
)
3385 TREE_OVERFLOW (expr
) = TREE_CONSTANT_OVERFLOW (expr
) = 0;
3387 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3388 show no longer constant. */
3389 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
3390 && !operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
),
3392 TREE_CONSTANT (expr
) = 0;
3397 /* Search the chain of currently reachable declarations for a builtin
3398 FUNCTION_DECL node corresponding to function NAME (an IDENTIFIER_NODE).
3399 Return the first node found, if any, or NULL_TREE otherwise. */
3402 builtin_decl_for (tree name
__attribute__ ((unused
)))
3404 /* ??? not clear yet how to implement this function in tree-ssa, so
3405 return NULL_TREE for now */
3409 #include "gt-ada-utils.h"
3410 #include "gtype-ada.h"